]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/gimplify.c
[Ada] Small housekeeping work in Check_Private_View
[thirdparty/gcc.git] / gcc / gimplify.c
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2020 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "gimple.h"
33 #include "gimple-predict.h"
34 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
35 #include "ssa.h"
36 #include "cgraph.h"
37 #include "tree-pretty-print.h"
38 #include "diagnostic-core.h"
39 #include "alias.h"
40 #include "fold-const.h"
41 #include "calls.h"
42 #include "varasm.h"
43 #include "stmt.h"
44 #include "expr.h"
45 #include "gimple-fold.h"
46 #include "tree-eh.h"
47 #include "gimplify.h"
48 #include "gimple-iterator.h"
49 #include "stor-layout.h"
50 #include "print-tree.h"
51 #include "tree-iterator.h"
52 #include "tree-inline.h"
53 #include "langhooks.h"
54 #include "tree-cfg.h"
55 #include "tree-ssa.h"
56 #include "omp-general.h"
57 #include "omp-low.h"
58 #include "gimple-low.h"
59 #include "gomp-constants.h"
60 #include "splay-tree.h"
61 #include "gimple-walk.h"
62 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
63 #include "builtins.h"
64 #include "stringpool.h"
65 #include "attribs.h"
66 #include "asan.h"
67 #include "dbgcnt.h"
68 #include "omp-offload.h"
69 #include "context.h"
70
71 /* Hash set of poisoned variables in a bind expr. */
72 static hash_set<tree> *asan_poisoned_variables = NULL;
73
74 enum gimplify_omp_var_data
75 {
76 GOVD_SEEN = 0x000001,
77 GOVD_EXPLICIT = 0x000002,
78 GOVD_SHARED = 0x000004,
79 GOVD_PRIVATE = 0x000008,
80 GOVD_FIRSTPRIVATE = 0x000010,
81 GOVD_LASTPRIVATE = 0x000020,
82 GOVD_REDUCTION = 0x000040,
83 GOVD_LOCAL = 0x00080,
84 GOVD_MAP = 0x000100,
85 GOVD_DEBUG_PRIVATE = 0x000200,
86 GOVD_PRIVATE_OUTER_REF = 0x000400,
87 GOVD_LINEAR = 0x000800,
88 GOVD_ALIGNED = 0x001000,
89
90 /* Flag for GOVD_MAP: don't copy back. */
91 GOVD_MAP_TO_ONLY = 0x002000,
92
93 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
94 GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 0x004000,
95
96 GOVD_MAP_0LEN_ARRAY = 0x008000,
97
98 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
99 GOVD_MAP_ALWAYS_TO = 0x010000,
100
101 /* Flag for shared vars that are or might be stored to in the region. */
102 GOVD_WRITTEN = 0x020000,
103
104 /* Flag for GOVD_MAP, if it is a forced mapping. */
105 GOVD_MAP_FORCE = 0x040000,
106
107 /* Flag for GOVD_MAP: must be present already. */
108 GOVD_MAP_FORCE_PRESENT = 0x080000,
109
110 /* Flag for GOVD_MAP: only allocate. */
111 GOVD_MAP_ALLOC_ONLY = 0x100000,
112
113 /* Flag for GOVD_MAP: only copy back. */
114 GOVD_MAP_FROM_ONLY = 0x200000,
115
116 GOVD_NONTEMPORAL = 0x400000,
117
118 /* Flag for GOVD_LASTPRIVATE: conditional modifier. */
119 GOVD_LASTPRIVATE_CONDITIONAL = 0x800000,
120
121 GOVD_CONDTEMP = 0x1000000,
122
123 /* Flag for GOVD_REDUCTION: inscan seen in {in,ex}clusive clause. */
124 GOVD_REDUCTION_INSCAN = 0x2000000,
125
126 /* Flag for GOVD_MAP: (struct) vars that have pointer attachments for
127 fields. */
128 GOVD_MAP_HAS_ATTACHMENTS = 8388608,
129
130 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
131 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
132 | GOVD_LOCAL)
133 };
134
135
136 enum omp_region_type
137 {
138 ORT_WORKSHARE = 0x00,
139 ORT_TASKGROUP = 0x01,
140 ORT_SIMD = 0x04,
141
142 ORT_PARALLEL = 0x08,
143 ORT_COMBINED_PARALLEL = ORT_PARALLEL | 1,
144
145 ORT_TASK = 0x10,
146 ORT_UNTIED_TASK = ORT_TASK | 1,
147 ORT_TASKLOOP = ORT_TASK | 2,
148 ORT_UNTIED_TASKLOOP = ORT_UNTIED_TASK | 2,
149
150 ORT_TEAMS = 0x20,
151 ORT_COMBINED_TEAMS = ORT_TEAMS | 1,
152 ORT_HOST_TEAMS = ORT_TEAMS | 2,
153 ORT_COMBINED_HOST_TEAMS = ORT_COMBINED_TEAMS | 2,
154
155 /* Data region. */
156 ORT_TARGET_DATA = 0x40,
157
158 /* Data region with offloading. */
159 ORT_TARGET = 0x80,
160 ORT_COMBINED_TARGET = ORT_TARGET | 1,
161 ORT_IMPLICIT_TARGET = ORT_TARGET | 2,
162
163 /* OpenACC variants. */
164 ORT_ACC = 0x100, /* A generic OpenACC region. */
165 ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */
166 ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */
167 ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 2, /* Kernels construct. */
168 ORT_ACC_SERIAL = ORT_ACC | ORT_TARGET | 4, /* Serial construct. */
169 ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 2, /* Host data. */
170
171 /* Dummy OpenMP region, used to disable expansion of
172 DECL_VALUE_EXPRs in taskloop pre body. */
173 ORT_NONE = 0x200
174 };
175
176 /* Gimplify hashtable helper. */
177
178 struct gimplify_hasher : free_ptr_hash <elt_t>
179 {
180 static inline hashval_t hash (const elt_t *);
181 static inline bool equal (const elt_t *, const elt_t *);
182 };
183
184 struct gimplify_ctx
185 {
186 struct gimplify_ctx *prev_context;
187
188 vec<gbind *> bind_expr_stack;
189 tree temps;
190 gimple_seq conditional_cleanups;
191 tree exit_label;
192 tree return_temp;
193
194 vec<tree> case_labels;
195 hash_set<tree> *live_switch_vars;
196 /* The formal temporary table. Should this be persistent? */
197 hash_table<gimplify_hasher> *temp_htab;
198
199 int conditions;
200 unsigned into_ssa : 1;
201 unsigned allow_rhs_cond_expr : 1;
202 unsigned in_cleanup_point_expr : 1;
203 unsigned keep_stack : 1;
204 unsigned save_stack : 1;
205 unsigned in_switch_expr : 1;
206 };
207
208 enum gimplify_defaultmap_kind
209 {
210 GDMK_SCALAR,
211 GDMK_AGGREGATE,
212 GDMK_ALLOCATABLE,
213 GDMK_POINTER
214 };
215
216 struct gimplify_omp_ctx
217 {
218 struct gimplify_omp_ctx *outer_context;
219 splay_tree variables;
220 hash_set<tree> *privatized_types;
221 tree clauses;
222 /* Iteration variables in an OMP_FOR. */
223 vec<tree> loop_iter_var;
224 location_t location;
225 enum omp_clause_default_kind default_kind;
226 enum omp_region_type region_type;
227 enum tree_code code;
228 bool combined_loop;
229 bool distribute;
230 bool target_firstprivatize_array_bases;
231 bool add_safelen1;
232 bool order_concurrent;
233 int defaultmap[4];
234 };
235
236 static struct gimplify_ctx *gimplify_ctxp;
237 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
238 static bool in_omp_construct;
239
240 /* Forward declaration. */
241 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
242 static hash_map<tree, tree> *oacc_declare_returns;
243 static enum gimplify_status gimplify_expr (tree *, gimple_seq *, gimple_seq *,
244 bool (*) (tree), fallback_t, bool);
245
246 /* Shorter alias name for the above function for use in gimplify.c
247 only. */
248
249 static inline void
250 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
251 {
252 gimple_seq_add_stmt_without_update (seq_p, gs);
253 }
254
255 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
256 NULL, a new sequence is allocated. This function is
257 similar to gimple_seq_add_seq, but does not scan the operands.
258 During gimplification, we need to manipulate statement sequences
259 before the def/use vectors have been constructed. */
260
261 static void
262 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
263 {
264 gimple_stmt_iterator si;
265
266 if (src == NULL)
267 return;
268
269 si = gsi_last (*dst_p);
270 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
271 }
272
273
274 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
275 and popping gimplify contexts. */
276
277 static struct gimplify_ctx *ctx_pool = NULL;
278
279 /* Return a gimplify context struct from the pool. */
280
281 static inline struct gimplify_ctx *
282 ctx_alloc (void)
283 {
284 struct gimplify_ctx * c = ctx_pool;
285
286 if (c)
287 ctx_pool = c->prev_context;
288 else
289 c = XNEW (struct gimplify_ctx);
290
291 memset (c, '\0', sizeof (*c));
292 return c;
293 }
294
295 /* Put gimplify context C back into the pool. */
296
297 static inline void
298 ctx_free (struct gimplify_ctx *c)
299 {
300 c->prev_context = ctx_pool;
301 ctx_pool = c;
302 }
303
304 /* Free allocated ctx stack memory. */
305
306 void
307 free_gimplify_stack (void)
308 {
309 struct gimplify_ctx *c;
310
311 while ((c = ctx_pool))
312 {
313 ctx_pool = c->prev_context;
314 free (c);
315 }
316 }
317
318
319 /* Set up a context for the gimplifier. */
320
321 void
322 push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
323 {
324 struct gimplify_ctx *c = ctx_alloc ();
325
326 c->prev_context = gimplify_ctxp;
327 gimplify_ctxp = c;
328 gimplify_ctxp->into_ssa = in_ssa;
329 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
330 }
331
332 /* Tear down a context for the gimplifier. If BODY is non-null, then
333 put the temporaries into the outer BIND_EXPR. Otherwise, put them
334 in the local_decls.
335
336 BODY is not a sequence, but the first tuple in a sequence. */
337
338 void
339 pop_gimplify_context (gimple *body)
340 {
341 struct gimplify_ctx *c = gimplify_ctxp;
342
343 gcc_assert (c
344 && (!c->bind_expr_stack.exists ()
345 || c->bind_expr_stack.is_empty ()));
346 c->bind_expr_stack.release ();
347 gimplify_ctxp = c->prev_context;
348
349 if (body)
350 declare_vars (c->temps, body, false);
351 else
352 record_vars (c->temps);
353
354 delete c->temp_htab;
355 c->temp_htab = NULL;
356 ctx_free (c);
357 }
358
359 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
360
361 static void
362 gimple_push_bind_expr (gbind *bind_stmt)
363 {
364 gimplify_ctxp->bind_expr_stack.reserve (8);
365 gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
366 }
367
368 /* Pop the first element off the stack of bindings. */
369
370 static void
371 gimple_pop_bind_expr (void)
372 {
373 gimplify_ctxp->bind_expr_stack.pop ();
374 }
375
376 /* Return the first element of the stack of bindings. */
377
378 gbind *
379 gimple_current_bind_expr (void)
380 {
381 return gimplify_ctxp->bind_expr_stack.last ();
382 }
383
384 /* Return the stack of bindings created during gimplification. */
385
386 vec<gbind *>
387 gimple_bind_expr_stack (void)
388 {
389 return gimplify_ctxp->bind_expr_stack;
390 }
391
392 /* Return true iff there is a COND_EXPR between us and the innermost
393 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
394
395 static bool
396 gimple_conditional_context (void)
397 {
398 return gimplify_ctxp->conditions > 0;
399 }
400
401 /* Note that we've entered a COND_EXPR. */
402
403 static void
404 gimple_push_condition (void)
405 {
406 #ifdef ENABLE_GIMPLE_CHECKING
407 if (gimplify_ctxp->conditions == 0)
408 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
409 #endif
410 ++(gimplify_ctxp->conditions);
411 }
412
413 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
414 now, add any conditional cleanups we've seen to the prequeue. */
415
416 static void
417 gimple_pop_condition (gimple_seq *pre_p)
418 {
419 int conds = --(gimplify_ctxp->conditions);
420
421 gcc_assert (conds >= 0);
422 if (conds == 0)
423 {
424 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
425 gimplify_ctxp->conditional_cleanups = NULL;
426 }
427 }
428
429 /* A stable comparison routine for use with splay trees and DECLs. */
430
431 static int
432 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
433 {
434 tree a = (tree) xa;
435 tree b = (tree) xb;
436
437 return DECL_UID (a) - DECL_UID (b);
438 }
439
440 /* Create a new omp construct that deals with variable remapping. */
441
442 static struct gimplify_omp_ctx *
443 new_omp_context (enum omp_region_type region_type)
444 {
445 struct gimplify_omp_ctx *c;
446
447 c = XCNEW (struct gimplify_omp_ctx);
448 c->outer_context = gimplify_omp_ctxp;
449 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
450 c->privatized_types = new hash_set<tree>;
451 c->location = input_location;
452 c->region_type = region_type;
453 if ((region_type & ORT_TASK) == 0)
454 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
455 else
456 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
457 c->defaultmap[GDMK_SCALAR] = GOVD_MAP;
458 c->defaultmap[GDMK_AGGREGATE] = GOVD_MAP;
459 c->defaultmap[GDMK_ALLOCATABLE] = GOVD_MAP;
460 c->defaultmap[GDMK_POINTER] = GOVD_MAP;
461
462 return c;
463 }
464
465 /* Destroy an omp construct that deals with variable remapping. */
466
467 static void
468 delete_omp_context (struct gimplify_omp_ctx *c)
469 {
470 splay_tree_delete (c->variables);
471 delete c->privatized_types;
472 c->loop_iter_var.release ();
473 XDELETE (c);
474 }
475
476 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
477 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
478
479 /* Both gimplify the statement T and append it to *SEQ_P. This function
480 behaves exactly as gimplify_stmt, but you don't have to pass T as a
481 reference. */
482
483 void
484 gimplify_and_add (tree t, gimple_seq *seq_p)
485 {
486 gimplify_stmt (&t, seq_p);
487 }
488
489 /* Gimplify statement T into sequence *SEQ_P, and return the first
490 tuple in the sequence of generated tuples for this statement.
491 Return NULL if gimplifying T produced no tuples. */
492
493 static gimple *
494 gimplify_and_return_first (tree t, gimple_seq *seq_p)
495 {
496 gimple_stmt_iterator last = gsi_last (*seq_p);
497
498 gimplify_and_add (t, seq_p);
499
500 if (!gsi_end_p (last))
501 {
502 gsi_next (&last);
503 return gsi_stmt (last);
504 }
505 else
506 return gimple_seq_first_stmt (*seq_p);
507 }
508
509 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
510 LHS, or for a call argument. */
511
512 static bool
513 is_gimple_mem_rhs (tree t)
514 {
515 /* If we're dealing with a renamable type, either source or dest must be
516 a renamed variable. */
517 if (is_gimple_reg_type (TREE_TYPE (t)))
518 return is_gimple_val (t);
519 else
520 return is_gimple_val (t) || is_gimple_lvalue (t);
521 }
522
523 /* Return true if T is a CALL_EXPR or an expression that can be
524 assigned to a temporary. Note that this predicate should only be
525 used during gimplification. See the rationale for this in
526 gimplify_modify_expr. */
527
528 static bool
529 is_gimple_reg_rhs_or_call (tree t)
530 {
531 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
532 || TREE_CODE (t) == CALL_EXPR);
533 }
534
535 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
536 this predicate should only be used during gimplification. See the
537 rationale for this in gimplify_modify_expr. */
538
539 static bool
540 is_gimple_mem_rhs_or_call (tree t)
541 {
542 /* If we're dealing with a renamable type, either source or dest must be
543 a renamed variable. */
544 if (is_gimple_reg_type (TREE_TYPE (t)))
545 return is_gimple_val (t);
546 else
547 return (is_gimple_val (t)
548 || is_gimple_lvalue (t)
549 || TREE_CLOBBER_P (t)
550 || TREE_CODE (t) == CALL_EXPR);
551 }
552
553 /* Create a temporary with a name derived from VAL. Subroutine of
554 lookup_tmp_var; nobody else should call this function. */
555
556 static inline tree
557 create_tmp_from_val (tree val)
558 {
559 /* Drop all qualifiers and address-space information from the value type. */
560 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
561 tree var = create_tmp_var (type, get_name (val));
562 return var;
563 }
564
565 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
566 an existing expression temporary. */
567
568 static tree
569 lookup_tmp_var (tree val, bool is_formal)
570 {
571 tree ret;
572
573 /* If not optimizing, never really reuse a temporary. local-alloc
574 won't allocate any variable that is used in more than one basic
575 block, which means it will go into memory, causing much extra
576 work in reload and final and poorer code generation, outweighing
577 the extra memory allocation here. */
578 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
579 ret = create_tmp_from_val (val);
580 else
581 {
582 elt_t elt, *elt_p;
583 elt_t **slot;
584
585 elt.val = val;
586 if (!gimplify_ctxp->temp_htab)
587 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
588 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
589 if (*slot == NULL)
590 {
591 elt_p = XNEW (elt_t);
592 elt_p->val = val;
593 elt_p->temp = ret = create_tmp_from_val (val);
594 *slot = elt_p;
595 }
596 else
597 {
598 elt_p = *slot;
599 ret = elt_p->temp;
600 }
601 }
602
603 return ret;
604 }
605
606 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
607
608 static tree
609 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
610 bool is_formal, bool allow_ssa)
611 {
612 tree t, mod;
613
614 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
615 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
616 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
617 fb_rvalue);
618
619 if (allow_ssa
620 && gimplify_ctxp->into_ssa
621 && is_gimple_reg_type (TREE_TYPE (val)))
622 {
623 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
624 if (! gimple_in_ssa_p (cfun))
625 {
626 const char *name = get_name (val);
627 if (name)
628 SET_SSA_NAME_VAR_OR_IDENTIFIER (t, create_tmp_var_name (name));
629 }
630 }
631 else
632 t = lookup_tmp_var (val, is_formal);
633
634 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
635
636 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
637
638 /* gimplify_modify_expr might want to reduce this further. */
639 gimplify_and_add (mod, pre_p);
640 ggc_free (mod);
641
642 return t;
643 }
644
645 /* Return a formal temporary variable initialized with VAL. PRE_P is as
646 in gimplify_expr. Only use this function if:
647
648 1) The value of the unfactored expression represented by VAL will not
649 change between the initialization and use of the temporary, and
650 2) The temporary will not be otherwise modified.
651
652 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
653 and #2 means it is inappropriate for && temps.
654
655 For other cases, use get_initialized_tmp_var instead. */
656
657 tree
658 get_formal_tmp_var (tree val, gimple_seq *pre_p)
659 {
660 return internal_get_tmp_var (val, pre_p, NULL, true, true);
661 }
662
663 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
664 are as in gimplify_expr. */
665
666 tree
667 get_initialized_tmp_var (tree val, gimple_seq *pre_p,
668 gimple_seq *post_p /* = NULL */,
669 bool allow_ssa /* = true */)
670 {
671 return internal_get_tmp_var (val, pre_p, post_p, false, allow_ssa);
672 }
673
674 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
675 generate debug info for them; otherwise don't. */
676
677 void
678 declare_vars (tree vars, gimple *gs, bool debug_info)
679 {
680 tree last = vars;
681 if (last)
682 {
683 tree temps, block;
684
685 gbind *scope = as_a <gbind *> (gs);
686
687 temps = nreverse (last);
688
689 block = gimple_bind_block (scope);
690 gcc_assert (!block || TREE_CODE (block) == BLOCK);
691 if (!block || !debug_info)
692 {
693 DECL_CHAIN (last) = gimple_bind_vars (scope);
694 gimple_bind_set_vars (scope, temps);
695 }
696 else
697 {
698 /* We need to attach the nodes both to the BIND_EXPR and to its
699 associated BLOCK for debugging purposes. The key point here
700 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
701 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
702 if (BLOCK_VARS (block))
703 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
704 else
705 {
706 gimple_bind_set_vars (scope,
707 chainon (gimple_bind_vars (scope), temps));
708 BLOCK_VARS (block) = temps;
709 }
710 }
711 }
712 }
713
714 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
715 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
716 no such upper bound can be obtained. */
717
718 static void
719 force_constant_size (tree var)
720 {
721 /* The only attempt we make is by querying the maximum size of objects
722 of the variable's type. */
723
724 HOST_WIDE_INT max_size;
725
726 gcc_assert (VAR_P (var));
727
728 max_size = max_int_size_in_bytes (TREE_TYPE (var));
729
730 gcc_assert (max_size >= 0);
731
732 DECL_SIZE_UNIT (var)
733 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
734 DECL_SIZE (var)
735 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
736 }
737
738 /* Push the temporary variable TMP into the current binding. */
739
740 void
741 gimple_add_tmp_var_fn (struct function *fn, tree tmp)
742 {
743 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
744
745 /* Later processing assumes that the object size is constant, which might
746 not be true at this point. Force the use of a constant upper bound in
747 this case. */
748 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
749 force_constant_size (tmp);
750
751 DECL_CONTEXT (tmp) = fn->decl;
752 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
753
754 record_vars_into (tmp, fn->decl);
755 }
756
757 /* Push the temporary variable TMP into the current binding. */
758
759 void
760 gimple_add_tmp_var (tree tmp)
761 {
762 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
763
764 /* Later processing assumes that the object size is constant, which might
765 not be true at this point. Force the use of a constant upper bound in
766 this case. */
767 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
768 force_constant_size (tmp);
769
770 DECL_CONTEXT (tmp) = current_function_decl;
771 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
772
773 if (gimplify_ctxp)
774 {
775 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
776 gimplify_ctxp->temps = tmp;
777
778 /* Mark temporaries local within the nearest enclosing parallel. */
779 if (gimplify_omp_ctxp)
780 {
781 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
782 int flag = GOVD_LOCAL;
783 while (ctx
784 && (ctx->region_type == ORT_WORKSHARE
785 || ctx->region_type == ORT_TASKGROUP
786 || ctx->region_type == ORT_SIMD
787 || ctx->region_type == ORT_ACC))
788 {
789 if (ctx->region_type == ORT_SIMD
790 && TREE_ADDRESSABLE (tmp)
791 && !TREE_STATIC (tmp))
792 {
793 if (TREE_CODE (DECL_SIZE_UNIT (tmp)) != INTEGER_CST)
794 ctx->add_safelen1 = true;
795 else
796 flag = GOVD_PRIVATE;
797 break;
798 }
799 ctx = ctx->outer_context;
800 }
801 if (ctx)
802 omp_add_variable (ctx, tmp, flag | GOVD_SEEN);
803 }
804 }
805 else if (cfun)
806 record_vars (tmp);
807 else
808 {
809 gimple_seq body_seq;
810
811 /* This case is for nested functions. We need to expose the locals
812 they create. */
813 body_seq = gimple_body (current_function_decl);
814 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
815 }
816 }
817
818
819 \f
820 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
821 nodes that are referenced more than once in GENERIC functions. This is
822 necessary because gimplification (translation into GIMPLE) is performed
823 by modifying tree nodes in-place, so gimplication of a shared node in a
824 first context could generate an invalid GIMPLE form in a second context.
825
826 This is achieved with a simple mark/copy/unmark algorithm that walks the
827 GENERIC representation top-down, marks nodes with TREE_VISITED the first
828 time it encounters them, duplicates them if they already have TREE_VISITED
829 set, and finally removes the TREE_VISITED marks it has set.
830
831 The algorithm works only at the function level, i.e. it generates a GENERIC
832 representation of a function with no nodes shared within the function when
833 passed a GENERIC function (except for nodes that are allowed to be shared).
834
835 At the global level, it is also necessary to unshare tree nodes that are
836 referenced in more than one function, for the same aforementioned reason.
837 This requires some cooperation from the front-end. There are 2 strategies:
838
839 1. Manual unsharing. The front-end needs to call unshare_expr on every
840 expression that might end up being shared across functions.
841
842 2. Deep unsharing. This is an extension of regular unsharing. Instead
843 of calling unshare_expr on expressions that might be shared across
844 functions, the front-end pre-marks them with TREE_VISITED. This will
845 ensure that they are unshared on the first reference within functions
846 when the regular unsharing algorithm runs. The counterpart is that
847 this algorithm must look deeper than for manual unsharing, which is
848 specified by LANG_HOOKS_DEEP_UNSHARING.
849
850 If there are only few specific cases of node sharing across functions, it is
851 probably easier for a front-end to unshare the expressions manually. On the
852 contrary, if the expressions generated at the global level are as widespread
853 as expressions generated within functions, deep unsharing is very likely the
854 way to go. */
855
856 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
857 These nodes model computations that must be done once. If we were to
858 unshare something like SAVE_EXPR(i++), the gimplification process would
859 create wrong code. However, if DATA is non-null, it must hold a pointer
860 set that is used to unshare the subtrees of these nodes. */
861
862 static tree
863 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
864 {
865 tree t = *tp;
866 enum tree_code code = TREE_CODE (t);
867
868 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
869 copy their subtrees if we can make sure to do it only once. */
870 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
871 {
872 if (data && !((hash_set<tree> *)data)->add (t))
873 ;
874 else
875 *walk_subtrees = 0;
876 }
877
878 /* Stop at types, decls, constants like copy_tree_r. */
879 else if (TREE_CODE_CLASS (code) == tcc_type
880 || TREE_CODE_CLASS (code) == tcc_declaration
881 || TREE_CODE_CLASS (code) == tcc_constant)
882 *walk_subtrees = 0;
883
884 /* Cope with the statement expression extension. */
885 else if (code == STATEMENT_LIST)
886 ;
887
888 /* Leave the bulk of the work to copy_tree_r itself. */
889 else
890 copy_tree_r (tp, walk_subtrees, NULL);
891
892 return NULL_TREE;
893 }
894
895 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
896 If *TP has been visited already, then *TP is deeply copied by calling
897 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
898
899 static tree
900 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
901 {
902 tree t = *tp;
903 enum tree_code code = TREE_CODE (t);
904
905 /* Skip types, decls, and constants. But we do want to look at their
906 types and the bounds of types. Mark them as visited so we properly
907 unmark their subtrees on the unmark pass. If we've already seen them,
908 don't look down further. */
909 if (TREE_CODE_CLASS (code) == tcc_type
910 || TREE_CODE_CLASS (code) == tcc_declaration
911 || TREE_CODE_CLASS (code) == tcc_constant)
912 {
913 if (TREE_VISITED (t))
914 *walk_subtrees = 0;
915 else
916 TREE_VISITED (t) = 1;
917 }
918
919 /* If this node has been visited already, unshare it and don't look
920 any deeper. */
921 else if (TREE_VISITED (t))
922 {
923 walk_tree (tp, mostly_copy_tree_r, data, NULL);
924 *walk_subtrees = 0;
925 }
926
927 /* Otherwise, mark the node as visited and keep looking. */
928 else
929 TREE_VISITED (t) = 1;
930
931 return NULL_TREE;
932 }
933
934 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
935 copy_if_shared_r callback unmodified. */
936
937 void
938 copy_if_shared (tree *tp, void *data)
939 {
940 walk_tree (tp, copy_if_shared_r, data, NULL);
941 }
942
943 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
944 any nested functions. */
945
946 static void
947 unshare_body (tree fndecl)
948 {
949 struct cgraph_node *cgn = cgraph_node::get (fndecl);
950 /* If the language requires deep unsharing, we need a pointer set to make
951 sure we don't repeatedly unshare subtrees of unshareable nodes. */
952 hash_set<tree> *visited
953 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
954
955 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
956 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
957 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
958
959 delete visited;
960
961 if (cgn)
962 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
963 unshare_body (cgn->decl);
964 }
965
966 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
967 Subtrees are walked until the first unvisited node is encountered. */
968
969 static tree
970 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
971 {
972 tree t = *tp;
973
974 /* If this node has been visited, unmark it and keep looking. */
975 if (TREE_VISITED (t))
976 TREE_VISITED (t) = 0;
977
978 /* Otherwise, don't look any deeper. */
979 else
980 *walk_subtrees = 0;
981
982 return NULL_TREE;
983 }
984
985 /* Unmark the visited trees rooted at *TP. */
986
987 static inline void
988 unmark_visited (tree *tp)
989 {
990 walk_tree (tp, unmark_visited_r, NULL, NULL);
991 }
992
993 /* Likewise, but mark all trees as not visited. */
994
995 static void
996 unvisit_body (tree fndecl)
997 {
998 struct cgraph_node *cgn = cgraph_node::get (fndecl);
999
1000 unmark_visited (&DECL_SAVED_TREE (fndecl));
1001 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
1002 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
1003
1004 if (cgn)
1005 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
1006 unvisit_body (cgn->decl);
1007 }
1008
1009 /* Unconditionally make an unshared copy of EXPR. This is used when using
1010 stored expressions which span multiple functions, such as BINFO_VTABLE,
1011 as the normal unsharing process can't tell that they're shared. */
1012
1013 tree
1014 unshare_expr (tree expr)
1015 {
1016 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1017 return expr;
1018 }
1019
1020 /* Worker for unshare_expr_without_location. */
1021
1022 static tree
1023 prune_expr_location (tree *tp, int *walk_subtrees, void *)
1024 {
1025 if (EXPR_P (*tp))
1026 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
1027 else
1028 *walk_subtrees = 0;
1029 return NULL_TREE;
1030 }
1031
1032 /* Similar to unshare_expr but also prune all expression locations
1033 from EXPR. */
1034
1035 tree
1036 unshare_expr_without_location (tree expr)
1037 {
1038 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1039 if (EXPR_P (expr))
1040 walk_tree (&expr, prune_expr_location, NULL, NULL);
1041 return expr;
1042 }
1043
1044 /* Return the EXPR_LOCATION of EXPR, if it (maybe recursively) has
1045 one, OR_ELSE otherwise. The location of a STATEMENT_LISTs
1046 comprising at least one DEBUG_BEGIN_STMT followed by exactly one
1047 EXPR is the location of the EXPR. */
1048
1049 static location_t
1050 rexpr_location (tree expr, location_t or_else = UNKNOWN_LOCATION)
1051 {
1052 if (!expr)
1053 return or_else;
1054
1055 if (EXPR_HAS_LOCATION (expr))
1056 return EXPR_LOCATION (expr);
1057
1058 if (TREE_CODE (expr) != STATEMENT_LIST)
1059 return or_else;
1060
1061 tree_stmt_iterator i = tsi_start (expr);
1062
1063 bool found = false;
1064 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
1065 {
1066 found = true;
1067 tsi_next (&i);
1068 }
1069
1070 if (!found || !tsi_one_before_end_p (i))
1071 return or_else;
1072
1073 return rexpr_location (tsi_stmt (i), or_else);
1074 }
1075
1076 /* Return TRUE iff EXPR (maybe recursively) has a location; see
1077 rexpr_location for the potential recursion. */
1078
1079 static inline bool
1080 rexpr_has_location (tree expr)
1081 {
1082 return rexpr_location (expr) != UNKNOWN_LOCATION;
1083 }
1084
1085 \f
1086 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1087 contain statements and have a value. Assign its value to a temporary
1088 and give it void_type_node. Return the temporary, or NULL_TREE if
1089 WRAPPER was already void. */
1090
1091 tree
1092 voidify_wrapper_expr (tree wrapper, tree temp)
1093 {
1094 tree type = TREE_TYPE (wrapper);
1095 if (type && !VOID_TYPE_P (type))
1096 {
1097 tree *p;
1098
1099 /* Set p to point to the body of the wrapper. Loop until we find
1100 something that isn't a wrapper. */
1101 for (p = &wrapper; p && *p; )
1102 {
1103 switch (TREE_CODE (*p))
1104 {
1105 case BIND_EXPR:
1106 TREE_SIDE_EFFECTS (*p) = 1;
1107 TREE_TYPE (*p) = void_type_node;
1108 /* For a BIND_EXPR, the body is operand 1. */
1109 p = &BIND_EXPR_BODY (*p);
1110 break;
1111
1112 case CLEANUP_POINT_EXPR:
1113 case TRY_FINALLY_EXPR:
1114 case TRY_CATCH_EXPR:
1115 TREE_SIDE_EFFECTS (*p) = 1;
1116 TREE_TYPE (*p) = void_type_node;
1117 p = &TREE_OPERAND (*p, 0);
1118 break;
1119
1120 case STATEMENT_LIST:
1121 {
1122 tree_stmt_iterator i = tsi_last (*p);
1123 TREE_SIDE_EFFECTS (*p) = 1;
1124 TREE_TYPE (*p) = void_type_node;
1125 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1126 }
1127 break;
1128
1129 case COMPOUND_EXPR:
1130 /* Advance to the last statement. Set all container types to
1131 void. */
1132 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1133 {
1134 TREE_SIDE_EFFECTS (*p) = 1;
1135 TREE_TYPE (*p) = void_type_node;
1136 }
1137 break;
1138
1139 case TRANSACTION_EXPR:
1140 TREE_SIDE_EFFECTS (*p) = 1;
1141 TREE_TYPE (*p) = void_type_node;
1142 p = &TRANSACTION_EXPR_BODY (*p);
1143 break;
1144
1145 default:
1146 /* Assume that any tree upon which voidify_wrapper_expr is
1147 directly called is a wrapper, and that its body is op0. */
1148 if (p == &wrapper)
1149 {
1150 TREE_SIDE_EFFECTS (*p) = 1;
1151 TREE_TYPE (*p) = void_type_node;
1152 p = &TREE_OPERAND (*p, 0);
1153 break;
1154 }
1155 goto out;
1156 }
1157 }
1158
1159 out:
1160 if (p == NULL || IS_EMPTY_STMT (*p))
1161 temp = NULL_TREE;
1162 else if (temp)
1163 {
1164 /* The wrapper is on the RHS of an assignment that we're pushing
1165 down. */
1166 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1167 || TREE_CODE (temp) == MODIFY_EXPR);
1168 TREE_OPERAND (temp, 1) = *p;
1169 *p = temp;
1170 }
1171 else
1172 {
1173 temp = create_tmp_var (type, "retval");
1174 *p = build2 (INIT_EXPR, type, temp, *p);
1175 }
1176
1177 return temp;
1178 }
1179
1180 return NULL_TREE;
1181 }
1182
1183 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1184 a temporary through which they communicate. */
1185
1186 static void
1187 build_stack_save_restore (gcall **save, gcall **restore)
1188 {
1189 tree tmp_var;
1190
1191 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1192 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1193 gimple_call_set_lhs (*save, tmp_var);
1194
1195 *restore
1196 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1197 1, tmp_var);
1198 }
1199
1200 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1201
1202 static tree
1203 build_asan_poison_call_expr (tree decl)
1204 {
1205 /* Do not poison variables that have size equal to zero. */
1206 tree unit_size = DECL_SIZE_UNIT (decl);
1207 if (zerop (unit_size))
1208 return NULL_TREE;
1209
1210 tree base = build_fold_addr_expr (decl);
1211
1212 return build_call_expr_internal_loc (UNKNOWN_LOCATION, IFN_ASAN_MARK,
1213 void_type_node, 3,
1214 build_int_cst (integer_type_node,
1215 ASAN_MARK_POISON),
1216 base, unit_size);
1217 }
1218
1219 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1220 on POISON flag, shadow memory of a DECL variable. The call will be
1221 put on location identified by IT iterator, where BEFORE flag drives
1222 position where the stmt will be put. */
1223
1224 static void
1225 asan_poison_variable (tree decl, bool poison, gimple_stmt_iterator *it,
1226 bool before)
1227 {
1228 tree unit_size = DECL_SIZE_UNIT (decl);
1229 tree base = build_fold_addr_expr (decl);
1230
1231 /* Do not poison variables that have size equal to zero. */
1232 if (zerop (unit_size))
1233 return;
1234
1235 /* It's necessary to have all stack variables aligned to ASAN granularity
1236 bytes. */
1237 if (DECL_ALIGN_UNIT (decl) <= ASAN_SHADOW_GRANULARITY)
1238 SET_DECL_ALIGN (decl, BITS_PER_UNIT * ASAN_SHADOW_GRANULARITY);
1239
1240 HOST_WIDE_INT flags = poison ? ASAN_MARK_POISON : ASAN_MARK_UNPOISON;
1241
1242 gimple *g
1243 = gimple_build_call_internal (IFN_ASAN_MARK, 3,
1244 build_int_cst (integer_type_node, flags),
1245 base, unit_size);
1246
1247 if (before)
1248 gsi_insert_before (it, g, GSI_NEW_STMT);
1249 else
1250 gsi_insert_after (it, g, GSI_NEW_STMT);
1251 }
1252
1253 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1254 either poisons or unpoisons a DECL. Created statement is appended
1255 to SEQ_P gimple sequence. */
1256
1257 static void
1258 asan_poison_variable (tree decl, bool poison, gimple_seq *seq_p)
1259 {
1260 gimple_stmt_iterator it = gsi_last (*seq_p);
1261 bool before = false;
1262
1263 if (gsi_end_p (it))
1264 before = true;
1265
1266 asan_poison_variable (decl, poison, &it, before);
1267 }
1268
1269 /* Sort pair of VAR_DECLs A and B by DECL_UID. */
1270
1271 static int
1272 sort_by_decl_uid (const void *a, const void *b)
1273 {
1274 const tree *t1 = (const tree *)a;
1275 const tree *t2 = (const tree *)b;
1276
1277 int uid1 = DECL_UID (*t1);
1278 int uid2 = DECL_UID (*t2);
1279
1280 if (uid1 < uid2)
1281 return -1;
1282 else if (uid1 > uid2)
1283 return 1;
1284 else
1285 return 0;
1286 }
1287
1288 /* Generate IFN_ASAN_MARK internal call for all VARIABLES
1289 depending on POISON flag. Created statement is appended
1290 to SEQ_P gimple sequence. */
1291
1292 static void
1293 asan_poison_variables (hash_set<tree> *variables, bool poison, gimple_seq *seq_p)
1294 {
1295 unsigned c = variables->elements ();
1296 if (c == 0)
1297 return;
1298
1299 auto_vec<tree> sorted_variables (c);
1300
1301 for (hash_set<tree>::iterator it = variables->begin ();
1302 it != variables->end (); ++it)
1303 sorted_variables.safe_push (*it);
1304
1305 sorted_variables.qsort (sort_by_decl_uid);
1306
1307 unsigned i;
1308 tree var;
1309 FOR_EACH_VEC_ELT (sorted_variables, i, var)
1310 {
1311 asan_poison_variable (var, poison, seq_p);
1312
1313 /* Add use_after_scope_memory attribute for the variable in order
1314 to prevent re-written into SSA. */
1315 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE,
1316 DECL_ATTRIBUTES (var)))
1317 DECL_ATTRIBUTES (var)
1318 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE),
1319 integer_one_node,
1320 DECL_ATTRIBUTES (var));
1321 }
1322 }
1323
1324 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1325
1326 static enum gimplify_status
1327 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1328 {
1329 tree bind_expr = *expr_p;
1330 bool old_keep_stack = gimplify_ctxp->keep_stack;
1331 bool old_save_stack = gimplify_ctxp->save_stack;
1332 tree t;
1333 gbind *bind_stmt;
1334 gimple_seq body, cleanup;
1335 gcall *stack_save;
1336 location_t start_locus = 0, end_locus = 0;
1337 tree ret_clauses = NULL;
1338
1339 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1340
1341 /* Mark variables seen in this bind expr. */
1342 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1343 {
1344 if (VAR_P (t))
1345 {
1346 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1347
1348 /* Mark variable as local. */
1349 if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t))
1350 {
1351 if (! DECL_SEEN_IN_BIND_EXPR_P (t)
1352 || splay_tree_lookup (ctx->variables,
1353 (splay_tree_key) t) == NULL)
1354 {
1355 int flag = GOVD_LOCAL;
1356 if (ctx->region_type == ORT_SIMD
1357 && TREE_ADDRESSABLE (t)
1358 && !TREE_STATIC (t))
1359 {
1360 if (TREE_CODE (DECL_SIZE_UNIT (t)) != INTEGER_CST)
1361 ctx->add_safelen1 = true;
1362 else
1363 flag = GOVD_PRIVATE;
1364 }
1365 omp_add_variable (ctx, t, flag | GOVD_SEEN);
1366 }
1367 /* Static locals inside of target construct or offloaded
1368 routines need to be "omp declare target". */
1369 if (TREE_STATIC (t))
1370 for (; ctx; ctx = ctx->outer_context)
1371 if ((ctx->region_type & ORT_TARGET) != 0)
1372 {
1373 if (!lookup_attribute ("omp declare target",
1374 DECL_ATTRIBUTES (t)))
1375 {
1376 tree id = get_identifier ("omp declare target");
1377 DECL_ATTRIBUTES (t)
1378 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (t));
1379 varpool_node *node = varpool_node::get (t);
1380 if (node)
1381 {
1382 node->offloadable = 1;
1383 if (ENABLE_OFFLOADING && !DECL_EXTERNAL (t))
1384 {
1385 g->have_offload = true;
1386 if (!in_lto_p)
1387 vec_safe_push (offload_vars, t);
1388 }
1389 }
1390 }
1391 break;
1392 }
1393 }
1394
1395 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1396
1397 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1398 cfun->has_local_explicit_reg_vars = true;
1399 }
1400 }
1401
1402 bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1403 BIND_EXPR_BLOCK (bind_expr));
1404 gimple_push_bind_expr (bind_stmt);
1405
1406 gimplify_ctxp->keep_stack = false;
1407 gimplify_ctxp->save_stack = false;
1408
1409 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1410 body = NULL;
1411 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1412 gimple_bind_set_body (bind_stmt, body);
1413
1414 /* Source location wise, the cleanup code (stack_restore and clobbers)
1415 belongs to the end of the block, so propagate what we have. The
1416 stack_save operation belongs to the beginning of block, which we can
1417 infer from the bind_expr directly if the block has no explicit
1418 assignment. */
1419 if (BIND_EXPR_BLOCK (bind_expr))
1420 {
1421 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1422 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1423 }
1424 if (start_locus == 0)
1425 start_locus = EXPR_LOCATION (bind_expr);
1426
1427 cleanup = NULL;
1428 stack_save = NULL;
1429
1430 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1431 the stack space allocated to the VLAs. */
1432 if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack)
1433 {
1434 gcall *stack_restore;
1435
1436 /* Save stack on entry and restore it on exit. Add a try_finally
1437 block to achieve this. */
1438 build_stack_save_restore (&stack_save, &stack_restore);
1439
1440 gimple_set_location (stack_save, start_locus);
1441 gimple_set_location (stack_restore, end_locus);
1442
1443 gimplify_seq_add_stmt (&cleanup, stack_restore);
1444 }
1445
1446 /* Add clobbers for all variables that go out of scope. */
1447 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1448 {
1449 if (VAR_P (t)
1450 && !is_global_var (t)
1451 && DECL_CONTEXT (t) == current_function_decl)
1452 {
1453 if (!DECL_HARD_REGISTER (t)
1454 && !TREE_THIS_VOLATILE (t)
1455 && !DECL_HAS_VALUE_EXPR_P (t)
1456 /* Only care for variables that have to be in memory. Others
1457 will be rewritten into SSA names, hence moved to the
1458 top-level. */
1459 && !is_gimple_reg (t)
1460 && flag_stack_reuse != SR_NONE)
1461 {
1462 tree clobber = build_clobber (TREE_TYPE (t));
1463 gimple *clobber_stmt;
1464 clobber_stmt = gimple_build_assign (t, clobber);
1465 gimple_set_location (clobber_stmt, end_locus);
1466 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1467 }
1468
1469 if (flag_openacc && oacc_declare_returns != NULL)
1470 {
1471 tree *c = oacc_declare_returns->get (t);
1472 if (c != NULL)
1473 {
1474 if (ret_clauses)
1475 OMP_CLAUSE_CHAIN (*c) = ret_clauses;
1476
1477 ret_clauses = *c;
1478
1479 oacc_declare_returns->remove (t);
1480
1481 if (oacc_declare_returns->is_empty ())
1482 {
1483 delete oacc_declare_returns;
1484 oacc_declare_returns = NULL;
1485 }
1486 }
1487 }
1488 }
1489
1490 if (asan_poisoned_variables != NULL
1491 && asan_poisoned_variables->contains (t))
1492 {
1493 asan_poisoned_variables->remove (t);
1494 asan_poison_variable (t, true, &cleanup);
1495 }
1496
1497 if (gimplify_ctxp->live_switch_vars != NULL
1498 && gimplify_ctxp->live_switch_vars->contains (t))
1499 gimplify_ctxp->live_switch_vars->remove (t);
1500 }
1501
1502 if (ret_clauses)
1503 {
1504 gomp_target *stmt;
1505 gimple_stmt_iterator si = gsi_start (cleanup);
1506
1507 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
1508 ret_clauses);
1509 gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT);
1510 }
1511
1512 if (cleanup)
1513 {
1514 gtry *gs;
1515 gimple_seq new_body;
1516
1517 new_body = NULL;
1518 gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
1519 GIMPLE_TRY_FINALLY);
1520
1521 if (stack_save)
1522 gimplify_seq_add_stmt (&new_body, stack_save);
1523 gimplify_seq_add_stmt (&new_body, gs);
1524 gimple_bind_set_body (bind_stmt, new_body);
1525 }
1526
1527 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1528 if (!gimplify_ctxp->keep_stack)
1529 gimplify_ctxp->keep_stack = old_keep_stack;
1530 gimplify_ctxp->save_stack = old_save_stack;
1531
1532 gimple_pop_bind_expr ();
1533
1534 gimplify_seq_add_stmt (pre_p, bind_stmt);
1535
1536 if (temp)
1537 {
1538 *expr_p = temp;
1539 return GS_OK;
1540 }
1541
1542 *expr_p = NULL_TREE;
1543 return GS_ALL_DONE;
1544 }
1545
1546 /* Maybe add early return predict statement to PRE_P sequence. */
1547
1548 static void
1549 maybe_add_early_return_predict_stmt (gimple_seq *pre_p)
1550 {
1551 /* If we are not in a conditional context, add PREDICT statement. */
1552 if (gimple_conditional_context ())
1553 {
1554 gimple *predict = gimple_build_predict (PRED_TREE_EARLY_RETURN,
1555 NOT_TAKEN);
1556 gimplify_seq_add_stmt (pre_p, predict);
1557 }
1558 }
1559
1560 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1561 GIMPLE value, it is assigned to a new temporary and the statement is
1562 re-written to return the temporary.
1563
1564 PRE_P points to the sequence where side effects that must happen before
1565 STMT should be stored. */
1566
1567 static enum gimplify_status
1568 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1569 {
1570 greturn *ret;
1571 tree ret_expr = TREE_OPERAND (stmt, 0);
1572 tree result_decl, result;
1573
1574 if (ret_expr == error_mark_node)
1575 return GS_ERROR;
1576
1577 if (!ret_expr
1578 || TREE_CODE (ret_expr) == RESULT_DECL)
1579 {
1580 maybe_add_early_return_predict_stmt (pre_p);
1581 greturn *ret = gimple_build_return (ret_expr);
1582 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1583 gimplify_seq_add_stmt (pre_p, ret);
1584 return GS_ALL_DONE;
1585 }
1586
1587 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1588 result_decl = NULL_TREE;
1589 else if (TREE_CODE (ret_expr) == COMPOUND_EXPR)
1590 {
1591 /* Used in C++ for handling EH cleanup of the return value if a local
1592 cleanup throws. Assume the front-end knows what it's doing. */
1593 result_decl = DECL_RESULT (current_function_decl);
1594 /* But crash if we end up trying to modify ret_expr below. */
1595 ret_expr = NULL_TREE;
1596 }
1597 else
1598 {
1599 result_decl = TREE_OPERAND (ret_expr, 0);
1600
1601 /* See through a return by reference. */
1602 if (TREE_CODE (result_decl) == INDIRECT_REF)
1603 result_decl = TREE_OPERAND (result_decl, 0);
1604
1605 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1606 || TREE_CODE (ret_expr) == INIT_EXPR)
1607 && TREE_CODE (result_decl) == RESULT_DECL);
1608 }
1609
1610 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1611 Recall that aggregate_value_p is FALSE for any aggregate type that is
1612 returned in registers. If we're returning values in registers, then
1613 we don't want to extend the lifetime of the RESULT_DECL, particularly
1614 across another call. In addition, for those aggregates for which
1615 hard_function_value generates a PARALLEL, we'll die during normal
1616 expansion of structure assignments; there's special code in expand_return
1617 to handle this case that does not exist in expand_expr. */
1618 if (!result_decl)
1619 result = NULL_TREE;
1620 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1621 {
1622 if (!poly_int_tree_p (DECL_SIZE (result_decl)))
1623 {
1624 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1625 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1626 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1627 should be effectively allocated by the caller, i.e. all calls to
1628 this function must be subject to the Return Slot Optimization. */
1629 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1630 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1631 }
1632 result = result_decl;
1633 }
1634 else if (gimplify_ctxp->return_temp)
1635 result = gimplify_ctxp->return_temp;
1636 else
1637 {
1638 result = create_tmp_reg (TREE_TYPE (result_decl));
1639
1640 /* ??? With complex control flow (usually involving abnormal edges),
1641 we can wind up warning about an uninitialized value for this. Due
1642 to how this variable is constructed and initialized, this is never
1643 true. Give up and never warn. */
1644 TREE_NO_WARNING (result) = 1;
1645
1646 gimplify_ctxp->return_temp = result;
1647 }
1648
1649 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1650 Then gimplify the whole thing. */
1651 if (result != result_decl)
1652 TREE_OPERAND (ret_expr, 0) = result;
1653
1654 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1655
1656 maybe_add_early_return_predict_stmt (pre_p);
1657 ret = gimple_build_return (result);
1658 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1659 gimplify_seq_add_stmt (pre_p, ret);
1660
1661 return GS_ALL_DONE;
1662 }
1663
1664 /* Gimplify a variable-length array DECL. */
1665
1666 static void
1667 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1668 {
1669 /* This is a variable-sized decl. Simplify its size and mark it
1670 for deferred expansion. */
1671 tree t, addr, ptr_type;
1672
1673 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1674 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1675
1676 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1677 if (DECL_HAS_VALUE_EXPR_P (decl))
1678 return;
1679
1680 /* All occurrences of this decl in final gimplified code will be
1681 replaced by indirection. Setting DECL_VALUE_EXPR does two
1682 things: First, it lets the rest of the gimplifier know what
1683 replacement to use. Second, it lets the debug info know
1684 where to find the value. */
1685 ptr_type = build_pointer_type (TREE_TYPE (decl));
1686 addr = create_tmp_var (ptr_type, get_name (decl));
1687 DECL_IGNORED_P (addr) = 0;
1688 t = build_fold_indirect_ref (addr);
1689 TREE_THIS_NOTRAP (t) = 1;
1690 SET_DECL_VALUE_EXPR (decl, t);
1691 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1692
1693 t = build_alloca_call_expr (DECL_SIZE_UNIT (decl), DECL_ALIGN (decl),
1694 max_int_size_in_bytes (TREE_TYPE (decl)));
1695 /* The call has been built for a variable-sized object. */
1696 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1697 t = fold_convert (ptr_type, t);
1698 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1699
1700 gimplify_and_add (t, seq_p);
1701
1702 /* Record the dynamic allocation associated with DECL if requested. */
1703 if (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC)
1704 record_dynamic_alloc (decl);
1705 }
1706
1707 /* A helper function to be called via walk_tree. Mark all labels under *TP
1708 as being forced. To be called for DECL_INITIAL of static variables. */
1709
1710 static tree
1711 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1712 {
1713 if (TYPE_P (*tp))
1714 *walk_subtrees = 0;
1715 if (TREE_CODE (*tp) == LABEL_DECL)
1716 {
1717 FORCED_LABEL (*tp) = 1;
1718 cfun->has_forced_label_in_static = 1;
1719 }
1720
1721 return NULL_TREE;
1722 }
1723
1724 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1725 and initialization explicit. */
1726
1727 static enum gimplify_status
1728 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1729 {
1730 tree stmt = *stmt_p;
1731 tree decl = DECL_EXPR_DECL (stmt);
1732
1733 *stmt_p = NULL_TREE;
1734
1735 if (TREE_TYPE (decl) == error_mark_node)
1736 return GS_ERROR;
1737
1738 if ((TREE_CODE (decl) == TYPE_DECL
1739 || VAR_P (decl))
1740 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1741 {
1742 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1743 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1744 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl)), seq_p);
1745 }
1746
1747 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1748 in case its size expressions contain problematic nodes like CALL_EXPR. */
1749 if (TREE_CODE (decl) == TYPE_DECL
1750 && DECL_ORIGINAL_TYPE (decl)
1751 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1752 {
1753 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1754 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl)) == REFERENCE_TYPE)
1755 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl)), seq_p);
1756 }
1757
1758 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
1759 {
1760 tree init = DECL_INITIAL (decl);
1761 bool is_vla = false;
1762
1763 poly_uint64 size;
1764 if (!poly_int_tree_p (DECL_SIZE_UNIT (decl), &size)
1765 || (!TREE_STATIC (decl)
1766 && flag_stack_check == GENERIC_STACK_CHECK
1767 && maybe_gt (size,
1768 (unsigned HOST_WIDE_INT) STACK_CHECK_MAX_VAR_SIZE)))
1769 {
1770 gimplify_vla_decl (decl, seq_p);
1771 is_vla = true;
1772 }
1773
1774 if (asan_poisoned_variables
1775 && !is_vla
1776 && TREE_ADDRESSABLE (decl)
1777 && !TREE_STATIC (decl)
1778 && !DECL_HAS_VALUE_EXPR_P (decl)
1779 && DECL_ALIGN (decl) <= MAX_SUPPORTED_STACK_ALIGNMENT
1780 && dbg_cnt (asan_use_after_scope)
1781 && !gimplify_omp_ctxp)
1782 {
1783 asan_poisoned_variables->add (decl);
1784 asan_poison_variable (decl, false, seq_p);
1785 if (!DECL_ARTIFICIAL (decl) && gimplify_ctxp->live_switch_vars)
1786 gimplify_ctxp->live_switch_vars->add (decl);
1787 }
1788
1789 /* Some front ends do not explicitly declare all anonymous
1790 artificial variables. We compensate here by declaring the
1791 variables, though it would be better if the front ends would
1792 explicitly declare them. */
1793 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1794 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1795 gimple_add_tmp_var (decl);
1796
1797 if (init && init != error_mark_node)
1798 {
1799 if (!TREE_STATIC (decl))
1800 {
1801 DECL_INITIAL (decl) = NULL_TREE;
1802 init = build2 (INIT_EXPR, void_type_node, decl, init);
1803 gimplify_and_add (init, seq_p);
1804 ggc_free (init);
1805 }
1806 else
1807 /* We must still examine initializers for static variables
1808 as they may contain a label address. */
1809 walk_tree (&init, force_labels_r, NULL, NULL);
1810 }
1811 }
1812
1813 return GS_ALL_DONE;
1814 }
1815
1816 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1817 and replacing the LOOP_EXPR with goto, but if the loop contains an
1818 EXIT_EXPR, we need to append a label for it to jump to. */
1819
1820 static enum gimplify_status
1821 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1822 {
1823 tree saved_label = gimplify_ctxp->exit_label;
1824 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1825
1826 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1827
1828 gimplify_ctxp->exit_label = NULL_TREE;
1829
1830 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1831
1832 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1833
1834 if (gimplify_ctxp->exit_label)
1835 gimplify_seq_add_stmt (pre_p,
1836 gimple_build_label (gimplify_ctxp->exit_label));
1837
1838 gimplify_ctxp->exit_label = saved_label;
1839
1840 *expr_p = NULL;
1841 return GS_ALL_DONE;
1842 }
1843
1844 /* Gimplify a statement list onto a sequence. These may be created either
1845 by an enlightened front-end, or by shortcut_cond_expr. */
1846
1847 static enum gimplify_status
1848 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1849 {
1850 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1851
1852 tree_stmt_iterator i = tsi_start (*expr_p);
1853
1854 while (!tsi_end_p (i))
1855 {
1856 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1857 tsi_delink (&i);
1858 }
1859
1860 if (temp)
1861 {
1862 *expr_p = temp;
1863 return GS_OK;
1864 }
1865
1866 return GS_ALL_DONE;
1867 }
1868
1869 /* Callback for walk_gimple_seq. */
1870
1871 static tree
1872 warn_switch_unreachable_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
1873 struct walk_stmt_info *wi)
1874 {
1875 gimple *stmt = gsi_stmt (*gsi_p);
1876
1877 *handled_ops_p = true;
1878 switch (gimple_code (stmt))
1879 {
1880 case GIMPLE_TRY:
1881 /* A compiler-generated cleanup or a user-written try block.
1882 If it's empty, don't dive into it--that would result in
1883 worse location info. */
1884 if (gimple_try_eval (stmt) == NULL)
1885 {
1886 wi->info = stmt;
1887 return integer_zero_node;
1888 }
1889 /* Fall through. */
1890 case GIMPLE_BIND:
1891 case GIMPLE_CATCH:
1892 case GIMPLE_EH_FILTER:
1893 case GIMPLE_TRANSACTION:
1894 /* Walk the sub-statements. */
1895 *handled_ops_p = false;
1896 break;
1897
1898 case GIMPLE_DEBUG:
1899 /* Ignore these. We may generate them before declarations that
1900 are never executed. If there's something to warn about,
1901 there will be non-debug stmts too, and we'll catch those. */
1902 break;
1903
1904 case GIMPLE_CALL:
1905 if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
1906 {
1907 *handled_ops_p = false;
1908 break;
1909 }
1910 /* Fall through. */
1911 default:
1912 /* Save the first "real" statement (not a decl/lexical scope/...). */
1913 wi->info = stmt;
1914 return integer_zero_node;
1915 }
1916 return NULL_TREE;
1917 }
1918
1919 /* Possibly warn about unreachable statements between switch's controlling
1920 expression and the first case. SEQ is the body of a switch expression. */
1921
1922 static void
1923 maybe_warn_switch_unreachable (gimple_seq seq)
1924 {
1925 if (!warn_switch_unreachable
1926 /* This warning doesn't play well with Fortran when optimizations
1927 are on. */
1928 || lang_GNU_Fortran ()
1929 || seq == NULL)
1930 return;
1931
1932 struct walk_stmt_info wi;
1933 memset (&wi, 0, sizeof (wi));
1934 walk_gimple_seq (seq, warn_switch_unreachable_r, NULL, &wi);
1935 gimple *stmt = (gimple *) wi.info;
1936
1937 if (stmt && gimple_code (stmt) != GIMPLE_LABEL)
1938 {
1939 if (gimple_code (stmt) == GIMPLE_GOTO
1940 && TREE_CODE (gimple_goto_dest (stmt)) == LABEL_DECL
1941 && DECL_ARTIFICIAL (gimple_goto_dest (stmt)))
1942 /* Don't warn for compiler-generated gotos. These occur
1943 in Duff's devices, for example. */;
1944 else
1945 warning_at (gimple_location (stmt), OPT_Wswitch_unreachable,
1946 "statement will never be executed");
1947 }
1948 }
1949
1950
1951 /* A label entry that pairs label and a location. */
1952 struct label_entry
1953 {
1954 tree label;
1955 location_t loc;
1956 };
1957
1958 /* Find LABEL in vector of label entries VEC. */
1959
1960 static struct label_entry *
1961 find_label_entry (const auto_vec<struct label_entry> *vec, tree label)
1962 {
1963 unsigned int i;
1964 struct label_entry *l;
1965
1966 FOR_EACH_VEC_ELT (*vec, i, l)
1967 if (l->label == label)
1968 return l;
1969 return NULL;
1970 }
1971
1972 /* Return true if LABEL, a LABEL_DECL, represents a case label
1973 in a vector of labels CASES. */
1974
1975 static bool
1976 case_label_p (const vec<tree> *cases, tree label)
1977 {
1978 unsigned int i;
1979 tree l;
1980
1981 FOR_EACH_VEC_ELT (*cases, i, l)
1982 if (CASE_LABEL (l) == label)
1983 return true;
1984 return false;
1985 }
1986
1987 /* Find the last nondebug statement in a scope STMT. */
1988
1989 static gimple *
1990 last_stmt_in_scope (gimple *stmt)
1991 {
1992 if (!stmt)
1993 return NULL;
1994
1995 switch (gimple_code (stmt))
1996 {
1997 case GIMPLE_BIND:
1998 {
1999 gbind *bind = as_a <gbind *> (stmt);
2000 stmt = gimple_seq_last_nondebug_stmt (gimple_bind_body (bind));
2001 return last_stmt_in_scope (stmt);
2002 }
2003
2004 case GIMPLE_TRY:
2005 {
2006 gtry *try_stmt = as_a <gtry *> (stmt);
2007 stmt = gimple_seq_last_nondebug_stmt (gimple_try_eval (try_stmt));
2008 gimple *last_eval = last_stmt_in_scope (stmt);
2009 if (gimple_stmt_may_fallthru (last_eval)
2010 && (last_eval == NULL
2011 || !gimple_call_internal_p (last_eval, IFN_FALLTHROUGH))
2012 && gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY)
2013 {
2014 stmt = gimple_seq_last_nondebug_stmt (gimple_try_cleanup (try_stmt));
2015 return last_stmt_in_scope (stmt);
2016 }
2017 else
2018 return last_eval;
2019 }
2020
2021 case GIMPLE_DEBUG:
2022 gcc_unreachable ();
2023
2024 default:
2025 return stmt;
2026 }
2027 }
2028
2029 /* Collect interesting labels in LABELS and return the statement preceding
2030 another case label, or a user-defined label. Store a location useful
2031 to give warnings at *PREVLOC (usually the location of the returned
2032 statement or of its surrounding scope). */
2033
2034 static gimple *
2035 collect_fallthrough_labels (gimple_stmt_iterator *gsi_p,
2036 auto_vec <struct label_entry> *labels,
2037 location_t *prevloc)
2038 {
2039 gimple *prev = NULL;
2040
2041 *prevloc = UNKNOWN_LOCATION;
2042 do
2043 {
2044 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND)
2045 {
2046 /* Recognize the special GIMPLE_BIND added by gimplify_switch_expr,
2047 which starts on a GIMPLE_SWITCH and ends with a break label.
2048 Handle that as a single statement that can fall through. */
2049 gbind *bind = as_a <gbind *> (gsi_stmt (*gsi_p));
2050 gimple *first = gimple_seq_first_stmt (gimple_bind_body (bind));
2051 gimple *last = gimple_seq_last_stmt (gimple_bind_body (bind));
2052 if (last
2053 && gimple_code (first) == GIMPLE_SWITCH
2054 && gimple_code (last) == GIMPLE_LABEL)
2055 {
2056 tree label = gimple_label_label (as_a <glabel *> (last));
2057 if (SWITCH_BREAK_LABEL_P (label))
2058 {
2059 prev = bind;
2060 gsi_next (gsi_p);
2061 continue;
2062 }
2063 }
2064 }
2065 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND
2066 || gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_TRY)
2067 {
2068 /* Nested scope. Only look at the last statement of
2069 the innermost scope. */
2070 location_t bind_loc = gimple_location (gsi_stmt (*gsi_p));
2071 gimple *last = last_stmt_in_scope (gsi_stmt (*gsi_p));
2072 if (last)
2073 {
2074 prev = last;
2075 /* It might be a label without a location. Use the
2076 location of the scope then. */
2077 if (!gimple_has_location (prev))
2078 *prevloc = bind_loc;
2079 }
2080 gsi_next (gsi_p);
2081 continue;
2082 }
2083
2084 /* Ifs are tricky. */
2085 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_COND)
2086 {
2087 gcond *cond_stmt = as_a <gcond *> (gsi_stmt (*gsi_p));
2088 tree false_lab = gimple_cond_false_label (cond_stmt);
2089 location_t if_loc = gimple_location (cond_stmt);
2090
2091 /* If we have e.g.
2092 if (i > 1) goto <D.2259>; else goto D;
2093 we can't do much with the else-branch. */
2094 if (!DECL_ARTIFICIAL (false_lab))
2095 break;
2096
2097 /* Go on until the false label, then one step back. */
2098 for (; !gsi_end_p (*gsi_p); gsi_next (gsi_p))
2099 {
2100 gimple *stmt = gsi_stmt (*gsi_p);
2101 if (gimple_code (stmt) == GIMPLE_LABEL
2102 && gimple_label_label (as_a <glabel *> (stmt)) == false_lab)
2103 break;
2104 }
2105
2106 /* Not found? Oops. */
2107 if (gsi_end_p (*gsi_p))
2108 break;
2109
2110 struct label_entry l = { false_lab, if_loc };
2111 labels->safe_push (l);
2112
2113 /* Go to the last statement of the then branch. */
2114 gsi_prev (gsi_p);
2115
2116 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
2117 <D.1759>:
2118 <stmt>;
2119 goto <D.1761>;
2120 <D.1760>:
2121 */
2122 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_GOTO
2123 && !gimple_has_location (gsi_stmt (*gsi_p)))
2124 {
2125 /* Look at the statement before, it might be
2126 attribute fallthrough, in which case don't warn. */
2127 gsi_prev (gsi_p);
2128 bool fallthru_before_dest
2129 = gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_FALLTHROUGH);
2130 gsi_next (gsi_p);
2131 tree goto_dest = gimple_goto_dest (gsi_stmt (*gsi_p));
2132 if (!fallthru_before_dest)
2133 {
2134 struct label_entry l = { goto_dest, if_loc };
2135 labels->safe_push (l);
2136 }
2137 }
2138 /* And move back. */
2139 gsi_next (gsi_p);
2140 }
2141
2142 /* Remember the last statement. Skip labels that are of no interest
2143 to us. */
2144 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2145 {
2146 tree label = gimple_label_label (as_a <glabel *> (gsi_stmt (*gsi_p)));
2147 if (find_label_entry (labels, label))
2148 prev = gsi_stmt (*gsi_p);
2149 }
2150 else if (gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_ASAN_MARK))
2151 ;
2152 else if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_PREDICT)
2153 ;
2154 else if (!is_gimple_debug (gsi_stmt (*gsi_p)))
2155 prev = gsi_stmt (*gsi_p);
2156 gsi_next (gsi_p);
2157 }
2158 while (!gsi_end_p (*gsi_p)
2159 /* Stop if we find a case or a user-defined label. */
2160 && (gimple_code (gsi_stmt (*gsi_p)) != GIMPLE_LABEL
2161 || !gimple_has_location (gsi_stmt (*gsi_p))));
2162
2163 if (prev && gimple_has_location (prev))
2164 *prevloc = gimple_location (prev);
2165 return prev;
2166 }
2167
2168 /* Return true if the switch fallthough warning should occur. LABEL is
2169 the label statement that we're falling through to. */
2170
2171 static bool
2172 should_warn_for_implicit_fallthrough (gimple_stmt_iterator *gsi_p, tree label)
2173 {
2174 gimple_stmt_iterator gsi = *gsi_p;
2175
2176 /* Don't warn if the label is marked with a "falls through" comment. */
2177 if (FALLTHROUGH_LABEL_P (label))
2178 return false;
2179
2180 /* Don't warn for non-case labels followed by a statement:
2181 case 0:
2182 foo ();
2183 label:
2184 bar ();
2185 as these are likely intentional. */
2186 if (!case_label_p (&gimplify_ctxp->case_labels, label))
2187 {
2188 tree l;
2189 while (!gsi_end_p (gsi)
2190 && gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2191 && (l = gimple_label_label (as_a <glabel *> (gsi_stmt (gsi))))
2192 && !case_label_p (&gimplify_ctxp->case_labels, l))
2193 gsi_next_nondebug (&gsi);
2194 if (gsi_end_p (gsi) || gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL)
2195 return false;
2196 }
2197
2198 /* Don't warn for terminated branches, i.e. when the subsequent case labels
2199 immediately breaks. */
2200 gsi = *gsi_p;
2201
2202 /* Skip all immediately following labels. */
2203 while (!gsi_end_p (gsi)
2204 && (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2205 || gimple_code (gsi_stmt (gsi)) == GIMPLE_PREDICT))
2206 gsi_next_nondebug (&gsi);
2207
2208 /* { ... something; default:; } */
2209 if (gsi_end_p (gsi)
2210 /* { ... something; default: break; } or
2211 { ... something; default: goto L; } */
2212 || gimple_code (gsi_stmt (gsi)) == GIMPLE_GOTO
2213 /* { ... something; default: return; } */
2214 || gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
2215 return false;
2216
2217 return true;
2218 }
2219
2220 /* Callback for walk_gimple_seq. */
2221
2222 static tree
2223 warn_implicit_fallthrough_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2224 struct walk_stmt_info *)
2225 {
2226 gimple *stmt = gsi_stmt (*gsi_p);
2227
2228 *handled_ops_p = true;
2229 switch (gimple_code (stmt))
2230 {
2231 case GIMPLE_TRY:
2232 case GIMPLE_BIND:
2233 case GIMPLE_CATCH:
2234 case GIMPLE_EH_FILTER:
2235 case GIMPLE_TRANSACTION:
2236 /* Walk the sub-statements. */
2237 *handled_ops_p = false;
2238 break;
2239
2240 /* Find a sequence of form:
2241
2242 GIMPLE_LABEL
2243 [...]
2244 <may fallthru stmt>
2245 GIMPLE_LABEL
2246
2247 and possibly warn. */
2248 case GIMPLE_LABEL:
2249 {
2250 /* Found a label. Skip all immediately following labels. */
2251 while (!gsi_end_p (*gsi_p)
2252 && gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2253 gsi_next_nondebug (gsi_p);
2254
2255 /* There might be no more statements. */
2256 if (gsi_end_p (*gsi_p))
2257 return integer_zero_node;
2258
2259 /* Vector of labels that fall through. */
2260 auto_vec <struct label_entry> labels;
2261 location_t prevloc;
2262 gimple *prev = collect_fallthrough_labels (gsi_p, &labels, &prevloc);
2263
2264 /* There might be no more statements. */
2265 if (gsi_end_p (*gsi_p))
2266 return integer_zero_node;
2267
2268 gimple *next = gsi_stmt (*gsi_p);
2269 tree label;
2270 /* If what follows is a label, then we may have a fallthrough. */
2271 if (gimple_code (next) == GIMPLE_LABEL
2272 && gimple_has_location (next)
2273 && (label = gimple_label_label (as_a <glabel *> (next)))
2274 && prev != NULL)
2275 {
2276 struct label_entry *l;
2277 bool warned_p = false;
2278 auto_diagnostic_group d;
2279 if (!should_warn_for_implicit_fallthrough (gsi_p, label))
2280 /* Quiet. */;
2281 else if (gimple_code (prev) == GIMPLE_LABEL
2282 && (label = gimple_label_label (as_a <glabel *> (prev)))
2283 && (l = find_label_entry (&labels, label)))
2284 warned_p = warning_at (l->loc, OPT_Wimplicit_fallthrough_,
2285 "this statement may fall through");
2286 else if (!gimple_call_internal_p (prev, IFN_FALLTHROUGH)
2287 /* Try to be clever and don't warn when the statement
2288 can't actually fall through. */
2289 && gimple_stmt_may_fallthru (prev)
2290 && prevloc != UNKNOWN_LOCATION)
2291 warned_p = warning_at (prevloc,
2292 OPT_Wimplicit_fallthrough_,
2293 "this statement may fall through");
2294 if (warned_p)
2295 inform (gimple_location (next), "here");
2296
2297 /* Mark this label as processed so as to prevent multiple
2298 warnings in nested switches. */
2299 FALLTHROUGH_LABEL_P (label) = true;
2300
2301 /* So that next warn_implicit_fallthrough_r will start looking for
2302 a new sequence starting with this label. */
2303 gsi_prev (gsi_p);
2304 }
2305 }
2306 break;
2307 default:
2308 break;
2309 }
2310 return NULL_TREE;
2311 }
2312
2313 /* Warn when a switch case falls through. */
2314
2315 static void
2316 maybe_warn_implicit_fallthrough (gimple_seq seq)
2317 {
2318 if (!warn_implicit_fallthrough)
2319 return;
2320
2321 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2322 if (!(lang_GNU_C ()
2323 || lang_GNU_CXX ()
2324 || lang_GNU_OBJC ()))
2325 return;
2326
2327 struct walk_stmt_info wi;
2328 memset (&wi, 0, sizeof (wi));
2329 walk_gimple_seq (seq, warn_implicit_fallthrough_r, NULL, &wi);
2330 }
2331
2332 /* Callback for walk_gimple_seq. */
2333
2334 static tree
2335 expand_FALLTHROUGH_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2336 struct walk_stmt_info *wi)
2337 {
2338 gimple *stmt = gsi_stmt (*gsi_p);
2339
2340 *handled_ops_p = true;
2341 switch (gimple_code (stmt))
2342 {
2343 case GIMPLE_TRY:
2344 case GIMPLE_BIND:
2345 case GIMPLE_CATCH:
2346 case GIMPLE_EH_FILTER:
2347 case GIMPLE_TRANSACTION:
2348 /* Walk the sub-statements. */
2349 *handled_ops_p = false;
2350 break;
2351 case GIMPLE_CALL:
2352 if (gimple_call_internal_p (stmt, IFN_FALLTHROUGH))
2353 {
2354 gsi_remove (gsi_p, true);
2355 if (gsi_end_p (*gsi_p))
2356 {
2357 *static_cast<location_t *>(wi->info) = gimple_location (stmt);
2358 return integer_zero_node;
2359 }
2360
2361 bool found = false;
2362 location_t loc = gimple_location (stmt);
2363
2364 gimple_stmt_iterator gsi2 = *gsi_p;
2365 stmt = gsi_stmt (gsi2);
2366 if (gimple_code (stmt) == GIMPLE_GOTO && !gimple_has_location (stmt))
2367 {
2368 /* Go on until the artificial label. */
2369 tree goto_dest = gimple_goto_dest (stmt);
2370 for (; !gsi_end_p (gsi2); gsi_next (&gsi2))
2371 {
2372 if (gimple_code (gsi_stmt (gsi2)) == GIMPLE_LABEL
2373 && gimple_label_label (as_a <glabel *> (gsi_stmt (gsi2)))
2374 == goto_dest)
2375 break;
2376 }
2377
2378 /* Not found? Stop. */
2379 if (gsi_end_p (gsi2))
2380 break;
2381
2382 /* Look one past it. */
2383 gsi_next (&gsi2);
2384 }
2385
2386 /* We're looking for a case label or default label here. */
2387 while (!gsi_end_p (gsi2))
2388 {
2389 stmt = gsi_stmt (gsi2);
2390 if (gimple_code (stmt) == GIMPLE_LABEL)
2391 {
2392 tree label = gimple_label_label (as_a <glabel *> (stmt));
2393 if (gimple_has_location (stmt) && DECL_ARTIFICIAL (label))
2394 {
2395 found = true;
2396 break;
2397 }
2398 }
2399 else if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
2400 ;
2401 else if (!is_gimple_debug (stmt))
2402 /* Anything else is not expected. */
2403 break;
2404 gsi_next (&gsi2);
2405 }
2406 if (!found)
2407 pedwarn (loc, 0, "attribute %<fallthrough%> not preceding "
2408 "a case label or default label");
2409 }
2410 break;
2411 default:
2412 break;
2413 }
2414 return NULL_TREE;
2415 }
2416
2417 /* Expand all FALLTHROUGH () calls in SEQ. */
2418
2419 static void
2420 expand_FALLTHROUGH (gimple_seq *seq_p)
2421 {
2422 struct walk_stmt_info wi;
2423 location_t loc;
2424 memset (&wi, 0, sizeof (wi));
2425 wi.info = (void *) &loc;
2426 walk_gimple_seq_mod (seq_p, expand_FALLTHROUGH_r, NULL, &wi);
2427 if (wi.callback_result == integer_zero_node)
2428 /* We've found [[fallthrough]]; at the end of a switch, which the C++
2429 standard says is ill-formed; see [dcl.attr.fallthrough]. */
2430 pedwarn (loc, 0, "attribute %<fallthrough%> not preceding "
2431 "a case label or default label");
2432 }
2433
2434 \f
2435 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
2436 branch to. */
2437
2438 static enum gimplify_status
2439 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
2440 {
2441 tree switch_expr = *expr_p;
2442 gimple_seq switch_body_seq = NULL;
2443 enum gimplify_status ret;
2444 tree index_type = TREE_TYPE (switch_expr);
2445 if (index_type == NULL_TREE)
2446 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
2447
2448 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
2449 fb_rvalue);
2450 if (ret == GS_ERROR || ret == GS_UNHANDLED)
2451 return ret;
2452
2453 if (SWITCH_BODY (switch_expr))
2454 {
2455 vec<tree> labels;
2456 vec<tree> saved_labels;
2457 hash_set<tree> *saved_live_switch_vars = NULL;
2458 tree default_case = NULL_TREE;
2459 gswitch *switch_stmt;
2460
2461 /* Save old labels, get new ones from body, then restore the old
2462 labels. Save all the things from the switch body to append after. */
2463 saved_labels = gimplify_ctxp->case_labels;
2464 gimplify_ctxp->case_labels.create (8);
2465
2466 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
2467 saved_live_switch_vars = gimplify_ctxp->live_switch_vars;
2468 tree_code body_type = TREE_CODE (SWITCH_BODY (switch_expr));
2469 if (body_type == BIND_EXPR || body_type == STATEMENT_LIST)
2470 gimplify_ctxp->live_switch_vars = new hash_set<tree> (4);
2471 else
2472 gimplify_ctxp->live_switch_vars = NULL;
2473
2474 bool old_in_switch_expr = gimplify_ctxp->in_switch_expr;
2475 gimplify_ctxp->in_switch_expr = true;
2476
2477 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
2478
2479 gimplify_ctxp->in_switch_expr = old_in_switch_expr;
2480 maybe_warn_switch_unreachable (switch_body_seq);
2481 maybe_warn_implicit_fallthrough (switch_body_seq);
2482 /* Only do this for the outermost GIMPLE_SWITCH. */
2483 if (!gimplify_ctxp->in_switch_expr)
2484 expand_FALLTHROUGH (&switch_body_seq);
2485
2486 labels = gimplify_ctxp->case_labels;
2487 gimplify_ctxp->case_labels = saved_labels;
2488
2489 if (gimplify_ctxp->live_switch_vars)
2490 {
2491 gcc_assert (gimplify_ctxp->live_switch_vars->is_empty ());
2492 delete gimplify_ctxp->live_switch_vars;
2493 }
2494 gimplify_ctxp->live_switch_vars = saved_live_switch_vars;
2495
2496 preprocess_case_label_vec_for_gimple (labels, index_type,
2497 &default_case);
2498
2499 bool add_bind = false;
2500 if (!default_case)
2501 {
2502 glabel *new_default;
2503
2504 default_case
2505 = build_case_label (NULL_TREE, NULL_TREE,
2506 create_artificial_label (UNKNOWN_LOCATION));
2507 if (old_in_switch_expr)
2508 {
2509 SWITCH_BREAK_LABEL_P (CASE_LABEL (default_case)) = 1;
2510 add_bind = true;
2511 }
2512 new_default = gimple_build_label (CASE_LABEL (default_case));
2513 gimplify_seq_add_stmt (&switch_body_seq, new_default);
2514 }
2515 else if (old_in_switch_expr)
2516 {
2517 gimple *last = gimple_seq_last_stmt (switch_body_seq);
2518 if (last && gimple_code (last) == GIMPLE_LABEL)
2519 {
2520 tree label = gimple_label_label (as_a <glabel *> (last));
2521 if (SWITCH_BREAK_LABEL_P (label))
2522 add_bind = true;
2523 }
2524 }
2525
2526 switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
2527 default_case, labels);
2528 /* For the benefit of -Wimplicit-fallthrough, if switch_body_seq
2529 ends with a GIMPLE_LABEL holding SWITCH_BREAK_LABEL_P LABEL_DECL,
2530 wrap the GIMPLE_SWITCH up to that GIMPLE_LABEL into a GIMPLE_BIND,
2531 so that we can easily find the start and end of the switch
2532 statement. */
2533 if (add_bind)
2534 {
2535 gimple_seq bind_body = NULL;
2536 gimplify_seq_add_stmt (&bind_body, switch_stmt);
2537 gimple_seq_add_seq (&bind_body, switch_body_seq);
2538 gbind *bind = gimple_build_bind (NULL_TREE, bind_body, NULL_TREE);
2539 gimple_set_location (bind, EXPR_LOCATION (switch_expr));
2540 gimplify_seq_add_stmt (pre_p, bind);
2541 }
2542 else
2543 {
2544 gimplify_seq_add_stmt (pre_p, switch_stmt);
2545 gimplify_seq_add_seq (pre_p, switch_body_seq);
2546 }
2547 labels.release ();
2548 }
2549 else
2550 gcc_unreachable ();
2551
2552 return GS_ALL_DONE;
2553 }
2554
2555 /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
2556
2557 static enum gimplify_status
2558 gimplify_label_expr (tree *expr_p, gimple_seq *pre_p)
2559 {
2560 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
2561 == current_function_decl);
2562
2563 tree label = LABEL_EXPR_LABEL (*expr_p);
2564 glabel *label_stmt = gimple_build_label (label);
2565 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2566 gimplify_seq_add_stmt (pre_p, label_stmt);
2567
2568 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2569 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2570 NOT_TAKEN));
2571 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2572 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2573 TAKEN));
2574
2575 return GS_ALL_DONE;
2576 }
2577
2578 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
2579
2580 static enum gimplify_status
2581 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
2582 {
2583 struct gimplify_ctx *ctxp;
2584 glabel *label_stmt;
2585
2586 /* Invalid programs can play Duff's Device type games with, for example,
2587 #pragma omp parallel. At least in the C front end, we don't
2588 detect such invalid branches until after gimplification, in the
2589 diagnose_omp_blocks pass. */
2590 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
2591 if (ctxp->case_labels.exists ())
2592 break;
2593
2594 tree label = CASE_LABEL (*expr_p);
2595 label_stmt = gimple_build_label (label);
2596 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2597 ctxp->case_labels.safe_push (*expr_p);
2598 gimplify_seq_add_stmt (pre_p, label_stmt);
2599
2600 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2601 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2602 NOT_TAKEN));
2603 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2604 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2605 TAKEN));
2606
2607 return GS_ALL_DONE;
2608 }
2609
2610 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
2611 if necessary. */
2612
2613 tree
2614 build_and_jump (tree *label_p)
2615 {
2616 if (label_p == NULL)
2617 /* If there's nowhere to jump, just fall through. */
2618 return NULL_TREE;
2619
2620 if (*label_p == NULL_TREE)
2621 {
2622 tree label = create_artificial_label (UNKNOWN_LOCATION);
2623 *label_p = label;
2624 }
2625
2626 return build1 (GOTO_EXPR, void_type_node, *label_p);
2627 }
2628
2629 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
2630 This also involves building a label to jump to and communicating it to
2631 gimplify_loop_expr through gimplify_ctxp->exit_label. */
2632
2633 static enum gimplify_status
2634 gimplify_exit_expr (tree *expr_p)
2635 {
2636 tree cond = TREE_OPERAND (*expr_p, 0);
2637 tree expr;
2638
2639 expr = build_and_jump (&gimplify_ctxp->exit_label);
2640 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
2641 *expr_p = expr;
2642
2643 return GS_OK;
2644 }
2645
2646 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
2647 different from its canonical type, wrap the whole thing inside a
2648 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
2649 type.
2650
2651 The canonical type of a COMPONENT_REF is the type of the field being
2652 referenced--unless the field is a bit-field which can be read directly
2653 in a smaller mode, in which case the canonical type is the
2654 sign-appropriate type corresponding to that mode. */
2655
2656 static void
2657 canonicalize_component_ref (tree *expr_p)
2658 {
2659 tree expr = *expr_p;
2660 tree type;
2661
2662 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
2663
2664 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
2665 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
2666 else
2667 type = TREE_TYPE (TREE_OPERAND (expr, 1));
2668
2669 /* One could argue that all the stuff below is not necessary for
2670 the non-bitfield case and declare it a FE error if type
2671 adjustment would be needed. */
2672 if (TREE_TYPE (expr) != type)
2673 {
2674 #ifdef ENABLE_TYPES_CHECKING
2675 tree old_type = TREE_TYPE (expr);
2676 #endif
2677 int type_quals;
2678
2679 /* We need to preserve qualifiers and propagate them from
2680 operand 0. */
2681 type_quals = TYPE_QUALS (type)
2682 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
2683 if (TYPE_QUALS (type) != type_quals)
2684 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
2685
2686 /* Set the type of the COMPONENT_REF to the underlying type. */
2687 TREE_TYPE (expr) = type;
2688
2689 #ifdef ENABLE_TYPES_CHECKING
2690 /* It is now a FE error, if the conversion from the canonical
2691 type to the original expression type is not useless. */
2692 gcc_assert (useless_type_conversion_p (old_type, type));
2693 #endif
2694 }
2695 }
2696
2697 /* If a NOP conversion is changing a pointer to array of foo to a pointer
2698 to foo, embed that change in the ADDR_EXPR by converting
2699 T array[U];
2700 (T *)&array
2701 ==>
2702 &array[L]
2703 where L is the lower bound. For simplicity, only do this for constant
2704 lower bound.
2705 The constraint is that the type of &array[L] is trivially convertible
2706 to T *. */
2707
2708 static void
2709 canonicalize_addr_expr (tree *expr_p)
2710 {
2711 tree expr = *expr_p;
2712 tree addr_expr = TREE_OPERAND (expr, 0);
2713 tree datype, ddatype, pddatype;
2714
2715 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
2716 if (!POINTER_TYPE_P (TREE_TYPE (expr))
2717 || TREE_CODE (addr_expr) != ADDR_EXPR)
2718 return;
2719
2720 /* The addr_expr type should be a pointer to an array. */
2721 datype = TREE_TYPE (TREE_TYPE (addr_expr));
2722 if (TREE_CODE (datype) != ARRAY_TYPE)
2723 return;
2724
2725 /* The pointer to element type shall be trivially convertible to
2726 the expression pointer type. */
2727 ddatype = TREE_TYPE (datype);
2728 pddatype = build_pointer_type (ddatype);
2729 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
2730 pddatype))
2731 return;
2732
2733 /* The lower bound and element sizes must be constant. */
2734 if (!TYPE_SIZE_UNIT (ddatype)
2735 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
2736 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
2737 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
2738 return;
2739
2740 /* All checks succeeded. Build a new node to merge the cast. */
2741 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
2742 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
2743 NULL_TREE, NULL_TREE);
2744 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
2745
2746 /* We can have stripped a required restrict qualifier above. */
2747 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
2748 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
2749 }
2750
2751 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
2752 underneath as appropriate. */
2753
2754 static enum gimplify_status
2755 gimplify_conversion (tree *expr_p)
2756 {
2757 location_t loc = EXPR_LOCATION (*expr_p);
2758 gcc_assert (CONVERT_EXPR_P (*expr_p));
2759
2760 /* Then strip away all but the outermost conversion. */
2761 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
2762
2763 /* And remove the outermost conversion if it's useless. */
2764 if (tree_ssa_useless_type_conversion (*expr_p))
2765 *expr_p = TREE_OPERAND (*expr_p, 0);
2766
2767 /* If we still have a conversion at the toplevel,
2768 then canonicalize some constructs. */
2769 if (CONVERT_EXPR_P (*expr_p))
2770 {
2771 tree sub = TREE_OPERAND (*expr_p, 0);
2772
2773 /* If a NOP conversion is changing the type of a COMPONENT_REF
2774 expression, then canonicalize its type now in order to expose more
2775 redundant conversions. */
2776 if (TREE_CODE (sub) == COMPONENT_REF)
2777 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
2778
2779 /* If a NOP conversion is changing a pointer to array of foo
2780 to a pointer to foo, embed that change in the ADDR_EXPR. */
2781 else if (TREE_CODE (sub) == ADDR_EXPR)
2782 canonicalize_addr_expr (expr_p);
2783 }
2784
2785 /* If we have a conversion to a non-register type force the
2786 use of a VIEW_CONVERT_EXPR instead. */
2787 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
2788 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
2789 TREE_OPERAND (*expr_p, 0));
2790
2791 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
2792 if (TREE_CODE (*expr_p) == CONVERT_EXPR)
2793 TREE_SET_CODE (*expr_p, NOP_EXPR);
2794
2795 return GS_OK;
2796 }
2797
2798 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
2799 DECL_VALUE_EXPR, and it's worth re-examining things. */
2800
2801 static enum gimplify_status
2802 gimplify_var_or_parm_decl (tree *expr_p)
2803 {
2804 tree decl = *expr_p;
2805
2806 /* ??? If this is a local variable, and it has not been seen in any
2807 outer BIND_EXPR, then it's probably the result of a duplicate
2808 declaration, for which we've already issued an error. It would
2809 be really nice if the front end wouldn't leak these at all.
2810 Currently the only known culprit is C++ destructors, as seen
2811 in g++.old-deja/g++.jason/binding.C. */
2812 if (VAR_P (decl)
2813 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
2814 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
2815 && decl_function_context (decl) == current_function_decl)
2816 {
2817 gcc_assert (seen_error ());
2818 return GS_ERROR;
2819 }
2820
2821 /* When within an OMP context, notice uses of variables. */
2822 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
2823 return GS_ALL_DONE;
2824
2825 /* If the decl is an alias for another expression, substitute it now. */
2826 if (DECL_HAS_VALUE_EXPR_P (decl))
2827 {
2828 *expr_p = unshare_expr (DECL_VALUE_EXPR (decl));
2829 return GS_OK;
2830 }
2831
2832 return GS_ALL_DONE;
2833 }
2834
2835 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
2836
2837 static void
2838 recalculate_side_effects (tree t)
2839 {
2840 enum tree_code code = TREE_CODE (t);
2841 int len = TREE_OPERAND_LENGTH (t);
2842 int i;
2843
2844 switch (TREE_CODE_CLASS (code))
2845 {
2846 case tcc_expression:
2847 switch (code)
2848 {
2849 case INIT_EXPR:
2850 case MODIFY_EXPR:
2851 case VA_ARG_EXPR:
2852 case PREDECREMENT_EXPR:
2853 case PREINCREMENT_EXPR:
2854 case POSTDECREMENT_EXPR:
2855 case POSTINCREMENT_EXPR:
2856 /* All of these have side-effects, no matter what their
2857 operands are. */
2858 return;
2859
2860 default:
2861 break;
2862 }
2863 /* Fall through. */
2864
2865 case tcc_comparison: /* a comparison expression */
2866 case tcc_unary: /* a unary arithmetic expression */
2867 case tcc_binary: /* a binary arithmetic expression */
2868 case tcc_reference: /* a reference */
2869 case tcc_vl_exp: /* a function call */
2870 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
2871 for (i = 0; i < len; ++i)
2872 {
2873 tree op = TREE_OPERAND (t, i);
2874 if (op && TREE_SIDE_EFFECTS (op))
2875 TREE_SIDE_EFFECTS (t) = 1;
2876 }
2877 break;
2878
2879 case tcc_constant:
2880 /* No side-effects. */
2881 return;
2882
2883 default:
2884 gcc_unreachable ();
2885 }
2886 }
2887
2888 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
2889 node *EXPR_P.
2890
2891 compound_lval
2892 : min_lval '[' val ']'
2893 | min_lval '.' ID
2894 | compound_lval '[' val ']'
2895 | compound_lval '.' ID
2896
2897 This is not part of the original SIMPLE definition, which separates
2898 array and member references, but it seems reasonable to handle them
2899 together. Also, this way we don't run into problems with union
2900 aliasing; gcc requires that for accesses through a union to alias, the
2901 union reference must be explicit, which was not always the case when we
2902 were splitting up array and member refs.
2903
2904 PRE_P points to the sequence where side effects that must happen before
2905 *EXPR_P should be stored.
2906
2907 POST_P points to the sequence where side effects that must happen after
2908 *EXPR_P should be stored. */
2909
2910 static enum gimplify_status
2911 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2912 fallback_t fallback)
2913 {
2914 tree *p;
2915 enum gimplify_status ret = GS_ALL_DONE, tret;
2916 int i;
2917 location_t loc = EXPR_LOCATION (*expr_p);
2918 tree expr = *expr_p;
2919
2920 /* Create a stack of the subexpressions so later we can walk them in
2921 order from inner to outer. */
2922 auto_vec<tree, 10> expr_stack;
2923
2924 /* We can handle anything that get_inner_reference can deal with. */
2925 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
2926 {
2927 restart:
2928 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
2929 if (TREE_CODE (*p) == INDIRECT_REF)
2930 *p = fold_indirect_ref_loc (loc, *p);
2931
2932 if (handled_component_p (*p))
2933 ;
2934 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
2935 additional COMPONENT_REFs. */
2936 else if ((VAR_P (*p) || TREE_CODE (*p) == PARM_DECL)
2937 && gimplify_var_or_parm_decl (p) == GS_OK)
2938 goto restart;
2939 else
2940 break;
2941
2942 expr_stack.safe_push (*p);
2943 }
2944
2945 gcc_assert (expr_stack.length ());
2946
2947 /* Now EXPR_STACK is a stack of pointers to all the refs we've
2948 walked through and P points to the innermost expression.
2949
2950 Java requires that we elaborated nodes in source order. That
2951 means we must gimplify the inner expression followed by each of
2952 the indices, in order. But we can't gimplify the inner
2953 expression until we deal with any variable bounds, sizes, or
2954 positions in order to deal with PLACEHOLDER_EXPRs.
2955
2956 So we do this in three steps. First we deal with the annotations
2957 for any variables in the components, then we gimplify the base,
2958 then we gimplify any indices, from left to right. */
2959 for (i = expr_stack.length () - 1; i >= 0; i--)
2960 {
2961 tree t = expr_stack[i];
2962
2963 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2964 {
2965 /* Gimplify the low bound and element type size and put them into
2966 the ARRAY_REF. If these values are set, they have already been
2967 gimplified. */
2968 if (TREE_OPERAND (t, 2) == NULL_TREE)
2969 {
2970 tree low = unshare_expr (array_ref_low_bound (t));
2971 if (!is_gimple_min_invariant (low))
2972 {
2973 TREE_OPERAND (t, 2) = low;
2974 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2975 post_p, is_gimple_reg,
2976 fb_rvalue);
2977 ret = MIN (ret, tret);
2978 }
2979 }
2980 else
2981 {
2982 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2983 is_gimple_reg, fb_rvalue);
2984 ret = MIN (ret, tret);
2985 }
2986
2987 if (TREE_OPERAND (t, 3) == NULL_TREE)
2988 {
2989 tree elmt_size = array_ref_element_size (t);
2990 if (!is_gimple_min_invariant (elmt_size))
2991 {
2992 elmt_size = unshare_expr (elmt_size);
2993 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
2994 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
2995
2996 /* Divide the element size by the alignment of the element
2997 type (above). */
2998 elmt_size = size_binop_loc (loc, EXACT_DIV_EXPR,
2999 elmt_size, factor);
3000
3001 TREE_OPERAND (t, 3) = elmt_size;
3002 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
3003 post_p, is_gimple_reg,
3004 fb_rvalue);
3005 ret = MIN (ret, tret);
3006 }
3007 }
3008 else
3009 {
3010 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
3011 is_gimple_reg, fb_rvalue);
3012 ret = MIN (ret, tret);
3013 }
3014 }
3015 else if (TREE_CODE (t) == COMPONENT_REF)
3016 {
3017 /* Set the field offset into T and gimplify it. */
3018 if (TREE_OPERAND (t, 2) == NULL_TREE)
3019 {
3020 tree offset = component_ref_field_offset (t);
3021 if (!is_gimple_min_invariant (offset))
3022 {
3023 offset = unshare_expr (offset);
3024 tree field = TREE_OPERAND (t, 1);
3025 tree factor
3026 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
3027
3028 /* Divide the offset by its alignment. */
3029 offset = size_binop_loc (loc, EXACT_DIV_EXPR,
3030 offset, factor);
3031
3032 TREE_OPERAND (t, 2) = offset;
3033 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
3034 post_p, is_gimple_reg,
3035 fb_rvalue);
3036 ret = MIN (ret, tret);
3037 }
3038 }
3039 else
3040 {
3041 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
3042 is_gimple_reg, fb_rvalue);
3043 ret = MIN (ret, tret);
3044 }
3045 }
3046 }
3047
3048 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
3049 so as to match the min_lval predicate. Failure to do so may result
3050 in the creation of large aggregate temporaries. */
3051 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
3052 fallback | fb_lvalue);
3053 ret = MIN (ret, tret);
3054
3055 /* And finally, the indices and operands of ARRAY_REF. During this
3056 loop we also remove any useless conversions. */
3057 for (; expr_stack.length () > 0; )
3058 {
3059 tree t = expr_stack.pop ();
3060
3061 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3062 {
3063 /* Gimplify the dimension. */
3064 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
3065 {
3066 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
3067 is_gimple_val, fb_rvalue);
3068 ret = MIN (ret, tret);
3069 }
3070 }
3071
3072 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
3073
3074 /* The innermost expression P may have originally had
3075 TREE_SIDE_EFFECTS set which would have caused all the outer
3076 expressions in *EXPR_P leading to P to also have had
3077 TREE_SIDE_EFFECTS set. */
3078 recalculate_side_effects (t);
3079 }
3080
3081 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
3082 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
3083 {
3084 canonicalize_component_ref (expr_p);
3085 }
3086
3087 expr_stack.release ();
3088
3089 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
3090
3091 return ret;
3092 }
3093
3094 /* Gimplify the self modifying expression pointed to by EXPR_P
3095 (++, --, +=, -=).
3096
3097 PRE_P points to the list where side effects that must happen before
3098 *EXPR_P should be stored.
3099
3100 POST_P points to the list where side effects that must happen after
3101 *EXPR_P should be stored.
3102
3103 WANT_VALUE is nonzero iff we want to use the value of this expression
3104 in another expression.
3105
3106 ARITH_TYPE is the type the computation should be performed in. */
3107
3108 enum gimplify_status
3109 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3110 bool want_value, tree arith_type)
3111 {
3112 enum tree_code code;
3113 tree lhs, lvalue, rhs, t1;
3114 gimple_seq post = NULL, *orig_post_p = post_p;
3115 bool postfix;
3116 enum tree_code arith_code;
3117 enum gimplify_status ret;
3118 location_t loc = EXPR_LOCATION (*expr_p);
3119
3120 code = TREE_CODE (*expr_p);
3121
3122 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
3123 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
3124
3125 /* Prefix or postfix? */
3126 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
3127 /* Faster to treat as prefix if result is not used. */
3128 postfix = want_value;
3129 else
3130 postfix = false;
3131
3132 /* For postfix, make sure the inner expression's post side effects
3133 are executed after side effects from this expression. */
3134 if (postfix)
3135 post_p = &post;
3136
3137 /* Add or subtract? */
3138 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
3139 arith_code = PLUS_EXPR;
3140 else
3141 arith_code = MINUS_EXPR;
3142
3143 /* Gimplify the LHS into a GIMPLE lvalue. */
3144 lvalue = TREE_OPERAND (*expr_p, 0);
3145 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
3146 if (ret == GS_ERROR)
3147 return ret;
3148
3149 /* Extract the operands to the arithmetic operation. */
3150 lhs = lvalue;
3151 rhs = TREE_OPERAND (*expr_p, 1);
3152
3153 /* For postfix operator, we evaluate the LHS to an rvalue and then use
3154 that as the result value and in the postqueue operation. */
3155 if (postfix)
3156 {
3157 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
3158 if (ret == GS_ERROR)
3159 return ret;
3160
3161 lhs = get_initialized_tmp_var (lhs, pre_p);
3162 }
3163
3164 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
3165 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
3166 {
3167 rhs = convert_to_ptrofftype_loc (loc, rhs);
3168 if (arith_code == MINUS_EXPR)
3169 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
3170 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
3171 }
3172 else
3173 t1 = fold_convert (TREE_TYPE (*expr_p),
3174 fold_build2 (arith_code, arith_type,
3175 fold_convert (arith_type, lhs),
3176 fold_convert (arith_type, rhs)));
3177
3178 if (postfix)
3179 {
3180 gimplify_assign (lvalue, t1, pre_p);
3181 gimplify_seq_add_seq (orig_post_p, post);
3182 *expr_p = lhs;
3183 return GS_ALL_DONE;
3184 }
3185 else
3186 {
3187 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
3188 return GS_OK;
3189 }
3190 }
3191
3192 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
3193
3194 static void
3195 maybe_with_size_expr (tree *expr_p)
3196 {
3197 tree expr = *expr_p;
3198 tree type = TREE_TYPE (expr);
3199 tree size;
3200
3201 /* If we've already wrapped this or the type is error_mark_node, we can't do
3202 anything. */
3203 if (TREE_CODE (expr) == WITH_SIZE_EXPR
3204 || type == error_mark_node)
3205 return;
3206
3207 /* If the size isn't known or is a constant, we have nothing to do. */
3208 size = TYPE_SIZE_UNIT (type);
3209 if (!size || poly_int_tree_p (size))
3210 return;
3211
3212 /* Otherwise, make a WITH_SIZE_EXPR. */
3213 size = unshare_expr (size);
3214 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
3215 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
3216 }
3217
3218 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
3219 Store any side-effects in PRE_P. CALL_LOCATION is the location of
3220 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3221 gimplified to an SSA name. */
3222
3223 enum gimplify_status
3224 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
3225 bool allow_ssa)
3226 {
3227 bool (*test) (tree);
3228 fallback_t fb;
3229
3230 /* In general, we allow lvalues for function arguments to avoid
3231 extra overhead of copying large aggregates out of even larger
3232 aggregates into temporaries only to copy the temporaries to
3233 the argument list. Make optimizers happy by pulling out to
3234 temporaries those types that fit in registers. */
3235 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
3236 test = is_gimple_val, fb = fb_rvalue;
3237 else
3238 {
3239 test = is_gimple_lvalue, fb = fb_either;
3240 /* Also strip a TARGET_EXPR that would force an extra copy. */
3241 if (TREE_CODE (*arg_p) == TARGET_EXPR)
3242 {
3243 tree init = TARGET_EXPR_INITIAL (*arg_p);
3244 if (init
3245 && !VOID_TYPE_P (TREE_TYPE (init)))
3246 *arg_p = init;
3247 }
3248 }
3249
3250 /* If this is a variable sized type, we must remember the size. */
3251 maybe_with_size_expr (arg_p);
3252
3253 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
3254 /* Make sure arguments have the same location as the function call
3255 itself. */
3256 protected_set_expr_location (*arg_p, call_location);
3257
3258 /* There is a sequence point before a function call. Side effects in
3259 the argument list must occur before the actual call. So, when
3260 gimplifying arguments, force gimplify_expr to use an internal
3261 post queue which is then appended to the end of PRE_P. */
3262 return gimplify_expr (arg_p, pre_p, NULL, test, fb, allow_ssa);
3263 }
3264
3265 /* Don't fold inside offloading or taskreg regions: it can break code by
3266 adding decl references that weren't in the source. We'll do it during
3267 omplower pass instead. */
3268
3269 static bool
3270 maybe_fold_stmt (gimple_stmt_iterator *gsi)
3271 {
3272 struct gimplify_omp_ctx *ctx;
3273 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
3274 if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0)
3275 return false;
3276 else if ((ctx->region_type & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
3277 return false;
3278 /* Delay folding of builtins until the IL is in consistent state
3279 so the diagnostic machinery can do a better job. */
3280 if (gimple_call_builtin_p (gsi_stmt (*gsi)))
3281 return false;
3282 return fold_stmt (gsi);
3283 }
3284
3285 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
3286 WANT_VALUE is true if the result of the call is desired. */
3287
3288 static enum gimplify_status
3289 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
3290 {
3291 tree fndecl, parms, p, fnptrtype;
3292 enum gimplify_status ret;
3293 int i, nargs;
3294 gcall *call;
3295 bool builtin_va_start_p = false;
3296 location_t loc = EXPR_LOCATION (*expr_p);
3297
3298 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
3299
3300 /* For reliable diagnostics during inlining, it is necessary that
3301 every call_expr be annotated with file and line. */
3302 if (! EXPR_HAS_LOCATION (*expr_p))
3303 SET_EXPR_LOCATION (*expr_p, input_location);
3304
3305 /* Gimplify internal functions created in the FEs. */
3306 if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
3307 {
3308 if (want_value)
3309 return GS_ALL_DONE;
3310
3311 nargs = call_expr_nargs (*expr_p);
3312 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
3313 auto_vec<tree> vargs (nargs);
3314
3315 for (i = 0; i < nargs; i++)
3316 {
3317 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3318 EXPR_LOCATION (*expr_p));
3319 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
3320 }
3321
3322 gcall *call = gimple_build_call_internal_vec (ifn, vargs);
3323 gimple_call_set_nothrow (call, TREE_NOTHROW (*expr_p));
3324 gimplify_seq_add_stmt (pre_p, call);
3325 return GS_ALL_DONE;
3326 }
3327
3328 /* This may be a call to a builtin function.
3329
3330 Builtin function calls may be transformed into different
3331 (and more efficient) builtin function calls under certain
3332 circumstances. Unfortunately, gimplification can muck things
3333 up enough that the builtin expanders are not aware that certain
3334 transformations are still valid.
3335
3336 So we attempt transformation/gimplification of the call before
3337 we gimplify the CALL_EXPR. At this time we do not manage to
3338 transform all calls in the same manner as the expanders do, but
3339 we do transform most of them. */
3340 fndecl = get_callee_fndecl (*expr_p);
3341 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
3342 switch (DECL_FUNCTION_CODE (fndecl))
3343 {
3344 CASE_BUILT_IN_ALLOCA:
3345 /* If the call has been built for a variable-sized object, then we
3346 want to restore the stack level when the enclosing BIND_EXPR is
3347 exited to reclaim the allocated space; otherwise, we precisely
3348 need to do the opposite and preserve the latest stack level. */
3349 if (CALL_ALLOCA_FOR_VAR_P (*expr_p))
3350 gimplify_ctxp->save_stack = true;
3351 else
3352 gimplify_ctxp->keep_stack = true;
3353 break;
3354
3355 case BUILT_IN_VA_START:
3356 {
3357 builtin_va_start_p = TRUE;
3358 if (call_expr_nargs (*expr_p) < 2)
3359 {
3360 error ("too few arguments to function %<va_start%>");
3361 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3362 return GS_OK;
3363 }
3364
3365 if (fold_builtin_next_arg (*expr_p, true))
3366 {
3367 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3368 return GS_OK;
3369 }
3370 break;
3371 }
3372
3373 case BUILT_IN_EH_RETURN:
3374 cfun->calls_eh_return = true;
3375 break;
3376
3377 default:
3378 ;
3379 }
3380 if (fndecl && fndecl_built_in_p (fndecl))
3381 {
3382 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3383 if (new_tree && new_tree != *expr_p)
3384 {
3385 /* There was a transformation of this call which computes the
3386 same value, but in a more efficient way. Return and try
3387 again. */
3388 *expr_p = new_tree;
3389 return GS_OK;
3390 }
3391 }
3392
3393 /* Remember the original function pointer type. */
3394 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
3395
3396 if (flag_openmp
3397 && fndecl
3398 && cfun
3399 && (cfun->curr_properties & PROP_gimple_any) == 0)
3400 {
3401 tree variant = omp_resolve_declare_variant (fndecl);
3402 if (variant != fndecl)
3403 CALL_EXPR_FN (*expr_p) = build1 (ADDR_EXPR, fnptrtype, variant);
3404 }
3405
3406 /* There is a sequence point before the call, so any side effects in
3407 the calling expression must occur before the actual call. Force
3408 gimplify_expr to use an internal post queue. */
3409 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
3410 is_gimple_call_addr, fb_rvalue);
3411
3412 nargs = call_expr_nargs (*expr_p);
3413
3414 /* Get argument types for verification. */
3415 fndecl = get_callee_fndecl (*expr_p);
3416 parms = NULL_TREE;
3417 if (fndecl)
3418 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3419 else
3420 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
3421
3422 if (fndecl && DECL_ARGUMENTS (fndecl))
3423 p = DECL_ARGUMENTS (fndecl);
3424 else if (parms)
3425 p = parms;
3426 else
3427 p = NULL_TREE;
3428 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
3429 ;
3430
3431 /* If the last argument is __builtin_va_arg_pack () and it is not
3432 passed as a named argument, decrease the number of CALL_EXPR
3433 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
3434 if (!p
3435 && i < nargs
3436 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
3437 {
3438 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
3439 tree last_arg_fndecl = get_callee_fndecl (last_arg);
3440
3441 if (last_arg_fndecl
3442 && fndecl_built_in_p (last_arg_fndecl, BUILT_IN_VA_ARG_PACK))
3443 {
3444 tree call = *expr_p;
3445
3446 --nargs;
3447 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
3448 CALL_EXPR_FN (call),
3449 nargs, CALL_EXPR_ARGP (call));
3450
3451 /* Copy all CALL_EXPR flags, location and block, except
3452 CALL_EXPR_VA_ARG_PACK flag. */
3453 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
3454 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
3455 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
3456 = CALL_EXPR_RETURN_SLOT_OPT (call);
3457 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
3458 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
3459
3460 /* Set CALL_EXPR_VA_ARG_PACK. */
3461 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
3462 }
3463 }
3464
3465 /* If the call returns twice then after building the CFG the call
3466 argument computations will no longer dominate the call because
3467 we add an abnormal incoming edge to the call. So do not use SSA
3468 vars there. */
3469 bool returns_twice = call_expr_flags (*expr_p) & ECF_RETURNS_TWICE;
3470
3471 /* Gimplify the function arguments. */
3472 if (nargs > 0)
3473 {
3474 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
3475 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
3476 PUSH_ARGS_REVERSED ? i-- : i++)
3477 {
3478 enum gimplify_status t;
3479
3480 /* Avoid gimplifying the second argument to va_start, which needs to
3481 be the plain PARM_DECL. */
3482 if ((i != 1) || !builtin_va_start_p)
3483 {
3484 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3485 EXPR_LOCATION (*expr_p), ! returns_twice);
3486
3487 if (t == GS_ERROR)
3488 ret = GS_ERROR;
3489 }
3490 }
3491 }
3492
3493 /* Gimplify the static chain. */
3494 if (CALL_EXPR_STATIC_CHAIN (*expr_p))
3495 {
3496 if (fndecl && !DECL_STATIC_CHAIN (fndecl))
3497 CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
3498 else
3499 {
3500 enum gimplify_status t;
3501 t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
3502 EXPR_LOCATION (*expr_p), ! returns_twice);
3503 if (t == GS_ERROR)
3504 ret = GS_ERROR;
3505 }
3506 }
3507
3508 /* Verify the function result. */
3509 if (want_value && fndecl
3510 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
3511 {
3512 error_at (loc, "using result of function returning %<void%>");
3513 ret = GS_ERROR;
3514 }
3515
3516 /* Try this again in case gimplification exposed something. */
3517 if (ret != GS_ERROR)
3518 {
3519 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3520
3521 if (new_tree && new_tree != *expr_p)
3522 {
3523 /* There was a transformation of this call which computes the
3524 same value, but in a more efficient way. Return and try
3525 again. */
3526 *expr_p = new_tree;
3527 return GS_OK;
3528 }
3529 }
3530 else
3531 {
3532 *expr_p = error_mark_node;
3533 return GS_ERROR;
3534 }
3535
3536 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
3537 decl. This allows us to eliminate redundant or useless
3538 calls to "const" functions. */
3539 if (TREE_CODE (*expr_p) == CALL_EXPR)
3540 {
3541 int flags = call_expr_flags (*expr_p);
3542 if (flags & (ECF_CONST | ECF_PURE)
3543 /* An infinite loop is considered a side effect. */
3544 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
3545 TREE_SIDE_EFFECTS (*expr_p) = 0;
3546 }
3547
3548 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
3549 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
3550 form and delegate the creation of a GIMPLE_CALL to
3551 gimplify_modify_expr. This is always possible because when
3552 WANT_VALUE is true, the caller wants the result of this call into
3553 a temporary, which means that we will emit an INIT_EXPR in
3554 internal_get_tmp_var which will then be handled by
3555 gimplify_modify_expr. */
3556 if (!want_value)
3557 {
3558 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
3559 have to do is replicate it as a GIMPLE_CALL tuple. */
3560 gimple_stmt_iterator gsi;
3561 call = gimple_build_call_from_tree (*expr_p, fnptrtype);
3562 notice_special_calls (call);
3563 gimplify_seq_add_stmt (pre_p, call);
3564 gsi = gsi_last (*pre_p);
3565 maybe_fold_stmt (&gsi);
3566 *expr_p = NULL_TREE;
3567 }
3568 else
3569 /* Remember the original function type. */
3570 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
3571 CALL_EXPR_FN (*expr_p));
3572
3573 return ret;
3574 }
3575
3576 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
3577 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
3578
3579 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
3580 condition is true or false, respectively. If null, we should generate
3581 our own to skip over the evaluation of this specific expression.
3582
3583 LOCUS is the source location of the COND_EXPR.
3584
3585 This function is the tree equivalent of do_jump.
3586
3587 shortcut_cond_r should only be called by shortcut_cond_expr. */
3588
3589 static tree
3590 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
3591 location_t locus)
3592 {
3593 tree local_label = NULL_TREE;
3594 tree t, expr = NULL;
3595
3596 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
3597 retain the shortcut semantics. Just insert the gotos here;
3598 shortcut_cond_expr will append the real blocks later. */
3599 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3600 {
3601 location_t new_locus;
3602
3603 /* Turn if (a && b) into
3604
3605 if (a); else goto no;
3606 if (b) goto yes; else goto no;
3607 (no:) */
3608
3609 if (false_label_p == NULL)
3610 false_label_p = &local_label;
3611
3612 /* Keep the original source location on the first 'if'. */
3613 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
3614 append_to_statement_list (t, &expr);
3615
3616 /* Set the source location of the && on the second 'if'. */
3617 new_locus = rexpr_location (pred, locus);
3618 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3619 new_locus);
3620 append_to_statement_list (t, &expr);
3621 }
3622 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3623 {
3624 location_t new_locus;
3625
3626 /* Turn if (a || b) into
3627
3628 if (a) goto yes;
3629 if (b) goto yes; else goto no;
3630 (yes:) */
3631
3632 if (true_label_p == NULL)
3633 true_label_p = &local_label;
3634
3635 /* Keep the original source location on the first 'if'. */
3636 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
3637 append_to_statement_list (t, &expr);
3638
3639 /* Set the source location of the || on the second 'if'. */
3640 new_locus = rexpr_location (pred, locus);
3641 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3642 new_locus);
3643 append_to_statement_list (t, &expr);
3644 }
3645 else if (TREE_CODE (pred) == COND_EXPR
3646 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
3647 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
3648 {
3649 location_t new_locus;
3650
3651 /* As long as we're messing with gotos, turn if (a ? b : c) into
3652 if (a)
3653 if (b) goto yes; else goto no;
3654 else
3655 if (c) goto yes; else goto no;
3656
3657 Don't do this if one of the arms has void type, which can happen
3658 in C++ when the arm is throw. */
3659
3660 /* Keep the original source location on the first 'if'. Set the source
3661 location of the ? on the second 'if'. */
3662 new_locus = rexpr_location (pred, locus);
3663 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
3664 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
3665 false_label_p, locus),
3666 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
3667 false_label_p, new_locus));
3668 }
3669 else
3670 {
3671 expr = build3 (COND_EXPR, void_type_node, pred,
3672 build_and_jump (true_label_p),
3673 build_and_jump (false_label_p));
3674 SET_EXPR_LOCATION (expr, locus);
3675 }
3676
3677 if (local_label)
3678 {
3679 t = build1 (LABEL_EXPR, void_type_node, local_label);
3680 append_to_statement_list (t, &expr);
3681 }
3682
3683 return expr;
3684 }
3685
3686 /* If EXPR is a GOTO_EXPR, return it. If it is a STATEMENT_LIST, skip
3687 any of its leading DEBUG_BEGIN_STMTS and recurse on the subsequent
3688 statement, if it is the last one. Otherwise, return NULL. */
3689
3690 static tree
3691 find_goto (tree expr)
3692 {
3693 if (!expr)
3694 return NULL_TREE;
3695
3696 if (TREE_CODE (expr) == GOTO_EXPR)
3697 return expr;
3698
3699 if (TREE_CODE (expr) != STATEMENT_LIST)
3700 return NULL_TREE;
3701
3702 tree_stmt_iterator i = tsi_start (expr);
3703
3704 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
3705 tsi_next (&i);
3706
3707 if (!tsi_one_before_end_p (i))
3708 return NULL_TREE;
3709
3710 return find_goto (tsi_stmt (i));
3711 }
3712
3713 /* Same as find_goto, except that it returns NULL if the destination
3714 is not a LABEL_DECL. */
3715
3716 static inline tree
3717 find_goto_label (tree expr)
3718 {
3719 tree dest = find_goto (expr);
3720 if (dest && TREE_CODE (GOTO_DESTINATION (dest)) == LABEL_DECL)
3721 return dest;
3722 return NULL_TREE;
3723 }
3724
3725 /* Given a conditional expression EXPR with short-circuit boolean
3726 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
3727 predicate apart into the equivalent sequence of conditionals. */
3728
3729 static tree
3730 shortcut_cond_expr (tree expr)
3731 {
3732 tree pred = TREE_OPERAND (expr, 0);
3733 tree then_ = TREE_OPERAND (expr, 1);
3734 tree else_ = TREE_OPERAND (expr, 2);
3735 tree true_label, false_label, end_label, t;
3736 tree *true_label_p;
3737 tree *false_label_p;
3738 bool emit_end, emit_false, jump_over_else;
3739 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
3740 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
3741
3742 /* First do simple transformations. */
3743 if (!else_se)
3744 {
3745 /* If there is no 'else', turn
3746 if (a && b) then c
3747 into
3748 if (a) if (b) then c. */
3749 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3750 {
3751 /* Keep the original source location on the first 'if'. */
3752 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3753 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3754 /* Set the source location of the && on the second 'if'. */
3755 if (rexpr_has_location (pred))
3756 SET_EXPR_LOCATION (expr, rexpr_location (pred));
3757 then_ = shortcut_cond_expr (expr);
3758 then_se = then_ && TREE_SIDE_EFFECTS (then_);
3759 pred = TREE_OPERAND (pred, 0);
3760 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
3761 SET_EXPR_LOCATION (expr, locus);
3762 }
3763 }
3764
3765 if (!then_se)
3766 {
3767 /* If there is no 'then', turn
3768 if (a || b); else d
3769 into
3770 if (a); else if (b); else d. */
3771 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3772 {
3773 /* Keep the original source location on the first 'if'. */
3774 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3775 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3776 /* Set the source location of the || on the second 'if'. */
3777 if (rexpr_has_location (pred))
3778 SET_EXPR_LOCATION (expr, rexpr_location (pred));
3779 else_ = shortcut_cond_expr (expr);
3780 else_se = else_ && TREE_SIDE_EFFECTS (else_);
3781 pred = TREE_OPERAND (pred, 0);
3782 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
3783 SET_EXPR_LOCATION (expr, locus);
3784 }
3785 }
3786
3787 /* If we're done, great. */
3788 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
3789 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
3790 return expr;
3791
3792 /* Otherwise we need to mess with gotos. Change
3793 if (a) c; else d;
3794 to
3795 if (a); else goto no;
3796 c; goto end;
3797 no: d; end:
3798 and recursively gimplify the condition. */
3799
3800 true_label = false_label = end_label = NULL_TREE;
3801
3802 /* If our arms just jump somewhere, hijack those labels so we don't
3803 generate jumps to jumps. */
3804
3805 if (tree then_goto = find_goto_label (then_))
3806 {
3807 true_label = GOTO_DESTINATION (then_goto);
3808 then_ = NULL;
3809 then_se = false;
3810 }
3811
3812 if (tree else_goto = find_goto_label (else_))
3813 {
3814 false_label = GOTO_DESTINATION (else_goto);
3815 else_ = NULL;
3816 else_se = false;
3817 }
3818
3819 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
3820 if (true_label)
3821 true_label_p = &true_label;
3822 else
3823 true_label_p = NULL;
3824
3825 /* The 'else' branch also needs a label if it contains interesting code. */
3826 if (false_label || else_se)
3827 false_label_p = &false_label;
3828 else
3829 false_label_p = NULL;
3830
3831 /* If there was nothing else in our arms, just forward the label(s). */
3832 if (!then_se && !else_se)
3833 return shortcut_cond_r (pred, true_label_p, false_label_p,
3834 EXPR_LOC_OR_LOC (expr, input_location));
3835
3836 /* If our last subexpression already has a terminal label, reuse it. */
3837 if (else_se)
3838 t = expr_last (else_);
3839 else if (then_se)
3840 t = expr_last (then_);
3841 else
3842 t = NULL;
3843 if (t && TREE_CODE (t) == LABEL_EXPR)
3844 end_label = LABEL_EXPR_LABEL (t);
3845
3846 /* If we don't care about jumping to the 'else' branch, jump to the end
3847 if the condition is false. */
3848 if (!false_label_p)
3849 false_label_p = &end_label;
3850
3851 /* We only want to emit these labels if we aren't hijacking them. */
3852 emit_end = (end_label == NULL_TREE);
3853 emit_false = (false_label == NULL_TREE);
3854
3855 /* We only emit the jump over the else clause if we have to--if the
3856 then clause may fall through. Otherwise we can wind up with a
3857 useless jump and a useless label at the end of gimplified code,
3858 which will cause us to think that this conditional as a whole
3859 falls through even if it doesn't. If we then inline a function
3860 which ends with such a condition, that can cause us to issue an
3861 inappropriate warning about control reaching the end of a
3862 non-void function. */
3863 jump_over_else = block_may_fallthru (then_);
3864
3865 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
3866 EXPR_LOC_OR_LOC (expr, input_location));
3867
3868 expr = NULL;
3869 append_to_statement_list (pred, &expr);
3870
3871 append_to_statement_list (then_, &expr);
3872 if (else_se)
3873 {
3874 if (jump_over_else)
3875 {
3876 tree last = expr_last (expr);
3877 t = build_and_jump (&end_label);
3878 if (rexpr_has_location (last))
3879 SET_EXPR_LOCATION (t, rexpr_location (last));
3880 append_to_statement_list (t, &expr);
3881 }
3882 if (emit_false)
3883 {
3884 t = build1 (LABEL_EXPR, void_type_node, false_label);
3885 append_to_statement_list (t, &expr);
3886 }
3887 append_to_statement_list (else_, &expr);
3888 }
3889 if (emit_end && end_label)
3890 {
3891 t = build1 (LABEL_EXPR, void_type_node, end_label);
3892 append_to_statement_list (t, &expr);
3893 }
3894
3895 return expr;
3896 }
3897
3898 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
3899
3900 tree
3901 gimple_boolify (tree expr)
3902 {
3903 tree type = TREE_TYPE (expr);
3904 location_t loc = EXPR_LOCATION (expr);
3905
3906 if (TREE_CODE (expr) == NE_EXPR
3907 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
3908 && integer_zerop (TREE_OPERAND (expr, 1)))
3909 {
3910 tree call = TREE_OPERAND (expr, 0);
3911 tree fn = get_callee_fndecl (call);
3912
3913 /* For __builtin_expect ((long) (x), y) recurse into x as well
3914 if x is truth_value_p. */
3915 if (fn
3916 && fndecl_built_in_p (fn, BUILT_IN_EXPECT)
3917 && call_expr_nargs (call) == 2)
3918 {
3919 tree arg = CALL_EXPR_ARG (call, 0);
3920 if (arg)
3921 {
3922 if (TREE_CODE (arg) == NOP_EXPR
3923 && TREE_TYPE (arg) == TREE_TYPE (call))
3924 arg = TREE_OPERAND (arg, 0);
3925 if (truth_value_p (TREE_CODE (arg)))
3926 {
3927 arg = gimple_boolify (arg);
3928 CALL_EXPR_ARG (call, 0)
3929 = fold_convert_loc (loc, TREE_TYPE (call), arg);
3930 }
3931 }
3932 }
3933 }
3934
3935 switch (TREE_CODE (expr))
3936 {
3937 case TRUTH_AND_EXPR:
3938 case TRUTH_OR_EXPR:
3939 case TRUTH_XOR_EXPR:
3940 case TRUTH_ANDIF_EXPR:
3941 case TRUTH_ORIF_EXPR:
3942 /* Also boolify the arguments of truth exprs. */
3943 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
3944 /* FALLTHRU */
3945
3946 case TRUTH_NOT_EXPR:
3947 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3948
3949 /* These expressions always produce boolean results. */
3950 if (TREE_CODE (type) != BOOLEAN_TYPE)
3951 TREE_TYPE (expr) = boolean_type_node;
3952 return expr;
3953
3954 case ANNOTATE_EXPR:
3955 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
3956 {
3957 case annot_expr_ivdep_kind:
3958 case annot_expr_unroll_kind:
3959 case annot_expr_no_vector_kind:
3960 case annot_expr_vector_kind:
3961 case annot_expr_parallel_kind:
3962 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3963 if (TREE_CODE (type) != BOOLEAN_TYPE)
3964 TREE_TYPE (expr) = boolean_type_node;
3965 return expr;
3966 default:
3967 gcc_unreachable ();
3968 }
3969
3970 default:
3971 if (COMPARISON_CLASS_P (expr))
3972 {
3973 /* There expressions always prduce boolean results. */
3974 if (TREE_CODE (type) != BOOLEAN_TYPE)
3975 TREE_TYPE (expr) = boolean_type_node;
3976 return expr;
3977 }
3978 /* Other expressions that get here must have boolean values, but
3979 might need to be converted to the appropriate mode. */
3980 if (TREE_CODE (type) == BOOLEAN_TYPE)
3981 return expr;
3982 return fold_convert_loc (loc, boolean_type_node, expr);
3983 }
3984 }
3985
3986 /* Given a conditional expression *EXPR_P without side effects, gimplify
3987 its operands. New statements are inserted to PRE_P. */
3988
3989 static enum gimplify_status
3990 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
3991 {
3992 tree expr = *expr_p, cond;
3993 enum gimplify_status ret, tret;
3994 enum tree_code code;
3995
3996 cond = gimple_boolify (COND_EXPR_COND (expr));
3997
3998 /* We need to handle && and || specially, as their gimplification
3999 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
4000 code = TREE_CODE (cond);
4001 if (code == TRUTH_ANDIF_EXPR)
4002 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
4003 else if (code == TRUTH_ORIF_EXPR)
4004 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
4005 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
4006 COND_EXPR_COND (*expr_p) = cond;
4007
4008 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
4009 is_gimple_val, fb_rvalue);
4010 ret = MIN (ret, tret);
4011 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
4012 is_gimple_val, fb_rvalue);
4013
4014 return MIN (ret, tret);
4015 }
4016
4017 /* Return true if evaluating EXPR could trap.
4018 EXPR is GENERIC, while tree_could_trap_p can be called
4019 only on GIMPLE. */
4020
4021 bool
4022 generic_expr_could_trap_p (tree expr)
4023 {
4024 unsigned i, n;
4025
4026 if (!expr || is_gimple_val (expr))
4027 return false;
4028
4029 if (!EXPR_P (expr) || tree_could_trap_p (expr))
4030 return true;
4031
4032 n = TREE_OPERAND_LENGTH (expr);
4033 for (i = 0; i < n; i++)
4034 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
4035 return true;
4036
4037 return false;
4038 }
4039
4040 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
4041 into
4042
4043 if (p) if (p)
4044 t1 = a; a;
4045 else or else
4046 t1 = b; b;
4047 t1;
4048
4049 The second form is used when *EXPR_P is of type void.
4050
4051 PRE_P points to the list where side effects that must happen before
4052 *EXPR_P should be stored. */
4053
4054 static enum gimplify_status
4055 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
4056 {
4057 tree expr = *expr_p;
4058 tree type = TREE_TYPE (expr);
4059 location_t loc = EXPR_LOCATION (expr);
4060 tree tmp, arm1, arm2;
4061 enum gimplify_status ret;
4062 tree label_true, label_false, label_cont;
4063 bool have_then_clause_p, have_else_clause_p;
4064 gcond *cond_stmt;
4065 enum tree_code pred_code;
4066 gimple_seq seq = NULL;
4067
4068 /* If this COND_EXPR has a value, copy the values into a temporary within
4069 the arms. */
4070 if (!VOID_TYPE_P (type))
4071 {
4072 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
4073 tree result;
4074
4075 /* If either an rvalue is ok or we do not require an lvalue, create the
4076 temporary. But we cannot do that if the type is addressable. */
4077 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
4078 && !TREE_ADDRESSABLE (type))
4079 {
4080 if (gimplify_ctxp->allow_rhs_cond_expr
4081 /* If either branch has side effects or could trap, it can't be
4082 evaluated unconditionally. */
4083 && !TREE_SIDE_EFFECTS (then_)
4084 && !generic_expr_could_trap_p (then_)
4085 && !TREE_SIDE_EFFECTS (else_)
4086 && !generic_expr_could_trap_p (else_))
4087 return gimplify_pure_cond_expr (expr_p, pre_p);
4088
4089 tmp = create_tmp_var (type, "iftmp");
4090 result = tmp;
4091 }
4092
4093 /* Otherwise, only create and copy references to the values. */
4094 else
4095 {
4096 type = build_pointer_type (type);
4097
4098 if (!VOID_TYPE_P (TREE_TYPE (then_)))
4099 then_ = build_fold_addr_expr_loc (loc, then_);
4100
4101 if (!VOID_TYPE_P (TREE_TYPE (else_)))
4102 else_ = build_fold_addr_expr_loc (loc, else_);
4103
4104 expr
4105 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
4106
4107 tmp = create_tmp_var (type, "iftmp");
4108 result = build_simple_mem_ref_loc (loc, tmp);
4109 }
4110
4111 /* Build the new then clause, `tmp = then_;'. But don't build the
4112 assignment if the value is void; in C++ it can be if it's a throw. */
4113 if (!VOID_TYPE_P (TREE_TYPE (then_)))
4114 TREE_OPERAND (expr, 1) = build2 (INIT_EXPR, type, tmp, then_);
4115
4116 /* Similarly, build the new else clause, `tmp = else_;'. */
4117 if (!VOID_TYPE_P (TREE_TYPE (else_)))
4118 TREE_OPERAND (expr, 2) = build2 (INIT_EXPR, type, tmp, else_);
4119
4120 TREE_TYPE (expr) = void_type_node;
4121 recalculate_side_effects (expr);
4122
4123 /* Move the COND_EXPR to the prequeue. */
4124 gimplify_stmt (&expr, pre_p);
4125
4126 *expr_p = result;
4127 return GS_ALL_DONE;
4128 }
4129
4130 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
4131 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
4132 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
4133 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
4134
4135 /* Make sure the condition has BOOLEAN_TYPE. */
4136 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4137
4138 /* Break apart && and || conditions. */
4139 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
4140 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
4141 {
4142 expr = shortcut_cond_expr (expr);
4143
4144 if (expr != *expr_p)
4145 {
4146 *expr_p = expr;
4147
4148 /* We can't rely on gimplify_expr to re-gimplify the expanded
4149 form properly, as cleanups might cause the target labels to be
4150 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
4151 set up a conditional context. */
4152 gimple_push_condition ();
4153 gimplify_stmt (expr_p, &seq);
4154 gimple_pop_condition (pre_p);
4155 gimple_seq_add_seq (pre_p, seq);
4156
4157 return GS_ALL_DONE;
4158 }
4159 }
4160
4161 /* Now do the normal gimplification. */
4162
4163 /* Gimplify condition. */
4164 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL,
4165 is_gimple_condexpr_for_cond, fb_rvalue);
4166 if (ret == GS_ERROR)
4167 return GS_ERROR;
4168 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
4169
4170 gimple_push_condition ();
4171
4172 have_then_clause_p = have_else_clause_p = false;
4173 label_true = find_goto_label (TREE_OPERAND (expr, 1));
4174 if (label_true
4175 && DECL_CONTEXT (GOTO_DESTINATION (label_true)) == current_function_decl
4176 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4177 have different locations, otherwise we end up with incorrect
4178 location information on the branches. */
4179 && (optimize
4180 || !EXPR_HAS_LOCATION (expr)
4181 || !rexpr_has_location (label_true)
4182 || EXPR_LOCATION (expr) == rexpr_location (label_true)))
4183 {
4184 have_then_clause_p = true;
4185 label_true = GOTO_DESTINATION (label_true);
4186 }
4187 else
4188 label_true = create_artificial_label (UNKNOWN_LOCATION);
4189 label_false = find_goto_label (TREE_OPERAND (expr, 2));
4190 if (label_false
4191 && DECL_CONTEXT (GOTO_DESTINATION (label_false)) == current_function_decl
4192 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4193 have different locations, otherwise we end up with incorrect
4194 location information on the branches. */
4195 && (optimize
4196 || !EXPR_HAS_LOCATION (expr)
4197 || !rexpr_has_location (label_false)
4198 || EXPR_LOCATION (expr) == rexpr_location (label_false)))
4199 {
4200 have_else_clause_p = true;
4201 label_false = GOTO_DESTINATION (label_false);
4202 }
4203 else
4204 label_false = create_artificial_label (UNKNOWN_LOCATION);
4205
4206 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
4207 &arm2);
4208 cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
4209 label_false);
4210 gimple_set_no_warning (cond_stmt, TREE_NO_WARNING (COND_EXPR_COND (expr)));
4211 gimplify_seq_add_stmt (&seq, cond_stmt);
4212 gimple_stmt_iterator gsi = gsi_last (seq);
4213 maybe_fold_stmt (&gsi);
4214
4215 label_cont = NULL_TREE;
4216 if (!have_then_clause_p)
4217 {
4218 /* For if (...) {} else { code; } put label_true after
4219 the else block. */
4220 if (TREE_OPERAND (expr, 1) == NULL_TREE
4221 && !have_else_clause_p
4222 && TREE_OPERAND (expr, 2) != NULL_TREE)
4223 label_cont = label_true;
4224 else
4225 {
4226 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
4227 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
4228 /* For if (...) { code; } else {} or
4229 if (...) { code; } else goto label; or
4230 if (...) { code; return; } else { ... }
4231 label_cont isn't needed. */
4232 if (!have_else_clause_p
4233 && TREE_OPERAND (expr, 2) != NULL_TREE
4234 && gimple_seq_may_fallthru (seq))
4235 {
4236 gimple *g;
4237 label_cont = create_artificial_label (UNKNOWN_LOCATION);
4238
4239 g = gimple_build_goto (label_cont);
4240
4241 /* GIMPLE_COND's are very low level; they have embedded
4242 gotos. This particular embedded goto should not be marked
4243 with the location of the original COND_EXPR, as it would
4244 correspond to the COND_EXPR's condition, not the ELSE or the
4245 THEN arms. To avoid marking it with the wrong location, flag
4246 it as "no location". */
4247 gimple_set_do_not_emit_location (g);
4248
4249 gimplify_seq_add_stmt (&seq, g);
4250 }
4251 }
4252 }
4253 if (!have_else_clause_p)
4254 {
4255 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
4256 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
4257 }
4258 if (label_cont)
4259 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
4260
4261 gimple_pop_condition (pre_p);
4262 gimple_seq_add_seq (pre_p, seq);
4263
4264 if (ret == GS_ERROR)
4265 ; /* Do nothing. */
4266 else if (have_then_clause_p || have_else_clause_p)
4267 ret = GS_ALL_DONE;
4268 else
4269 {
4270 /* Both arms are empty; replace the COND_EXPR with its predicate. */
4271 expr = TREE_OPERAND (expr, 0);
4272 gimplify_stmt (&expr, pre_p);
4273 }
4274
4275 *expr_p = NULL;
4276 return ret;
4277 }
4278
4279 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4280 to be marked addressable.
4281
4282 We cannot rely on such an expression being directly markable if a temporary
4283 has been created by the gimplification. In this case, we create another
4284 temporary and initialize it with a copy, which will become a store after we
4285 mark it addressable. This can happen if the front-end passed us something
4286 that it could not mark addressable yet, like a Fortran pass-by-reference
4287 parameter (int) floatvar. */
4288
4289 static void
4290 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
4291 {
4292 while (handled_component_p (*expr_p))
4293 expr_p = &TREE_OPERAND (*expr_p, 0);
4294 if (is_gimple_reg (*expr_p))
4295 {
4296 /* Do not allow an SSA name as the temporary. */
4297 tree var = get_initialized_tmp_var (*expr_p, seq_p, NULL, false);
4298 DECL_NOT_GIMPLE_REG_P (var) = 1;
4299 *expr_p = var;
4300 }
4301 }
4302
4303 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4304 a call to __builtin_memcpy. */
4305
4306 static enum gimplify_status
4307 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
4308 gimple_seq *seq_p)
4309 {
4310 tree t, to, to_ptr, from, from_ptr;
4311 gcall *gs;
4312 location_t loc = EXPR_LOCATION (*expr_p);
4313
4314 to = TREE_OPERAND (*expr_p, 0);
4315 from = TREE_OPERAND (*expr_p, 1);
4316
4317 /* Mark the RHS addressable. Beware that it may not be possible to do so
4318 directly if a temporary has been created by the gimplification. */
4319 prepare_gimple_addressable (&from, seq_p);
4320
4321 mark_addressable (from);
4322 from_ptr = build_fold_addr_expr_loc (loc, from);
4323 gimplify_arg (&from_ptr, seq_p, loc);
4324
4325 mark_addressable (to);
4326 to_ptr = build_fold_addr_expr_loc (loc, to);
4327 gimplify_arg (&to_ptr, seq_p, loc);
4328
4329 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
4330
4331 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
4332
4333 if (want_value)
4334 {
4335 /* tmp = memcpy() */
4336 t = create_tmp_var (TREE_TYPE (to_ptr));
4337 gimple_call_set_lhs (gs, t);
4338 gimplify_seq_add_stmt (seq_p, gs);
4339
4340 *expr_p = build_simple_mem_ref (t);
4341 return GS_ALL_DONE;
4342 }
4343
4344 gimplify_seq_add_stmt (seq_p, gs);
4345 *expr_p = NULL;
4346 return GS_ALL_DONE;
4347 }
4348
4349 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4350 a call to __builtin_memset. In this case we know that the RHS is
4351 a CONSTRUCTOR with an empty element list. */
4352
4353 static enum gimplify_status
4354 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
4355 gimple_seq *seq_p)
4356 {
4357 tree t, from, to, to_ptr;
4358 gcall *gs;
4359 location_t loc = EXPR_LOCATION (*expr_p);
4360
4361 /* Assert our assumptions, to abort instead of producing wrong code
4362 silently if they are not met. Beware that the RHS CONSTRUCTOR might
4363 not be immediately exposed. */
4364 from = TREE_OPERAND (*expr_p, 1);
4365 if (TREE_CODE (from) == WITH_SIZE_EXPR)
4366 from = TREE_OPERAND (from, 0);
4367
4368 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
4369 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
4370
4371 /* Now proceed. */
4372 to = TREE_OPERAND (*expr_p, 0);
4373
4374 to_ptr = build_fold_addr_expr_loc (loc, to);
4375 gimplify_arg (&to_ptr, seq_p, loc);
4376 t = builtin_decl_implicit (BUILT_IN_MEMSET);
4377
4378 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
4379
4380 if (want_value)
4381 {
4382 /* tmp = memset() */
4383 t = create_tmp_var (TREE_TYPE (to_ptr));
4384 gimple_call_set_lhs (gs, t);
4385 gimplify_seq_add_stmt (seq_p, gs);
4386
4387 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
4388 return GS_ALL_DONE;
4389 }
4390
4391 gimplify_seq_add_stmt (seq_p, gs);
4392 *expr_p = NULL;
4393 return GS_ALL_DONE;
4394 }
4395
4396 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
4397 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
4398 assignment. Return non-null if we detect a potential overlap. */
4399
4400 struct gimplify_init_ctor_preeval_data
4401 {
4402 /* The base decl of the lhs object. May be NULL, in which case we
4403 have to assume the lhs is indirect. */
4404 tree lhs_base_decl;
4405
4406 /* The alias set of the lhs object. */
4407 alias_set_type lhs_alias_set;
4408 };
4409
4410 static tree
4411 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
4412 {
4413 struct gimplify_init_ctor_preeval_data *data
4414 = (struct gimplify_init_ctor_preeval_data *) xdata;
4415 tree t = *tp;
4416
4417 /* If we find the base object, obviously we have overlap. */
4418 if (data->lhs_base_decl == t)
4419 return t;
4420
4421 /* If the constructor component is indirect, determine if we have a
4422 potential overlap with the lhs. The only bits of information we
4423 have to go on at this point are addressability and alias sets. */
4424 if ((INDIRECT_REF_P (t)
4425 || TREE_CODE (t) == MEM_REF)
4426 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4427 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
4428 return t;
4429
4430 /* If the constructor component is a call, determine if it can hide a
4431 potential overlap with the lhs through an INDIRECT_REF like above.
4432 ??? Ugh - this is completely broken. In fact this whole analysis
4433 doesn't look conservative. */
4434 if (TREE_CODE (t) == CALL_EXPR)
4435 {
4436 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
4437
4438 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
4439 if (POINTER_TYPE_P (TREE_VALUE (type))
4440 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4441 && alias_sets_conflict_p (data->lhs_alias_set,
4442 get_alias_set
4443 (TREE_TYPE (TREE_VALUE (type)))))
4444 return t;
4445 }
4446
4447 if (IS_TYPE_OR_DECL_P (t))
4448 *walk_subtrees = 0;
4449 return NULL;
4450 }
4451
4452 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
4453 force values that overlap with the lhs (as described by *DATA)
4454 into temporaries. */
4455
4456 static void
4457 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4458 struct gimplify_init_ctor_preeval_data *data)
4459 {
4460 enum gimplify_status one;
4461
4462 /* If the value is constant, then there's nothing to pre-evaluate. */
4463 if (TREE_CONSTANT (*expr_p))
4464 {
4465 /* Ensure it does not have side effects, it might contain a reference to
4466 the object we're initializing. */
4467 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
4468 return;
4469 }
4470
4471 /* If the type has non-trivial constructors, we can't pre-evaluate. */
4472 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
4473 return;
4474
4475 /* Recurse for nested constructors. */
4476 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
4477 {
4478 unsigned HOST_WIDE_INT ix;
4479 constructor_elt *ce;
4480 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
4481
4482 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
4483 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
4484
4485 return;
4486 }
4487
4488 /* If this is a variable sized type, we must remember the size. */
4489 maybe_with_size_expr (expr_p);
4490
4491 /* Gimplify the constructor element to something appropriate for the rhs
4492 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
4493 the gimplifier will consider this a store to memory. Doing this
4494 gimplification now means that we won't have to deal with complicated
4495 language-specific trees, nor trees like SAVE_EXPR that can induce
4496 exponential search behavior. */
4497 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
4498 if (one == GS_ERROR)
4499 {
4500 *expr_p = NULL;
4501 return;
4502 }
4503
4504 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
4505 with the lhs, since "a = { .x=a }" doesn't make sense. This will
4506 always be true for all scalars, since is_gimple_mem_rhs insists on a
4507 temporary variable for them. */
4508 if (DECL_P (*expr_p))
4509 return;
4510
4511 /* If this is of variable size, we have no choice but to assume it doesn't
4512 overlap since we can't make a temporary for it. */
4513 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
4514 return;
4515
4516 /* Otherwise, we must search for overlap ... */
4517 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
4518 return;
4519
4520 /* ... and if found, force the value into a temporary. */
4521 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
4522 }
4523
4524 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
4525 a RANGE_EXPR in a CONSTRUCTOR for an array.
4526
4527 var = lower;
4528 loop_entry:
4529 object[var] = value;
4530 if (var == upper)
4531 goto loop_exit;
4532 var = var + 1;
4533 goto loop_entry;
4534 loop_exit:
4535
4536 We increment var _after_ the loop exit check because we might otherwise
4537 fail if upper == TYPE_MAX_VALUE (type for upper).
4538
4539 Note that we never have to deal with SAVE_EXPRs here, because this has
4540 already been taken care of for us, in gimplify_init_ctor_preeval(). */
4541
4542 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
4543 gimple_seq *, bool);
4544
4545 static void
4546 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
4547 tree value, tree array_elt_type,
4548 gimple_seq *pre_p, bool cleared)
4549 {
4550 tree loop_entry_label, loop_exit_label, fall_thru_label;
4551 tree var, var_type, cref, tmp;
4552
4553 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
4554 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
4555 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
4556
4557 /* Create and initialize the index variable. */
4558 var_type = TREE_TYPE (upper);
4559 var = create_tmp_var (var_type);
4560 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
4561
4562 /* Add the loop entry label. */
4563 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
4564
4565 /* Build the reference. */
4566 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4567 var, NULL_TREE, NULL_TREE);
4568
4569 /* If we are a constructor, just call gimplify_init_ctor_eval to do
4570 the store. Otherwise just assign value to the reference. */
4571
4572 if (TREE_CODE (value) == CONSTRUCTOR)
4573 /* NB we might have to call ourself recursively through
4574 gimplify_init_ctor_eval if the value is a constructor. */
4575 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4576 pre_p, cleared);
4577 else
4578 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
4579
4580 /* We exit the loop when the index var is equal to the upper bound. */
4581 gimplify_seq_add_stmt (pre_p,
4582 gimple_build_cond (EQ_EXPR, var, upper,
4583 loop_exit_label, fall_thru_label));
4584
4585 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
4586
4587 /* Otherwise, increment the index var... */
4588 tmp = build2 (PLUS_EXPR, var_type, var,
4589 fold_convert (var_type, integer_one_node));
4590 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
4591
4592 /* ...and jump back to the loop entry. */
4593 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
4594
4595 /* Add the loop exit label. */
4596 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
4597 }
4598
4599 /* Return true if FDECL is accessing a field that is zero sized. */
4600
4601 static bool
4602 zero_sized_field_decl (const_tree fdecl)
4603 {
4604 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
4605 && integer_zerop (DECL_SIZE (fdecl)))
4606 return true;
4607 return false;
4608 }
4609
4610 /* Return true if TYPE is zero sized. */
4611
4612 static bool
4613 zero_sized_type (const_tree type)
4614 {
4615 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
4616 && integer_zerop (TYPE_SIZE (type)))
4617 return true;
4618 return false;
4619 }
4620
4621 /* A subroutine of gimplify_init_constructor. Generate individual
4622 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
4623 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
4624 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
4625 zeroed first. */
4626
4627 static void
4628 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
4629 gimple_seq *pre_p, bool cleared)
4630 {
4631 tree array_elt_type = NULL;
4632 unsigned HOST_WIDE_INT ix;
4633 tree purpose, value;
4634
4635 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
4636 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
4637
4638 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
4639 {
4640 tree cref;
4641
4642 /* NULL values are created above for gimplification errors. */
4643 if (value == NULL)
4644 continue;
4645
4646 if (cleared && initializer_zerop (value))
4647 continue;
4648
4649 /* ??? Here's to hoping the front end fills in all of the indices,
4650 so we don't have to figure out what's missing ourselves. */
4651 gcc_assert (purpose);
4652
4653 /* Skip zero-sized fields, unless value has side-effects. This can
4654 happen with calls to functions returning a zero-sized type, which
4655 we shouldn't discard. As a number of downstream passes don't
4656 expect sets of zero-sized fields, we rely on the gimplification of
4657 the MODIFY_EXPR we make below to drop the assignment statement. */
4658 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
4659 continue;
4660
4661 /* If we have a RANGE_EXPR, we have to build a loop to assign the
4662 whole range. */
4663 if (TREE_CODE (purpose) == RANGE_EXPR)
4664 {
4665 tree lower = TREE_OPERAND (purpose, 0);
4666 tree upper = TREE_OPERAND (purpose, 1);
4667
4668 /* If the lower bound is equal to upper, just treat it as if
4669 upper was the index. */
4670 if (simple_cst_equal (lower, upper))
4671 purpose = upper;
4672 else
4673 {
4674 gimplify_init_ctor_eval_range (object, lower, upper, value,
4675 array_elt_type, pre_p, cleared);
4676 continue;
4677 }
4678 }
4679
4680 if (array_elt_type)
4681 {
4682 /* Do not use bitsizetype for ARRAY_REF indices. */
4683 if (TYPE_DOMAIN (TREE_TYPE (object)))
4684 purpose
4685 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
4686 purpose);
4687 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4688 purpose, NULL_TREE, NULL_TREE);
4689 }
4690 else
4691 {
4692 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
4693 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
4694 unshare_expr (object), purpose, NULL_TREE);
4695 }
4696
4697 if (TREE_CODE (value) == CONSTRUCTOR
4698 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
4699 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4700 pre_p, cleared);
4701 else
4702 {
4703 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
4704 gimplify_and_add (init, pre_p);
4705 ggc_free (init);
4706 }
4707 }
4708 }
4709
4710 /* Return the appropriate RHS predicate for this LHS. */
4711
4712 gimple_predicate
4713 rhs_predicate_for (tree lhs)
4714 {
4715 if (is_gimple_reg (lhs))
4716 return is_gimple_reg_rhs_or_call;
4717 else
4718 return is_gimple_mem_rhs_or_call;
4719 }
4720
4721 /* Return the initial guess for an appropriate RHS predicate for this LHS,
4722 before the LHS has been gimplified. */
4723
4724 static gimple_predicate
4725 initial_rhs_predicate_for (tree lhs)
4726 {
4727 if (is_gimple_reg_type (TREE_TYPE (lhs)))
4728 return is_gimple_reg_rhs_or_call;
4729 else
4730 return is_gimple_mem_rhs_or_call;
4731 }
4732
4733 /* Gimplify a C99 compound literal expression. This just means adding
4734 the DECL_EXPR before the current statement and using its anonymous
4735 decl instead. */
4736
4737 static enum gimplify_status
4738 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
4739 bool (*gimple_test_f) (tree),
4740 fallback_t fallback)
4741 {
4742 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
4743 tree decl = DECL_EXPR_DECL (decl_s);
4744 tree init = DECL_INITIAL (decl);
4745 /* Mark the decl as addressable if the compound literal
4746 expression is addressable now, otherwise it is marked too late
4747 after we gimplify the initialization expression. */
4748 if (TREE_ADDRESSABLE (*expr_p))
4749 TREE_ADDRESSABLE (decl) = 1;
4750 /* Otherwise, if we don't need an lvalue and have a literal directly
4751 substitute it. Check if it matches the gimple predicate, as
4752 otherwise we'd generate a new temporary, and we can as well just
4753 use the decl we already have. */
4754 else if (!TREE_ADDRESSABLE (decl)
4755 && !TREE_THIS_VOLATILE (decl)
4756 && init
4757 && (fallback & fb_lvalue) == 0
4758 && gimple_test_f (init))
4759 {
4760 *expr_p = init;
4761 return GS_OK;
4762 }
4763
4764 /* If the decl is not addressable, then it is being used in some
4765 expression or on the right hand side of a statement, and it can
4766 be put into a readonly data section. */
4767 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
4768 TREE_READONLY (decl) = 1;
4769
4770 /* This decl isn't mentioned in the enclosing block, so add it to the
4771 list of temps. FIXME it seems a bit of a kludge to say that
4772 anonymous artificial vars aren't pushed, but everything else is. */
4773 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
4774 gimple_add_tmp_var (decl);
4775
4776 gimplify_and_add (decl_s, pre_p);
4777 *expr_p = decl;
4778 return GS_OK;
4779 }
4780
4781 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
4782 return a new CONSTRUCTOR if something changed. */
4783
4784 static tree
4785 optimize_compound_literals_in_ctor (tree orig_ctor)
4786 {
4787 tree ctor = orig_ctor;
4788 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
4789 unsigned int idx, num = vec_safe_length (elts);
4790
4791 for (idx = 0; idx < num; idx++)
4792 {
4793 tree value = (*elts)[idx].value;
4794 tree newval = value;
4795 if (TREE_CODE (value) == CONSTRUCTOR)
4796 newval = optimize_compound_literals_in_ctor (value);
4797 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
4798 {
4799 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
4800 tree decl = DECL_EXPR_DECL (decl_s);
4801 tree init = DECL_INITIAL (decl);
4802
4803 if (!TREE_ADDRESSABLE (value)
4804 && !TREE_ADDRESSABLE (decl)
4805 && init
4806 && TREE_CODE (init) == CONSTRUCTOR)
4807 newval = optimize_compound_literals_in_ctor (init);
4808 }
4809 if (newval == value)
4810 continue;
4811
4812 if (ctor == orig_ctor)
4813 {
4814 ctor = copy_node (orig_ctor);
4815 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
4816 elts = CONSTRUCTOR_ELTS (ctor);
4817 }
4818 (*elts)[idx].value = newval;
4819 }
4820 return ctor;
4821 }
4822
4823 /* A subroutine of gimplify_modify_expr. Break out elements of a
4824 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
4825
4826 Note that we still need to clear any elements that don't have explicit
4827 initializers, so if not all elements are initialized we keep the
4828 original MODIFY_EXPR, we just remove all of the constructor elements.
4829
4830 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
4831 GS_ERROR if we would have to create a temporary when gimplifying
4832 this constructor. Otherwise, return GS_OK.
4833
4834 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
4835
4836 static enum gimplify_status
4837 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4838 bool want_value, bool notify_temp_creation)
4839 {
4840 tree object, ctor, type;
4841 enum gimplify_status ret;
4842 vec<constructor_elt, va_gc> *elts;
4843
4844 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
4845
4846 if (!notify_temp_creation)
4847 {
4848 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4849 is_gimple_lvalue, fb_lvalue);
4850 if (ret == GS_ERROR)
4851 return ret;
4852 }
4853
4854 object = TREE_OPERAND (*expr_p, 0);
4855 ctor = TREE_OPERAND (*expr_p, 1)
4856 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
4857 type = TREE_TYPE (ctor);
4858 elts = CONSTRUCTOR_ELTS (ctor);
4859 ret = GS_ALL_DONE;
4860
4861 switch (TREE_CODE (type))
4862 {
4863 case RECORD_TYPE:
4864 case UNION_TYPE:
4865 case QUAL_UNION_TYPE:
4866 case ARRAY_TYPE:
4867 {
4868 struct gimplify_init_ctor_preeval_data preeval_data;
4869 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
4870 HOST_WIDE_INT num_unique_nonzero_elements;
4871 bool cleared, complete_p, valid_const_initializer;
4872 /* Use readonly data for initializers of this or smaller size
4873 regardless of the num_nonzero_elements / num_unique_nonzero_elements
4874 ratio. */
4875 const HOST_WIDE_INT min_unique_size = 64;
4876 /* If num_nonzero_elements / num_unique_nonzero_elements ratio
4877 is smaller than this, use readonly data. */
4878 const int unique_nonzero_ratio = 8;
4879
4880 /* Aggregate types must lower constructors to initialization of
4881 individual elements. The exception is that a CONSTRUCTOR node
4882 with no elements indicates zero-initialization of the whole. */
4883 if (vec_safe_is_empty (elts))
4884 {
4885 if (notify_temp_creation)
4886 return GS_OK;
4887 break;
4888 }
4889
4890 /* Fetch information about the constructor to direct later processing.
4891 We might want to make static versions of it in various cases, and
4892 can only do so if it known to be a valid constant initializer. */
4893 valid_const_initializer
4894 = categorize_ctor_elements (ctor, &num_nonzero_elements,
4895 &num_unique_nonzero_elements,
4896 &num_ctor_elements, &complete_p);
4897
4898 /* If a const aggregate variable is being initialized, then it
4899 should never be a lose to promote the variable to be static. */
4900 if (valid_const_initializer
4901 && num_nonzero_elements > 1
4902 && TREE_READONLY (object)
4903 && VAR_P (object)
4904 && !DECL_REGISTER (object)
4905 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object))
4906 /* For ctors that have many repeated nonzero elements
4907 represented through RANGE_EXPRs, prefer initializing
4908 those through runtime loops over copies of large amounts
4909 of data from readonly data section. */
4910 && (num_unique_nonzero_elements
4911 > num_nonzero_elements / unique_nonzero_ratio
4912 || ((unsigned HOST_WIDE_INT) int_size_in_bytes (type)
4913 <= (unsigned HOST_WIDE_INT) min_unique_size)))
4914 {
4915 if (notify_temp_creation)
4916 return GS_ERROR;
4917 DECL_INITIAL (object) = ctor;
4918 TREE_STATIC (object) = 1;
4919 if (!DECL_NAME (object))
4920 DECL_NAME (object) = create_tmp_var_name ("C");
4921 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
4922
4923 /* ??? C++ doesn't automatically append a .<number> to the
4924 assembler name, and even when it does, it looks at FE private
4925 data structures to figure out what that number should be,
4926 which are not set for this variable. I suppose this is
4927 important for local statics for inline functions, which aren't
4928 "local" in the object file sense. So in order to get a unique
4929 TU-local symbol, we must invoke the lhd version now. */
4930 lhd_set_decl_assembler_name (object);
4931
4932 *expr_p = NULL_TREE;
4933 break;
4934 }
4935
4936 /* If there are "lots" of initialized elements, even discounting
4937 those that are not address constants (and thus *must* be
4938 computed at runtime), then partition the constructor into
4939 constant and non-constant parts. Block copy the constant
4940 parts in, then generate code for the non-constant parts. */
4941 /* TODO. There's code in cp/typeck.c to do this. */
4942
4943 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
4944 /* store_constructor will ignore the clearing of variable-sized
4945 objects. Initializers for such objects must explicitly set
4946 every field that needs to be set. */
4947 cleared = false;
4948 else if (!complete_p)
4949 /* If the constructor isn't complete, clear the whole object
4950 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
4951
4952 ??? This ought not to be needed. For any element not present
4953 in the initializer, we should simply set them to zero. Except
4954 we'd need to *find* the elements that are not present, and that
4955 requires trickery to avoid quadratic compile-time behavior in
4956 large cases or excessive memory use in small cases. */
4957 cleared = !CONSTRUCTOR_NO_CLEARING (ctor);
4958 else if (num_ctor_elements - num_nonzero_elements
4959 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
4960 && num_nonzero_elements < num_ctor_elements / 4)
4961 /* If there are "lots" of zeros, it's more efficient to clear
4962 the memory and then set the nonzero elements. */
4963 cleared = true;
4964 else
4965 cleared = false;
4966
4967 /* If there are "lots" of initialized elements, and all of them
4968 are valid address constants, then the entire initializer can
4969 be dropped to memory, and then memcpy'd out. Don't do this
4970 for sparse arrays, though, as it's more efficient to follow
4971 the standard CONSTRUCTOR behavior of memset followed by
4972 individual element initialization. Also don't do this for small
4973 all-zero initializers (which aren't big enough to merit
4974 clearing), and don't try to make bitwise copies of
4975 TREE_ADDRESSABLE types. */
4976 if (valid_const_initializer
4977 && complete_p
4978 && !(cleared || num_nonzero_elements == 0)
4979 && !TREE_ADDRESSABLE (type))
4980 {
4981 HOST_WIDE_INT size = int_size_in_bytes (type);
4982 unsigned int align;
4983
4984 /* ??? We can still get unbounded array types, at least
4985 from the C++ front end. This seems wrong, but attempt
4986 to work around it for now. */
4987 if (size < 0)
4988 {
4989 size = int_size_in_bytes (TREE_TYPE (object));
4990 if (size >= 0)
4991 TREE_TYPE (ctor) = type = TREE_TYPE (object);
4992 }
4993
4994 /* Find the maximum alignment we can assume for the object. */
4995 /* ??? Make use of DECL_OFFSET_ALIGN. */
4996 if (DECL_P (object))
4997 align = DECL_ALIGN (object);
4998 else
4999 align = TYPE_ALIGN (type);
5000
5001 /* Do a block move either if the size is so small as to make
5002 each individual move a sub-unit move on average, or if it
5003 is so large as to make individual moves inefficient. */
5004 if (size > 0
5005 && num_nonzero_elements > 1
5006 /* For ctors that have many repeated nonzero elements
5007 represented through RANGE_EXPRs, prefer initializing
5008 those through runtime loops over copies of large amounts
5009 of data from readonly data section. */
5010 && (num_unique_nonzero_elements
5011 > num_nonzero_elements / unique_nonzero_ratio
5012 || size <= min_unique_size)
5013 && (size < num_nonzero_elements
5014 || !can_move_by_pieces (size, align)))
5015 {
5016 if (notify_temp_creation)
5017 return GS_ERROR;
5018
5019 walk_tree (&ctor, force_labels_r, NULL, NULL);
5020 ctor = tree_output_constant_def (ctor);
5021 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
5022 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
5023 TREE_OPERAND (*expr_p, 1) = ctor;
5024
5025 /* This is no longer an assignment of a CONSTRUCTOR, but
5026 we still may have processing to do on the LHS. So
5027 pretend we didn't do anything here to let that happen. */
5028 return GS_UNHANDLED;
5029 }
5030 }
5031
5032 /* If the target is volatile, we have non-zero elements and more than
5033 one field to assign, initialize the target from a temporary. */
5034 if (TREE_THIS_VOLATILE (object)
5035 && !TREE_ADDRESSABLE (type)
5036 && (num_nonzero_elements > 0 || !cleared)
5037 && vec_safe_length (elts) > 1)
5038 {
5039 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
5040 TREE_OPERAND (*expr_p, 0) = temp;
5041 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
5042 *expr_p,
5043 build2 (MODIFY_EXPR, void_type_node,
5044 object, temp));
5045 return GS_OK;
5046 }
5047
5048 if (notify_temp_creation)
5049 return GS_OK;
5050
5051 /* If there are nonzero elements and if needed, pre-evaluate to capture
5052 elements overlapping with the lhs into temporaries. We must do this
5053 before clearing to fetch the values before they are zeroed-out. */
5054 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
5055 {
5056 preeval_data.lhs_base_decl = get_base_address (object);
5057 if (!DECL_P (preeval_data.lhs_base_decl))
5058 preeval_data.lhs_base_decl = NULL;
5059 preeval_data.lhs_alias_set = get_alias_set (object);
5060
5061 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
5062 pre_p, post_p, &preeval_data);
5063 }
5064
5065 bool ctor_has_side_effects_p
5066 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1));
5067
5068 if (cleared)
5069 {
5070 /* Zap the CONSTRUCTOR element list, which simplifies this case.
5071 Note that we still have to gimplify, in order to handle the
5072 case of variable sized types. Avoid shared tree structures. */
5073 CONSTRUCTOR_ELTS (ctor) = NULL;
5074 TREE_SIDE_EFFECTS (ctor) = 0;
5075 object = unshare_expr (object);
5076 gimplify_stmt (expr_p, pre_p);
5077 }
5078
5079 /* If we have not block cleared the object, or if there are nonzero
5080 elements in the constructor, or if the constructor has side effects,
5081 add assignments to the individual scalar fields of the object. */
5082 if (!cleared
5083 || num_nonzero_elements > 0
5084 || ctor_has_side_effects_p)
5085 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
5086
5087 *expr_p = NULL_TREE;
5088 }
5089 break;
5090
5091 case COMPLEX_TYPE:
5092 {
5093 tree r, i;
5094
5095 if (notify_temp_creation)
5096 return GS_OK;
5097
5098 /* Extract the real and imaginary parts out of the ctor. */
5099 gcc_assert (elts->length () == 2);
5100 r = (*elts)[0].value;
5101 i = (*elts)[1].value;
5102 if (r == NULL || i == NULL)
5103 {
5104 tree zero = build_zero_cst (TREE_TYPE (type));
5105 if (r == NULL)
5106 r = zero;
5107 if (i == NULL)
5108 i = zero;
5109 }
5110
5111 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
5112 represent creation of a complex value. */
5113 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
5114 {
5115 ctor = build_complex (type, r, i);
5116 TREE_OPERAND (*expr_p, 1) = ctor;
5117 }
5118 else
5119 {
5120 ctor = build2 (COMPLEX_EXPR, type, r, i);
5121 TREE_OPERAND (*expr_p, 1) = ctor;
5122 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
5123 pre_p,
5124 post_p,
5125 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
5126 fb_rvalue);
5127 }
5128 }
5129 break;
5130
5131 case VECTOR_TYPE:
5132 {
5133 unsigned HOST_WIDE_INT ix;
5134 constructor_elt *ce;
5135
5136 if (notify_temp_creation)
5137 return GS_OK;
5138
5139 /* Go ahead and simplify constant constructors to VECTOR_CST. */
5140 if (TREE_CONSTANT (ctor))
5141 {
5142 bool constant_p = true;
5143 tree value;
5144
5145 /* Even when ctor is constant, it might contain non-*_CST
5146 elements, such as addresses or trapping values like
5147 1.0/0.0 - 1.0/0.0. Such expressions don't belong
5148 in VECTOR_CST nodes. */
5149 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
5150 if (!CONSTANT_CLASS_P (value))
5151 {
5152 constant_p = false;
5153 break;
5154 }
5155
5156 if (constant_p)
5157 {
5158 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
5159 break;
5160 }
5161
5162 TREE_CONSTANT (ctor) = 0;
5163 }
5164
5165 /* Vector types use CONSTRUCTOR all the way through gimple
5166 compilation as a general initializer. */
5167 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
5168 {
5169 enum gimplify_status tret;
5170 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
5171 fb_rvalue);
5172 if (tret == GS_ERROR)
5173 ret = GS_ERROR;
5174 else if (TREE_STATIC (ctor)
5175 && !initializer_constant_valid_p (ce->value,
5176 TREE_TYPE (ce->value)))
5177 TREE_STATIC (ctor) = 0;
5178 }
5179 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
5180 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
5181 }
5182 break;
5183
5184 default:
5185 /* So how did we get a CONSTRUCTOR for a scalar type? */
5186 gcc_unreachable ();
5187 }
5188
5189 if (ret == GS_ERROR)
5190 return GS_ERROR;
5191 /* If we have gimplified both sides of the initializer but have
5192 not emitted an assignment, do so now. */
5193 if (*expr_p)
5194 {
5195 tree lhs = TREE_OPERAND (*expr_p, 0);
5196 tree rhs = TREE_OPERAND (*expr_p, 1);
5197 if (want_value && object == lhs)
5198 lhs = unshare_expr (lhs);
5199 gassign *init = gimple_build_assign (lhs, rhs);
5200 gimplify_seq_add_stmt (pre_p, init);
5201 }
5202 if (want_value)
5203 {
5204 *expr_p = object;
5205 return GS_OK;
5206 }
5207 else
5208 {
5209 *expr_p = NULL;
5210 return GS_ALL_DONE;
5211 }
5212 }
5213
5214 /* Given a pointer value OP0, return a simplified version of an
5215 indirection through OP0, or NULL_TREE if no simplification is
5216 possible. This may only be applied to a rhs of an expression.
5217 Note that the resulting type may be different from the type pointed
5218 to in the sense that it is still compatible from the langhooks
5219 point of view. */
5220
5221 static tree
5222 gimple_fold_indirect_ref_rhs (tree t)
5223 {
5224 return gimple_fold_indirect_ref (t);
5225 }
5226
5227 /* Subroutine of gimplify_modify_expr to do simplifications of
5228 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
5229 something changes. */
5230
5231 static enum gimplify_status
5232 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
5233 gimple_seq *pre_p, gimple_seq *post_p,
5234 bool want_value)
5235 {
5236 enum gimplify_status ret = GS_UNHANDLED;
5237 bool changed;
5238
5239 do
5240 {
5241 changed = false;
5242 switch (TREE_CODE (*from_p))
5243 {
5244 case VAR_DECL:
5245 /* If we're assigning from a read-only variable initialized with
5246 a constructor, do the direct assignment from the constructor,
5247 but only if neither source nor target are volatile since this
5248 latter assignment might end up being done on a per-field basis. */
5249 if (DECL_INITIAL (*from_p)
5250 && TREE_READONLY (*from_p)
5251 && !TREE_THIS_VOLATILE (*from_p)
5252 && !TREE_THIS_VOLATILE (*to_p)
5253 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
5254 {
5255 tree old_from = *from_p;
5256 enum gimplify_status subret;
5257
5258 /* Move the constructor into the RHS. */
5259 *from_p = unshare_expr (DECL_INITIAL (*from_p));
5260
5261 /* Let's see if gimplify_init_constructor will need to put
5262 it in memory. */
5263 subret = gimplify_init_constructor (expr_p, NULL, NULL,
5264 false, true);
5265 if (subret == GS_ERROR)
5266 {
5267 /* If so, revert the change. */
5268 *from_p = old_from;
5269 }
5270 else
5271 {
5272 ret = GS_OK;
5273 changed = true;
5274 }
5275 }
5276 break;
5277 case INDIRECT_REF:
5278 {
5279 /* If we have code like
5280
5281 *(const A*)(A*)&x
5282
5283 where the type of "x" is a (possibly cv-qualified variant
5284 of "A"), treat the entire expression as identical to "x".
5285 This kind of code arises in C++ when an object is bound
5286 to a const reference, and if "x" is a TARGET_EXPR we want
5287 to take advantage of the optimization below. */
5288 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
5289 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
5290 if (t)
5291 {
5292 if (TREE_THIS_VOLATILE (t) != volatile_p)
5293 {
5294 if (DECL_P (t))
5295 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
5296 build_fold_addr_expr (t));
5297 if (REFERENCE_CLASS_P (t))
5298 TREE_THIS_VOLATILE (t) = volatile_p;
5299 }
5300 *from_p = t;
5301 ret = GS_OK;
5302 changed = true;
5303 }
5304 break;
5305 }
5306
5307 case TARGET_EXPR:
5308 {
5309 /* If we are initializing something from a TARGET_EXPR, strip the
5310 TARGET_EXPR and initialize it directly, if possible. This can't
5311 be done if the initializer is void, since that implies that the
5312 temporary is set in some non-trivial way.
5313
5314 ??? What about code that pulls out the temp and uses it
5315 elsewhere? I think that such code never uses the TARGET_EXPR as
5316 an initializer. If I'm wrong, we'll die because the temp won't
5317 have any RTL. In that case, I guess we'll need to replace
5318 references somehow. */
5319 tree init = TARGET_EXPR_INITIAL (*from_p);
5320
5321 if (init
5322 && (TREE_CODE (*expr_p) != MODIFY_EXPR
5323 || !TARGET_EXPR_NO_ELIDE (*from_p))
5324 && !VOID_TYPE_P (TREE_TYPE (init)))
5325 {
5326 *from_p = init;
5327 ret = GS_OK;
5328 changed = true;
5329 }
5330 }
5331 break;
5332
5333 case COMPOUND_EXPR:
5334 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
5335 caught. */
5336 gimplify_compound_expr (from_p, pre_p, true);
5337 ret = GS_OK;
5338 changed = true;
5339 break;
5340
5341 case CONSTRUCTOR:
5342 /* If we already made some changes, let the front end have a
5343 crack at this before we break it down. */
5344 if (ret != GS_UNHANDLED)
5345 break;
5346 /* If we're initializing from a CONSTRUCTOR, break this into
5347 individual MODIFY_EXPRs. */
5348 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
5349 false);
5350
5351 case COND_EXPR:
5352 /* If we're assigning to a non-register type, push the assignment
5353 down into the branches. This is mandatory for ADDRESSABLE types,
5354 since we cannot generate temporaries for such, but it saves a
5355 copy in other cases as well. */
5356 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
5357 {
5358 /* This code should mirror the code in gimplify_cond_expr. */
5359 enum tree_code code = TREE_CODE (*expr_p);
5360 tree cond = *from_p;
5361 tree result = *to_p;
5362
5363 ret = gimplify_expr (&result, pre_p, post_p,
5364 is_gimple_lvalue, fb_lvalue);
5365 if (ret != GS_ERROR)
5366 ret = GS_OK;
5367
5368 /* If we are going to write RESULT more than once, clear
5369 TREE_READONLY flag, otherwise we might incorrectly promote
5370 the variable to static const and initialize it at compile
5371 time in one of the branches. */
5372 if (VAR_P (result)
5373 && TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node
5374 && TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5375 TREE_READONLY (result) = 0;
5376 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
5377 TREE_OPERAND (cond, 1)
5378 = build2 (code, void_type_node, result,
5379 TREE_OPERAND (cond, 1));
5380 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5381 TREE_OPERAND (cond, 2)
5382 = build2 (code, void_type_node, unshare_expr (result),
5383 TREE_OPERAND (cond, 2));
5384
5385 TREE_TYPE (cond) = void_type_node;
5386 recalculate_side_effects (cond);
5387
5388 if (want_value)
5389 {
5390 gimplify_and_add (cond, pre_p);
5391 *expr_p = unshare_expr (result);
5392 }
5393 else
5394 *expr_p = cond;
5395 return ret;
5396 }
5397 break;
5398
5399 case CALL_EXPR:
5400 /* For calls that return in memory, give *to_p as the CALL_EXPR's
5401 return slot so that we don't generate a temporary. */
5402 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
5403 && aggregate_value_p (*from_p, *from_p))
5404 {
5405 bool use_target;
5406
5407 if (!(rhs_predicate_for (*to_p))(*from_p))
5408 /* If we need a temporary, *to_p isn't accurate. */
5409 use_target = false;
5410 /* It's OK to use the return slot directly unless it's an NRV. */
5411 else if (TREE_CODE (*to_p) == RESULT_DECL
5412 && DECL_NAME (*to_p) == NULL_TREE
5413 && needs_to_live_in_memory (*to_p))
5414 use_target = true;
5415 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
5416 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
5417 /* Don't force regs into memory. */
5418 use_target = false;
5419 else if (TREE_CODE (*expr_p) == INIT_EXPR)
5420 /* It's OK to use the target directly if it's being
5421 initialized. */
5422 use_target = true;
5423 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p)))
5424 != INTEGER_CST)
5425 /* Always use the target and thus RSO for variable-sized types.
5426 GIMPLE cannot deal with a variable-sized assignment
5427 embedded in a call statement. */
5428 use_target = true;
5429 else if (TREE_CODE (*to_p) != SSA_NAME
5430 && (!is_gimple_variable (*to_p)
5431 || needs_to_live_in_memory (*to_p)))
5432 /* Don't use the original target if it's already addressable;
5433 if its address escapes, and the called function uses the
5434 NRV optimization, a conforming program could see *to_p
5435 change before the called function returns; see c++/19317.
5436 When optimizing, the return_slot pass marks more functions
5437 as safe after we have escape info. */
5438 use_target = false;
5439 else
5440 use_target = true;
5441
5442 if (use_target)
5443 {
5444 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
5445 mark_addressable (*to_p);
5446 }
5447 }
5448 break;
5449
5450 case WITH_SIZE_EXPR:
5451 /* Likewise for calls that return an aggregate of non-constant size,
5452 since we would not be able to generate a temporary at all. */
5453 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
5454 {
5455 *from_p = TREE_OPERAND (*from_p, 0);
5456 /* We don't change ret in this case because the
5457 WITH_SIZE_EXPR might have been added in
5458 gimplify_modify_expr, so returning GS_OK would lead to an
5459 infinite loop. */
5460 changed = true;
5461 }
5462 break;
5463
5464 /* If we're initializing from a container, push the initialization
5465 inside it. */
5466 case CLEANUP_POINT_EXPR:
5467 case BIND_EXPR:
5468 case STATEMENT_LIST:
5469 {
5470 tree wrap = *from_p;
5471 tree t;
5472
5473 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
5474 fb_lvalue);
5475 if (ret != GS_ERROR)
5476 ret = GS_OK;
5477
5478 t = voidify_wrapper_expr (wrap, *expr_p);
5479 gcc_assert (t == *expr_p);
5480
5481 if (want_value)
5482 {
5483 gimplify_and_add (wrap, pre_p);
5484 *expr_p = unshare_expr (*to_p);
5485 }
5486 else
5487 *expr_p = wrap;
5488 return GS_OK;
5489 }
5490
5491 case COMPOUND_LITERAL_EXPR:
5492 {
5493 tree complit = TREE_OPERAND (*expr_p, 1);
5494 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
5495 tree decl = DECL_EXPR_DECL (decl_s);
5496 tree init = DECL_INITIAL (decl);
5497
5498 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
5499 into struct T x = { 0, 1, 2 } if the address of the
5500 compound literal has never been taken. */
5501 if (!TREE_ADDRESSABLE (complit)
5502 && !TREE_ADDRESSABLE (decl)
5503 && init)
5504 {
5505 *expr_p = copy_node (*expr_p);
5506 TREE_OPERAND (*expr_p, 1) = init;
5507 return GS_OK;
5508 }
5509 }
5510
5511 default:
5512 break;
5513 }
5514 }
5515 while (changed);
5516
5517 return ret;
5518 }
5519
5520
5521 /* Return true if T looks like a valid GIMPLE statement. */
5522
5523 static bool
5524 is_gimple_stmt (tree t)
5525 {
5526 const enum tree_code code = TREE_CODE (t);
5527
5528 switch (code)
5529 {
5530 case NOP_EXPR:
5531 /* The only valid NOP_EXPR is the empty statement. */
5532 return IS_EMPTY_STMT (t);
5533
5534 case BIND_EXPR:
5535 case COND_EXPR:
5536 /* These are only valid if they're void. */
5537 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
5538
5539 case SWITCH_EXPR:
5540 case GOTO_EXPR:
5541 case RETURN_EXPR:
5542 case LABEL_EXPR:
5543 case CASE_LABEL_EXPR:
5544 case TRY_CATCH_EXPR:
5545 case TRY_FINALLY_EXPR:
5546 case EH_FILTER_EXPR:
5547 case CATCH_EXPR:
5548 case ASM_EXPR:
5549 case STATEMENT_LIST:
5550 case OACC_PARALLEL:
5551 case OACC_KERNELS:
5552 case OACC_SERIAL:
5553 case OACC_DATA:
5554 case OACC_HOST_DATA:
5555 case OACC_DECLARE:
5556 case OACC_UPDATE:
5557 case OACC_ENTER_DATA:
5558 case OACC_EXIT_DATA:
5559 case OACC_CACHE:
5560 case OMP_PARALLEL:
5561 case OMP_FOR:
5562 case OMP_SIMD:
5563 case OMP_DISTRIBUTE:
5564 case OMP_LOOP:
5565 case OACC_LOOP:
5566 case OMP_SCAN:
5567 case OMP_SECTIONS:
5568 case OMP_SECTION:
5569 case OMP_SINGLE:
5570 case OMP_MASTER:
5571 case OMP_TASKGROUP:
5572 case OMP_ORDERED:
5573 case OMP_CRITICAL:
5574 case OMP_TASK:
5575 case OMP_TARGET:
5576 case OMP_TARGET_DATA:
5577 case OMP_TARGET_UPDATE:
5578 case OMP_TARGET_ENTER_DATA:
5579 case OMP_TARGET_EXIT_DATA:
5580 case OMP_TASKLOOP:
5581 case OMP_TEAMS:
5582 /* These are always void. */
5583 return true;
5584
5585 case CALL_EXPR:
5586 case MODIFY_EXPR:
5587 case PREDICT_EXPR:
5588 /* These are valid regardless of their type. */
5589 return true;
5590
5591 default:
5592 return false;
5593 }
5594 }
5595
5596
5597 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
5598 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a gimple register.
5599
5600 IMPORTANT NOTE: This promotion is performed by introducing a load of the
5601 other, unmodified part of the complex object just before the total store.
5602 As a consequence, if the object is still uninitialized, an undefined value
5603 will be loaded into a register, which may result in a spurious exception
5604 if the register is floating-point and the value happens to be a signaling
5605 NaN for example. Then the fully-fledged complex operations lowering pass
5606 followed by a DCE pass are necessary in order to fix things up. */
5607
5608 static enum gimplify_status
5609 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
5610 bool want_value)
5611 {
5612 enum tree_code code, ocode;
5613 tree lhs, rhs, new_rhs, other, realpart, imagpart;
5614
5615 lhs = TREE_OPERAND (*expr_p, 0);
5616 rhs = TREE_OPERAND (*expr_p, 1);
5617 code = TREE_CODE (lhs);
5618 lhs = TREE_OPERAND (lhs, 0);
5619
5620 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
5621 other = build1 (ocode, TREE_TYPE (rhs), lhs);
5622 TREE_NO_WARNING (other) = 1;
5623 other = get_formal_tmp_var (other, pre_p);
5624
5625 realpart = code == REALPART_EXPR ? rhs : other;
5626 imagpart = code == REALPART_EXPR ? other : rhs;
5627
5628 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
5629 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
5630 else
5631 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
5632
5633 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
5634 *expr_p = (want_value) ? rhs : NULL_TREE;
5635
5636 return GS_ALL_DONE;
5637 }
5638
5639 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
5640
5641 modify_expr
5642 : varname '=' rhs
5643 | '*' ID '=' rhs
5644
5645 PRE_P points to the list where side effects that must happen before
5646 *EXPR_P should be stored.
5647
5648 POST_P points to the list where side effects that must happen after
5649 *EXPR_P should be stored.
5650
5651 WANT_VALUE is nonzero iff we want to use the value of this expression
5652 in another expression. */
5653
5654 static enum gimplify_status
5655 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
5656 bool want_value)
5657 {
5658 tree *from_p = &TREE_OPERAND (*expr_p, 1);
5659 tree *to_p = &TREE_OPERAND (*expr_p, 0);
5660 enum gimplify_status ret = GS_UNHANDLED;
5661 gimple *assign;
5662 location_t loc = EXPR_LOCATION (*expr_p);
5663 gimple_stmt_iterator gsi;
5664
5665 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
5666 || TREE_CODE (*expr_p) == INIT_EXPR);
5667
5668 /* Trying to simplify a clobber using normal logic doesn't work,
5669 so handle it here. */
5670 if (TREE_CLOBBER_P (*from_p))
5671 {
5672 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5673 if (ret == GS_ERROR)
5674 return ret;
5675 gcc_assert (!want_value);
5676 if (!VAR_P (*to_p) && TREE_CODE (*to_p) != MEM_REF)
5677 {
5678 tree addr = get_initialized_tmp_var (build_fold_addr_expr (*to_p),
5679 pre_p, post_p);
5680 *to_p = build_simple_mem_ref_loc (EXPR_LOCATION (*to_p), addr);
5681 }
5682 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
5683 *expr_p = NULL;
5684 return GS_ALL_DONE;
5685 }
5686
5687 /* Insert pointer conversions required by the middle-end that are not
5688 required by the frontend. This fixes middle-end type checking for
5689 for example gcc.dg/redecl-6.c. */
5690 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
5691 {
5692 STRIP_USELESS_TYPE_CONVERSION (*from_p);
5693 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
5694 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
5695 }
5696
5697 /* See if any simplifications can be done based on what the RHS is. */
5698 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5699 want_value);
5700 if (ret != GS_UNHANDLED)
5701 return ret;
5702
5703 /* For zero sized types only gimplify the left hand side and right hand
5704 side as statements and throw away the assignment. Do this after
5705 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
5706 types properly. */
5707 if (zero_sized_type (TREE_TYPE (*from_p))
5708 && !want_value
5709 /* Don't do this for calls that return addressable types, expand_call
5710 relies on those having a lhs. */
5711 && !(TREE_ADDRESSABLE (TREE_TYPE (*from_p))
5712 && TREE_CODE (*from_p) == CALL_EXPR))
5713 {
5714 gimplify_stmt (from_p, pre_p);
5715 gimplify_stmt (to_p, pre_p);
5716 *expr_p = NULL_TREE;
5717 return GS_ALL_DONE;
5718 }
5719
5720 /* If the value being copied is of variable width, compute the length
5721 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
5722 before gimplifying any of the operands so that we can resolve any
5723 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
5724 the size of the expression to be copied, not of the destination, so
5725 that is what we must do here. */
5726 maybe_with_size_expr (from_p);
5727
5728 /* As a special case, we have to temporarily allow for assignments
5729 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
5730 a toplevel statement, when gimplifying the GENERIC expression
5731 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
5732 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
5733
5734 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
5735 prevent gimplify_expr from trying to create a new temporary for
5736 foo's LHS, we tell it that it should only gimplify until it
5737 reaches the CALL_EXPR. On return from gimplify_expr, the newly
5738 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
5739 and all we need to do here is set 'a' to be its LHS. */
5740
5741 /* Gimplify the RHS first for C++17 and bug 71104. */
5742 gimple_predicate initial_pred = initial_rhs_predicate_for (*to_p);
5743 ret = gimplify_expr (from_p, pre_p, post_p, initial_pred, fb_rvalue);
5744 if (ret == GS_ERROR)
5745 return ret;
5746
5747 /* Then gimplify the LHS. */
5748 /* If we gimplified the RHS to a CALL_EXPR and that call may return
5749 twice we have to make sure to gimplify into non-SSA as otherwise
5750 the abnormal edge added later will make those defs not dominate
5751 their uses.
5752 ??? Technically this applies only to the registers used in the
5753 resulting non-register *TO_P. */
5754 bool saved_into_ssa = gimplify_ctxp->into_ssa;
5755 if (saved_into_ssa
5756 && TREE_CODE (*from_p) == CALL_EXPR
5757 && call_expr_flags (*from_p) & ECF_RETURNS_TWICE)
5758 gimplify_ctxp->into_ssa = false;
5759 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5760 gimplify_ctxp->into_ssa = saved_into_ssa;
5761 if (ret == GS_ERROR)
5762 return ret;
5763
5764 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
5765 guess for the predicate was wrong. */
5766 gimple_predicate final_pred = rhs_predicate_for (*to_p);
5767 if (final_pred != initial_pred)
5768 {
5769 ret = gimplify_expr (from_p, pre_p, post_p, final_pred, fb_rvalue);
5770 if (ret == GS_ERROR)
5771 return ret;
5772 }
5773
5774 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
5775 size as argument to the call. */
5776 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5777 {
5778 tree call = TREE_OPERAND (*from_p, 0);
5779 tree vlasize = TREE_OPERAND (*from_p, 1);
5780
5781 if (TREE_CODE (call) == CALL_EXPR
5782 && CALL_EXPR_IFN (call) == IFN_VA_ARG)
5783 {
5784 int nargs = call_expr_nargs (call);
5785 tree type = TREE_TYPE (call);
5786 tree ap = CALL_EXPR_ARG (call, 0);
5787 tree tag = CALL_EXPR_ARG (call, 1);
5788 tree aptag = CALL_EXPR_ARG (call, 2);
5789 tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call),
5790 IFN_VA_ARG, type,
5791 nargs + 1, ap, tag,
5792 aptag, vlasize);
5793 TREE_OPERAND (*from_p, 0) = newcall;
5794 }
5795 }
5796
5797 /* Now see if the above changed *from_p to something we handle specially. */
5798 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5799 want_value);
5800 if (ret != GS_UNHANDLED)
5801 return ret;
5802
5803 /* If we've got a variable sized assignment between two lvalues (i.e. does
5804 not involve a call), then we can make things a bit more straightforward
5805 by converting the assignment to memcpy or memset. */
5806 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5807 {
5808 tree from = TREE_OPERAND (*from_p, 0);
5809 tree size = TREE_OPERAND (*from_p, 1);
5810
5811 if (TREE_CODE (from) == CONSTRUCTOR)
5812 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
5813
5814 if (is_gimple_addressable (from))
5815 {
5816 *from_p = from;
5817 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
5818 pre_p);
5819 }
5820 }
5821
5822 /* Transform partial stores to non-addressable complex variables into
5823 total stores. This allows us to use real instead of virtual operands
5824 for these variables, which improves optimization. */
5825 if ((TREE_CODE (*to_p) == REALPART_EXPR
5826 || TREE_CODE (*to_p) == IMAGPART_EXPR)
5827 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
5828 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
5829
5830 /* Try to alleviate the effects of the gimplification creating artificial
5831 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
5832 make sure not to create DECL_DEBUG_EXPR links across functions. */
5833 if (!gimplify_ctxp->into_ssa
5834 && VAR_P (*from_p)
5835 && DECL_IGNORED_P (*from_p)
5836 && DECL_P (*to_p)
5837 && !DECL_IGNORED_P (*to_p)
5838 && decl_function_context (*to_p) == current_function_decl
5839 && decl_function_context (*from_p) == current_function_decl)
5840 {
5841 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
5842 DECL_NAME (*from_p)
5843 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
5844 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
5845 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
5846 }
5847
5848 if (want_value && TREE_THIS_VOLATILE (*to_p))
5849 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
5850
5851 if (TREE_CODE (*from_p) == CALL_EXPR)
5852 {
5853 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
5854 instead of a GIMPLE_ASSIGN. */
5855 gcall *call_stmt;
5856 if (CALL_EXPR_FN (*from_p) == NULL_TREE)
5857 {
5858 /* Gimplify internal functions created in the FEs. */
5859 int nargs = call_expr_nargs (*from_p), i;
5860 enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
5861 auto_vec<tree> vargs (nargs);
5862
5863 for (i = 0; i < nargs; i++)
5864 {
5865 gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
5866 EXPR_LOCATION (*from_p));
5867 vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
5868 }
5869 call_stmt = gimple_build_call_internal_vec (ifn, vargs);
5870 gimple_call_set_nothrow (call_stmt, TREE_NOTHROW (*from_p));
5871 gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
5872 }
5873 else
5874 {
5875 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
5876 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
5877 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
5878 tree fndecl = get_callee_fndecl (*from_p);
5879 if (fndecl
5880 && fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
5881 && call_expr_nargs (*from_p) == 3)
5882 call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
5883 CALL_EXPR_ARG (*from_p, 0),
5884 CALL_EXPR_ARG (*from_p, 1),
5885 CALL_EXPR_ARG (*from_p, 2));
5886 else
5887 {
5888 call_stmt = gimple_build_call_from_tree (*from_p, fnptrtype);
5889 }
5890 }
5891 notice_special_calls (call_stmt);
5892 if (!gimple_call_noreturn_p (call_stmt) || !should_remove_lhs_p (*to_p))
5893 gimple_call_set_lhs (call_stmt, *to_p);
5894 else if (TREE_CODE (*to_p) == SSA_NAME)
5895 /* The above is somewhat premature, avoid ICEing later for a
5896 SSA name w/o a definition. We may have uses in the GIMPLE IL.
5897 ??? This doesn't make it a default-def. */
5898 SSA_NAME_DEF_STMT (*to_p) = gimple_build_nop ();
5899
5900 assign = call_stmt;
5901 }
5902 else
5903 {
5904 assign = gimple_build_assign (*to_p, *from_p);
5905 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
5906 if (COMPARISON_CLASS_P (*from_p))
5907 gimple_set_no_warning (assign, TREE_NO_WARNING (*from_p));
5908 }
5909
5910 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
5911 {
5912 /* We should have got an SSA name from the start. */
5913 gcc_assert (TREE_CODE (*to_p) == SSA_NAME
5914 || ! gimple_in_ssa_p (cfun));
5915 }
5916
5917 gimplify_seq_add_stmt (pre_p, assign);
5918 gsi = gsi_last (*pre_p);
5919 maybe_fold_stmt (&gsi);
5920
5921 if (want_value)
5922 {
5923 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
5924 return GS_OK;
5925 }
5926 else
5927 *expr_p = NULL;
5928
5929 return GS_ALL_DONE;
5930 }
5931
5932 /* Gimplify a comparison between two variable-sized objects. Do this
5933 with a call to BUILT_IN_MEMCMP. */
5934
5935 static enum gimplify_status
5936 gimplify_variable_sized_compare (tree *expr_p)
5937 {
5938 location_t loc = EXPR_LOCATION (*expr_p);
5939 tree op0 = TREE_OPERAND (*expr_p, 0);
5940 tree op1 = TREE_OPERAND (*expr_p, 1);
5941 tree t, arg, dest, src, expr;
5942
5943 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
5944 arg = unshare_expr (arg);
5945 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
5946 src = build_fold_addr_expr_loc (loc, op1);
5947 dest = build_fold_addr_expr_loc (loc, op0);
5948 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
5949 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
5950
5951 expr
5952 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
5953 SET_EXPR_LOCATION (expr, loc);
5954 *expr_p = expr;
5955
5956 return GS_OK;
5957 }
5958
5959 /* Gimplify a comparison between two aggregate objects of integral scalar
5960 mode as a comparison between the bitwise equivalent scalar values. */
5961
5962 static enum gimplify_status
5963 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
5964 {
5965 location_t loc = EXPR_LOCATION (*expr_p);
5966 tree op0 = TREE_OPERAND (*expr_p, 0);
5967 tree op1 = TREE_OPERAND (*expr_p, 1);
5968
5969 tree type = TREE_TYPE (op0);
5970 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
5971
5972 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
5973 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
5974
5975 *expr_p
5976 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
5977
5978 return GS_OK;
5979 }
5980
5981 /* Gimplify an expression sequence. This function gimplifies each
5982 expression and rewrites the original expression with the last
5983 expression of the sequence in GIMPLE form.
5984
5985 PRE_P points to the list where the side effects for all the
5986 expressions in the sequence will be emitted.
5987
5988 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
5989
5990 static enum gimplify_status
5991 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
5992 {
5993 tree t = *expr_p;
5994
5995 do
5996 {
5997 tree *sub_p = &TREE_OPERAND (t, 0);
5998
5999 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
6000 gimplify_compound_expr (sub_p, pre_p, false);
6001 else
6002 gimplify_stmt (sub_p, pre_p);
6003
6004 t = TREE_OPERAND (t, 1);
6005 }
6006 while (TREE_CODE (t) == COMPOUND_EXPR);
6007
6008 *expr_p = t;
6009 if (want_value)
6010 return GS_OK;
6011 else
6012 {
6013 gimplify_stmt (expr_p, pre_p);
6014 return GS_ALL_DONE;
6015 }
6016 }
6017
6018 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
6019 gimplify. After gimplification, EXPR_P will point to a new temporary
6020 that holds the original value of the SAVE_EXPR node.
6021
6022 PRE_P points to the list where side effects that must happen before
6023 *EXPR_P should be stored. */
6024
6025 static enum gimplify_status
6026 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6027 {
6028 enum gimplify_status ret = GS_ALL_DONE;
6029 tree val;
6030
6031 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
6032 val = TREE_OPERAND (*expr_p, 0);
6033
6034 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
6035 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
6036 {
6037 /* The operand may be a void-valued expression. It is
6038 being executed only for its side-effects. */
6039 if (TREE_TYPE (val) == void_type_node)
6040 {
6041 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6042 is_gimple_stmt, fb_none);
6043 val = NULL;
6044 }
6045 else
6046 /* The temporary may not be an SSA name as later abnormal and EH
6047 control flow may invalidate use/def domination. When in SSA
6048 form then assume there are no such issues and SAVE_EXPRs only
6049 appear via GENERIC foldings. */
6050 val = get_initialized_tmp_var (val, pre_p, post_p,
6051 gimple_in_ssa_p (cfun));
6052
6053 TREE_OPERAND (*expr_p, 0) = val;
6054 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
6055 }
6056
6057 *expr_p = val;
6058
6059 return ret;
6060 }
6061
6062 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
6063
6064 unary_expr
6065 : ...
6066 | '&' varname
6067 ...
6068
6069 PRE_P points to the list where side effects that must happen before
6070 *EXPR_P should be stored.
6071
6072 POST_P points to the list where side effects that must happen after
6073 *EXPR_P should be stored. */
6074
6075 static enum gimplify_status
6076 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6077 {
6078 tree expr = *expr_p;
6079 tree op0 = TREE_OPERAND (expr, 0);
6080 enum gimplify_status ret;
6081 location_t loc = EXPR_LOCATION (*expr_p);
6082
6083 switch (TREE_CODE (op0))
6084 {
6085 case INDIRECT_REF:
6086 do_indirect_ref:
6087 /* Check if we are dealing with an expression of the form '&*ptr'.
6088 While the front end folds away '&*ptr' into 'ptr', these
6089 expressions may be generated internally by the compiler (e.g.,
6090 builtins like __builtin_va_end). */
6091 /* Caution: the silent array decomposition semantics we allow for
6092 ADDR_EXPR means we can't always discard the pair. */
6093 /* Gimplification of the ADDR_EXPR operand may drop
6094 cv-qualification conversions, so make sure we add them if
6095 needed. */
6096 {
6097 tree op00 = TREE_OPERAND (op0, 0);
6098 tree t_expr = TREE_TYPE (expr);
6099 tree t_op00 = TREE_TYPE (op00);
6100
6101 if (!useless_type_conversion_p (t_expr, t_op00))
6102 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
6103 *expr_p = op00;
6104 ret = GS_OK;
6105 }
6106 break;
6107
6108 case VIEW_CONVERT_EXPR:
6109 /* Take the address of our operand and then convert it to the type of
6110 this ADDR_EXPR.
6111
6112 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
6113 all clear. The impact of this transformation is even less clear. */
6114
6115 /* If the operand is a useless conversion, look through it. Doing so
6116 guarantees that the ADDR_EXPR and its operand will remain of the
6117 same type. */
6118 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
6119 op0 = TREE_OPERAND (op0, 0);
6120
6121 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
6122 build_fold_addr_expr_loc (loc,
6123 TREE_OPERAND (op0, 0)));
6124 ret = GS_OK;
6125 break;
6126
6127 case MEM_REF:
6128 if (integer_zerop (TREE_OPERAND (op0, 1)))
6129 goto do_indirect_ref;
6130
6131 /* fall through */
6132
6133 default:
6134 /* If we see a call to a declared builtin or see its address
6135 being taken (we can unify those cases here) then we can mark
6136 the builtin for implicit generation by GCC. */
6137 if (TREE_CODE (op0) == FUNCTION_DECL
6138 && fndecl_built_in_p (op0, BUILT_IN_NORMAL)
6139 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
6140 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
6141
6142 /* We use fb_either here because the C frontend sometimes takes
6143 the address of a call that returns a struct; see
6144 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
6145 the implied temporary explicit. */
6146
6147 /* Make the operand addressable. */
6148 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
6149 is_gimple_addressable, fb_either);
6150 if (ret == GS_ERROR)
6151 break;
6152
6153 /* Then mark it. Beware that it may not be possible to do so directly
6154 if a temporary has been created by the gimplification. */
6155 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
6156
6157 op0 = TREE_OPERAND (expr, 0);
6158
6159 /* For various reasons, the gimplification of the expression
6160 may have made a new INDIRECT_REF. */
6161 if (TREE_CODE (op0) == INDIRECT_REF
6162 || (TREE_CODE (op0) == MEM_REF
6163 && integer_zerop (TREE_OPERAND (op0, 1))))
6164 goto do_indirect_ref;
6165
6166 mark_addressable (TREE_OPERAND (expr, 0));
6167
6168 /* The FEs may end up building ADDR_EXPRs early on a decl with
6169 an incomplete type. Re-build ADDR_EXPRs in canonical form
6170 here. */
6171 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
6172 *expr_p = build_fold_addr_expr (op0);
6173
6174 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
6175 recompute_tree_invariant_for_addr_expr (*expr_p);
6176
6177 /* If we re-built the ADDR_EXPR add a conversion to the original type
6178 if required. */
6179 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
6180 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
6181
6182 break;
6183 }
6184
6185 return ret;
6186 }
6187
6188 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
6189 value; output operands should be a gimple lvalue. */
6190
6191 static enum gimplify_status
6192 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6193 {
6194 tree expr;
6195 int noutputs;
6196 const char **oconstraints;
6197 int i;
6198 tree link;
6199 const char *constraint;
6200 bool allows_mem, allows_reg, is_inout;
6201 enum gimplify_status ret, tret;
6202 gasm *stmt;
6203 vec<tree, va_gc> *inputs;
6204 vec<tree, va_gc> *outputs;
6205 vec<tree, va_gc> *clobbers;
6206 vec<tree, va_gc> *labels;
6207 tree link_next;
6208
6209 expr = *expr_p;
6210 noutputs = list_length (ASM_OUTPUTS (expr));
6211 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
6212
6213 inputs = NULL;
6214 outputs = NULL;
6215 clobbers = NULL;
6216 labels = NULL;
6217
6218 ret = GS_ALL_DONE;
6219 link_next = NULL_TREE;
6220 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
6221 {
6222 bool ok;
6223 size_t constraint_len;
6224
6225 link_next = TREE_CHAIN (link);
6226
6227 oconstraints[i]
6228 = constraint
6229 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6230 constraint_len = strlen (constraint);
6231 if (constraint_len == 0)
6232 continue;
6233
6234 ok = parse_output_constraint (&constraint, i, 0, 0,
6235 &allows_mem, &allows_reg, &is_inout);
6236 if (!ok)
6237 {
6238 ret = GS_ERROR;
6239 is_inout = false;
6240 }
6241
6242 /* If we can't make copies, we can only accept memory.
6243 Similarly for VLAs. */
6244 tree outtype = TREE_TYPE (TREE_VALUE (link));
6245 if (outtype != error_mark_node
6246 && (TREE_ADDRESSABLE (outtype)
6247 || !COMPLETE_TYPE_P (outtype)
6248 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (outtype))))
6249 {
6250 if (allows_mem)
6251 allows_reg = 0;
6252 else
6253 {
6254 error ("impossible constraint in %<asm%>");
6255 error ("non-memory output %d must stay in memory", i);
6256 return GS_ERROR;
6257 }
6258 }
6259
6260 if (!allows_reg && allows_mem)
6261 mark_addressable (TREE_VALUE (link));
6262
6263 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6264 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
6265 fb_lvalue | fb_mayfail);
6266 if (tret == GS_ERROR)
6267 {
6268 error ("invalid lvalue in %<asm%> output %d", i);
6269 ret = tret;
6270 }
6271
6272 /* If the constraint does not allow memory make sure we gimplify
6273 it to a register if it is not already but its base is. This
6274 happens for complex and vector components. */
6275 if (!allows_mem)
6276 {
6277 tree op = TREE_VALUE (link);
6278 if (! is_gimple_val (op)
6279 && is_gimple_reg_type (TREE_TYPE (op))
6280 && is_gimple_reg (get_base_address (op)))
6281 {
6282 tree tem = create_tmp_reg (TREE_TYPE (op));
6283 tree ass;
6284 if (is_inout)
6285 {
6286 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem),
6287 tem, unshare_expr (op));
6288 gimplify_and_add (ass, pre_p);
6289 }
6290 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem), op, tem);
6291 gimplify_and_add (ass, post_p);
6292
6293 TREE_VALUE (link) = tem;
6294 tret = GS_OK;
6295 }
6296 }
6297
6298 vec_safe_push (outputs, link);
6299 TREE_CHAIN (link) = NULL_TREE;
6300
6301 if (is_inout)
6302 {
6303 /* An input/output operand. To give the optimizers more
6304 flexibility, split it into separate input and output
6305 operands. */
6306 tree input;
6307 /* Buffer big enough to format a 32-bit UINT_MAX into. */
6308 char buf[11];
6309
6310 /* Turn the in/out constraint into an output constraint. */
6311 char *p = xstrdup (constraint);
6312 p[0] = '=';
6313 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
6314
6315 /* And add a matching input constraint. */
6316 if (allows_reg)
6317 {
6318 sprintf (buf, "%u", i);
6319
6320 /* If there are multiple alternatives in the constraint,
6321 handle each of them individually. Those that allow register
6322 will be replaced with operand number, the others will stay
6323 unchanged. */
6324 if (strchr (p, ',') != NULL)
6325 {
6326 size_t len = 0, buflen = strlen (buf);
6327 char *beg, *end, *str, *dst;
6328
6329 for (beg = p + 1;;)
6330 {
6331 end = strchr (beg, ',');
6332 if (end == NULL)
6333 end = strchr (beg, '\0');
6334 if ((size_t) (end - beg) < buflen)
6335 len += buflen + 1;
6336 else
6337 len += end - beg + 1;
6338 if (*end)
6339 beg = end + 1;
6340 else
6341 break;
6342 }
6343
6344 str = (char *) alloca (len);
6345 for (beg = p + 1, dst = str;;)
6346 {
6347 const char *tem;
6348 bool mem_p, reg_p, inout_p;
6349
6350 end = strchr (beg, ',');
6351 if (end)
6352 *end = '\0';
6353 beg[-1] = '=';
6354 tem = beg - 1;
6355 parse_output_constraint (&tem, i, 0, 0,
6356 &mem_p, &reg_p, &inout_p);
6357 if (dst != str)
6358 *dst++ = ',';
6359 if (reg_p)
6360 {
6361 memcpy (dst, buf, buflen);
6362 dst += buflen;
6363 }
6364 else
6365 {
6366 if (end)
6367 len = end - beg;
6368 else
6369 len = strlen (beg);
6370 memcpy (dst, beg, len);
6371 dst += len;
6372 }
6373 if (end)
6374 beg = end + 1;
6375 else
6376 break;
6377 }
6378 *dst = '\0';
6379 input = build_string (dst - str, str);
6380 }
6381 else
6382 input = build_string (strlen (buf), buf);
6383 }
6384 else
6385 input = build_string (constraint_len - 1, constraint + 1);
6386
6387 free (p);
6388
6389 input = build_tree_list (build_tree_list (NULL_TREE, input),
6390 unshare_expr (TREE_VALUE (link)));
6391 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
6392 }
6393 }
6394
6395 link_next = NULL_TREE;
6396 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
6397 {
6398 link_next = TREE_CHAIN (link);
6399 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6400 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
6401 oconstraints, &allows_mem, &allows_reg);
6402
6403 /* If we can't make copies, we can only accept memory. */
6404 tree intype = TREE_TYPE (TREE_VALUE (link));
6405 if (intype != error_mark_node
6406 && (TREE_ADDRESSABLE (intype)
6407 || !COMPLETE_TYPE_P (intype)
6408 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (intype))))
6409 {
6410 if (allows_mem)
6411 allows_reg = 0;
6412 else
6413 {
6414 error ("impossible constraint in %<asm%>");
6415 error ("non-memory input %d must stay in memory", i);
6416 return GS_ERROR;
6417 }
6418 }
6419
6420 /* If the operand is a memory input, it should be an lvalue. */
6421 if (!allows_reg && allows_mem)
6422 {
6423 tree inputv = TREE_VALUE (link);
6424 STRIP_NOPS (inputv);
6425 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
6426 || TREE_CODE (inputv) == PREINCREMENT_EXPR
6427 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
6428 || TREE_CODE (inputv) == POSTINCREMENT_EXPR
6429 || TREE_CODE (inputv) == MODIFY_EXPR)
6430 TREE_VALUE (link) = error_mark_node;
6431 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6432 is_gimple_lvalue, fb_lvalue | fb_mayfail);
6433 if (tret != GS_ERROR)
6434 {
6435 /* Unlike output operands, memory inputs are not guaranteed
6436 to be lvalues by the FE, and while the expressions are
6437 marked addressable there, if it is e.g. a statement
6438 expression, temporaries in it might not end up being
6439 addressable. They might be already used in the IL and thus
6440 it is too late to make them addressable now though. */
6441 tree x = TREE_VALUE (link);
6442 while (handled_component_p (x))
6443 x = TREE_OPERAND (x, 0);
6444 if (TREE_CODE (x) == MEM_REF
6445 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
6446 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
6447 if ((VAR_P (x)
6448 || TREE_CODE (x) == PARM_DECL
6449 || TREE_CODE (x) == RESULT_DECL)
6450 && !TREE_ADDRESSABLE (x)
6451 && is_gimple_reg (x))
6452 {
6453 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link),
6454 input_location), 0,
6455 "memory input %d is not directly addressable",
6456 i);
6457 prepare_gimple_addressable (&TREE_VALUE (link), pre_p);
6458 }
6459 }
6460 mark_addressable (TREE_VALUE (link));
6461 if (tret == GS_ERROR)
6462 {
6463 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), input_location),
6464 "memory input %d is not directly addressable", i);
6465 ret = tret;
6466 }
6467 }
6468 else
6469 {
6470 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6471 is_gimple_asm_val, fb_rvalue);
6472 if (tret == GS_ERROR)
6473 ret = tret;
6474 }
6475
6476 TREE_CHAIN (link) = NULL_TREE;
6477 vec_safe_push (inputs, link);
6478 }
6479
6480 link_next = NULL_TREE;
6481 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
6482 {
6483 link_next = TREE_CHAIN (link);
6484 TREE_CHAIN (link) = NULL_TREE;
6485 vec_safe_push (clobbers, link);
6486 }
6487
6488 link_next = NULL_TREE;
6489 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
6490 {
6491 link_next = TREE_CHAIN (link);
6492 TREE_CHAIN (link) = NULL_TREE;
6493 vec_safe_push (labels, link);
6494 }
6495
6496 /* Do not add ASMs with errors to the gimple IL stream. */
6497 if (ret != GS_ERROR)
6498 {
6499 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
6500 inputs, outputs, clobbers, labels);
6501
6502 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr) || noutputs == 0);
6503 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
6504 gimple_asm_set_inline (stmt, ASM_INLINE_P (expr));
6505
6506 gimplify_seq_add_stmt (pre_p, stmt);
6507 }
6508
6509 return ret;
6510 }
6511
6512 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
6513 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6514 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
6515 return to this function.
6516
6517 FIXME should we complexify the prequeue handling instead? Or use flags
6518 for all the cleanups and let the optimizer tighten them up? The current
6519 code seems pretty fragile; it will break on a cleanup within any
6520 non-conditional nesting. But any such nesting would be broken, anyway;
6521 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
6522 and continues out of it. We can do that at the RTL level, though, so
6523 having an optimizer to tighten up try/finally regions would be a Good
6524 Thing. */
6525
6526 static enum gimplify_status
6527 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
6528 {
6529 gimple_stmt_iterator iter;
6530 gimple_seq body_sequence = NULL;
6531
6532 tree temp = voidify_wrapper_expr (*expr_p, NULL);
6533
6534 /* We only care about the number of conditions between the innermost
6535 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
6536 any cleanups collected outside the CLEANUP_POINT_EXPR. */
6537 int old_conds = gimplify_ctxp->conditions;
6538 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
6539 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
6540 gimplify_ctxp->conditions = 0;
6541 gimplify_ctxp->conditional_cleanups = NULL;
6542 gimplify_ctxp->in_cleanup_point_expr = true;
6543
6544 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
6545
6546 gimplify_ctxp->conditions = old_conds;
6547 gimplify_ctxp->conditional_cleanups = old_cleanups;
6548 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
6549
6550 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
6551 {
6552 gimple *wce = gsi_stmt (iter);
6553
6554 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
6555 {
6556 if (gsi_one_before_end_p (iter))
6557 {
6558 /* Note that gsi_insert_seq_before and gsi_remove do not
6559 scan operands, unlike some other sequence mutators. */
6560 if (!gimple_wce_cleanup_eh_only (wce))
6561 gsi_insert_seq_before_without_update (&iter,
6562 gimple_wce_cleanup (wce),
6563 GSI_SAME_STMT);
6564 gsi_remove (&iter, true);
6565 break;
6566 }
6567 else
6568 {
6569 gtry *gtry;
6570 gimple_seq seq;
6571 enum gimple_try_flags kind;
6572
6573 if (gimple_wce_cleanup_eh_only (wce))
6574 kind = GIMPLE_TRY_CATCH;
6575 else
6576 kind = GIMPLE_TRY_FINALLY;
6577 seq = gsi_split_seq_after (iter);
6578
6579 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
6580 /* Do not use gsi_replace here, as it may scan operands.
6581 We want to do a simple structural modification only. */
6582 gsi_set_stmt (&iter, gtry);
6583 iter = gsi_start (gtry->eval);
6584 }
6585 }
6586 else
6587 gsi_next (&iter);
6588 }
6589
6590 gimplify_seq_add_seq (pre_p, body_sequence);
6591 if (temp)
6592 {
6593 *expr_p = temp;
6594 return GS_OK;
6595 }
6596 else
6597 {
6598 *expr_p = NULL;
6599 return GS_ALL_DONE;
6600 }
6601 }
6602
6603 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
6604 is the cleanup action required. EH_ONLY is true if the cleanup should
6605 only be executed if an exception is thrown, not on normal exit.
6606 If FORCE_UNCOND is true perform the cleanup unconditionally; this is
6607 only valid for clobbers. */
6608
6609 static void
6610 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p,
6611 bool force_uncond = false)
6612 {
6613 gimple *wce;
6614 gimple_seq cleanup_stmts = NULL;
6615
6616 /* Errors can result in improperly nested cleanups. Which results in
6617 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
6618 if (seen_error ())
6619 return;
6620
6621 if (gimple_conditional_context ())
6622 {
6623 /* If we're in a conditional context, this is more complex. We only
6624 want to run the cleanup if we actually ran the initialization that
6625 necessitates it, but we want to run it after the end of the
6626 conditional context. So we wrap the try/finally around the
6627 condition and use a flag to determine whether or not to actually
6628 run the destructor. Thus
6629
6630 test ? f(A()) : 0
6631
6632 becomes (approximately)
6633
6634 flag = 0;
6635 try {
6636 if (test) { A::A(temp); flag = 1; val = f(temp); }
6637 else { val = 0; }
6638 } finally {
6639 if (flag) A::~A(temp);
6640 }
6641 val
6642 */
6643 if (force_uncond)
6644 {
6645 gimplify_stmt (&cleanup, &cleanup_stmts);
6646 wce = gimple_build_wce (cleanup_stmts);
6647 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6648 }
6649 else
6650 {
6651 tree flag = create_tmp_var (boolean_type_node, "cleanup");
6652 gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
6653 gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
6654
6655 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
6656 gimplify_stmt (&cleanup, &cleanup_stmts);
6657 wce = gimple_build_wce (cleanup_stmts);
6658
6659 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
6660 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6661 gimplify_seq_add_stmt (pre_p, ftrue);
6662
6663 /* Because of this manipulation, and the EH edges that jump
6664 threading cannot redirect, the temporary (VAR) will appear
6665 to be used uninitialized. Don't warn. */
6666 TREE_NO_WARNING (var) = 1;
6667 }
6668 }
6669 else
6670 {
6671 gimplify_stmt (&cleanup, &cleanup_stmts);
6672 wce = gimple_build_wce (cleanup_stmts);
6673 gimple_wce_set_cleanup_eh_only (wce, eh_only);
6674 gimplify_seq_add_stmt (pre_p, wce);
6675 }
6676 }
6677
6678 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
6679
6680 static enum gimplify_status
6681 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6682 {
6683 tree targ = *expr_p;
6684 tree temp = TARGET_EXPR_SLOT (targ);
6685 tree init = TARGET_EXPR_INITIAL (targ);
6686 enum gimplify_status ret;
6687
6688 bool unpoison_empty_seq = false;
6689 gimple_stmt_iterator unpoison_it;
6690
6691 if (init)
6692 {
6693 tree cleanup = NULL_TREE;
6694
6695 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
6696 to the temps list. Handle also variable length TARGET_EXPRs. */
6697 if (!poly_int_tree_p (DECL_SIZE (temp)))
6698 {
6699 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
6700 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
6701 gimplify_vla_decl (temp, pre_p);
6702 }
6703 else
6704 {
6705 /* Save location where we need to place unpoisoning. It's possible
6706 that a variable will be converted to needs_to_live_in_memory. */
6707 unpoison_it = gsi_last (*pre_p);
6708 unpoison_empty_seq = gsi_end_p (unpoison_it);
6709
6710 gimple_add_tmp_var (temp);
6711 }
6712
6713 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
6714 expression is supposed to initialize the slot. */
6715 if (VOID_TYPE_P (TREE_TYPE (init)))
6716 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6717 else
6718 {
6719 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
6720 init = init_expr;
6721 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6722 init = NULL;
6723 ggc_free (init_expr);
6724 }
6725 if (ret == GS_ERROR)
6726 {
6727 /* PR c++/28266 Make sure this is expanded only once. */
6728 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6729 return GS_ERROR;
6730 }
6731 if (init)
6732 gimplify_and_add (init, pre_p);
6733
6734 /* If needed, push the cleanup for the temp. */
6735 if (TARGET_EXPR_CLEANUP (targ))
6736 {
6737 if (CLEANUP_EH_ONLY (targ))
6738 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
6739 CLEANUP_EH_ONLY (targ), pre_p);
6740 else
6741 cleanup = TARGET_EXPR_CLEANUP (targ);
6742 }
6743
6744 /* Add a clobber for the temporary going out of scope, like
6745 gimplify_bind_expr. */
6746 if (gimplify_ctxp->in_cleanup_point_expr
6747 && needs_to_live_in_memory (temp))
6748 {
6749 if (flag_stack_reuse == SR_ALL)
6750 {
6751 tree clobber = build_clobber (TREE_TYPE (temp));
6752 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
6753 gimple_push_cleanup (temp, clobber, false, pre_p, true);
6754 }
6755 if (asan_poisoned_variables
6756 && DECL_ALIGN (temp) <= MAX_SUPPORTED_STACK_ALIGNMENT
6757 && !TREE_STATIC (temp)
6758 && dbg_cnt (asan_use_after_scope)
6759 && !gimplify_omp_ctxp)
6760 {
6761 tree asan_cleanup = build_asan_poison_call_expr (temp);
6762 if (asan_cleanup)
6763 {
6764 if (unpoison_empty_seq)
6765 unpoison_it = gsi_start (*pre_p);
6766
6767 asan_poison_variable (temp, false, &unpoison_it,
6768 unpoison_empty_seq);
6769 gimple_push_cleanup (temp, asan_cleanup, false, pre_p);
6770 }
6771 }
6772 }
6773 if (cleanup)
6774 gimple_push_cleanup (temp, cleanup, false, pre_p);
6775
6776 /* Only expand this once. */
6777 TREE_OPERAND (targ, 3) = init;
6778 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6779 }
6780 else
6781 /* We should have expanded this before. */
6782 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
6783
6784 *expr_p = temp;
6785 return GS_OK;
6786 }
6787
6788 /* Gimplification of expression trees. */
6789
6790 /* Gimplify an expression which appears at statement context. The
6791 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
6792 NULL, a new sequence is allocated.
6793
6794 Return true if we actually added a statement to the queue. */
6795
6796 bool
6797 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
6798 {
6799 gimple_seq_node last;
6800
6801 last = gimple_seq_last (*seq_p);
6802 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
6803 return last != gimple_seq_last (*seq_p);
6804 }
6805
6806 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
6807 to CTX. If entries already exist, force them to be some flavor of private.
6808 If there is no enclosing parallel, do nothing. */
6809
6810 void
6811 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
6812 {
6813 splay_tree_node n;
6814
6815 if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE)
6816 return;
6817
6818 do
6819 {
6820 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6821 if (n != NULL)
6822 {
6823 if (n->value & GOVD_SHARED)
6824 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
6825 else if (n->value & GOVD_MAP)
6826 n->value |= GOVD_MAP_TO_ONLY;
6827 else
6828 return;
6829 }
6830 else if ((ctx->region_type & ORT_TARGET) != 0)
6831 {
6832 if (ctx->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
6833 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6834 else
6835 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
6836 }
6837 else if (ctx->region_type != ORT_WORKSHARE
6838 && ctx->region_type != ORT_TASKGROUP
6839 && ctx->region_type != ORT_SIMD
6840 && ctx->region_type != ORT_ACC
6841 && !(ctx->region_type & ORT_TARGET_DATA))
6842 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6843
6844 ctx = ctx->outer_context;
6845 }
6846 while (ctx);
6847 }
6848
6849 /* Similarly for each of the type sizes of TYPE. */
6850
6851 static void
6852 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
6853 {
6854 if (type == NULL || type == error_mark_node)
6855 return;
6856 type = TYPE_MAIN_VARIANT (type);
6857
6858 if (ctx->privatized_types->add (type))
6859 return;
6860
6861 switch (TREE_CODE (type))
6862 {
6863 case INTEGER_TYPE:
6864 case ENUMERAL_TYPE:
6865 case BOOLEAN_TYPE:
6866 case REAL_TYPE:
6867 case FIXED_POINT_TYPE:
6868 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
6869 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
6870 break;
6871
6872 case ARRAY_TYPE:
6873 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6874 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
6875 break;
6876
6877 case RECORD_TYPE:
6878 case UNION_TYPE:
6879 case QUAL_UNION_TYPE:
6880 {
6881 tree field;
6882 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
6883 if (TREE_CODE (field) == FIELD_DECL)
6884 {
6885 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
6886 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
6887 }
6888 }
6889 break;
6890
6891 case POINTER_TYPE:
6892 case REFERENCE_TYPE:
6893 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6894 break;
6895
6896 default:
6897 break;
6898 }
6899
6900 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
6901 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
6902 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
6903 }
6904
6905 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
6906
6907 static void
6908 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
6909 {
6910 splay_tree_node n;
6911 unsigned int nflags;
6912 tree t;
6913
6914 if (error_operand_p (decl) || ctx->region_type == ORT_NONE)
6915 return;
6916
6917 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
6918 there are constructors involved somewhere. Exception is a shared clause,
6919 there is nothing privatized in that case. */
6920 if ((flags & GOVD_SHARED) == 0
6921 && (TREE_ADDRESSABLE (TREE_TYPE (decl))
6922 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl))))
6923 flags |= GOVD_SEEN;
6924
6925 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6926 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6927 {
6928 /* We shouldn't be re-adding the decl with the same data
6929 sharing class. */
6930 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
6931 nflags = n->value | flags;
6932 /* The only combination of data sharing classes we should see is
6933 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
6934 reduction variables to be used in data sharing clauses. */
6935 gcc_assert ((ctx->region_type & ORT_ACC) != 0
6936 || ((nflags & GOVD_DATA_SHARE_CLASS)
6937 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE))
6938 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
6939 n->value = nflags;
6940 return;
6941 }
6942
6943 /* When adding a variable-sized variable, we have to handle all sorts
6944 of additional bits of data: the pointer replacement variable, and
6945 the parameters of the type. */
6946 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6947 {
6948 /* Add the pointer replacement variable as PRIVATE if the variable
6949 replacement is private, else FIRSTPRIVATE since we'll need the
6950 address of the original variable either for SHARED, or for the
6951 copy into or out of the context. */
6952 if (!(flags & GOVD_LOCAL) && ctx->region_type != ORT_TASKGROUP)
6953 {
6954 if (flags & GOVD_MAP)
6955 nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
6956 else if (flags & GOVD_PRIVATE)
6957 nflags = GOVD_PRIVATE;
6958 else if (((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
6959 && (flags & GOVD_FIRSTPRIVATE))
6960 || (ctx->region_type == ORT_TARGET_DATA
6961 && (flags & GOVD_DATA_SHARE_CLASS) == 0))
6962 nflags = GOVD_PRIVATE | GOVD_EXPLICIT;
6963 else
6964 nflags = GOVD_FIRSTPRIVATE;
6965 nflags |= flags & GOVD_SEEN;
6966 t = DECL_VALUE_EXPR (decl);
6967 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
6968 t = TREE_OPERAND (t, 0);
6969 gcc_assert (DECL_P (t));
6970 omp_add_variable (ctx, t, nflags);
6971 }
6972
6973 /* Add all of the variable and type parameters (which should have
6974 been gimplified to a formal temporary) as FIRSTPRIVATE. */
6975 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
6976 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
6977 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
6978
6979 /* The variable-sized variable itself is never SHARED, only some form
6980 of PRIVATE. The sharing would take place via the pointer variable
6981 which we remapped above. */
6982 if (flags & GOVD_SHARED)
6983 flags = GOVD_SHARED | GOVD_DEBUG_PRIVATE
6984 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
6985
6986 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
6987 alloca statement we generate for the variable, so make sure it
6988 is available. This isn't automatically needed for the SHARED
6989 case, since we won't be allocating local storage then.
6990 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
6991 in this case omp_notice_variable will be called later
6992 on when it is gimplified. */
6993 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
6994 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
6995 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
6996 }
6997 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
6998 && lang_hooks.decls.omp_privatize_by_reference (decl))
6999 {
7000 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
7001
7002 /* Similar to the direct variable sized case above, we'll need the
7003 size of references being privatized. */
7004 if ((flags & GOVD_SHARED) == 0)
7005 {
7006 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
7007 if (DECL_P (t))
7008 omp_notice_variable (ctx, t, true);
7009 }
7010 }
7011
7012 if (n != NULL)
7013 n->value |= flags;
7014 else
7015 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
7016
7017 /* For reductions clauses in OpenACC loop directives, by default create a
7018 copy clause on the enclosing parallel construct for carrying back the
7019 results. */
7020 if (ctx->region_type == ORT_ACC && (flags & GOVD_REDUCTION))
7021 {
7022 struct gimplify_omp_ctx *outer_ctx = ctx->outer_context;
7023 while (outer_ctx)
7024 {
7025 n = splay_tree_lookup (outer_ctx->variables, (splay_tree_key)decl);
7026 if (n != NULL)
7027 {
7028 /* Ignore local variables and explicitly declared clauses. */
7029 if (n->value & (GOVD_LOCAL | GOVD_EXPLICIT))
7030 break;
7031 else if (outer_ctx->region_type == ORT_ACC_KERNELS)
7032 {
7033 /* According to the OpenACC spec, such a reduction variable
7034 should already have a copy map on a kernels construct,
7035 verify that here. */
7036 gcc_assert (!(n->value & GOVD_FIRSTPRIVATE)
7037 && (n->value & GOVD_MAP));
7038 }
7039 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
7040 {
7041 /* Remove firstprivate and make it a copy map. */
7042 n->value &= ~GOVD_FIRSTPRIVATE;
7043 n->value |= GOVD_MAP;
7044 }
7045 }
7046 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
7047 {
7048 splay_tree_insert (outer_ctx->variables, (splay_tree_key)decl,
7049 GOVD_MAP | GOVD_SEEN);
7050 break;
7051 }
7052 outer_ctx = outer_ctx->outer_context;
7053 }
7054 }
7055 }
7056
7057 /* Notice a threadprivate variable DECL used in OMP context CTX.
7058 This just prints out diagnostics about threadprivate variable uses
7059 in untied tasks. If DECL2 is non-NULL, prevent this warning
7060 on that variable. */
7061
7062 static bool
7063 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
7064 tree decl2)
7065 {
7066 splay_tree_node n;
7067 struct gimplify_omp_ctx *octx;
7068
7069 for (octx = ctx; octx; octx = octx->outer_context)
7070 if ((octx->region_type & ORT_TARGET) != 0
7071 || octx->order_concurrent)
7072 {
7073 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
7074 if (n == NULL)
7075 {
7076 if (octx->order_concurrent)
7077 {
7078 error ("threadprivate variable %qE used in a region with"
7079 " %<order(concurrent)%> clause", DECL_NAME (decl));
7080 error_at (octx->location, "enclosing region");
7081 }
7082 else
7083 {
7084 error ("threadprivate variable %qE used in target region",
7085 DECL_NAME (decl));
7086 error_at (octx->location, "enclosing target region");
7087 }
7088 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
7089 }
7090 if (decl2)
7091 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
7092 }
7093
7094 if (ctx->region_type != ORT_UNTIED_TASK)
7095 return false;
7096 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7097 if (n == NULL)
7098 {
7099 error ("threadprivate variable %qE used in untied task",
7100 DECL_NAME (decl));
7101 error_at (ctx->location, "enclosing task");
7102 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
7103 }
7104 if (decl2)
7105 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
7106 return false;
7107 }
7108
7109 /* Return true if global var DECL is device resident. */
7110
7111 static bool
7112 device_resident_p (tree decl)
7113 {
7114 tree attr = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl));
7115
7116 if (!attr)
7117 return false;
7118
7119 for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t))
7120 {
7121 tree c = TREE_VALUE (t);
7122 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT)
7123 return true;
7124 }
7125
7126 return false;
7127 }
7128
7129 /* Return true if DECL has an ACC DECLARE attribute. */
7130
7131 static bool
7132 is_oacc_declared (tree decl)
7133 {
7134 tree t = TREE_CODE (decl) == MEM_REF ? TREE_OPERAND (decl, 0) : decl;
7135 tree declared = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t));
7136 return declared != NULL_TREE;
7137 }
7138
7139 /* Determine outer default flags for DECL mentioned in an OMP region
7140 but not declared in an enclosing clause.
7141
7142 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
7143 remapped firstprivate instead of shared. To some extent this is
7144 addressed in omp_firstprivatize_type_sizes, but not
7145 effectively. */
7146
7147 static unsigned
7148 omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl,
7149 bool in_code, unsigned flags)
7150 {
7151 enum omp_clause_default_kind default_kind = ctx->default_kind;
7152 enum omp_clause_default_kind kind;
7153
7154 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
7155 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
7156 default_kind = kind;
7157 else if (VAR_P (decl) && TREE_STATIC (decl) && DECL_IN_CONSTANT_POOL (decl))
7158 default_kind = OMP_CLAUSE_DEFAULT_SHARED;
7159
7160 switch (default_kind)
7161 {
7162 case OMP_CLAUSE_DEFAULT_NONE:
7163 {
7164 const char *rtype;
7165
7166 if (ctx->region_type & ORT_PARALLEL)
7167 rtype = "parallel";
7168 else if ((ctx->region_type & ORT_TASKLOOP) == ORT_TASKLOOP)
7169 rtype = "taskloop";
7170 else if (ctx->region_type & ORT_TASK)
7171 rtype = "task";
7172 else if (ctx->region_type & ORT_TEAMS)
7173 rtype = "teams";
7174 else
7175 gcc_unreachable ();
7176
7177 error ("%qE not specified in enclosing %qs",
7178 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype);
7179 error_at (ctx->location, "enclosing %qs", rtype);
7180 }
7181 /* FALLTHRU */
7182 case OMP_CLAUSE_DEFAULT_SHARED:
7183 flags |= GOVD_SHARED;
7184 break;
7185 case OMP_CLAUSE_DEFAULT_PRIVATE:
7186 flags |= GOVD_PRIVATE;
7187 break;
7188 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
7189 flags |= GOVD_FIRSTPRIVATE;
7190 break;
7191 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
7192 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
7193 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
7194 if (struct gimplify_omp_ctx *octx = ctx->outer_context)
7195 {
7196 omp_notice_variable (octx, decl, in_code);
7197 for (; octx; octx = octx->outer_context)
7198 {
7199 splay_tree_node n2;
7200
7201 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
7202 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0
7203 && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0))
7204 continue;
7205 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
7206 {
7207 flags |= GOVD_FIRSTPRIVATE;
7208 goto found_outer;
7209 }
7210 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
7211 {
7212 flags |= GOVD_SHARED;
7213 goto found_outer;
7214 }
7215 }
7216 }
7217
7218 if (TREE_CODE (decl) == PARM_DECL
7219 || (!is_global_var (decl)
7220 && DECL_CONTEXT (decl) == current_function_decl))
7221 flags |= GOVD_FIRSTPRIVATE;
7222 else
7223 flags |= GOVD_SHARED;
7224 found_outer:
7225 break;
7226
7227 default:
7228 gcc_unreachable ();
7229 }
7230
7231 return flags;
7232 }
7233
7234
7235 /* Determine outer default flags for DECL mentioned in an OACC region
7236 but not declared in an enclosing clause. */
7237
7238 static unsigned
7239 oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags)
7240 {
7241 const char *rkind;
7242 bool on_device = false;
7243 bool is_private = false;
7244 bool declared = is_oacc_declared (decl);
7245 tree type = TREE_TYPE (decl);
7246
7247 if (lang_hooks.decls.omp_privatize_by_reference (decl))
7248 type = TREE_TYPE (type);
7249
7250 /* For Fortran COMMON blocks, only used variables in those blocks are
7251 transfered and remapped. The block itself will have a private clause to
7252 avoid transfering the data twice.
7253 The hook evaluates to false by default. For a variable in Fortran's COMMON
7254 or EQUIVALENCE block, returns 'true' (as we have shared=false) - as only
7255 the variables in such a COMMON/EQUIVALENCE block shall be privatized not
7256 the whole block. For C++ and Fortran, it can also be true under certain
7257 other conditions, if DECL_HAS_VALUE_EXPR. */
7258 if (RECORD_OR_UNION_TYPE_P (type))
7259 is_private = lang_hooks.decls.omp_disregard_value_expr (decl, false);
7260
7261 if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0
7262 && is_global_var (decl)
7263 && device_resident_p (decl)
7264 && !is_private)
7265 {
7266 on_device = true;
7267 flags |= GOVD_MAP_TO_ONLY;
7268 }
7269
7270 switch (ctx->region_type)
7271 {
7272 case ORT_ACC_KERNELS:
7273 rkind = "kernels";
7274
7275 if (is_private)
7276 flags |= GOVD_FIRSTPRIVATE;
7277 else if (AGGREGATE_TYPE_P (type))
7278 {
7279 /* Aggregates default to 'present_or_copy', or 'present'. */
7280 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7281 flags |= GOVD_MAP;
7282 else
7283 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7284 }
7285 else
7286 /* Scalars default to 'copy'. */
7287 flags |= GOVD_MAP | GOVD_MAP_FORCE;
7288
7289 break;
7290
7291 case ORT_ACC_PARALLEL:
7292 case ORT_ACC_SERIAL:
7293 rkind = ctx->region_type == ORT_ACC_PARALLEL ? "parallel" : "serial";
7294
7295 if (is_private)
7296 flags |= GOVD_FIRSTPRIVATE;
7297 else if (on_device || declared)
7298 flags |= GOVD_MAP;
7299 else if (AGGREGATE_TYPE_P (type))
7300 {
7301 /* Aggregates default to 'present_or_copy', or 'present'. */
7302 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7303 flags |= GOVD_MAP;
7304 else
7305 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7306 }
7307 else
7308 /* Scalars default to 'firstprivate'. */
7309 flags |= GOVD_FIRSTPRIVATE;
7310
7311 break;
7312
7313 default:
7314 gcc_unreachable ();
7315 }
7316
7317 if (DECL_ARTIFICIAL (decl))
7318 ; /* We can get compiler-generated decls, and should not complain
7319 about them. */
7320 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_NONE)
7321 {
7322 error ("%qE not specified in enclosing OpenACC %qs construct",
7323 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rkind);
7324 inform (ctx->location, "enclosing OpenACC %qs construct", rkind);
7325 }
7326 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_PRESENT)
7327 ; /* Handled above. */
7328 else
7329 gcc_checking_assert (ctx->default_kind == OMP_CLAUSE_DEFAULT_SHARED);
7330
7331 return flags;
7332 }
7333
7334 /* Record the fact that DECL was used within the OMP context CTX.
7335 IN_CODE is true when real code uses DECL, and false when we should
7336 merely emit default(none) errors. Return true if DECL is going to
7337 be remapped and thus DECL shouldn't be gimplified into its
7338 DECL_VALUE_EXPR (if any). */
7339
7340 static bool
7341 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
7342 {
7343 splay_tree_node n;
7344 unsigned flags = in_code ? GOVD_SEEN : 0;
7345 bool ret = false, shared;
7346
7347 if (error_operand_p (decl))
7348 return false;
7349
7350 if (ctx->region_type == ORT_NONE)
7351 return lang_hooks.decls.omp_disregard_value_expr (decl, false);
7352
7353 if (is_global_var (decl))
7354 {
7355 /* Threadprivate variables are predetermined. */
7356 if (DECL_THREAD_LOCAL_P (decl))
7357 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
7358
7359 if (DECL_HAS_VALUE_EXPR_P (decl))
7360 {
7361 if (ctx->region_type & ORT_ACC)
7362 /* For OpenACC, defer expansion of value to avoid transfering
7363 privatized common block data instead of im-/explicitly transfered
7364 variables which are in common blocks. */
7365 ;
7366 else
7367 {
7368 tree value = get_base_address (DECL_VALUE_EXPR (decl));
7369
7370 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
7371 return omp_notice_threadprivate_variable (ctx, decl, value);
7372 }
7373 }
7374
7375 if (gimplify_omp_ctxp->outer_context == NULL
7376 && VAR_P (decl)
7377 && oacc_get_fn_attrib (current_function_decl))
7378 {
7379 location_t loc = DECL_SOURCE_LOCATION (decl);
7380
7381 if (lookup_attribute ("omp declare target link",
7382 DECL_ATTRIBUTES (decl)))
7383 {
7384 error_at (loc,
7385 "%qE with %<link%> clause used in %<routine%> function",
7386 DECL_NAME (decl));
7387 return false;
7388 }
7389 else if (!lookup_attribute ("omp declare target",
7390 DECL_ATTRIBUTES (decl)))
7391 {
7392 error_at (loc,
7393 "%qE requires a %<declare%> directive for use "
7394 "in a %<routine%> function", DECL_NAME (decl));
7395 return false;
7396 }
7397 }
7398 }
7399
7400 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7401 if ((ctx->region_type & ORT_TARGET) != 0)
7402 {
7403 if (ctx->region_type & ORT_ACC)
7404 /* For OpenACC, as remarked above, defer expansion. */
7405 shared = false;
7406 else
7407 shared = true;
7408
7409 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7410 if (n == NULL)
7411 {
7412 unsigned nflags = flags;
7413 if ((ctx->region_type & ORT_ACC) == 0)
7414 {
7415 bool is_declare_target = false;
7416 if (is_global_var (decl)
7417 && varpool_node::get_create (decl)->offloadable)
7418 {
7419 struct gimplify_omp_ctx *octx;
7420 for (octx = ctx->outer_context;
7421 octx; octx = octx->outer_context)
7422 {
7423 n = splay_tree_lookup (octx->variables,
7424 (splay_tree_key)decl);
7425 if (n
7426 && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED
7427 && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7428 break;
7429 }
7430 is_declare_target = octx == NULL;
7431 }
7432 if (!is_declare_target)
7433 {
7434 int gdmk;
7435 if (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
7436 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
7437 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
7438 == POINTER_TYPE)))
7439 gdmk = GDMK_POINTER;
7440 else if (lang_hooks.decls.omp_scalar_p (decl))
7441 gdmk = GDMK_SCALAR;
7442 else
7443 gdmk = GDMK_AGGREGATE;
7444 if (ctx->defaultmap[gdmk] == 0)
7445 {
7446 tree d = lang_hooks.decls.omp_report_decl (decl);
7447 error ("%qE not specified in enclosing %<target%>",
7448 DECL_NAME (d));
7449 error_at (ctx->location, "enclosing %<target%>");
7450 }
7451 else if (ctx->defaultmap[gdmk]
7452 & (GOVD_MAP_0LEN_ARRAY | GOVD_FIRSTPRIVATE))
7453 nflags |= ctx->defaultmap[gdmk];
7454 else
7455 {
7456 gcc_assert (ctx->defaultmap[gdmk] & GOVD_MAP);
7457 nflags |= ctx->defaultmap[gdmk] & ~GOVD_MAP;
7458 }
7459 }
7460 }
7461
7462 struct gimplify_omp_ctx *octx = ctx->outer_context;
7463 if ((ctx->region_type & ORT_ACC) && octx)
7464 {
7465 /* Look in outer OpenACC contexts, to see if there's a
7466 data attribute for this variable. */
7467 omp_notice_variable (octx, decl, in_code);
7468
7469 for (; octx; octx = octx->outer_context)
7470 {
7471 if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)))
7472 break;
7473 splay_tree_node n2
7474 = splay_tree_lookup (octx->variables,
7475 (splay_tree_key) decl);
7476 if (n2)
7477 {
7478 if (octx->region_type == ORT_ACC_HOST_DATA)
7479 error ("variable %qE declared in enclosing "
7480 "%<host_data%> region", DECL_NAME (decl));
7481 nflags |= GOVD_MAP;
7482 if (octx->region_type == ORT_ACC_DATA
7483 && (n2->value & GOVD_MAP_0LEN_ARRAY))
7484 nflags |= GOVD_MAP_0LEN_ARRAY;
7485 goto found_outer;
7486 }
7487 }
7488 }
7489
7490 if ((nflags & ~(GOVD_MAP_TO_ONLY | GOVD_MAP_FROM_ONLY
7491 | GOVD_MAP_ALLOC_ONLY)) == flags)
7492 {
7493 tree type = TREE_TYPE (decl);
7494
7495 if (gimplify_omp_ctxp->target_firstprivatize_array_bases
7496 && lang_hooks.decls.omp_privatize_by_reference (decl))
7497 type = TREE_TYPE (type);
7498 if (!lang_hooks.types.omp_mappable_type (type))
7499 {
7500 error ("%qD referenced in target region does not have "
7501 "a mappable type", decl);
7502 nflags |= GOVD_MAP | GOVD_EXPLICIT;
7503 }
7504 else
7505 {
7506 if ((ctx->region_type & ORT_ACC) != 0)
7507 nflags = oacc_default_clause (ctx, decl, flags);
7508 else
7509 nflags |= GOVD_MAP;
7510 }
7511 }
7512 found_outer:
7513 omp_add_variable (ctx, decl, nflags);
7514 }
7515 else
7516 {
7517 /* If nothing changed, there's nothing left to do. */
7518 if ((n->value & flags) == flags)
7519 return ret;
7520 flags |= n->value;
7521 n->value = flags;
7522 }
7523 goto do_outer;
7524 }
7525
7526 if (n == NULL)
7527 {
7528 if (ctx->region_type == ORT_WORKSHARE
7529 || ctx->region_type == ORT_TASKGROUP
7530 || ctx->region_type == ORT_SIMD
7531 || ctx->region_type == ORT_ACC
7532 || (ctx->region_type & ORT_TARGET_DATA) != 0)
7533 goto do_outer;
7534
7535 flags = omp_default_clause (ctx, decl, in_code, flags);
7536
7537 if ((flags & GOVD_PRIVATE)
7538 && lang_hooks.decls.omp_private_outer_ref (decl))
7539 flags |= GOVD_PRIVATE_OUTER_REF;
7540
7541 omp_add_variable (ctx, decl, flags);
7542
7543 shared = (flags & GOVD_SHARED) != 0;
7544 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7545 goto do_outer;
7546 }
7547
7548 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
7549 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
7550 && DECL_SIZE (decl))
7551 {
7552 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7553 {
7554 splay_tree_node n2;
7555 tree t = DECL_VALUE_EXPR (decl);
7556 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
7557 t = TREE_OPERAND (t, 0);
7558 gcc_assert (DECL_P (t));
7559 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7560 n2->value |= GOVD_SEEN;
7561 }
7562 else if (lang_hooks.decls.omp_privatize_by_reference (decl)
7563 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))
7564 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))))
7565 != INTEGER_CST))
7566 {
7567 splay_tree_node n2;
7568 tree t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
7569 gcc_assert (DECL_P (t));
7570 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7571 if (n2)
7572 omp_notice_variable (ctx, t, true);
7573 }
7574 }
7575
7576 if (ctx->region_type & ORT_ACC)
7577 /* For OpenACC, as remarked above, defer expansion. */
7578 shared = false;
7579 else
7580 shared = ((flags | n->value) & GOVD_SHARED) != 0;
7581 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7582
7583 /* If nothing changed, there's nothing left to do. */
7584 if ((n->value & flags) == flags)
7585 return ret;
7586 flags |= n->value;
7587 n->value = flags;
7588
7589 do_outer:
7590 /* If the variable is private in the current context, then we don't
7591 need to propagate anything to an outer context. */
7592 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
7593 return ret;
7594 if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7595 == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7596 return ret;
7597 if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
7598 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7599 == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7600 return ret;
7601 if (ctx->outer_context
7602 && omp_notice_variable (ctx->outer_context, decl, in_code))
7603 return true;
7604 return ret;
7605 }
7606
7607 /* Verify that DECL is private within CTX. If there's specific information
7608 to the contrary in the innermost scope, generate an error. */
7609
7610 static bool
7611 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
7612 {
7613 splay_tree_node n;
7614
7615 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7616 if (n != NULL)
7617 {
7618 if (n->value & GOVD_SHARED)
7619 {
7620 if (ctx == gimplify_omp_ctxp)
7621 {
7622 if (simd)
7623 error ("iteration variable %qE is predetermined linear",
7624 DECL_NAME (decl));
7625 else
7626 error ("iteration variable %qE should be private",
7627 DECL_NAME (decl));
7628 n->value = GOVD_PRIVATE;
7629 return true;
7630 }
7631 else
7632 return false;
7633 }
7634 else if ((n->value & GOVD_EXPLICIT) != 0
7635 && (ctx == gimplify_omp_ctxp
7636 || (ctx->region_type == ORT_COMBINED_PARALLEL
7637 && gimplify_omp_ctxp->outer_context == ctx)))
7638 {
7639 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
7640 error ("iteration variable %qE should not be firstprivate",
7641 DECL_NAME (decl));
7642 else if ((n->value & GOVD_REDUCTION) != 0)
7643 error ("iteration variable %qE should not be reduction",
7644 DECL_NAME (decl));
7645 else if (simd != 1 && (n->value & GOVD_LINEAR) != 0)
7646 error ("iteration variable %qE should not be linear",
7647 DECL_NAME (decl));
7648 }
7649 return (ctx == gimplify_omp_ctxp
7650 || (ctx->region_type == ORT_COMBINED_PARALLEL
7651 && gimplify_omp_ctxp->outer_context == ctx));
7652 }
7653
7654 if (ctx->region_type != ORT_WORKSHARE
7655 && ctx->region_type != ORT_TASKGROUP
7656 && ctx->region_type != ORT_SIMD
7657 && ctx->region_type != ORT_ACC)
7658 return false;
7659 else if (ctx->outer_context)
7660 return omp_is_private (ctx->outer_context, decl, simd);
7661 return false;
7662 }
7663
7664 /* Return true if DECL is private within a parallel region
7665 that binds to the current construct's context or in parallel
7666 region's REDUCTION clause. */
7667
7668 static bool
7669 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
7670 {
7671 splay_tree_node n;
7672
7673 do
7674 {
7675 ctx = ctx->outer_context;
7676 if (ctx == NULL)
7677 {
7678 if (is_global_var (decl))
7679 return false;
7680
7681 /* References might be private, but might be shared too,
7682 when checking for copyprivate, assume they might be
7683 private, otherwise assume they might be shared. */
7684 if (copyprivate)
7685 return true;
7686
7687 if (lang_hooks.decls.omp_privatize_by_reference (decl))
7688 return false;
7689
7690 /* Treat C++ privatized non-static data members outside
7691 of the privatization the same. */
7692 if (omp_member_access_dummy_var (decl))
7693 return false;
7694
7695 return true;
7696 }
7697
7698 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7699
7700 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
7701 && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0))
7702 continue;
7703
7704 if (n != NULL)
7705 {
7706 if ((n->value & GOVD_LOCAL) != 0
7707 && omp_member_access_dummy_var (decl))
7708 return false;
7709 return (n->value & GOVD_SHARED) == 0;
7710 }
7711 }
7712 while (ctx->region_type == ORT_WORKSHARE
7713 || ctx->region_type == ORT_TASKGROUP
7714 || ctx->region_type == ORT_SIMD
7715 || ctx->region_type == ORT_ACC);
7716 return false;
7717 }
7718
7719 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
7720
7721 static tree
7722 find_decl_expr (tree *tp, int *walk_subtrees, void *data)
7723 {
7724 tree t = *tp;
7725
7726 /* If this node has been visited, unmark it and keep looking. */
7727 if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data)
7728 return t;
7729
7730 if (IS_TYPE_OR_DECL_P (t))
7731 *walk_subtrees = 0;
7732 return NULL_TREE;
7733 }
7734
7735 /* If *LIST_P contains any OpenMP depend clauses with iterators,
7736 lower all the depend clauses by populating corresponding depend
7737 array. Returns 0 if there are no such depend clauses, or
7738 2 if all depend clauses should be removed, 1 otherwise. */
7739
7740 static int
7741 gimplify_omp_depend (tree *list_p, gimple_seq *pre_p)
7742 {
7743 tree c;
7744 gimple *g;
7745 size_t n[4] = { 0, 0, 0, 0 };
7746 bool unused[4];
7747 tree counts[4] = { NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE };
7748 tree last_iter = NULL_TREE, last_count = NULL_TREE;
7749 size_t i, j;
7750 location_t first_loc = UNKNOWN_LOCATION;
7751
7752 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
7753 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
7754 {
7755 switch (OMP_CLAUSE_DEPEND_KIND (c))
7756 {
7757 case OMP_CLAUSE_DEPEND_IN:
7758 i = 2;
7759 break;
7760 case OMP_CLAUSE_DEPEND_OUT:
7761 case OMP_CLAUSE_DEPEND_INOUT:
7762 i = 0;
7763 break;
7764 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
7765 i = 1;
7766 break;
7767 case OMP_CLAUSE_DEPEND_DEPOBJ:
7768 i = 3;
7769 break;
7770 case OMP_CLAUSE_DEPEND_SOURCE:
7771 case OMP_CLAUSE_DEPEND_SINK:
7772 continue;
7773 default:
7774 gcc_unreachable ();
7775 }
7776 tree t = OMP_CLAUSE_DECL (c);
7777 if (first_loc == UNKNOWN_LOCATION)
7778 first_loc = OMP_CLAUSE_LOCATION (c);
7779 if (TREE_CODE (t) == TREE_LIST
7780 && TREE_PURPOSE (t)
7781 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
7782 {
7783 if (TREE_PURPOSE (t) != last_iter)
7784 {
7785 tree tcnt = size_one_node;
7786 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
7787 {
7788 if (gimplify_expr (&TREE_VEC_ELT (it, 1), pre_p, NULL,
7789 is_gimple_val, fb_rvalue) == GS_ERROR
7790 || gimplify_expr (&TREE_VEC_ELT (it, 2), pre_p, NULL,
7791 is_gimple_val, fb_rvalue) == GS_ERROR
7792 || gimplify_expr (&TREE_VEC_ELT (it, 3), pre_p, NULL,
7793 is_gimple_val, fb_rvalue) == GS_ERROR
7794 || (gimplify_expr (&TREE_VEC_ELT (it, 4), pre_p, NULL,
7795 is_gimple_val, fb_rvalue)
7796 == GS_ERROR))
7797 return 2;
7798 tree var = TREE_VEC_ELT (it, 0);
7799 tree begin = TREE_VEC_ELT (it, 1);
7800 tree end = TREE_VEC_ELT (it, 2);
7801 tree step = TREE_VEC_ELT (it, 3);
7802 tree orig_step = TREE_VEC_ELT (it, 4);
7803 tree type = TREE_TYPE (var);
7804 tree stype = TREE_TYPE (step);
7805 location_t loc = DECL_SOURCE_LOCATION (var);
7806 tree endmbegin;
7807 /* Compute count for this iterator as
7808 orig_step > 0
7809 ? (begin < end ? (end - begin + (step - 1)) / step : 0)
7810 : (begin > end ? (end - begin + (step + 1)) / step : 0)
7811 and compute product of those for the entire depend
7812 clause. */
7813 if (POINTER_TYPE_P (type))
7814 endmbegin = fold_build2_loc (loc, POINTER_DIFF_EXPR,
7815 stype, end, begin);
7816 else
7817 endmbegin = fold_build2_loc (loc, MINUS_EXPR, type,
7818 end, begin);
7819 tree stepm1 = fold_build2_loc (loc, MINUS_EXPR, stype,
7820 step,
7821 build_int_cst (stype, 1));
7822 tree stepp1 = fold_build2_loc (loc, PLUS_EXPR, stype, step,
7823 build_int_cst (stype, 1));
7824 tree pos = fold_build2_loc (loc, PLUS_EXPR, stype,
7825 unshare_expr (endmbegin),
7826 stepm1);
7827 pos = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
7828 pos, step);
7829 tree neg = fold_build2_loc (loc, PLUS_EXPR, stype,
7830 endmbegin, stepp1);
7831 if (TYPE_UNSIGNED (stype))
7832 {
7833 neg = fold_build1_loc (loc, NEGATE_EXPR, stype, neg);
7834 step = fold_build1_loc (loc, NEGATE_EXPR, stype, step);
7835 }
7836 neg = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
7837 neg, step);
7838 step = NULL_TREE;
7839 tree cond = fold_build2_loc (loc, LT_EXPR,
7840 boolean_type_node,
7841 begin, end);
7842 pos = fold_build3_loc (loc, COND_EXPR, stype, cond, pos,
7843 build_int_cst (stype, 0));
7844 cond = fold_build2_loc (loc, LT_EXPR, boolean_type_node,
7845 end, begin);
7846 neg = fold_build3_loc (loc, COND_EXPR, stype, cond, neg,
7847 build_int_cst (stype, 0));
7848 tree osteptype = TREE_TYPE (orig_step);
7849 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
7850 orig_step,
7851 build_int_cst (osteptype, 0));
7852 tree cnt = fold_build3_loc (loc, COND_EXPR, stype,
7853 cond, pos, neg);
7854 cnt = fold_convert_loc (loc, sizetype, cnt);
7855 if (gimplify_expr (&cnt, pre_p, NULL, is_gimple_val,
7856 fb_rvalue) == GS_ERROR)
7857 return 2;
7858 tcnt = size_binop_loc (loc, MULT_EXPR, tcnt, cnt);
7859 }
7860 if (gimplify_expr (&tcnt, pre_p, NULL, is_gimple_val,
7861 fb_rvalue) == GS_ERROR)
7862 return 2;
7863 last_iter = TREE_PURPOSE (t);
7864 last_count = tcnt;
7865 }
7866 if (counts[i] == NULL_TREE)
7867 counts[i] = last_count;
7868 else
7869 counts[i] = size_binop_loc (OMP_CLAUSE_LOCATION (c),
7870 PLUS_EXPR, counts[i], last_count);
7871 }
7872 else
7873 n[i]++;
7874 }
7875 for (i = 0; i < 4; i++)
7876 if (counts[i])
7877 break;
7878 if (i == 4)
7879 return 0;
7880
7881 tree total = size_zero_node;
7882 for (i = 0; i < 4; i++)
7883 {
7884 unused[i] = counts[i] == NULL_TREE && n[i] == 0;
7885 if (counts[i] == NULL_TREE)
7886 counts[i] = size_zero_node;
7887 if (n[i])
7888 counts[i] = size_binop (PLUS_EXPR, counts[i], size_int (n[i]));
7889 if (gimplify_expr (&counts[i], pre_p, NULL, is_gimple_val,
7890 fb_rvalue) == GS_ERROR)
7891 return 2;
7892 total = size_binop (PLUS_EXPR, total, counts[i]);
7893 }
7894
7895 if (gimplify_expr (&total, pre_p, NULL, is_gimple_val, fb_rvalue)
7896 == GS_ERROR)
7897 return 2;
7898 bool is_old = unused[1] && unused[3];
7899 tree totalpx = size_binop (PLUS_EXPR, unshare_expr (total),
7900 size_int (is_old ? 1 : 4));
7901 tree type = build_array_type (ptr_type_node, build_index_type (totalpx));
7902 tree array = create_tmp_var_raw (type);
7903 TREE_ADDRESSABLE (array) = 1;
7904 if (!poly_int_tree_p (totalpx))
7905 {
7906 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (array)))
7907 gimplify_type_sizes (TREE_TYPE (array), pre_p);
7908 if (gimplify_omp_ctxp)
7909 {
7910 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7911 while (ctx
7912 && (ctx->region_type == ORT_WORKSHARE
7913 || ctx->region_type == ORT_TASKGROUP
7914 || ctx->region_type == ORT_SIMD
7915 || ctx->region_type == ORT_ACC))
7916 ctx = ctx->outer_context;
7917 if (ctx)
7918 omp_add_variable (ctx, array, GOVD_LOCAL | GOVD_SEEN);
7919 }
7920 gimplify_vla_decl (array, pre_p);
7921 }
7922 else
7923 gimple_add_tmp_var (array);
7924 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
7925 NULL_TREE);
7926 tree tem;
7927 if (!is_old)
7928 {
7929 tem = build2 (MODIFY_EXPR, void_type_node, r,
7930 build_int_cst (ptr_type_node, 0));
7931 gimplify_and_add (tem, pre_p);
7932 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
7933 NULL_TREE);
7934 }
7935 tem = build2 (MODIFY_EXPR, void_type_node, r,
7936 fold_convert (ptr_type_node, total));
7937 gimplify_and_add (tem, pre_p);
7938 for (i = 1; i < (is_old ? 2 : 4); i++)
7939 {
7940 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (i + !is_old),
7941 NULL_TREE, NULL_TREE);
7942 tem = build2 (MODIFY_EXPR, void_type_node, r, counts[i - 1]);
7943 gimplify_and_add (tem, pre_p);
7944 }
7945
7946 tree cnts[4];
7947 for (j = 4; j; j--)
7948 if (!unused[j - 1])
7949 break;
7950 for (i = 0; i < 4; i++)
7951 {
7952 if (i && (i >= j || unused[i - 1]))
7953 {
7954 cnts[i] = cnts[i - 1];
7955 continue;
7956 }
7957 cnts[i] = create_tmp_var (sizetype);
7958 if (i == 0)
7959 g = gimple_build_assign (cnts[i], size_int (is_old ? 2 : 5));
7960 else
7961 {
7962 tree t;
7963 if (is_old)
7964 t = size_binop (PLUS_EXPR, counts[0], size_int (2));
7965 else
7966 t = size_binop (PLUS_EXPR, cnts[i - 1], counts[i - 1]);
7967 if (gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue)
7968 == GS_ERROR)
7969 return 2;
7970 g = gimple_build_assign (cnts[i], t);
7971 }
7972 gimple_seq_add_stmt (pre_p, g);
7973 }
7974
7975 last_iter = NULL_TREE;
7976 tree last_bind = NULL_TREE;
7977 tree *last_body = NULL;
7978 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
7979 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
7980 {
7981 switch (OMP_CLAUSE_DEPEND_KIND (c))
7982 {
7983 case OMP_CLAUSE_DEPEND_IN:
7984 i = 2;
7985 break;
7986 case OMP_CLAUSE_DEPEND_OUT:
7987 case OMP_CLAUSE_DEPEND_INOUT:
7988 i = 0;
7989 break;
7990 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
7991 i = 1;
7992 break;
7993 case OMP_CLAUSE_DEPEND_DEPOBJ:
7994 i = 3;
7995 break;
7996 case OMP_CLAUSE_DEPEND_SOURCE:
7997 case OMP_CLAUSE_DEPEND_SINK:
7998 continue;
7999 default:
8000 gcc_unreachable ();
8001 }
8002 tree t = OMP_CLAUSE_DECL (c);
8003 if (TREE_CODE (t) == TREE_LIST
8004 && TREE_PURPOSE (t)
8005 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
8006 {
8007 if (TREE_PURPOSE (t) != last_iter)
8008 {
8009 if (last_bind)
8010 gimplify_and_add (last_bind, pre_p);
8011 tree block = TREE_VEC_ELT (TREE_PURPOSE (t), 5);
8012 last_bind = build3 (BIND_EXPR, void_type_node,
8013 BLOCK_VARS (block), NULL, block);
8014 TREE_SIDE_EFFECTS (last_bind) = 1;
8015 SET_EXPR_LOCATION (last_bind, OMP_CLAUSE_LOCATION (c));
8016 tree *p = &BIND_EXPR_BODY (last_bind);
8017 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8018 {
8019 tree var = TREE_VEC_ELT (it, 0);
8020 tree begin = TREE_VEC_ELT (it, 1);
8021 tree end = TREE_VEC_ELT (it, 2);
8022 tree step = TREE_VEC_ELT (it, 3);
8023 tree orig_step = TREE_VEC_ELT (it, 4);
8024 tree type = TREE_TYPE (var);
8025 location_t loc = DECL_SOURCE_LOCATION (var);
8026 /* Emit:
8027 var = begin;
8028 goto cond_label;
8029 beg_label:
8030 ...
8031 var = var + step;
8032 cond_label:
8033 if (orig_step > 0) {
8034 if (var < end) goto beg_label;
8035 } else {
8036 if (var > end) goto beg_label;
8037 }
8038 for each iterator, with inner iterators added to
8039 the ... above. */
8040 tree beg_label = create_artificial_label (loc);
8041 tree cond_label = NULL_TREE;
8042 tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
8043 var, begin);
8044 append_to_statement_list_force (tem, p);
8045 tem = build_and_jump (&cond_label);
8046 append_to_statement_list_force (tem, p);
8047 tem = build1 (LABEL_EXPR, void_type_node, beg_label);
8048 append_to_statement_list (tem, p);
8049 tree bind = build3 (BIND_EXPR, void_type_node, NULL_TREE,
8050 NULL_TREE, NULL_TREE);
8051 TREE_SIDE_EFFECTS (bind) = 1;
8052 SET_EXPR_LOCATION (bind, loc);
8053 append_to_statement_list_force (bind, p);
8054 if (POINTER_TYPE_P (type))
8055 tem = build2_loc (loc, POINTER_PLUS_EXPR, type,
8056 var, fold_convert_loc (loc, sizetype,
8057 step));
8058 else
8059 tem = build2_loc (loc, PLUS_EXPR, type, var, step);
8060 tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
8061 var, tem);
8062 append_to_statement_list_force (tem, p);
8063 tem = build1 (LABEL_EXPR, void_type_node, cond_label);
8064 append_to_statement_list (tem, p);
8065 tree cond = fold_build2_loc (loc, LT_EXPR,
8066 boolean_type_node,
8067 var, end);
8068 tree pos
8069 = fold_build3_loc (loc, COND_EXPR, void_type_node,
8070 cond, build_and_jump (&beg_label),
8071 void_node);
8072 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8073 var, end);
8074 tree neg
8075 = fold_build3_loc (loc, COND_EXPR, void_type_node,
8076 cond, build_and_jump (&beg_label),
8077 void_node);
8078 tree osteptype = TREE_TYPE (orig_step);
8079 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8080 orig_step,
8081 build_int_cst (osteptype, 0));
8082 tem = fold_build3_loc (loc, COND_EXPR, void_type_node,
8083 cond, pos, neg);
8084 append_to_statement_list_force (tem, p);
8085 p = &BIND_EXPR_BODY (bind);
8086 }
8087 last_body = p;
8088 }
8089 last_iter = TREE_PURPOSE (t);
8090 if (TREE_CODE (TREE_VALUE (t)) == COMPOUND_EXPR)
8091 {
8092 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t),
8093 0), last_body);
8094 TREE_VALUE (t) = TREE_OPERAND (TREE_VALUE (t), 1);
8095 }
8096 if (error_operand_p (TREE_VALUE (t)))
8097 return 2;
8098 TREE_VALUE (t) = build_fold_addr_expr (TREE_VALUE (t));
8099 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
8100 NULL_TREE, NULL_TREE);
8101 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8102 void_type_node, r, TREE_VALUE (t));
8103 append_to_statement_list_force (tem, last_body);
8104 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8105 void_type_node, cnts[i],
8106 size_binop (PLUS_EXPR, cnts[i], size_int (1)));
8107 append_to_statement_list_force (tem, last_body);
8108 TREE_VALUE (t) = null_pointer_node;
8109 }
8110 else
8111 {
8112 if (last_bind)
8113 {
8114 gimplify_and_add (last_bind, pre_p);
8115 last_bind = NULL_TREE;
8116 }
8117 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
8118 {
8119 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
8120 NULL, is_gimple_val, fb_rvalue);
8121 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
8122 }
8123 if (error_operand_p (OMP_CLAUSE_DECL (c)))
8124 return 2;
8125 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
8126 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
8127 is_gimple_val, fb_rvalue) == GS_ERROR)
8128 return 2;
8129 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
8130 NULL_TREE, NULL_TREE);
8131 tem = build2 (MODIFY_EXPR, void_type_node, r, OMP_CLAUSE_DECL (c));
8132 gimplify_and_add (tem, pre_p);
8133 g = gimple_build_assign (cnts[i], size_binop (PLUS_EXPR, cnts[i],
8134 size_int (1)));
8135 gimple_seq_add_stmt (pre_p, g);
8136 }
8137 }
8138 if (last_bind)
8139 gimplify_and_add (last_bind, pre_p);
8140 tree cond = boolean_false_node;
8141 if (is_old)
8142 {
8143 if (!unused[0])
8144 cond = build2_loc (first_loc, NE_EXPR, boolean_type_node, cnts[0],
8145 size_binop_loc (first_loc, PLUS_EXPR, counts[0],
8146 size_int (2)));
8147 if (!unused[2])
8148 cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
8149 build2_loc (first_loc, NE_EXPR, boolean_type_node,
8150 cnts[2],
8151 size_binop_loc (first_loc, PLUS_EXPR,
8152 totalpx,
8153 size_int (1))));
8154 }
8155 else
8156 {
8157 tree prev = size_int (5);
8158 for (i = 0; i < 4; i++)
8159 {
8160 if (unused[i])
8161 continue;
8162 prev = size_binop_loc (first_loc, PLUS_EXPR, counts[i], prev);
8163 cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
8164 build2_loc (first_loc, NE_EXPR, boolean_type_node,
8165 cnts[i], unshare_expr (prev)));
8166 }
8167 }
8168 tem = build3_loc (first_loc, COND_EXPR, void_type_node, cond,
8169 build_call_expr_loc (first_loc,
8170 builtin_decl_explicit (BUILT_IN_TRAP),
8171 0), void_node);
8172 gimplify_and_add (tem, pre_p);
8173 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
8174 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
8175 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
8176 OMP_CLAUSE_CHAIN (c) = *list_p;
8177 *list_p = c;
8178 return 1;
8179 }
8180
8181 /* Insert a GOMP_MAP_ALLOC or GOMP_MAP_RELEASE node following a
8182 GOMP_MAP_STRUCT mapping. C is an always_pointer mapping. STRUCT_NODE is
8183 the struct node to insert the new mapping after (when the struct node is
8184 initially created). PREV_NODE is the first of two or three mappings for a
8185 pointer, and is either:
8186 - the node before C, when a pair of mappings is used, e.g. for a C/C++
8187 array section.
8188 - not the node before C. This is true when we have a reference-to-pointer
8189 type (with a mapping for the reference and for the pointer), or for
8190 Fortran derived-type mappings with a GOMP_MAP_TO_PSET.
8191 If SCP is non-null, the new node is inserted before *SCP.
8192 if SCP is null, the new node is inserted before PREV_NODE.
8193 The return type is:
8194 - PREV_NODE, if SCP is non-null.
8195 - The newly-created ALLOC or RELEASE node, if SCP is null.
8196 - The second newly-created ALLOC or RELEASE node, if we are mapping a
8197 reference to a pointer. */
8198
8199 static tree
8200 insert_struct_comp_map (enum tree_code code, tree c, tree struct_node,
8201 tree prev_node, tree *scp)
8202 {
8203 enum gomp_map_kind mkind
8204 = (code == OMP_TARGET_EXIT_DATA || code == OACC_EXIT_DATA)
8205 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
8206
8207 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_MAP);
8208 tree cl = scp ? prev_node : c2;
8209 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
8210 OMP_CLAUSE_DECL (c2) = unshare_expr (OMP_CLAUSE_DECL (c));
8211 OMP_CLAUSE_CHAIN (c2) = scp ? *scp : prev_node;
8212 if (OMP_CLAUSE_CHAIN (prev_node) != c
8213 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (prev_node)) == OMP_CLAUSE_MAP
8214 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (prev_node))
8215 == GOMP_MAP_TO_PSET))
8216 OMP_CLAUSE_SIZE (c2) = OMP_CLAUSE_SIZE (OMP_CLAUSE_CHAIN (prev_node));
8217 else
8218 OMP_CLAUSE_SIZE (c2) = TYPE_SIZE_UNIT (ptr_type_node);
8219 if (struct_node)
8220 OMP_CLAUSE_CHAIN (struct_node) = c2;
8221
8222 /* We might need to create an additional mapping if we have a reference to a
8223 pointer (in C++). Don't do this if we have something other than a
8224 GOMP_MAP_ALWAYS_POINTER though, i.e. a GOMP_MAP_TO_PSET. */
8225 if (OMP_CLAUSE_CHAIN (prev_node) != c
8226 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (prev_node)) == OMP_CLAUSE_MAP
8227 && ((OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (prev_node))
8228 == GOMP_MAP_ALWAYS_POINTER)
8229 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (prev_node))
8230 == GOMP_MAP_ATTACH_DETACH)))
8231 {
8232 tree c4 = OMP_CLAUSE_CHAIN (prev_node);
8233 tree c3 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_MAP);
8234 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
8235 OMP_CLAUSE_DECL (c3) = unshare_expr (OMP_CLAUSE_DECL (c4));
8236 OMP_CLAUSE_SIZE (c3) = TYPE_SIZE_UNIT (ptr_type_node);
8237 OMP_CLAUSE_CHAIN (c3) = prev_node;
8238 if (!scp)
8239 OMP_CLAUSE_CHAIN (c2) = c3;
8240 else
8241 cl = c3;
8242 }
8243
8244 if (scp)
8245 *scp = c2;
8246
8247 return cl;
8248 }
8249
8250 /* Strip ARRAY_REFS or an indirect ref off BASE, find the containing object,
8251 and set *BITPOSP and *POFFSETP to the bit offset of the access.
8252 If BASE_REF is non-NULL and the containing object is a reference, set
8253 *BASE_REF to that reference before dereferencing the object.
8254 If BASE_REF is NULL, check that the containing object is a COMPONENT_REF or
8255 has array type, else return NULL. */
8256
8257 static tree
8258 extract_base_bit_offset (tree base, tree *base_ref, poly_int64 *bitposp,
8259 poly_offset_int *poffsetp)
8260 {
8261 tree offset;
8262 poly_int64 bitsize, bitpos;
8263 machine_mode mode;
8264 int unsignedp, reversep, volatilep = 0;
8265 poly_offset_int poffset;
8266
8267 if (base_ref)
8268 {
8269 *base_ref = NULL_TREE;
8270
8271 while (TREE_CODE (base) == ARRAY_REF)
8272 base = TREE_OPERAND (base, 0);
8273
8274 if (TREE_CODE (base) == INDIRECT_REF)
8275 base = TREE_OPERAND (base, 0);
8276 }
8277 else
8278 {
8279 if (TREE_CODE (base) == ARRAY_REF)
8280 {
8281 while (TREE_CODE (base) == ARRAY_REF)
8282 base = TREE_OPERAND (base, 0);
8283 if (TREE_CODE (base) != COMPONENT_REF
8284 || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE)
8285 return NULL_TREE;
8286 }
8287 else if (TREE_CODE (base) == INDIRECT_REF
8288 && TREE_CODE (TREE_OPERAND (base, 0)) == COMPONENT_REF
8289 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0)))
8290 == REFERENCE_TYPE))
8291 base = TREE_OPERAND (base, 0);
8292 }
8293
8294 base = get_inner_reference (base, &bitsize, &bitpos, &offset, &mode,
8295 &unsignedp, &reversep, &volatilep);
8296
8297 tree orig_base = base;
8298
8299 if ((TREE_CODE (base) == INDIRECT_REF
8300 || (TREE_CODE (base) == MEM_REF
8301 && integer_zerop (TREE_OPERAND (base, 1))))
8302 && DECL_P (TREE_OPERAND (base, 0))
8303 && TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0))) == REFERENCE_TYPE)
8304 base = TREE_OPERAND (base, 0);
8305
8306 gcc_assert (offset == NULL_TREE || poly_int_tree_p (offset));
8307
8308 if (offset)
8309 poffset = wi::to_poly_offset (offset);
8310 else
8311 poffset = 0;
8312
8313 if (maybe_ne (bitpos, 0))
8314 poffset += bits_to_bytes_round_down (bitpos);
8315
8316 *bitposp = bitpos;
8317 *poffsetp = poffset;
8318
8319 /* Set *BASE_REF if BASE was a dereferenced reference variable. */
8320 if (base_ref && orig_base != base)
8321 *base_ref = orig_base;
8322
8323 return base;
8324 }
8325
8326 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
8327 and previous omp contexts. */
8328
8329 static void
8330 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
8331 enum omp_region_type region_type,
8332 enum tree_code code)
8333 {
8334 struct gimplify_omp_ctx *ctx, *outer_ctx;
8335 tree c;
8336 hash_map<tree, tree> *struct_map_to_clause = NULL;
8337 hash_set<tree> *struct_deref_set = NULL;
8338 tree *prev_list_p = NULL, *orig_list_p = list_p;
8339 int handled_depend_iterators = -1;
8340 int nowait = -1;
8341
8342 ctx = new_omp_context (region_type);
8343 ctx->code = code;
8344 outer_ctx = ctx->outer_context;
8345 if (code == OMP_TARGET)
8346 {
8347 if (!lang_GNU_Fortran ())
8348 ctx->defaultmap[GDMK_POINTER] = GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
8349 ctx->defaultmap[GDMK_SCALAR] = GOVD_FIRSTPRIVATE;
8350 }
8351 if (!lang_GNU_Fortran ())
8352 switch (code)
8353 {
8354 case OMP_TARGET:
8355 case OMP_TARGET_DATA:
8356 case OMP_TARGET_ENTER_DATA:
8357 case OMP_TARGET_EXIT_DATA:
8358 case OACC_DECLARE:
8359 case OACC_HOST_DATA:
8360 case OACC_PARALLEL:
8361 case OACC_KERNELS:
8362 ctx->target_firstprivatize_array_bases = true;
8363 default:
8364 break;
8365 }
8366
8367 while ((c = *list_p) != NULL)
8368 {
8369 bool remove = false;
8370 bool notice_outer = true;
8371 const char *check_non_private = NULL;
8372 unsigned int flags;
8373 tree decl;
8374
8375 switch (OMP_CLAUSE_CODE (c))
8376 {
8377 case OMP_CLAUSE_PRIVATE:
8378 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
8379 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
8380 {
8381 flags |= GOVD_PRIVATE_OUTER_REF;
8382 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
8383 }
8384 else
8385 notice_outer = false;
8386 goto do_add;
8387 case OMP_CLAUSE_SHARED:
8388 flags = GOVD_SHARED | GOVD_EXPLICIT;
8389 goto do_add;
8390 case OMP_CLAUSE_FIRSTPRIVATE:
8391 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
8392 check_non_private = "firstprivate";
8393 goto do_add;
8394 case OMP_CLAUSE_LASTPRIVATE:
8395 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
8396 switch (code)
8397 {
8398 case OMP_DISTRIBUTE:
8399 error_at (OMP_CLAUSE_LOCATION (c),
8400 "conditional %<lastprivate%> clause on "
8401 "%qs construct", "distribute");
8402 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
8403 break;
8404 case OMP_TASKLOOP:
8405 error_at (OMP_CLAUSE_LOCATION (c),
8406 "conditional %<lastprivate%> clause on "
8407 "%qs construct", "taskloop");
8408 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
8409 break;
8410 default:
8411 break;
8412 }
8413 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
8414 if (code != OMP_LOOP)
8415 check_non_private = "lastprivate";
8416 decl = OMP_CLAUSE_DECL (c);
8417 if (error_operand_p (decl))
8418 goto do_add;
8419 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
8420 && !lang_hooks.decls.omp_scalar_p (decl))
8421 {
8422 error_at (OMP_CLAUSE_LOCATION (c),
8423 "non-scalar variable %qD in conditional "
8424 "%<lastprivate%> clause", decl);
8425 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
8426 }
8427 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
8428 flags |= GOVD_LASTPRIVATE_CONDITIONAL;
8429 if (outer_ctx
8430 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
8431 || ((outer_ctx->region_type & ORT_COMBINED_TEAMS)
8432 == ORT_COMBINED_TEAMS))
8433 && splay_tree_lookup (outer_ctx->variables,
8434 (splay_tree_key) decl) == NULL)
8435 {
8436 omp_add_variable (outer_ctx, decl, GOVD_SHARED | GOVD_SEEN);
8437 if (outer_ctx->outer_context)
8438 omp_notice_variable (outer_ctx->outer_context, decl, true);
8439 }
8440 else if (outer_ctx
8441 && (outer_ctx->region_type & ORT_TASK) != 0
8442 && outer_ctx->combined_loop
8443 && splay_tree_lookup (outer_ctx->variables,
8444 (splay_tree_key) decl) == NULL)
8445 {
8446 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
8447 if (outer_ctx->outer_context)
8448 omp_notice_variable (outer_ctx->outer_context, decl, true);
8449 }
8450 else if (outer_ctx
8451 && (outer_ctx->region_type == ORT_WORKSHARE
8452 || outer_ctx->region_type == ORT_ACC)
8453 && outer_ctx->combined_loop
8454 && splay_tree_lookup (outer_ctx->variables,
8455 (splay_tree_key) decl) == NULL
8456 && !omp_check_private (outer_ctx, decl, false))
8457 {
8458 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
8459 if (outer_ctx->outer_context
8460 && (outer_ctx->outer_context->region_type
8461 == ORT_COMBINED_PARALLEL)
8462 && splay_tree_lookup (outer_ctx->outer_context->variables,
8463 (splay_tree_key) decl) == NULL)
8464 {
8465 struct gimplify_omp_ctx *octx = outer_ctx->outer_context;
8466 omp_add_variable (octx, decl, GOVD_SHARED | GOVD_SEEN);
8467 if (octx->outer_context)
8468 {
8469 octx = octx->outer_context;
8470 if (octx->region_type == ORT_WORKSHARE
8471 && octx->combined_loop
8472 && splay_tree_lookup (octx->variables,
8473 (splay_tree_key) decl) == NULL
8474 && !omp_check_private (octx, decl, false))
8475 {
8476 omp_add_variable (octx, decl,
8477 GOVD_LASTPRIVATE | GOVD_SEEN);
8478 octx = octx->outer_context;
8479 if (octx
8480 && ((octx->region_type & ORT_COMBINED_TEAMS)
8481 == ORT_COMBINED_TEAMS)
8482 && (splay_tree_lookup (octx->variables,
8483 (splay_tree_key) decl)
8484 == NULL))
8485 {
8486 omp_add_variable (octx, decl,
8487 GOVD_SHARED | GOVD_SEEN);
8488 octx = octx->outer_context;
8489 }
8490 }
8491 if (octx)
8492 omp_notice_variable (octx, decl, true);
8493 }
8494 }
8495 else if (outer_ctx->outer_context)
8496 omp_notice_variable (outer_ctx->outer_context, decl, true);
8497 }
8498 goto do_add;
8499 case OMP_CLAUSE_REDUCTION:
8500 if (OMP_CLAUSE_REDUCTION_TASK (c))
8501 {
8502 if (region_type == ORT_WORKSHARE)
8503 {
8504 if (nowait == -1)
8505 nowait = omp_find_clause (*list_p,
8506 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8507 if (nowait
8508 && (outer_ctx == NULL
8509 || outer_ctx->region_type != ORT_COMBINED_PARALLEL))
8510 {
8511 error_at (OMP_CLAUSE_LOCATION (c),
8512 "%<task%> reduction modifier on a construct "
8513 "with a %<nowait%> clause");
8514 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
8515 }
8516 }
8517 else if ((region_type & ORT_PARALLEL) != ORT_PARALLEL)
8518 {
8519 error_at (OMP_CLAUSE_LOCATION (c),
8520 "invalid %<task%> reduction modifier on construct "
8521 "other than %<parallel%>, %<for%> or %<sections%>");
8522 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
8523 }
8524 }
8525 if (OMP_CLAUSE_REDUCTION_INSCAN (c))
8526 switch (code)
8527 {
8528 case OMP_SECTIONS:
8529 error_at (OMP_CLAUSE_LOCATION (c),
8530 "%<inscan%> %<reduction%> clause on "
8531 "%qs construct", "sections");
8532 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
8533 break;
8534 case OMP_PARALLEL:
8535 error_at (OMP_CLAUSE_LOCATION (c),
8536 "%<inscan%> %<reduction%> clause on "
8537 "%qs construct", "parallel");
8538 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
8539 break;
8540 case OMP_TEAMS:
8541 error_at (OMP_CLAUSE_LOCATION (c),
8542 "%<inscan%> %<reduction%> clause on "
8543 "%qs construct", "teams");
8544 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
8545 break;
8546 case OMP_TASKLOOP:
8547 error_at (OMP_CLAUSE_LOCATION (c),
8548 "%<inscan%> %<reduction%> clause on "
8549 "%qs construct", "taskloop");
8550 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
8551 break;
8552 default:
8553 break;
8554 }
8555 /* FALLTHRU */
8556 case OMP_CLAUSE_IN_REDUCTION:
8557 case OMP_CLAUSE_TASK_REDUCTION:
8558 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
8559 /* OpenACC permits reductions on private variables. */
8560 if (!(region_type & ORT_ACC)
8561 /* taskgroup is actually not a worksharing region. */
8562 && code != OMP_TASKGROUP)
8563 check_non_private = omp_clause_code_name[OMP_CLAUSE_CODE (c)];
8564 decl = OMP_CLAUSE_DECL (c);
8565 if (TREE_CODE (decl) == MEM_REF)
8566 {
8567 tree type = TREE_TYPE (decl);
8568 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p,
8569 NULL, is_gimple_val, fb_rvalue, false)
8570 == GS_ERROR)
8571 {
8572 remove = true;
8573 break;
8574 }
8575 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
8576 if (DECL_P (v))
8577 {
8578 omp_firstprivatize_variable (ctx, v);
8579 omp_notice_variable (ctx, v, true);
8580 }
8581 decl = TREE_OPERAND (decl, 0);
8582 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
8583 {
8584 if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p,
8585 NULL, is_gimple_val, fb_rvalue, false)
8586 == GS_ERROR)
8587 {
8588 remove = true;
8589 break;
8590 }
8591 v = TREE_OPERAND (decl, 1);
8592 if (DECL_P (v))
8593 {
8594 omp_firstprivatize_variable (ctx, v);
8595 omp_notice_variable (ctx, v, true);
8596 }
8597 decl = TREE_OPERAND (decl, 0);
8598 }
8599 if (TREE_CODE (decl) == ADDR_EXPR
8600 || TREE_CODE (decl) == INDIRECT_REF)
8601 decl = TREE_OPERAND (decl, 0);
8602 }
8603 goto do_add_decl;
8604 case OMP_CLAUSE_LINEAR:
8605 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
8606 is_gimple_val, fb_rvalue) == GS_ERROR)
8607 {
8608 remove = true;
8609 break;
8610 }
8611 else
8612 {
8613 if (code == OMP_SIMD
8614 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
8615 {
8616 struct gimplify_omp_ctx *octx = outer_ctx;
8617 if (octx
8618 && octx->region_type == ORT_WORKSHARE
8619 && octx->combined_loop
8620 && !octx->distribute)
8621 {
8622 if (octx->outer_context
8623 && (octx->outer_context->region_type
8624 == ORT_COMBINED_PARALLEL))
8625 octx = octx->outer_context->outer_context;
8626 else
8627 octx = octx->outer_context;
8628 }
8629 if (octx
8630 && octx->region_type == ORT_WORKSHARE
8631 && octx->combined_loop
8632 && octx->distribute)
8633 {
8634 error_at (OMP_CLAUSE_LOCATION (c),
8635 "%<linear%> clause for variable other than "
8636 "loop iterator specified on construct "
8637 "combined with %<distribute%>");
8638 remove = true;
8639 break;
8640 }
8641 }
8642 /* For combined #pragma omp parallel for simd, need to put
8643 lastprivate and perhaps firstprivate too on the
8644 parallel. Similarly for #pragma omp for simd. */
8645 struct gimplify_omp_ctx *octx = outer_ctx;
8646 decl = NULL_TREE;
8647 do
8648 {
8649 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
8650 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8651 break;
8652 decl = OMP_CLAUSE_DECL (c);
8653 if (error_operand_p (decl))
8654 {
8655 decl = NULL_TREE;
8656 break;
8657 }
8658 flags = GOVD_SEEN;
8659 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
8660 flags |= GOVD_FIRSTPRIVATE;
8661 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8662 flags |= GOVD_LASTPRIVATE;
8663 if (octx
8664 && octx->region_type == ORT_WORKSHARE
8665 && octx->combined_loop)
8666 {
8667 if (octx->outer_context
8668 && (octx->outer_context->region_type
8669 == ORT_COMBINED_PARALLEL))
8670 octx = octx->outer_context;
8671 else if (omp_check_private (octx, decl, false))
8672 break;
8673 }
8674 else if (octx
8675 && (octx->region_type & ORT_TASK) != 0
8676 && octx->combined_loop)
8677 ;
8678 else if (octx
8679 && octx->region_type == ORT_COMBINED_PARALLEL
8680 && ctx->region_type == ORT_WORKSHARE
8681 && octx == outer_ctx)
8682 flags = GOVD_SEEN | GOVD_SHARED;
8683 else if (octx
8684 && ((octx->region_type & ORT_COMBINED_TEAMS)
8685 == ORT_COMBINED_TEAMS))
8686 flags = GOVD_SEEN | GOVD_SHARED;
8687 else if (octx
8688 && octx->region_type == ORT_COMBINED_TARGET)
8689 {
8690 flags &= ~GOVD_LASTPRIVATE;
8691 if (flags == GOVD_SEEN)
8692 break;
8693 }
8694 else
8695 break;
8696 splay_tree_node on
8697 = splay_tree_lookup (octx->variables,
8698 (splay_tree_key) decl);
8699 if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0)
8700 {
8701 octx = NULL;
8702 break;
8703 }
8704 omp_add_variable (octx, decl, flags);
8705 if (octx->outer_context == NULL)
8706 break;
8707 octx = octx->outer_context;
8708 }
8709 while (1);
8710 if (octx
8711 && decl
8712 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
8713 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
8714 omp_notice_variable (octx, decl, true);
8715 }
8716 flags = GOVD_LINEAR | GOVD_EXPLICIT;
8717 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
8718 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8719 {
8720 notice_outer = false;
8721 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
8722 }
8723 goto do_add;
8724
8725 case OMP_CLAUSE_MAP:
8726 decl = OMP_CLAUSE_DECL (c);
8727 if (error_operand_p (decl))
8728 remove = true;
8729 switch (code)
8730 {
8731 case OMP_TARGET:
8732 break;
8733 case OACC_DATA:
8734 if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE)
8735 break;
8736 /* FALLTHRU */
8737 case OMP_TARGET_DATA:
8738 case OMP_TARGET_ENTER_DATA:
8739 case OMP_TARGET_EXIT_DATA:
8740 case OACC_HOST_DATA:
8741 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8742 || (OMP_CLAUSE_MAP_KIND (c)
8743 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
8744 /* For target {,enter ,exit }data only the array slice is
8745 mapped, but not the pointer to it. */
8746 remove = true;
8747 break;
8748 case OACC_ENTER_DATA:
8749 case OACC_EXIT_DATA:
8750 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
8751 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_TO_PSET
8752 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8753 || (OMP_CLAUSE_MAP_KIND (c)
8754 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
8755 remove = true;
8756 break;
8757 default:
8758 break;
8759 }
8760 /* For Fortran, not only the pointer to the data is mapped but also
8761 the address of the pointer, the array descriptor etc.; for
8762 'exit data' - and in particular for 'delete:' - having an 'alloc:'
8763 does not make sense. Likewise, for 'update' only transferring the
8764 data itself is needed as the rest has been handled in previous
8765 directives. However, for 'exit data', the array descriptor needs
8766 to be delete; hence, we turn the MAP_TO_PSET into a MAP_DELETE. */
8767 if (code == OMP_TARGET_EXIT_DATA
8768 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_TO_PSET)
8769 OMP_CLAUSE_SET_MAP_KIND (c, OMP_CLAUSE_MAP_KIND (*prev_list_p)
8770 == GOMP_MAP_DELETE
8771 ? GOMP_MAP_DELETE : GOMP_MAP_RELEASE);
8772 else if ((code == OMP_TARGET_EXIT_DATA || code == OMP_TARGET_UPDATE)
8773 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
8774 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_TO_PSET))
8775 remove = true;
8776
8777 if (remove)
8778 break;
8779 if (DECL_P (decl) && outer_ctx && (region_type & ORT_ACC))
8780 {
8781 struct gimplify_omp_ctx *octx;
8782 for (octx = outer_ctx; octx; octx = octx->outer_context)
8783 {
8784 if (octx->region_type != ORT_ACC_HOST_DATA)
8785 break;
8786 splay_tree_node n2
8787 = splay_tree_lookup (octx->variables,
8788 (splay_tree_key) decl);
8789 if (n2)
8790 error_at (OMP_CLAUSE_LOCATION (c), "variable %qE "
8791 "declared in enclosing %<host_data%> region",
8792 DECL_NAME (decl));
8793 }
8794 }
8795 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
8796 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
8797 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
8798 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
8799 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
8800 {
8801 remove = true;
8802 break;
8803 }
8804 else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8805 || (OMP_CLAUSE_MAP_KIND (c)
8806 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
8807 && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST)
8808 {
8809 OMP_CLAUSE_SIZE (c)
8810 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL,
8811 false);
8812 omp_add_variable (ctx, OMP_CLAUSE_SIZE (c),
8813 GOVD_FIRSTPRIVATE | GOVD_SEEN);
8814 }
8815 if (!DECL_P (decl))
8816 {
8817 tree d = decl, *pd;
8818 if (TREE_CODE (d) == ARRAY_REF)
8819 {
8820 while (TREE_CODE (d) == ARRAY_REF)
8821 d = TREE_OPERAND (d, 0);
8822 if (TREE_CODE (d) == COMPONENT_REF
8823 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
8824 decl = d;
8825 }
8826 pd = &OMP_CLAUSE_DECL (c);
8827 if (d == decl
8828 && TREE_CODE (decl) == INDIRECT_REF
8829 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
8830 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
8831 == REFERENCE_TYPE))
8832 {
8833 pd = &TREE_OPERAND (decl, 0);
8834 decl = TREE_OPERAND (decl, 0);
8835 }
8836 bool indir_p = false;
8837 tree orig_decl = decl;
8838 tree decl_ref = NULL_TREE;
8839 if ((region_type & ORT_ACC) != 0
8840 && TREE_CODE (*pd) == COMPONENT_REF
8841 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH
8842 && code != OACC_UPDATE)
8843 {
8844 while (TREE_CODE (decl) == COMPONENT_REF)
8845 {
8846 decl = TREE_OPERAND (decl, 0);
8847 if ((TREE_CODE (decl) == MEM_REF
8848 && integer_zerop (TREE_OPERAND (decl, 1)))
8849 || INDIRECT_REF_P (decl))
8850 {
8851 indir_p = true;
8852 decl = TREE_OPERAND (decl, 0);
8853 }
8854 if (TREE_CODE (decl) == INDIRECT_REF
8855 && DECL_P (TREE_OPERAND (decl, 0))
8856 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
8857 == REFERENCE_TYPE))
8858 {
8859 decl_ref = decl;
8860 decl = TREE_OPERAND (decl, 0);
8861 }
8862 }
8863 }
8864 else if (TREE_CODE (decl) == COMPONENT_REF)
8865 {
8866 while (TREE_CODE (decl) == COMPONENT_REF)
8867 decl = TREE_OPERAND (decl, 0);
8868 if (TREE_CODE (decl) == INDIRECT_REF
8869 && DECL_P (TREE_OPERAND (decl, 0))
8870 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
8871 == REFERENCE_TYPE))
8872 decl = TREE_OPERAND (decl, 0);
8873 }
8874 if (decl != orig_decl && DECL_P (decl) && indir_p)
8875 {
8876 gomp_map_kind k = (code == OACC_EXIT_DATA) ? GOMP_MAP_DETACH
8877 : GOMP_MAP_ATTACH;
8878 /* We have a dereference of a struct member. Make this an
8879 attach/detach operation, and ensure the base pointer is
8880 mapped as a FIRSTPRIVATE_POINTER. */
8881 OMP_CLAUSE_SET_MAP_KIND (c, k);
8882 flags = GOVD_MAP | GOVD_SEEN | GOVD_EXPLICIT;
8883 tree next_clause = OMP_CLAUSE_CHAIN (c);
8884 if (k == GOMP_MAP_ATTACH
8885 && code != OACC_ENTER_DATA
8886 && (!next_clause
8887 || (OMP_CLAUSE_CODE (next_clause) != OMP_CLAUSE_MAP)
8888 || (OMP_CLAUSE_MAP_KIND (next_clause)
8889 != GOMP_MAP_POINTER)
8890 || OMP_CLAUSE_DECL (next_clause) != decl)
8891 && (!struct_deref_set
8892 || !struct_deref_set->contains (decl)))
8893 {
8894 if (!struct_deref_set)
8895 struct_deref_set = new hash_set<tree> ();
8896 /* As well as the attach, we also need a
8897 FIRSTPRIVATE_POINTER clause to properly map the
8898 pointer to the struct base. */
8899 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8900 OMP_CLAUSE_MAP);
8901 OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_ALLOC);
8902 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c2)
8903 = 1;
8904 tree charptr_zero
8905 = build_int_cst (build_pointer_type (char_type_node),
8906 0);
8907 OMP_CLAUSE_DECL (c2)
8908 = build2 (MEM_REF, char_type_node,
8909 decl_ref ? decl_ref : decl, charptr_zero);
8910 OMP_CLAUSE_SIZE (c2) = size_zero_node;
8911 tree c3 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8912 OMP_CLAUSE_MAP);
8913 OMP_CLAUSE_SET_MAP_KIND (c3,
8914 GOMP_MAP_FIRSTPRIVATE_POINTER);
8915 OMP_CLAUSE_DECL (c3) = decl;
8916 OMP_CLAUSE_SIZE (c3) = size_zero_node;
8917 tree mapgrp = *prev_list_p;
8918 *prev_list_p = c2;
8919 OMP_CLAUSE_CHAIN (c3) = mapgrp;
8920 OMP_CLAUSE_CHAIN (c2) = c3;
8921
8922 struct_deref_set->add (decl);
8923 }
8924 goto do_add_decl;
8925 }
8926 /* An "attach/detach" operation on an update directive should
8927 behave as a GOMP_MAP_ALWAYS_POINTER. Beware that
8928 unlike attach or detach map kinds, GOMP_MAP_ALWAYS_POINTER
8929 depends on the previous mapping. */
8930 if (code == OACC_UPDATE
8931 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
8932 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_ALWAYS_POINTER);
8933 if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue, fb_lvalue)
8934 == GS_ERROR)
8935 {
8936 remove = true;
8937 break;
8938 }
8939 if (DECL_P (decl)
8940 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
8941 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH
8942 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_DETACH
8943 && code != OACC_UPDATE)
8944 {
8945 if (error_operand_p (decl))
8946 {
8947 remove = true;
8948 break;
8949 }
8950
8951 tree stype = TREE_TYPE (decl);
8952 if (TREE_CODE (stype) == REFERENCE_TYPE)
8953 stype = TREE_TYPE (stype);
8954 if (TYPE_SIZE_UNIT (stype) == NULL
8955 || TREE_CODE (TYPE_SIZE_UNIT (stype)) != INTEGER_CST)
8956 {
8957 error_at (OMP_CLAUSE_LOCATION (c),
8958 "mapping field %qE of variable length "
8959 "structure", OMP_CLAUSE_DECL (c));
8960 remove = true;
8961 break;
8962 }
8963
8964 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER
8965 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
8966 {
8967 /* Error recovery. */
8968 if (prev_list_p == NULL)
8969 {
8970 remove = true;
8971 break;
8972 }
8973 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
8974 {
8975 tree ch = OMP_CLAUSE_CHAIN (*prev_list_p);
8976 if (ch == NULL_TREE || OMP_CLAUSE_CHAIN (ch) != c)
8977 {
8978 remove = true;
8979 break;
8980 }
8981 }
8982 }
8983
8984 poly_offset_int offset1;
8985 poly_int64 bitpos1;
8986 tree base_ref;
8987
8988 tree base
8989 = extract_base_bit_offset (OMP_CLAUSE_DECL (c), &base_ref,
8990 &bitpos1, &offset1);
8991
8992 gcc_assert (base == decl);
8993
8994 splay_tree_node n
8995 = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
8996 bool ptr = (OMP_CLAUSE_MAP_KIND (c)
8997 == GOMP_MAP_ALWAYS_POINTER);
8998 bool attach_detach = (OMP_CLAUSE_MAP_KIND (c)
8999 == GOMP_MAP_ATTACH_DETACH);
9000 bool attach = OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
9001 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH;
9002 bool has_attachments = false;
9003 /* For OpenACC, pointers in structs should trigger an
9004 attach action. */
9005 if (attach_detach && (region_type & ORT_ACC) != 0)
9006 {
9007 /* Turn a GOMP_MAP_ATTACH_DETACH clause into a
9008 GOMP_MAP_ATTACH or GOMP_MAP_DETACH clause after we
9009 have detected a case that needs a GOMP_MAP_STRUCT
9010 mapping added. */
9011 gomp_map_kind k
9012 = (code == OACC_EXIT_DATA) ? GOMP_MAP_DETACH
9013 : GOMP_MAP_ATTACH;
9014 OMP_CLAUSE_SET_MAP_KIND (c, k);
9015 has_attachments = true;
9016 }
9017 if (n == NULL || (n->value & GOVD_MAP) == 0)
9018 {
9019 tree l = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9020 OMP_CLAUSE_MAP);
9021 gomp_map_kind k = attach ? GOMP_MAP_FORCE_PRESENT
9022 : GOMP_MAP_STRUCT;
9023
9024 OMP_CLAUSE_SET_MAP_KIND (l, k);
9025 if (base_ref)
9026 OMP_CLAUSE_DECL (l) = unshare_expr (base_ref);
9027 else
9028 OMP_CLAUSE_DECL (l) = decl;
9029 OMP_CLAUSE_SIZE (l)
9030 = (!attach
9031 ? size_int (1)
9032 : DECL_P (OMP_CLAUSE_DECL (l))
9033 ? DECL_SIZE_UNIT (OMP_CLAUSE_DECL (l))
9034 : TYPE_SIZE_UNIT (TREE_TYPE (OMP_CLAUSE_DECL (l))));
9035 if (struct_map_to_clause == NULL)
9036 struct_map_to_clause = new hash_map<tree, tree>;
9037 struct_map_to_clause->put (decl, l);
9038 if (ptr || attach_detach)
9039 {
9040 insert_struct_comp_map (code, c, l, *prev_list_p,
9041 NULL);
9042 *prev_list_p = l;
9043 prev_list_p = NULL;
9044 }
9045 else
9046 {
9047 OMP_CLAUSE_CHAIN (l) = c;
9048 *list_p = l;
9049 list_p = &OMP_CLAUSE_CHAIN (l);
9050 }
9051 if (base_ref && code == OMP_TARGET)
9052 {
9053 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9054 OMP_CLAUSE_MAP);
9055 enum gomp_map_kind mkind
9056 = GOMP_MAP_FIRSTPRIVATE_REFERENCE;
9057 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
9058 OMP_CLAUSE_DECL (c2) = decl;
9059 OMP_CLAUSE_SIZE (c2) = size_zero_node;
9060 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (l);
9061 OMP_CLAUSE_CHAIN (l) = c2;
9062 }
9063 flags = GOVD_MAP | GOVD_EXPLICIT;
9064 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c))
9065 || ptr
9066 || attach_detach)
9067 flags |= GOVD_SEEN;
9068 if (has_attachments)
9069 flags |= GOVD_MAP_HAS_ATTACHMENTS;
9070 goto do_add_decl;
9071 }
9072 else if (struct_map_to_clause)
9073 {
9074 tree *osc = struct_map_to_clause->get (decl);
9075 tree *sc = NULL, *scp = NULL;
9076 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c))
9077 || ptr
9078 || attach_detach)
9079 n->value |= GOVD_SEEN;
9080 sc = &OMP_CLAUSE_CHAIN (*osc);
9081 if (*sc != c
9082 && (OMP_CLAUSE_MAP_KIND (*sc)
9083 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
9084 sc = &OMP_CLAUSE_CHAIN (*sc);
9085 /* Here "prev_list_p" is the end of the inserted
9086 alloc/release nodes after the struct node, OSC. */
9087 for (; *sc != c; sc = &OMP_CLAUSE_CHAIN (*sc))
9088 if ((ptr || attach_detach) && sc == prev_list_p)
9089 break;
9090 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc))
9091 != COMPONENT_REF
9092 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
9093 != INDIRECT_REF)
9094 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
9095 != ARRAY_REF))
9096 break;
9097 else
9098 {
9099 tree sc_decl = OMP_CLAUSE_DECL (*sc);
9100 poly_offset_int offsetn;
9101 poly_int64 bitposn;
9102 tree base
9103 = extract_base_bit_offset (sc_decl, NULL,
9104 &bitposn, &offsetn);
9105 if (base != decl)
9106 break;
9107 if (scp)
9108 continue;
9109 tree d1 = OMP_CLAUSE_DECL (*sc);
9110 tree d2 = OMP_CLAUSE_DECL (c);
9111 while (TREE_CODE (d1) == ARRAY_REF)
9112 d1 = TREE_OPERAND (d1, 0);
9113 while (TREE_CODE (d2) == ARRAY_REF)
9114 d2 = TREE_OPERAND (d2, 0);
9115 if (TREE_CODE (d1) == INDIRECT_REF)
9116 d1 = TREE_OPERAND (d1, 0);
9117 if (TREE_CODE (d2) == INDIRECT_REF)
9118 d2 = TREE_OPERAND (d2, 0);
9119 while (TREE_CODE (d1) == COMPONENT_REF)
9120 if (TREE_CODE (d2) == COMPONENT_REF
9121 && TREE_OPERAND (d1, 1)
9122 == TREE_OPERAND (d2, 1))
9123 {
9124 d1 = TREE_OPERAND (d1, 0);
9125 d2 = TREE_OPERAND (d2, 0);
9126 }
9127 else
9128 break;
9129 if (d1 == d2)
9130 {
9131 error_at (OMP_CLAUSE_LOCATION (c),
9132 "%qE appears more than once in map "
9133 "clauses", OMP_CLAUSE_DECL (c));
9134 remove = true;
9135 break;
9136 }
9137 if (maybe_lt (offset1, offsetn)
9138 || (known_eq (offset1, offsetn)
9139 && maybe_lt (bitpos1, bitposn)))
9140 {
9141 if (ptr || attach_detach)
9142 scp = sc;
9143 else
9144 break;
9145 }
9146 }
9147 if (remove)
9148 break;
9149 if (!attach)
9150 OMP_CLAUSE_SIZE (*osc)
9151 = size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc),
9152 size_one_node);
9153 if (ptr || attach_detach)
9154 {
9155 tree cl = insert_struct_comp_map (code, c, NULL,
9156 *prev_list_p, scp);
9157 if (sc == prev_list_p)
9158 {
9159 *sc = cl;
9160 prev_list_p = NULL;
9161 }
9162 else
9163 {
9164 *prev_list_p = OMP_CLAUSE_CHAIN (c);
9165 list_p = prev_list_p;
9166 prev_list_p = NULL;
9167 OMP_CLAUSE_CHAIN (c) = *sc;
9168 *sc = cl;
9169 continue;
9170 }
9171 }
9172 else if (*sc != c)
9173 {
9174 *list_p = OMP_CLAUSE_CHAIN (c);
9175 OMP_CLAUSE_CHAIN (c) = *sc;
9176 *sc = c;
9177 continue;
9178 }
9179 }
9180 }
9181 if (!remove
9182 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_POINTER
9183 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH_DETACH
9184 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
9185 && OMP_CLAUSE_CHAIN (c)
9186 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c)) == OMP_CLAUSE_MAP
9187 && ((OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
9188 == GOMP_MAP_ALWAYS_POINTER)
9189 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
9190 == GOMP_MAP_ATTACH_DETACH)
9191 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
9192 == GOMP_MAP_TO_PSET)))
9193 prev_list_p = list_p;
9194
9195 break;
9196 }
9197 flags = GOVD_MAP | GOVD_EXPLICIT;
9198 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
9199 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM)
9200 flags |= GOVD_MAP_ALWAYS_TO;
9201 goto do_add;
9202
9203 case OMP_CLAUSE_DEPEND:
9204 if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
9205 {
9206 tree deps = OMP_CLAUSE_DECL (c);
9207 while (deps && TREE_CODE (deps) == TREE_LIST)
9208 {
9209 if (TREE_CODE (TREE_PURPOSE (deps)) == TRUNC_DIV_EXPR
9210 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps), 1)))
9211 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps), 1),
9212 pre_p, NULL, is_gimple_val, fb_rvalue);
9213 deps = TREE_CHAIN (deps);
9214 }
9215 break;
9216 }
9217 else if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
9218 break;
9219 if (handled_depend_iterators == -1)
9220 handled_depend_iterators = gimplify_omp_depend (list_p, pre_p);
9221 if (handled_depend_iterators)
9222 {
9223 if (handled_depend_iterators == 2)
9224 remove = true;
9225 break;
9226 }
9227 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
9228 {
9229 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
9230 NULL, is_gimple_val, fb_rvalue);
9231 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
9232 }
9233 if (error_operand_p (OMP_CLAUSE_DECL (c)))
9234 {
9235 remove = true;
9236 break;
9237 }
9238 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
9239 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
9240 is_gimple_val, fb_rvalue) == GS_ERROR)
9241 {
9242 remove = true;
9243 break;
9244 }
9245 break;
9246
9247 case OMP_CLAUSE_TO:
9248 case OMP_CLAUSE_FROM:
9249 case OMP_CLAUSE__CACHE_:
9250 decl = OMP_CLAUSE_DECL (c);
9251 if (error_operand_p (decl))
9252 {
9253 remove = true;
9254 break;
9255 }
9256 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
9257 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
9258 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
9259 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
9260 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
9261 {
9262 remove = true;
9263 break;
9264 }
9265 if (!DECL_P (decl))
9266 {
9267 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
9268 NULL, is_gimple_lvalue, fb_lvalue)
9269 == GS_ERROR)
9270 {
9271 remove = true;
9272 break;
9273 }
9274 break;
9275 }
9276 goto do_notice;
9277
9278 case OMP_CLAUSE_USE_DEVICE_PTR:
9279 case OMP_CLAUSE_USE_DEVICE_ADDR:
9280 flags = GOVD_EXPLICIT;
9281 goto do_add;
9282
9283 case OMP_CLAUSE_IS_DEVICE_PTR:
9284 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
9285 goto do_add;
9286
9287 do_add:
9288 decl = OMP_CLAUSE_DECL (c);
9289 do_add_decl:
9290 if (error_operand_p (decl))
9291 {
9292 remove = true;
9293 break;
9294 }
9295 if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0)
9296 {
9297 tree t = omp_member_access_dummy_var (decl);
9298 if (t)
9299 {
9300 tree v = DECL_VALUE_EXPR (decl);
9301 DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1));
9302 if (outer_ctx)
9303 omp_notice_variable (outer_ctx, t, true);
9304 }
9305 }
9306 if (code == OACC_DATA
9307 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9308 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
9309 flags |= GOVD_MAP_0LEN_ARRAY;
9310 omp_add_variable (ctx, decl, flags);
9311 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9312 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
9313 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
9314 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9315 {
9316 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
9317 GOVD_LOCAL | GOVD_SEEN);
9318 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
9319 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
9320 find_decl_expr,
9321 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
9322 NULL) == NULL_TREE)
9323 omp_add_variable (ctx,
9324 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
9325 GOVD_LOCAL | GOVD_SEEN);
9326 gimplify_omp_ctxp = ctx;
9327 push_gimplify_context ();
9328
9329 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
9330 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9331
9332 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
9333 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
9334 pop_gimplify_context
9335 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
9336 push_gimplify_context ();
9337 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
9338 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
9339 pop_gimplify_context
9340 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
9341 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
9342 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
9343
9344 gimplify_omp_ctxp = outer_ctx;
9345 }
9346 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
9347 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
9348 {
9349 gimplify_omp_ctxp = ctx;
9350 push_gimplify_context ();
9351 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
9352 {
9353 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
9354 NULL, NULL);
9355 TREE_SIDE_EFFECTS (bind) = 1;
9356 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
9357 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
9358 }
9359 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
9360 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
9361 pop_gimplify_context
9362 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
9363 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
9364
9365 gimplify_omp_ctxp = outer_ctx;
9366 }
9367 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
9368 && OMP_CLAUSE_LINEAR_STMT (c))
9369 {
9370 gimplify_omp_ctxp = ctx;
9371 push_gimplify_context ();
9372 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
9373 {
9374 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
9375 NULL, NULL);
9376 TREE_SIDE_EFFECTS (bind) = 1;
9377 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
9378 OMP_CLAUSE_LINEAR_STMT (c) = bind;
9379 }
9380 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
9381 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
9382 pop_gimplify_context
9383 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
9384 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
9385
9386 gimplify_omp_ctxp = outer_ctx;
9387 }
9388 if (notice_outer)
9389 goto do_notice;
9390 break;
9391
9392 case OMP_CLAUSE_COPYIN:
9393 case OMP_CLAUSE_COPYPRIVATE:
9394 decl = OMP_CLAUSE_DECL (c);
9395 if (error_operand_p (decl))
9396 {
9397 remove = true;
9398 break;
9399 }
9400 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
9401 && !remove
9402 && !omp_check_private (ctx, decl, true))
9403 {
9404 remove = true;
9405 if (is_global_var (decl))
9406 {
9407 if (DECL_THREAD_LOCAL_P (decl))
9408 remove = false;
9409 else if (DECL_HAS_VALUE_EXPR_P (decl))
9410 {
9411 tree value = get_base_address (DECL_VALUE_EXPR (decl));
9412
9413 if (value
9414 && DECL_P (value)
9415 && DECL_THREAD_LOCAL_P (value))
9416 remove = false;
9417 }
9418 }
9419 if (remove)
9420 error_at (OMP_CLAUSE_LOCATION (c),
9421 "copyprivate variable %qE is not threadprivate"
9422 " or private in outer context", DECL_NAME (decl));
9423 }
9424 do_notice:
9425 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9426 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
9427 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
9428 && outer_ctx
9429 && ((region_type & ORT_TASKLOOP) == ORT_TASKLOOP
9430 || (region_type == ORT_WORKSHARE
9431 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9432 && (OMP_CLAUSE_REDUCTION_INSCAN (c)
9433 || code == OMP_LOOP)))
9434 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
9435 || (code == OMP_LOOP
9436 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9437 && ((outer_ctx->region_type & ORT_COMBINED_TEAMS)
9438 == ORT_COMBINED_TEAMS))))
9439 {
9440 splay_tree_node on
9441 = splay_tree_lookup (outer_ctx->variables,
9442 (splay_tree_key)decl);
9443 if (on == NULL || (on->value & GOVD_DATA_SHARE_CLASS) == 0)
9444 {
9445 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9446 && TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
9447 && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
9448 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
9449 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
9450 == POINTER_TYPE))))
9451 omp_firstprivatize_variable (outer_ctx, decl);
9452 else
9453 {
9454 omp_add_variable (outer_ctx, decl,
9455 GOVD_SEEN | GOVD_SHARED);
9456 if (outer_ctx->outer_context)
9457 omp_notice_variable (outer_ctx->outer_context, decl,
9458 true);
9459 }
9460 }
9461 }
9462 if (outer_ctx)
9463 omp_notice_variable (outer_ctx, decl, true);
9464 if (check_non_private
9465 && region_type == ORT_WORKSHARE
9466 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
9467 || decl == OMP_CLAUSE_DECL (c)
9468 || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
9469 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
9470 == ADDR_EXPR
9471 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
9472 == POINTER_PLUS_EXPR
9473 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
9474 (OMP_CLAUSE_DECL (c), 0), 0))
9475 == ADDR_EXPR)))))
9476 && omp_check_private (ctx, decl, false))
9477 {
9478 error ("%s variable %qE is private in outer context",
9479 check_non_private, DECL_NAME (decl));
9480 remove = true;
9481 }
9482 break;
9483
9484 case OMP_CLAUSE_IF:
9485 if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK
9486 && OMP_CLAUSE_IF_MODIFIER (c) != code)
9487 {
9488 const char *p[2];
9489 for (int i = 0; i < 2; i++)
9490 switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code)
9491 {
9492 case VOID_CST: p[i] = "cancel"; break;
9493 case OMP_PARALLEL: p[i] = "parallel"; break;
9494 case OMP_SIMD: p[i] = "simd"; break;
9495 case OMP_TASK: p[i] = "task"; break;
9496 case OMP_TASKLOOP: p[i] = "taskloop"; break;
9497 case OMP_TARGET_DATA: p[i] = "target data"; break;
9498 case OMP_TARGET: p[i] = "target"; break;
9499 case OMP_TARGET_UPDATE: p[i] = "target update"; break;
9500 case OMP_TARGET_ENTER_DATA:
9501 p[i] = "target enter data"; break;
9502 case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break;
9503 default: gcc_unreachable ();
9504 }
9505 error_at (OMP_CLAUSE_LOCATION (c),
9506 "expected %qs %<if%> clause modifier rather than %qs",
9507 p[0], p[1]);
9508 remove = true;
9509 }
9510 /* Fall through. */
9511
9512 case OMP_CLAUSE_FINAL:
9513 OMP_CLAUSE_OPERAND (c, 0)
9514 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
9515 /* Fall through. */
9516
9517 case OMP_CLAUSE_SCHEDULE:
9518 case OMP_CLAUSE_NUM_THREADS:
9519 case OMP_CLAUSE_NUM_TEAMS:
9520 case OMP_CLAUSE_THREAD_LIMIT:
9521 case OMP_CLAUSE_DIST_SCHEDULE:
9522 case OMP_CLAUSE_DEVICE:
9523 case OMP_CLAUSE_PRIORITY:
9524 case OMP_CLAUSE_GRAINSIZE:
9525 case OMP_CLAUSE_NUM_TASKS:
9526 case OMP_CLAUSE_HINT:
9527 case OMP_CLAUSE_ASYNC:
9528 case OMP_CLAUSE_WAIT:
9529 case OMP_CLAUSE_NUM_GANGS:
9530 case OMP_CLAUSE_NUM_WORKERS:
9531 case OMP_CLAUSE_VECTOR_LENGTH:
9532 case OMP_CLAUSE_WORKER:
9533 case OMP_CLAUSE_VECTOR:
9534 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
9535 is_gimple_val, fb_rvalue) == GS_ERROR)
9536 remove = true;
9537 break;
9538
9539 case OMP_CLAUSE_GANG:
9540 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
9541 is_gimple_val, fb_rvalue) == GS_ERROR)
9542 remove = true;
9543 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
9544 is_gimple_val, fb_rvalue) == GS_ERROR)
9545 remove = true;
9546 break;
9547
9548 case OMP_CLAUSE_NOWAIT:
9549 nowait = 1;
9550 break;
9551
9552 case OMP_CLAUSE_ORDERED:
9553 case OMP_CLAUSE_UNTIED:
9554 case OMP_CLAUSE_COLLAPSE:
9555 case OMP_CLAUSE_TILE:
9556 case OMP_CLAUSE_AUTO:
9557 case OMP_CLAUSE_SEQ:
9558 case OMP_CLAUSE_INDEPENDENT:
9559 case OMP_CLAUSE_MERGEABLE:
9560 case OMP_CLAUSE_PROC_BIND:
9561 case OMP_CLAUSE_SAFELEN:
9562 case OMP_CLAUSE_SIMDLEN:
9563 case OMP_CLAUSE_NOGROUP:
9564 case OMP_CLAUSE_THREADS:
9565 case OMP_CLAUSE_SIMD:
9566 case OMP_CLAUSE_BIND:
9567 case OMP_CLAUSE_IF_PRESENT:
9568 case OMP_CLAUSE_FINALIZE:
9569 break;
9570
9571 case OMP_CLAUSE_ORDER:
9572 ctx->order_concurrent = true;
9573 break;
9574
9575 case OMP_CLAUSE_DEFAULTMAP:
9576 enum gimplify_defaultmap_kind gdmkmin, gdmkmax;
9577 switch (OMP_CLAUSE_DEFAULTMAP_CATEGORY (c))
9578 {
9579 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED:
9580 gdmkmin = GDMK_SCALAR;
9581 gdmkmax = GDMK_POINTER;
9582 break;
9583 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_SCALAR:
9584 gdmkmin = gdmkmax = GDMK_SCALAR;
9585 break;
9586 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_AGGREGATE:
9587 gdmkmin = gdmkmax = GDMK_AGGREGATE;
9588 break;
9589 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALLOCATABLE:
9590 gdmkmin = gdmkmax = GDMK_ALLOCATABLE;
9591 break;
9592 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_POINTER:
9593 gdmkmin = gdmkmax = GDMK_POINTER;
9594 break;
9595 default:
9596 gcc_unreachable ();
9597 }
9598 for (int gdmk = gdmkmin; gdmk <= gdmkmax; gdmk++)
9599 switch (OMP_CLAUSE_DEFAULTMAP_BEHAVIOR (c))
9600 {
9601 case OMP_CLAUSE_DEFAULTMAP_ALLOC:
9602 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_ALLOC_ONLY;
9603 break;
9604 case OMP_CLAUSE_DEFAULTMAP_TO:
9605 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_TO_ONLY;
9606 break;
9607 case OMP_CLAUSE_DEFAULTMAP_FROM:
9608 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_FROM_ONLY;
9609 break;
9610 case OMP_CLAUSE_DEFAULTMAP_TOFROM:
9611 ctx->defaultmap[gdmk] = GOVD_MAP;
9612 break;
9613 case OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE:
9614 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
9615 break;
9616 case OMP_CLAUSE_DEFAULTMAP_NONE:
9617 ctx->defaultmap[gdmk] = 0;
9618 break;
9619 case OMP_CLAUSE_DEFAULTMAP_DEFAULT:
9620 switch (gdmk)
9621 {
9622 case GDMK_SCALAR:
9623 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
9624 break;
9625 case GDMK_AGGREGATE:
9626 case GDMK_ALLOCATABLE:
9627 ctx->defaultmap[gdmk] = GOVD_MAP;
9628 break;
9629 case GDMK_POINTER:
9630 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
9631 break;
9632 default:
9633 gcc_unreachable ();
9634 }
9635 break;
9636 default:
9637 gcc_unreachable ();
9638 }
9639 break;
9640
9641 case OMP_CLAUSE_ALIGNED:
9642 decl = OMP_CLAUSE_DECL (c);
9643 if (error_operand_p (decl))
9644 {
9645 remove = true;
9646 break;
9647 }
9648 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
9649 is_gimple_val, fb_rvalue) == GS_ERROR)
9650 {
9651 remove = true;
9652 break;
9653 }
9654 if (!is_global_var (decl)
9655 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
9656 omp_add_variable (ctx, decl, GOVD_ALIGNED);
9657 break;
9658
9659 case OMP_CLAUSE_NONTEMPORAL:
9660 decl = OMP_CLAUSE_DECL (c);
9661 if (error_operand_p (decl))
9662 {
9663 remove = true;
9664 break;
9665 }
9666 omp_add_variable (ctx, decl, GOVD_NONTEMPORAL);
9667 break;
9668
9669 case OMP_CLAUSE_DEFAULT:
9670 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
9671 break;
9672
9673 case OMP_CLAUSE_INCLUSIVE:
9674 case OMP_CLAUSE_EXCLUSIVE:
9675 decl = OMP_CLAUSE_DECL (c);
9676 {
9677 splay_tree_node n = splay_tree_lookup (outer_ctx->variables,
9678 (splay_tree_key) decl);
9679 if (n == NULL || (n->value & GOVD_REDUCTION) == 0)
9680 {
9681 error_at (OMP_CLAUSE_LOCATION (c),
9682 "%qD specified in %qs clause but not in %<inscan%> "
9683 "%<reduction%> clause on the containing construct",
9684 decl, omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
9685 remove = true;
9686 }
9687 else
9688 {
9689 n->value |= GOVD_REDUCTION_INSCAN;
9690 if (outer_ctx->region_type == ORT_SIMD
9691 && outer_ctx->outer_context
9692 && outer_ctx->outer_context->region_type == ORT_WORKSHARE)
9693 {
9694 n = splay_tree_lookup (outer_ctx->outer_context->variables,
9695 (splay_tree_key) decl);
9696 if (n && (n->value & GOVD_REDUCTION) != 0)
9697 n->value |= GOVD_REDUCTION_INSCAN;
9698 }
9699 }
9700 }
9701 break;
9702
9703 default:
9704 gcc_unreachable ();
9705 }
9706
9707 if (code == OACC_DATA
9708 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9709 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
9710 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
9711 remove = true;
9712 if (remove)
9713 *list_p = OMP_CLAUSE_CHAIN (c);
9714 else
9715 list_p = &OMP_CLAUSE_CHAIN (c);
9716 }
9717
9718 ctx->clauses = *orig_list_p;
9719 gimplify_omp_ctxp = ctx;
9720 if (struct_map_to_clause)
9721 delete struct_map_to_clause;
9722 if (struct_deref_set)
9723 delete struct_deref_set;
9724 }
9725
9726 /* Return true if DECL is a candidate for shared to firstprivate
9727 optimization. We only consider non-addressable scalars, not
9728 too big, and not references. */
9729
9730 static bool
9731 omp_shared_to_firstprivate_optimizable_decl_p (tree decl)
9732 {
9733 if (TREE_ADDRESSABLE (decl))
9734 return false;
9735 tree type = TREE_TYPE (decl);
9736 if (!is_gimple_reg_type (type)
9737 || TREE_CODE (type) == REFERENCE_TYPE
9738 || TREE_ADDRESSABLE (type))
9739 return false;
9740 /* Don't optimize too large decls, as each thread/task will have
9741 its own. */
9742 HOST_WIDE_INT len = int_size_in_bytes (type);
9743 if (len == -1 || len > 4 * POINTER_SIZE / BITS_PER_UNIT)
9744 return false;
9745 if (lang_hooks.decls.omp_privatize_by_reference (decl))
9746 return false;
9747 return true;
9748 }
9749
9750 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
9751 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
9752 GOVD_WRITTEN in outer contexts. */
9753
9754 static void
9755 omp_mark_stores (struct gimplify_omp_ctx *ctx, tree decl)
9756 {
9757 for (; ctx; ctx = ctx->outer_context)
9758 {
9759 splay_tree_node n = splay_tree_lookup (ctx->variables,
9760 (splay_tree_key) decl);
9761 if (n == NULL)
9762 continue;
9763 else if (n->value & GOVD_SHARED)
9764 {
9765 n->value |= GOVD_WRITTEN;
9766 return;
9767 }
9768 else if (n->value & GOVD_DATA_SHARE_CLASS)
9769 return;
9770 }
9771 }
9772
9773 /* Helper callback for walk_gimple_seq to discover possible stores
9774 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
9775 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
9776 for those. */
9777
9778 static tree
9779 omp_find_stores_op (tree *tp, int *walk_subtrees, void *data)
9780 {
9781 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
9782
9783 *walk_subtrees = 0;
9784 if (!wi->is_lhs)
9785 return NULL_TREE;
9786
9787 tree op = *tp;
9788 do
9789 {
9790 if (handled_component_p (op))
9791 op = TREE_OPERAND (op, 0);
9792 else if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
9793 && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
9794 op = TREE_OPERAND (TREE_OPERAND (op, 0), 0);
9795 else
9796 break;
9797 }
9798 while (1);
9799 if (!DECL_P (op) || !omp_shared_to_firstprivate_optimizable_decl_p (op))
9800 return NULL_TREE;
9801
9802 omp_mark_stores (gimplify_omp_ctxp, op);
9803 return NULL_TREE;
9804 }
9805
9806 /* Helper callback for walk_gimple_seq to discover possible stores
9807 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
9808 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
9809 for those. */
9810
9811 static tree
9812 omp_find_stores_stmt (gimple_stmt_iterator *gsi_p,
9813 bool *handled_ops_p,
9814 struct walk_stmt_info *wi)
9815 {
9816 gimple *stmt = gsi_stmt (*gsi_p);
9817 switch (gimple_code (stmt))
9818 {
9819 /* Don't recurse on OpenMP constructs for which
9820 gimplify_adjust_omp_clauses already handled the bodies,
9821 except handle gimple_omp_for_pre_body. */
9822 case GIMPLE_OMP_FOR:
9823 *handled_ops_p = true;
9824 if (gimple_omp_for_pre_body (stmt))
9825 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
9826 omp_find_stores_stmt, omp_find_stores_op, wi);
9827 break;
9828 case GIMPLE_OMP_PARALLEL:
9829 case GIMPLE_OMP_TASK:
9830 case GIMPLE_OMP_SECTIONS:
9831 case GIMPLE_OMP_SINGLE:
9832 case GIMPLE_OMP_TARGET:
9833 case GIMPLE_OMP_TEAMS:
9834 case GIMPLE_OMP_CRITICAL:
9835 *handled_ops_p = true;
9836 break;
9837 default:
9838 break;
9839 }
9840 return NULL_TREE;
9841 }
9842
9843 struct gimplify_adjust_omp_clauses_data
9844 {
9845 tree *list_p;
9846 gimple_seq *pre_p;
9847 };
9848
9849 /* For all variables that were not actually used within the context,
9850 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
9851
9852 static int
9853 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
9854 {
9855 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
9856 gimple_seq *pre_p
9857 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
9858 tree decl = (tree) n->key;
9859 unsigned flags = n->value;
9860 enum omp_clause_code code;
9861 tree clause;
9862 bool private_debug;
9863
9864 if (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
9865 && (flags & GOVD_LASTPRIVATE_CONDITIONAL) != 0)
9866 flags = GOVD_SHARED | GOVD_SEEN | GOVD_WRITTEN;
9867 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
9868 return 0;
9869 if ((flags & GOVD_SEEN) == 0)
9870 return 0;
9871 if ((flags & GOVD_MAP_HAS_ATTACHMENTS) != 0)
9872 return 0;
9873 if (flags & GOVD_DEBUG_PRIVATE)
9874 {
9875 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_SHARED);
9876 private_debug = true;
9877 }
9878 else if (flags & GOVD_MAP)
9879 private_debug = false;
9880 else
9881 private_debug
9882 = lang_hooks.decls.omp_private_debug_clause (decl,
9883 !!(flags & GOVD_SHARED));
9884 if (private_debug)
9885 code = OMP_CLAUSE_PRIVATE;
9886 else if (flags & GOVD_MAP)
9887 {
9888 code = OMP_CLAUSE_MAP;
9889 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0
9890 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
9891 {
9892 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl);
9893 return 0;
9894 }
9895 if (VAR_P (decl)
9896 && DECL_IN_CONSTANT_POOL (decl)
9897 && !lookup_attribute ("omp declare target",
9898 DECL_ATTRIBUTES (decl)))
9899 {
9900 tree id = get_identifier ("omp declare target");
9901 DECL_ATTRIBUTES (decl)
9902 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (decl));
9903 varpool_node *node = varpool_node::get (decl);
9904 if (node)
9905 {
9906 node->offloadable = 1;
9907 if (ENABLE_OFFLOADING)
9908 g->have_offload = true;
9909 }
9910 }
9911 }
9912 else if (flags & GOVD_SHARED)
9913 {
9914 if (is_global_var (decl))
9915 {
9916 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
9917 while (ctx != NULL)
9918 {
9919 splay_tree_node on
9920 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9921 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
9922 | GOVD_PRIVATE | GOVD_REDUCTION
9923 | GOVD_LINEAR | GOVD_MAP)) != 0)
9924 break;
9925 ctx = ctx->outer_context;
9926 }
9927 if (ctx == NULL)
9928 return 0;
9929 }
9930 code = OMP_CLAUSE_SHARED;
9931 }
9932 else if (flags & GOVD_PRIVATE)
9933 code = OMP_CLAUSE_PRIVATE;
9934 else if (flags & GOVD_FIRSTPRIVATE)
9935 {
9936 code = OMP_CLAUSE_FIRSTPRIVATE;
9937 if ((gimplify_omp_ctxp->region_type & ORT_TARGET)
9938 && (gimplify_omp_ctxp->region_type & ORT_ACC) == 0
9939 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
9940 {
9941 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
9942 "%<target%> construct", decl);
9943 return 0;
9944 }
9945 }
9946 else if (flags & GOVD_LASTPRIVATE)
9947 code = OMP_CLAUSE_LASTPRIVATE;
9948 else if (flags & (GOVD_ALIGNED | GOVD_NONTEMPORAL))
9949 return 0;
9950 else if (flags & GOVD_CONDTEMP)
9951 {
9952 code = OMP_CLAUSE__CONDTEMP_;
9953 gimple_add_tmp_var (decl);
9954 }
9955 else
9956 gcc_unreachable ();
9957
9958 if (((flags & GOVD_LASTPRIVATE)
9959 || (code == OMP_CLAUSE_SHARED && (flags & GOVD_WRITTEN)))
9960 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9961 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
9962
9963 tree chain = *list_p;
9964 clause = build_omp_clause (input_location, code);
9965 OMP_CLAUSE_DECL (clause) = decl;
9966 OMP_CLAUSE_CHAIN (clause) = chain;
9967 if (private_debug)
9968 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
9969 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
9970 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
9971 else if (code == OMP_CLAUSE_SHARED
9972 && (flags & GOVD_WRITTEN) == 0
9973 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9974 OMP_CLAUSE_SHARED_READONLY (clause) = 1;
9975 else if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_EXPLICIT) == 0)
9976 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause) = 1;
9977 else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0)
9978 {
9979 tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP);
9980 OMP_CLAUSE_DECL (nc) = decl;
9981 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
9982 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
9983 OMP_CLAUSE_DECL (clause)
9984 = build_simple_mem_ref_loc (input_location, decl);
9985 OMP_CLAUSE_DECL (clause)
9986 = build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause),
9987 build_int_cst (build_pointer_type (char_type_node), 0));
9988 OMP_CLAUSE_SIZE (clause) = size_zero_node;
9989 OMP_CLAUSE_SIZE (nc) = size_zero_node;
9990 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC);
9991 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1;
9992 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
9993 OMP_CLAUSE_CHAIN (nc) = chain;
9994 OMP_CLAUSE_CHAIN (clause) = nc;
9995 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9996 gimplify_omp_ctxp = ctx->outer_context;
9997 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0),
9998 pre_p, NULL, is_gimple_val, fb_rvalue);
9999 gimplify_omp_ctxp = ctx;
10000 }
10001 else if (code == OMP_CLAUSE_MAP)
10002 {
10003 int kind;
10004 /* Not all combinations of these GOVD_MAP flags are actually valid. */
10005 switch (flags & (GOVD_MAP_TO_ONLY
10006 | GOVD_MAP_FORCE
10007 | GOVD_MAP_FORCE_PRESENT
10008 | GOVD_MAP_ALLOC_ONLY
10009 | GOVD_MAP_FROM_ONLY))
10010 {
10011 case 0:
10012 kind = GOMP_MAP_TOFROM;
10013 break;
10014 case GOVD_MAP_FORCE:
10015 kind = GOMP_MAP_TOFROM | GOMP_MAP_FLAG_FORCE;
10016 break;
10017 case GOVD_MAP_TO_ONLY:
10018 kind = GOMP_MAP_TO;
10019 break;
10020 case GOVD_MAP_FROM_ONLY:
10021 kind = GOMP_MAP_FROM;
10022 break;
10023 case GOVD_MAP_ALLOC_ONLY:
10024 kind = GOMP_MAP_ALLOC;
10025 break;
10026 case GOVD_MAP_TO_ONLY | GOVD_MAP_FORCE:
10027 kind = GOMP_MAP_TO | GOMP_MAP_FLAG_FORCE;
10028 break;
10029 case GOVD_MAP_FORCE_PRESENT:
10030 kind = GOMP_MAP_FORCE_PRESENT;
10031 break;
10032 default:
10033 gcc_unreachable ();
10034 }
10035 OMP_CLAUSE_SET_MAP_KIND (clause, kind);
10036 if (DECL_SIZE (decl)
10037 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
10038 {
10039 tree decl2 = DECL_VALUE_EXPR (decl);
10040 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
10041 decl2 = TREE_OPERAND (decl2, 0);
10042 gcc_assert (DECL_P (decl2));
10043 tree mem = build_simple_mem_ref (decl2);
10044 OMP_CLAUSE_DECL (clause) = mem;
10045 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
10046 if (gimplify_omp_ctxp->outer_context)
10047 {
10048 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
10049 omp_notice_variable (ctx, decl2, true);
10050 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
10051 }
10052 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
10053 OMP_CLAUSE_MAP);
10054 OMP_CLAUSE_DECL (nc) = decl;
10055 OMP_CLAUSE_SIZE (nc) = size_zero_node;
10056 if (gimplify_omp_ctxp->target_firstprivatize_array_bases)
10057 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
10058 else
10059 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
10060 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
10061 OMP_CLAUSE_CHAIN (clause) = nc;
10062 }
10063 else if (gimplify_omp_ctxp->target_firstprivatize_array_bases
10064 && lang_hooks.decls.omp_privatize_by_reference (decl))
10065 {
10066 OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl);
10067 OMP_CLAUSE_SIZE (clause)
10068 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))));
10069 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10070 gimplify_omp_ctxp = ctx->outer_context;
10071 gimplify_expr (&OMP_CLAUSE_SIZE (clause),
10072 pre_p, NULL, is_gimple_val, fb_rvalue);
10073 gimplify_omp_ctxp = ctx;
10074 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
10075 OMP_CLAUSE_MAP);
10076 OMP_CLAUSE_DECL (nc) = decl;
10077 OMP_CLAUSE_SIZE (nc) = size_zero_node;
10078 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
10079 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
10080 OMP_CLAUSE_CHAIN (clause) = nc;
10081 }
10082 else
10083 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
10084 }
10085 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
10086 {
10087 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
10088 OMP_CLAUSE_DECL (nc) = decl;
10089 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
10090 OMP_CLAUSE_CHAIN (nc) = chain;
10091 OMP_CLAUSE_CHAIN (clause) = nc;
10092 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10093 gimplify_omp_ctxp = ctx->outer_context;
10094 lang_hooks.decls.omp_finish_clause (nc, pre_p);
10095 gimplify_omp_ctxp = ctx;
10096 }
10097 *list_p = clause;
10098 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10099 gimplify_omp_ctxp = ctx->outer_context;
10100 lang_hooks.decls.omp_finish_clause (clause, pre_p);
10101 if (gimplify_omp_ctxp)
10102 for (; clause != chain; clause = OMP_CLAUSE_CHAIN (clause))
10103 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP
10104 && DECL_P (OMP_CLAUSE_SIZE (clause)))
10105 omp_notice_variable (gimplify_omp_ctxp, OMP_CLAUSE_SIZE (clause),
10106 true);
10107 gimplify_omp_ctxp = ctx;
10108 return 0;
10109 }
10110
10111 static void
10112 gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
10113 enum tree_code code)
10114 {
10115 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10116 tree *orig_list_p = list_p;
10117 tree c, decl;
10118 bool has_inscan_reductions = false;
10119
10120 if (body)
10121 {
10122 struct gimplify_omp_ctx *octx;
10123 for (octx = ctx; octx; octx = octx->outer_context)
10124 if ((octx->region_type & (ORT_PARALLEL | ORT_TASK | ORT_TEAMS)) != 0)
10125 break;
10126 if (octx)
10127 {
10128 struct walk_stmt_info wi;
10129 memset (&wi, 0, sizeof (wi));
10130 walk_gimple_seq (body, omp_find_stores_stmt,
10131 omp_find_stores_op, &wi);
10132 }
10133 }
10134
10135 if (ctx->add_safelen1)
10136 {
10137 /* If there are VLAs in the body of simd loop, prevent
10138 vectorization. */
10139 gcc_assert (ctx->region_type == ORT_SIMD);
10140 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
10141 OMP_CLAUSE_SAFELEN_EXPR (c) = integer_one_node;
10142 OMP_CLAUSE_CHAIN (c) = *list_p;
10143 *list_p = c;
10144 list_p = &OMP_CLAUSE_CHAIN (c);
10145 }
10146
10147 if (ctx->region_type == ORT_WORKSHARE
10148 && ctx->outer_context
10149 && ctx->outer_context->region_type == ORT_COMBINED_PARALLEL)
10150 {
10151 for (c = ctx->outer_context->clauses; c; c = OMP_CLAUSE_CHAIN (c))
10152 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
10153 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
10154 {
10155 decl = OMP_CLAUSE_DECL (c);
10156 splay_tree_node n
10157 = splay_tree_lookup (ctx->outer_context->variables,
10158 (splay_tree_key) decl);
10159 gcc_checking_assert (!splay_tree_lookup (ctx->variables,
10160 (splay_tree_key) decl));
10161 omp_add_variable (ctx, decl, n->value);
10162 tree c2 = copy_node (c);
10163 OMP_CLAUSE_CHAIN (c2) = *list_p;
10164 *list_p = c2;
10165 if ((n->value & GOVD_FIRSTPRIVATE) == 0)
10166 continue;
10167 c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10168 OMP_CLAUSE_FIRSTPRIVATE);
10169 OMP_CLAUSE_DECL (c2) = decl;
10170 OMP_CLAUSE_CHAIN (c2) = *list_p;
10171 *list_p = c2;
10172 }
10173 }
10174 while ((c = *list_p) != NULL)
10175 {
10176 splay_tree_node n;
10177 bool remove = false;
10178
10179 switch (OMP_CLAUSE_CODE (c))
10180 {
10181 case OMP_CLAUSE_FIRSTPRIVATE:
10182 if ((ctx->region_type & ORT_TARGET)
10183 && (ctx->region_type & ORT_ACC) == 0
10184 && TYPE_ATOMIC (strip_array_types
10185 (TREE_TYPE (OMP_CLAUSE_DECL (c)))))
10186 {
10187 error_at (OMP_CLAUSE_LOCATION (c),
10188 "%<_Atomic%> %qD in %<firstprivate%> clause on "
10189 "%<target%> construct", OMP_CLAUSE_DECL (c));
10190 remove = true;
10191 break;
10192 }
10193 /* FALLTHRU */
10194 case OMP_CLAUSE_PRIVATE:
10195 case OMP_CLAUSE_SHARED:
10196 case OMP_CLAUSE_LINEAR:
10197 decl = OMP_CLAUSE_DECL (c);
10198 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10199 remove = !(n->value & GOVD_SEEN);
10200 if ((n->value & GOVD_LASTPRIVATE_CONDITIONAL) != 0
10201 && code == OMP_PARALLEL
10202 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
10203 remove = true;
10204 if (! remove)
10205 {
10206 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
10207 if ((n->value & GOVD_DEBUG_PRIVATE)
10208 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
10209 {
10210 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
10211 || ((n->value & GOVD_DATA_SHARE_CLASS)
10212 == GOVD_SHARED));
10213 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
10214 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
10215 }
10216 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
10217 && (n->value & GOVD_WRITTEN) == 0
10218 && DECL_P (decl)
10219 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
10220 OMP_CLAUSE_SHARED_READONLY (c) = 1;
10221 else if (DECL_P (decl)
10222 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
10223 && (n->value & GOVD_WRITTEN) != 0)
10224 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10225 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
10226 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
10227 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
10228 }
10229 break;
10230
10231 case OMP_CLAUSE_LASTPRIVATE:
10232 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
10233 accurately reflect the presence of a FIRSTPRIVATE clause. */
10234 decl = OMP_CLAUSE_DECL (c);
10235 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10236 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
10237 = (n->value & GOVD_FIRSTPRIVATE) != 0;
10238 if (code == OMP_DISTRIBUTE
10239 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
10240 {
10241 remove = true;
10242 error_at (OMP_CLAUSE_LOCATION (c),
10243 "same variable used in %<firstprivate%> and "
10244 "%<lastprivate%> clauses on %<distribute%> "
10245 "construct");
10246 }
10247 if (!remove
10248 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
10249 && DECL_P (decl)
10250 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
10251 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
10252 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) && code == OMP_PARALLEL)
10253 remove = true;
10254 break;
10255
10256 case OMP_CLAUSE_ALIGNED:
10257 decl = OMP_CLAUSE_DECL (c);
10258 if (!is_global_var (decl))
10259 {
10260 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10261 remove = n == NULL || !(n->value & GOVD_SEEN);
10262 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
10263 {
10264 struct gimplify_omp_ctx *octx;
10265 if (n != NULL
10266 && (n->value & (GOVD_DATA_SHARE_CLASS
10267 & ~GOVD_FIRSTPRIVATE)))
10268 remove = true;
10269 else
10270 for (octx = ctx->outer_context; octx;
10271 octx = octx->outer_context)
10272 {
10273 n = splay_tree_lookup (octx->variables,
10274 (splay_tree_key) decl);
10275 if (n == NULL)
10276 continue;
10277 if (n->value & GOVD_LOCAL)
10278 break;
10279 /* We have to avoid assigning a shared variable
10280 to itself when trying to add
10281 __builtin_assume_aligned. */
10282 if (n->value & GOVD_SHARED)
10283 {
10284 remove = true;
10285 break;
10286 }
10287 }
10288 }
10289 }
10290 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
10291 {
10292 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10293 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
10294 remove = true;
10295 }
10296 break;
10297
10298 case OMP_CLAUSE_NONTEMPORAL:
10299 decl = OMP_CLAUSE_DECL (c);
10300 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10301 remove = n == NULL || !(n->value & GOVD_SEEN);
10302 break;
10303
10304 case OMP_CLAUSE_MAP:
10305 if (code == OMP_TARGET_EXIT_DATA
10306 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
10307 {
10308 remove = true;
10309 break;
10310 }
10311 decl = OMP_CLAUSE_DECL (c);
10312 /* Data clauses associated with reductions must be
10313 compatible with present_or_copy. Warn and adjust the clause
10314 if that is not the case. */
10315 if (ctx->region_type == ORT_ACC_PARALLEL
10316 || ctx->region_type == ORT_ACC_SERIAL)
10317 {
10318 tree t = DECL_P (decl) ? decl : TREE_OPERAND (decl, 0);
10319 n = NULL;
10320
10321 if (DECL_P (t))
10322 n = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
10323
10324 if (n && (n->value & GOVD_REDUCTION))
10325 {
10326 enum gomp_map_kind kind = OMP_CLAUSE_MAP_KIND (c);
10327
10328 OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1;
10329 if ((kind & GOMP_MAP_TOFROM) != GOMP_MAP_TOFROM
10330 && kind != GOMP_MAP_FORCE_PRESENT
10331 && kind != GOMP_MAP_POINTER)
10332 {
10333 warning_at (OMP_CLAUSE_LOCATION (c), 0,
10334 "incompatible data clause with reduction "
10335 "on %qE; promoting to %<present_or_copy%>",
10336 DECL_NAME (t));
10337 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
10338 }
10339 }
10340 }
10341 if (!DECL_P (decl))
10342 {
10343 if ((ctx->region_type & ORT_TARGET) != 0
10344 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
10345 {
10346 if (TREE_CODE (decl) == INDIRECT_REF
10347 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
10348 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
10349 == REFERENCE_TYPE))
10350 decl = TREE_OPERAND (decl, 0);
10351 if (TREE_CODE (decl) == COMPONENT_REF)
10352 {
10353 while (TREE_CODE (decl) == COMPONENT_REF)
10354 decl = TREE_OPERAND (decl, 0);
10355 if (DECL_P (decl))
10356 {
10357 n = splay_tree_lookup (ctx->variables,
10358 (splay_tree_key) decl);
10359 if (!(n->value & GOVD_SEEN))
10360 remove = true;
10361 }
10362 }
10363 }
10364 break;
10365 }
10366 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10367 if ((ctx->region_type & ORT_TARGET) != 0
10368 && !(n->value & GOVD_SEEN)
10369 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0
10370 && (!is_global_var (decl)
10371 || !lookup_attribute ("omp declare target link",
10372 DECL_ATTRIBUTES (decl))))
10373 {
10374 remove = true;
10375 /* For struct element mapping, if struct is never referenced
10376 in target block and none of the mapping has always modifier,
10377 remove all the struct element mappings, which immediately
10378 follow the GOMP_MAP_STRUCT map clause. */
10379 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
10380 {
10381 HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
10382 while (cnt--)
10383 OMP_CLAUSE_CHAIN (c)
10384 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c));
10385 }
10386 }
10387 else if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
10388 && code == OMP_TARGET_EXIT_DATA)
10389 remove = true;
10390 else if (DECL_SIZE (decl)
10391 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
10392 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER
10393 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
10394 && (OMP_CLAUSE_MAP_KIND (c)
10395 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
10396 {
10397 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
10398 for these, TREE_CODE (DECL_SIZE (decl)) will always be
10399 INTEGER_CST. */
10400 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
10401
10402 tree decl2 = DECL_VALUE_EXPR (decl);
10403 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
10404 decl2 = TREE_OPERAND (decl2, 0);
10405 gcc_assert (DECL_P (decl2));
10406 tree mem = build_simple_mem_ref (decl2);
10407 OMP_CLAUSE_DECL (c) = mem;
10408 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
10409 if (ctx->outer_context)
10410 {
10411 omp_notice_variable (ctx->outer_context, decl2, true);
10412 omp_notice_variable (ctx->outer_context,
10413 OMP_CLAUSE_SIZE (c), true);
10414 }
10415 if (((ctx->region_type & ORT_TARGET) != 0
10416 || !ctx->target_firstprivatize_array_bases)
10417 && ((n->value & GOVD_SEEN) == 0
10418 || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0))
10419 {
10420 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10421 OMP_CLAUSE_MAP);
10422 OMP_CLAUSE_DECL (nc) = decl;
10423 OMP_CLAUSE_SIZE (nc) = size_zero_node;
10424 if (ctx->target_firstprivatize_array_bases)
10425 OMP_CLAUSE_SET_MAP_KIND (nc,
10426 GOMP_MAP_FIRSTPRIVATE_POINTER);
10427 else
10428 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
10429 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
10430 OMP_CLAUSE_CHAIN (c) = nc;
10431 c = nc;
10432 }
10433 }
10434 else
10435 {
10436 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
10437 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
10438 gcc_assert ((n->value & GOVD_SEEN) == 0
10439 || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
10440 == 0));
10441 }
10442 break;
10443
10444 case OMP_CLAUSE_TO:
10445 case OMP_CLAUSE_FROM:
10446 case OMP_CLAUSE__CACHE_:
10447 decl = OMP_CLAUSE_DECL (c);
10448 if (!DECL_P (decl))
10449 break;
10450 if (DECL_SIZE (decl)
10451 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
10452 {
10453 tree decl2 = DECL_VALUE_EXPR (decl);
10454 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
10455 decl2 = TREE_OPERAND (decl2, 0);
10456 gcc_assert (DECL_P (decl2));
10457 tree mem = build_simple_mem_ref (decl2);
10458 OMP_CLAUSE_DECL (c) = mem;
10459 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
10460 if (ctx->outer_context)
10461 {
10462 omp_notice_variable (ctx->outer_context, decl2, true);
10463 omp_notice_variable (ctx->outer_context,
10464 OMP_CLAUSE_SIZE (c), true);
10465 }
10466 }
10467 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
10468 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
10469 break;
10470
10471 case OMP_CLAUSE_REDUCTION:
10472 if (OMP_CLAUSE_REDUCTION_INSCAN (c))
10473 {
10474 decl = OMP_CLAUSE_DECL (c);
10475 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10476 if ((n->value & GOVD_REDUCTION_INSCAN) == 0)
10477 {
10478 remove = true;
10479 error_at (OMP_CLAUSE_LOCATION (c),
10480 "%qD specified in %<inscan%> %<reduction%> clause "
10481 "but not in %<scan%> directive clause", decl);
10482 break;
10483 }
10484 has_inscan_reductions = true;
10485 }
10486 /* FALLTHRU */
10487 case OMP_CLAUSE_IN_REDUCTION:
10488 case OMP_CLAUSE_TASK_REDUCTION:
10489 decl = OMP_CLAUSE_DECL (c);
10490 /* OpenACC reductions need a present_or_copy data clause.
10491 Add one if necessary. Emit error when the reduction is private. */
10492 if (ctx->region_type == ORT_ACC_PARALLEL
10493 || ctx->region_type == ORT_ACC_SERIAL)
10494 {
10495 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10496 if (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
10497 {
10498 remove = true;
10499 error_at (OMP_CLAUSE_LOCATION (c), "invalid private "
10500 "reduction on %qE", DECL_NAME (decl));
10501 }
10502 else if ((n->value & GOVD_MAP) == 0)
10503 {
10504 tree next = OMP_CLAUSE_CHAIN (c);
10505 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_MAP);
10506 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_TOFROM);
10507 OMP_CLAUSE_DECL (nc) = decl;
10508 OMP_CLAUSE_CHAIN (c) = nc;
10509 lang_hooks.decls.omp_finish_clause (nc, pre_p);
10510 while (1)
10511 {
10512 OMP_CLAUSE_MAP_IN_REDUCTION (nc) = 1;
10513 if (OMP_CLAUSE_CHAIN (nc) == NULL)
10514 break;
10515 nc = OMP_CLAUSE_CHAIN (nc);
10516 }
10517 OMP_CLAUSE_CHAIN (nc) = next;
10518 n->value |= GOVD_MAP;
10519 }
10520 }
10521 if (DECL_P (decl)
10522 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
10523 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
10524 break;
10525 case OMP_CLAUSE_COPYIN:
10526 case OMP_CLAUSE_COPYPRIVATE:
10527 case OMP_CLAUSE_IF:
10528 case OMP_CLAUSE_NUM_THREADS:
10529 case OMP_CLAUSE_NUM_TEAMS:
10530 case OMP_CLAUSE_THREAD_LIMIT:
10531 case OMP_CLAUSE_DIST_SCHEDULE:
10532 case OMP_CLAUSE_DEVICE:
10533 case OMP_CLAUSE_SCHEDULE:
10534 case OMP_CLAUSE_NOWAIT:
10535 case OMP_CLAUSE_ORDERED:
10536 case OMP_CLAUSE_DEFAULT:
10537 case OMP_CLAUSE_UNTIED:
10538 case OMP_CLAUSE_COLLAPSE:
10539 case OMP_CLAUSE_FINAL:
10540 case OMP_CLAUSE_MERGEABLE:
10541 case OMP_CLAUSE_PROC_BIND:
10542 case OMP_CLAUSE_SAFELEN:
10543 case OMP_CLAUSE_SIMDLEN:
10544 case OMP_CLAUSE_DEPEND:
10545 case OMP_CLAUSE_PRIORITY:
10546 case OMP_CLAUSE_GRAINSIZE:
10547 case OMP_CLAUSE_NUM_TASKS:
10548 case OMP_CLAUSE_NOGROUP:
10549 case OMP_CLAUSE_THREADS:
10550 case OMP_CLAUSE_SIMD:
10551 case OMP_CLAUSE_HINT:
10552 case OMP_CLAUSE_DEFAULTMAP:
10553 case OMP_CLAUSE_ORDER:
10554 case OMP_CLAUSE_BIND:
10555 case OMP_CLAUSE_USE_DEVICE_PTR:
10556 case OMP_CLAUSE_USE_DEVICE_ADDR:
10557 case OMP_CLAUSE_IS_DEVICE_PTR:
10558 case OMP_CLAUSE_ASYNC:
10559 case OMP_CLAUSE_WAIT:
10560 case OMP_CLAUSE_INDEPENDENT:
10561 case OMP_CLAUSE_NUM_GANGS:
10562 case OMP_CLAUSE_NUM_WORKERS:
10563 case OMP_CLAUSE_VECTOR_LENGTH:
10564 case OMP_CLAUSE_GANG:
10565 case OMP_CLAUSE_WORKER:
10566 case OMP_CLAUSE_VECTOR:
10567 case OMP_CLAUSE_AUTO:
10568 case OMP_CLAUSE_SEQ:
10569 case OMP_CLAUSE_TILE:
10570 case OMP_CLAUSE_IF_PRESENT:
10571 case OMP_CLAUSE_FINALIZE:
10572 case OMP_CLAUSE_INCLUSIVE:
10573 case OMP_CLAUSE_EXCLUSIVE:
10574 break;
10575
10576 default:
10577 gcc_unreachable ();
10578 }
10579
10580 if (remove)
10581 *list_p = OMP_CLAUSE_CHAIN (c);
10582 else
10583 list_p = &OMP_CLAUSE_CHAIN (c);
10584 }
10585
10586 /* Add in any implicit data sharing. */
10587 struct gimplify_adjust_omp_clauses_data data;
10588 data.list_p = list_p;
10589 data.pre_p = pre_p;
10590 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
10591
10592 if (has_inscan_reductions)
10593 for (c = *orig_list_p; c; c = OMP_CLAUSE_CHAIN (c))
10594 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10595 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
10596 {
10597 error_at (OMP_CLAUSE_LOCATION (c),
10598 "%<inscan%> %<reduction%> clause used together with "
10599 "%<linear%> clause for a variable other than loop "
10600 "iterator");
10601 break;
10602 }
10603
10604 gimplify_omp_ctxp = ctx->outer_context;
10605 delete_omp_context (ctx);
10606 }
10607
10608 /* Return 0 if CONSTRUCTS selectors don't match the OpenMP context,
10609 -1 if unknown yet (simd is involved, won't be known until vectorization)
10610 and 1 if they do. If SCORES is non-NULL, it should point to an array
10611 of at least 2*NCONSTRUCTS+2 ints, and will be filled with the positions
10612 of the CONSTRUCTS (position -1 if it will never match) followed by
10613 number of constructs in the OpenMP context construct trait. If the
10614 score depends on whether it will be in a declare simd clone or not,
10615 the function returns 2 and there will be two sets of the scores, the first
10616 one for the case that it is not in a declare simd clone, the other
10617 that it is in a declare simd clone. */
10618
10619 int
10620 omp_construct_selector_matches (enum tree_code *constructs, int nconstructs,
10621 int *scores)
10622 {
10623 int matched = 0, cnt = 0;
10624 bool simd_seen = false;
10625 bool target_seen = false;
10626 int declare_simd_cnt = -1;
10627 auto_vec<enum tree_code, 16> codes;
10628 for (struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; ctx;)
10629 {
10630 if (((ctx->region_type & ORT_PARALLEL) && ctx->code == OMP_PARALLEL)
10631 || ((ctx->region_type & (ORT_TARGET | ORT_IMPLICIT_TARGET | ORT_ACC))
10632 == ORT_TARGET && ctx->code == OMP_TARGET)
10633 || ((ctx->region_type & ORT_TEAMS) && ctx->code == OMP_TEAMS)
10634 || (ctx->region_type == ORT_WORKSHARE && ctx->code == OMP_FOR)
10635 || (ctx->region_type == ORT_SIMD
10636 && ctx->code == OMP_SIMD
10637 && !omp_find_clause (ctx->clauses, OMP_CLAUSE_BIND)))
10638 {
10639 ++cnt;
10640 if (scores)
10641 codes.safe_push (ctx->code);
10642 else if (matched < nconstructs && ctx->code == constructs[matched])
10643 {
10644 if (ctx->code == OMP_SIMD)
10645 {
10646 if (matched)
10647 return 0;
10648 simd_seen = true;
10649 }
10650 ++matched;
10651 }
10652 if (ctx->code == OMP_TARGET)
10653 {
10654 if (scores == NULL)
10655 return matched < nconstructs ? 0 : simd_seen ? -1 : 1;
10656 target_seen = true;
10657 break;
10658 }
10659 }
10660 else if (ctx->region_type == ORT_WORKSHARE
10661 && ctx->code == OMP_LOOP
10662 && ctx->outer_context
10663 && ctx->outer_context->region_type == ORT_COMBINED_PARALLEL
10664 && ctx->outer_context->outer_context
10665 && ctx->outer_context->outer_context->code == OMP_LOOP
10666 && ctx->outer_context->outer_context->distribute)
10667 ctx = ctx->outer_context->outer_context;
10668 ctx = ctx->outer_context;
10669 }
10670 if (!target_seen
10671 && lookup_attribute ("omp declare simd",
10672 DECL_ATTRIBUTES (current_function_decl)))
10673 {
10674 /* Declare simd is a maybe case, it is supposed to be added only to the
10675 omp-simd-clone.c added clones and not to the base function. */
10676 declare_simd_cnt = cnt++;
10677 if (scores)
10678 codes.safe_push (OMP_SIMD);
10679 else if (cnt == 0
10680 && constructs[0] == OMP_SIMD)
10681 {
10682 gcc_assert (matched == 0);
10683 simd_seen = true;
10684 if (++matched == nconstructs)
10685 return -1;
10686 }
10687 }
10688 if (tree attr = lookup_attribute ("omp declare variant variant",
10689 DECL_ATTRIBUTES (current_function_decl)))
10690 {
10691 enum tree_code variant_constructs[5];
10692 int variant_nconstructs = 0;
10693 if (!target_seen)
10694 variant_nconstructs
10695 = omp_constructor_traits_to_codes (TREE_VALUE (attr),
10696 variant_constructs);
10697 for (int i = 0; i < variant_nconstructs; i++)
10698 {
10699 ++cnt;
10700 if (scores)
10701 codes.safe_push (variant_constructs[i]);
10702 else if (matched < nconstructs
10703 && variant_constructs[i] == constructs[matched])
10704 {
10705 if (variant_constructs[i] == OMP_SIMD)
10706 {
10707 if (matched)
10708 return 0;
10709 simd_seen = true;
10710 }
10711 ++matched;
10712 }
10713 }
10714 }
10715 if (!target_seen
10716 && lookup_attribute ("omp declare target block",
10717 DECL_ATTRIBUTES (current_function_decl)))
10718 {
10719 if (scores)
10720 codes.safe_push (OMP_TARGET);
10721 else if (matched < nconstructs && constructs[matched] == OMP_TARGET)
10722 ++matched;
10723 }
10724 if (scores)
10725 {
10726 for (int pass = 0; pass < (declare_simd_cnt == -1 ? 1 : 2); pass++)
10727 {
10728 int j = codes.length () - 1;
10729 for (int i = nconstructs - 1; i >= 0; i--)
10730 {
10731 while (j >= 0
10732 && (pass != 0 || declare_simd_cnt != j)
10733 && constructs[i] != codes[j])
10734 --j;
10735 if (pass == 0 && declare_simd_cnt != -1 && j > declare_simd_cnt)
10736 *scores++ = j - 1;
10737 else
10738 *scores++ = j;
10739 }
10740 *scores++ = ((pass == 0 && declare_simd_cnt != -1)
10741 ? codes.length () - 1 : codes.length ());
10742 }
10743 return declare_simd_cnt == -1 ? 1 : 2;
10744 }
10745 if (matched == nconstructs)
10746 return simd_seen ? -1 : 1;
10747 return 0;
10748 }
10749
10750 /* Gimplify OACC_CACHE. */
10751
10752 static void
10753 gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
10754 {
10755 tree expr = *expr_p;
10756
10757 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_ACC,
10758 OACC_CACHE);
10759 gimplify_adjust_omp_clauses (pre_p, NULL, &OACC_CACHE_CLAUSES (expr),
10760 OACC_CACHE);
10761
10762 /* TODO: Do something sensible with this information. */
10763
10764 *expr_p = NULL_TREE;
10765 }
10766
10767 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
10768 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
10769 kind. The entry kind will replace the one in CLAUSE, while the exit
10770 kind will be used in a new omp_clause and returned to the caller. */
10771
10772 static tree
10773 gimplify_oacc_declare_1 (tree clause)
10774 {
10775 HOST_WIDE_INT kind, new_op;
10776 bool ret = false;
10777 tree c = NULL;
10778
10779 kind = OMP_CLAUSE_MAP_KIND (clause);
10780
10781 switch (kind)
10782 {
10783 case GOMP_MAP_ALLOC:
10784 new_op = GOMP_MAP_RELEASE;
10785 ret = true;
10786 break;
10787
10788 case GOMP_MAP_FROM:
10789 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
10790 new_op = GOMP_MAP_FROM;
10791 ret = true;
10792 break;
10793
10794 case GOMP_MAP_TOFROM:
10795 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO);
10796 new_op = GOMP_MAP_FROM;
10797 ret = true;
10798 break;
10799
10800 case GOMP_MAP_DEVICE_RESIDENT:
10801 case GOMP_MAP_FORCE_DEVICEPTR:
10802 case GOMP_MAP_FORCE_PRESENT:
10803 case GOMP_MAP_LINK:
10804 case GOMP_MAP_POINTER:
10805 case GOMP_MAP_TO:
10806 break;
10807
10808 default:
10809 gcc_unreachable ();
10810 break;
10811 }
10812
10813 if (ret)
10814 {
10815 c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP);
10816 OMP_CLAUSE_SET_MAP_KIND (c, new_op);
10817 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause);
10818 }
10819
10820 return c;
10821 }
10822
10823 /* Gimplify OACC_DECLARE. */
10824
10825 static void
10826 gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p)
10827 {
10828 tree expr = *expr_p;
10829 gomp_target *stmt;
10830 tree clauses, t, decl;
10831
10832 clauses = OACC_DECLARE_CLAUSES (expr);
10833
10834 gimplify_scan_omp_clauses (&clauses, pre_p, ORT_TARGET_DATA, OACC_DECLARE);
10835 gimplify_adjust_omp_clauses (pre_p, NULL, &clauses, OACC_DECLARE);
10836
10837 for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t))
10838 {
10839 decl = OMP_CLAUSE_DECL (t);
10840
10841 if (TREE_CODE (decl) == MEM_REF)
10842 decl = TREE_OPERAND (decl, 0);
10843
10844 if (VAR_P (decl) && !is_oacc_declared (decl))
10845 {
10846 tree attr = get_identifier ("oacc declare target");
10847 DECL_ATTRIBUTES (decl) = tree_cons (attr, NULL_TREE,
10848 DECL_ATTRIBUTES (decl));
10849 }
10850
10851 if (VAR_P (decl)
10852 && !is_global_var (decl)
10853 && DECL_CONTEXT (decl) == current_function_decl)
10854 {
10855 tree c = gimplify_oacc_declare_1 (t);
10856 if (c)
10857 {
10858 if (oacc_declare_returns == NULL)
10859 oacc_declare_returns = new hash_map<tree, tree>;
10860
10861 oacc_declare_returns->put (decl, c);
10862 }
10863 }
10864
10865 if (gimplify_omp_ctxp)
10866 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_SEEN);
10867 }
10868
10869 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
10870 clauses);
10871
10872 gimplify_seq_add_stmt (pre_p, stmt);
10873
10874 *expr_p = NULL_TREE;
10875 }
10876
10877 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
10878 gimplification of the body, as well as scanning the body for used
10879 variables. We need to do this scan now, because variable-sized
10880 decls will be decomposed during gimplification. */
10881
10882 static void
10883 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
10884 {
10885 tree expr = *expr_p;
10886 gimple *g;
10887 gimple_seq body = NULL;
10888
10889 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
10890 OMP_PARALLEL_COMBINED (expr)
10891 ? ORT_COMBINED_PARALLEL
10892 : ORT_PARALLEL, OMP_PARALLEL);
10893
10894 push_gimplify_context ();
10895
10896 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
10897 if (gimple_code (g) == GIMPLE_BIND)
10898 pop_gimplify_context (g);
10899 else
10900 pop_gimplify_context (NULL);
10901
10902 gimplify_adjust_omp_clauses (pre_p, body, &OMP_PARALLEL_CLAUSES (expr),
10903 OMP_PARALLEL);
10904
10905 g = gimple_build_omp_parallel (body,
10906 OMP_PARALLEL_CLAUSES (expr),
10907 NULL_TREE, NULL_TREE);
10908 if (OMP_PARALLEL_COMBINED (expr))
10909 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
10910 gimplify_seq_add_stmt (pre_p, g);
10911 *expr_p = NULL_TREE;
10912 }
10913
10914 /* Gimplify the contents of an OMP_TASK statement. This involves
10915 gimplification of the body, as well as scanning the body for used
10916 variables. We need to do this scan now, because variable-sized
10917 decls will be decomposed during gimplification. */
10918
10919 static void
10920 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
10921 {
10922 tree expr = *expr_p;
10923 gimple *g;
10924 gimple_seq body = NULL;
10925
10926 if (OMP_TASK_BODY (expr) == NULL_TREE)
10927 for (tree c = OMP_TASK_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
10928 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
10929 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_MUTEXINOUTSET)
10930 {
10931 error_at (OMP_CLAUSE_LOCATION (c),
10932 "%<mutexinoutset%> kind in %<depend%> clause on a "
10933 "%<taskwait%> construct");
10934 break;
10935 }
10936
10937 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
10938 omp_find_clause (OMP_TASK_CLAUSES (expr),
10939 OMP_CLAUSE_UNTIED)
10940 ? ORT_UNTIED_TASK : ORT_TASK, OMP_TASK);
10941
10942 if (OMP_TASK_BODY (expr))
10943 {
10944 push_gimplify_context ();
10945
10946 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
10947 if (gimple_code (g) == GIMPLE_BIND)
10948 pop_gimplify_context (g);
10949 else
10950 pop_gimplify_context (NULL);
10951 }
10952
10953 gimplify_adjust_omp_clauses (pre_p, body, &OMP_TASK_CLAUSES (expr),
10954 OMP_TASK);
10955
10956 g = gimple_build_omp_task (body,
10957 OMP_TASK_CLAUSES (expr),
10958 NULL_TREE, NULL_TREE,
10959 NULL_TREE, NULL_TREE, NULL_TREE);
10960 if (OMP_TASK_BODY (expr) == NULL_TREE)
10961 gimple_omp_task_set_taskwait_p (g, true);
10962 gimplify_seq_add_stmt (pre_p, g);
10963 *expr_p = NULL_TREE;
10964 }
10965
10966 /* Gimplify the gross structure of an OMP_FOR statement. */
10967
10968 static enum gimplify_status
10969 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
10970 {
10971 tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
10972 enum gimplify_status ret = GS_ALL_DONE;
10973 enum gimplify_status tret;
10974 gomp_for *gfor;
10975 gimple_seq for_body, for_pre_body;
10976 int i;
10977 bitmap has_decl_expr = NULL;
10978 enum omp_region_type ort = ORT_WORKSHARE;
10979
10980 orig_for_stmt = for_stmt = *expr_p;
10981
10982 bool loop_p = (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_BIND)
10983 != NULL_TREE);
10984 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
10985 {
10986 tree *data[4] = { NULL, NULL, NULL, NULL };
10987 gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
10988 inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
10989 find_combined_omp_for, data, NULL);
10990 if (inner_for_stmt == NULL_TREE)
10991 {
10992 gcc_assert (seen_error ());
10993 *expr_p = NULL_TREE;
10994 return GS_ERROR;
10995 }
10996 if (data[2] && OMP_FOR_PRE_BODY (*data[2]))
10997 {
10998 append_to_statement_list_force (OMP_FOR_PRE_BODY (*data[2]),
10999 &OMP_FOR_PRE_BODY (for_stmt));
11000 OMP_FOR_PRE_BODY (*data[2]) = NULL_TREE;
11001 }
11002 if (OMP_FOR_PRE_BODY (inner_for_stmt))
11003 {
11004 append_to_statement_list_force (OMP_FOR_PRE_BODY (inner_for_stmt),
11005 &OMP_FOR_PRE_BODY (for_stmt));
11006 OMP_FOR_PRE_BODY (inner_for_stmt) = NULL_TREE;
11007 }
11008
11009 if (data[0])
11010 {
11011 /* We have some statements or variable declarations in between
11012 the composite construct directives. Move them around the
11013 inner_for_stmt. */
11014 data[0] = expr_p;
11015 for (i = 0; i < 3; i++)
11016 if (data[i])
11017 {
11018 tree t = *data[i];
11019 if (i < 2 && data[i + 1] == &OMP_BODY (t))
11020 data[i + 1] = data[i];
11021 *data[i] = OMP_BODY (t);
11022 tree body = build3 (BIND_EXPR, void_type_node, NULL_TREE,
11023 NULL_TREE, make_node (BLOCK));
11024 OMP_BODY (t) = body;
11025 append_to_statement_list_force (inner_for_stmt,
11026 &BIND_EXPR_BODY (body));
11027 *data[3] = t;
11028 data[3] = tsi_stmt_ptr (tsi_start (BIND_EXPR_BODY (body)));
11029 gcc_assert (*data[3] == inner_for_stmt);
11030 }
11031 return GS_OK;
11032 }
11033
11034 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
11035 if (!loop_p
11036 && OMP_FOR_ORIG_DECLS (inner_for_stmt)
11037 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
11038 i)) == TREE_LIST
11039 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
11040 i)))
11041 {
11042 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
11043 /* Class iterators aren't allowed on OMP_SIMD, so the only
11044 case we need to solve is distribute parallel for. They are
11045 allowed on the loop construct, but that is already handled
11046 in gimplify_omp_loop. */
11047 gcc_assert (TREE_CODE (inner_for_stmt) == OMP_FOR
11048 && TREE_CODE (for_stmt) == OMP_DISTRIBUTE
11049 && data[1]);
11050 tree orig_decl = TREE_PURPOSE (orig);
11051 tree last = TREE_VALUE (orig);
11052 tree *pc;
11053 for (pc = &OMP_FOR_CLAUSES (inner_for_stmt);
11054 *pc; pc = &OMP_CLAUSE_CHAIN (*pc))
11055 if ((OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE
11056 || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_LASTPRIVATE)
11057 && OMP_CLAUSE_DECL (*pc) == orig_decl)
11058 break;
11059 if (*pc == NULL_TREE)
11060 {
11061 tree *spc;
11062 for (spc = &OMP_PARALLEL_CLAUSES (*data[1]);
11063 *spc; spc = &OMP_CLAUSE_CHAIN (*spc))
11064 if (OMP_CLAUSE_CODE (*spc) == OMP_CLAUSE_PRIVATE
11065 && OMP_CLAUSE_DECL (*spc) == orig_decl)
11066 break;
11067 if (*spc)
11068 {
11069 tree c = *spc;
11070 *spc = OMP_CLAUSE_CHAIN (c);
11071 OMP_CLAUSE_CHAIN (c) = NULL_TREE;
11072 *pc = c;
11073 }
11074 }
11075 if (*pc == NULL_TREE)
11076 ;
11077 else if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE)
11078 {
11079 /* private clause will appear only on inner_for_stmt.
11080 Change it into firstprivate, and add private clause
11081 on for_stmt. */
11082 tree c = copy_node (*pc);
11083 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
11084 OMP_FOR_CLAUSES (for_stmt) = c;
11085 OMP_CLAUSE_CODE (*pc) = OMP_CLAUSE_FIRSTPRIVATE;
11086 lang_hooks.decls.omp_finish_clause (*pc, pre_p);
11087 }
11088 else
11089 {
11090 /* lastprivate clause will appear on both inner_for_stmt
11091 and for_stmt. Add firstprivate clause to
11092 inner_for_stmt. */
11093 tree c = build_omp_clause (OMP_CLAUSE_LOCATION (*pc),
11094 OMP_CLAUSE_FIRSTPRIVATE);
11095 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (*pc);
11096 OMP_CLAUSE_CHAIN (c) = *pc;
11097 *pc = c;
11098 lang_hooks.decls.omp_finish_clause (*pc, pre_p);
11099 }
11100 tree c = build_omp_clause (UNKNOWN_LOCATION,
11101 OMP_CLAUSE_FIRSTPRIVATE);
11102 OMP_CLAUSE_DECL (c) = last;
11103 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
11104 OMP_PARALLEL_CLAUSES (*data[1]) = c;
11105 c = build_omp_clause (UNKNOWN_LOCATION,
11106 *pc ? OMP_CLAUSE_SHARED
11107 : OMP_CLAUSE_FIRSTPRIVATE);
11108 OMP_CLAUSE_DECL (c) = orig_decl;
11109 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
11110 OMP_PARALLEL_CLAUSES (*data[1]) = c;
11111 }
11112 /* Similarly, take care of C++ range for temporaries, those should
11113 be firstprivate on OMP_PARALLEL if any. */
11114 if (data[1])
11115 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
11116 if (OMP_FOR_ORIG_DECLS (inner_for_stmt)
11117 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
11118 i)) == TREE_LIST
11119 && TREE_CHAIN (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
11120 i)))
11121 {
11122 tree orig
11123 = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
11124 tree v = TREE_CHAIN (orig);
11125 tree c = build_omp_clause (UNKNOWN_LOCATION,
11126 OMP_CLAUSE_FIRSTPRIVATE);
11127 /* First add firstprivate clause for the __for_end artificial
11128 decl. */
11129 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 1);
11130 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
11131 == REFERENCE_TYPE)
11132 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
11133 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
11134 OMP_PARALLEL_CLAUSES (*data[1]) = c;
11135 if (TREE_VEC_ELT (v, 0))
11136 {
11137 /* And now the same for __for_range artificial decl if it
11138 exists. */
11139 c = build_omp_clause (UNKNOWN_LOCATION,
11140 OMP_CLAUSE_FIRSTPRIVATE);
11141 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 0);
11142 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
11143 == REFERENCE_TYPE)
11144 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
11145 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
11146 OMP_PARALLEL_CLAUSES (*data[1]) = c;
11147 }
11148 }
11149 }
11150
11151 switch (TREE_CODE (for_stmt))
11152 {
11153 case OMP_FOR:
11154 case OMP_DISTRIBUTE:
11155 break;
11156 case OACC_LOOP:
11157 ort = ORT_ACC;
11158 break;
11159 case OMP_TASKLOOP:
11160 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_UNTIED))
11161 ort = ORT_UNTIED_TASKLOOP;
11162 else
11163 ort = ORT_TASKLOOP;
11164 break;
11165 case OMP_SIMD:
11166 ort = ORT_SIMD;
11167 break;
11168 default:
11169 gcc_unreachable ();
11170 }
11171
11172 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
11173 clause for the IV. */
11174 if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
11175 {
11176 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
11177 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
11178 decl = TREE_OPERAND (t, 0);
11179 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
11180 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
11181 && OMP_CLAUSE_DECL (c) == decl)
11182 {
11183 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
11184 break;
11185 }
11186 }
11187
11188 if (TREE_CODE (for_stmt) != OMP_TASKLOOP)
11189 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort,
11190 loop_p && TREE_CODE (for_stmt) != OMP_SIMD
11191 ? OMP_LOOP : TREE_CODE (for_stmt));
11192
11193 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
11194 gimplify_omp_ctxp->distribute = true;
11195
11196 /* Handle OMP_FOR_INIT. */
11197 for_pre_body = NULL;
11198 if ((ort == ORT_SIMD
11199 || (inner_for_stmt && TREE_CODE (inner_for_stmt) == OMP_SIMD))
11200 && OMP_FOR_PRE_BODY (for_stmt))
11201 {
11202 has_decl_expr = BITMAP_ALLOC (NULL);
11203 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
11204 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
11205 == VAR_DECL)
11206 {
11207 t = OMP_FOR_PRE_BODY (for_stmt);
11208 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
11209 }
11210 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
11211 {
11212 tree_stmt_iterator si;
11213 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
11214 tsi_next (&si))
11215 {
11216 t = tsi_stmt (si);
11217 if (TREE_CODE (t) == DECL_EXPR
11218 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
11219 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
11220 }
11221 }
11222 }
11223 if (OMP_FOR_PRE_BODY (for_stmt))
11224 {
11225 if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp)
11226 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
11227 else
11228 {
11229 struct gimplify_omp_ctx ctx;
11230 memset (&ctx, 0, sizeof (ctx));
11231 ctx.region_type = ORT_NONE;
11232 gimplify_omp_ctxp = &ctx;
11233 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
11234 gimplify_omp_ctxp = NULL;
11235 }
11236 }
11237 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
11238
11239 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
11240 for_stmt = inner_for_stmt;
11241
11242 /* For taskloop, need to gimplify the start, end and step before the
11243 taskloop, outside of the taskloop omp context. */
11244 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
11245 {
11246 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
11247 {
11248 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
11249 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
11250 {
11251 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
11252 TREE_OPERAND (t, 1)
11253 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
11254 gimple_seq_empty_p (for_pre_body)
11255 ? pre_p : &for_pre_body, NULL,
11256 false);
11257 /* Reference to pointer conversion is considered useless,
11258 but is significant for firstprivate clause. Force it
11259 here. */
11260 if (TREE_CODE (type) == POINTER_TYPE
11261 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 1)))
11262 == REFERENCE_TYPE))
11263 {
11264 tree v = create_tmp_var (TYPE_MAIN_VARIANT (type));
11265 tree m = build2 (INIT_EXPR, TREE_TYPE (v), v,
11266 TREE_OPERAND (t, 1));
11267 gimplify_and_add (m, gimple_seq_empty_p (for_pre_body)
11268 ? pre_p : &for_pre_body);
11269 TREE_OPERAND (t, 1) = v;
11270 }
11271 tree c = build_omp_clause (input_location,
11272 OMP_CLAUSE_FIRSTPRIVATE);
11273 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
11274 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
11275 OMP_FOR_CLAUSES (orig_for_stmt) = c;
11276 }
11277
11278 /* Handle OMP_FOR_COND. */
11279 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
11280 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
11281 {
11282 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
11283 TREE_OPERAND (t, 1)
11284 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
11285 gimple_seq_empty_p (for_pre_body)
11286 ? pre_p : &for_pre_body, NULL,
11287 false);
11288 /* Reference to pointer conversion is considered useless,
11289 but is significant for firstprivate clause. Force it
11290 here. */
11291 if (TREE_CODE (type) == POINTER_TYPE
11292 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 1)))
11293 == REFERENCE_TYPE))
11294 {
11295 tree v = create_tmp_var (TYPE_MAIN_VARIANT (type));
11296 tree m = build2 (INIT_EXPR, TREE_TYPE (v), v,
11297 TREE_OPERAND (t, 1));
11298 gimplify_and_add (m, gimple_seq_empty_p (for_pre_body)
11299 ? pre_p : &for_pre_body);
11300 TREE_OPERAND (t, 1) = v;
11301 }
11302 tree c = build_omp_clause (input_location,
11303 OMP_CLAUSE_FIRSTPRIVATE);
11304 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
11305 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
11306 OMP_FOR_CLAUSES (orig_for_stmt) = c;
11307 }
11308
11309 /* Handle OMP_FOR_INCR. */
11310 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
11311 if (TREE_CODE (t) == MODIFY_EXPR)
11312 {
11313 decl = TREE_OPERAND (t, 0);
11314 t = TREE_OPERAND (t, 1);
11315 tree *tp = &TREE_OPERAND (t, 1);
11316 if (TREE_CODE (t) == PLUS_EXPR && *tp == decl)
11317 tp = &TREE_OPERAND (t, 0);
11318
11319 if (!is_gimple_constant (*tp))
11320 {
11321 gimple_seq *seq = gimple_seq_empty_p (for_pre_body)
11322 ? pre_p : &for_pre_body;
11323 *tp = get_initialized_tmp_var (*tp, seq, NULL, false);
11324 tree c = build_omp_clause (input_location,
11325 OMP_CLAUSE_FIRSTPRIVATE);
11326 OMP_CLAUSE_DECL (c) = *tp;
11327 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
11328 OMP_FOR_CLAUSES (orig_for_stmt) = c;
11329 }
11330 }
11331 }
11332
11333 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt), pre_p, ort,
11334 OMP_TASKLOOP);
11335 }
11336
11337 if (orig_for_stmt != for_stmt)
11338 gimplify_omp_ctxp->combined_loop = true;
11339
11340 for_body = NULL;
11341 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
11342 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
11343 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
11344 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
11345
11346 tree c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED);
11347 bool is_doacross = false;
11348 if (c && OMP_CLAUSE_ORDERED_EXPR (c))
11349 {
11350 is_doacross = true;
11351 gimplify_omp_ctxp->loop_iter_var.create (TREE_VEC_LENGTH
11352 (OMP_FOR_INIT (for_stmt))
11353 * 2);
11354 }
11355 int collapse = 1, tile = 0;
11356 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_COLLAPSE);
11357 if (c)
11358 collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
11359 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_TILE);
11360 if (c)
11361 tile = list_length (OMP_CLAUSE_TILE_LIST (c));
11362 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
11363 {
11364 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
11365 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
11366 decl = TREE_OPERAND (t, 0);
11367 gcc_assert (DECL_P (decl));
11368 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
11369 || POINTER_TYPE_P (TREE_TYPE (decl)));
11370 if (is_doacross)
11371 {
11372 if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt))
11373 {
11374 tree orig_decl = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
11375 if (TREE_CODE (orig_decl) == TREE_LIST)
11376 {
11377 orig_decl = TREE_PURPOSE (orig_decl);
11378 if (!orig_decl)
11379 orig_decl = decl;
11380 }
11381 gimplify_omp_ctxp->loop_iter_var.quick_push (orig_decl);
11382 }
11383 else
11384 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
11385 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
11386 }
11387
11388 /* Make sure the iteration variable is private. */
11389 tree c = NULL_TREE;
11390 tree c2 = NULL_TREE;
11391 if (orig_for_stmt != for_stmt)
11392 {
11393 /* Preserve this information until we gimplify the inner simd. */
11394 if (has_decl_expr
11395 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
11396 TREE_PRIVATE (t) = 1;
11397 }
11398 else if (ort == ORT_SIMD)
11399 {
11400 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
11401 (splay_tree_key) decl);
11402 omp_is_private (gimplify_omp_ctxp, decl,
11403 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
11404 != 1));
11405 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
11406 {
11407 omp_notice_variable (gimplify_omp_ctxp, decl, true);
11408 if (n->value & GOVD_LASTPRIVATE_CONDITIONAL)
11409 for (tree c3 = omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
11410 OMP_CLAUSE_LASTPRIVATE);
11411 c3; c3 = omp_find_clause (OMP_CLAUSE_CHAIN (c3),
11412 OMP_CLAUSE_LASTPRIVATE))
11413 if (OMP_CLAUSE_DECL (c3) == decl)
11414 {
11415 warning_at (OMP_CLAUSE_LOCATION (c3), 0,
11416 "conditional %<lastprivate%> on loop "
11417 "iterator %qD ignored", decl);
11418 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3) = 0;
11419 n->value &= ~GOVD_LASTPRIVATE_CONDITIONAL;
11420 }
11421 }
11422 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1 && !loop_p)
11423 {
11424 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
11425 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
11426 unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
11427 if ((has_decl_expr
11428 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
11429 || TREE_PRIVATE (t))
11430 {
11431 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
11432 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
11433 }
11434 struct gimplify_omp_ctx *outer
11435 = gimplify_omp_ctxp->outer_context;
11436 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
11437 {
11438 if (outer->region_type == ORT_WORKSHARE
11439 && outer->combined_loop)
11440 {
11441 n = splay_tree_lookup (outer->variables,
11442 (splay_tree_key)decl);
11443 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
11444 {
11445 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
11446 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
11447 }
11448 else
11449 {
11450 struct gimplify_omp_ctx *octx = outer->outer_context;
11451 if (octx
11452 && octx->region_type == ORT_COMBINED_PARALLEL
11453 && octx->outer_context
11454 && (octx->outer_context->region_type
11455 == ORT_WORKSHARE)
11456 && octx->outer_context->combined_loop)
11457 {
11458 octx = octx->outer_context;
11459 n = splay_tree_lookup (octx->variables,
11460 (splay_tree_key)decl);
11461 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
11462 {
11463 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
11464 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
11465 }
11466 }
11467 }
11468 }
11469 }
11470
11471 OMP_CLAUSE_DECL (c) = decl;
11472 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
11473 OMP_FOR_CLAUSES (for_stmt) = c;
11474 omp_add_variable (gimplify_omp_ctxp, decl, flags);
11475 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
11476 {
11477 if (outer->region_type == ORT_WORKSHARE
11478 && outer->combined_loop)
11479 {
11480 if (outer->outer_context
11481 && (outer->outer_context->region_type
11482 == ORT_COMBINED_PARALLEL))
11483 outer = outer->outer_context;
11484 else if (omp_check_private (outer, decl, false))
11485 outer = NULL;
11486 }
11487 else if (((outer->region_type & ORT_TASKLOOP)
11488 == ORT_TASKLOOP)
11489 && outer->combined_loop
11490 && !omp_check_private (gimplify_omp_ctxp,
11491 decl, false))
11492 ;
11493 else if (outer->region_type != ORT_COMBINED_PARALLEL)
11494 {
11495 omp_notice_variable (outer, decl, true);
11496 outer = NULL;
11497 }
11498 if (outer)
11499 {
11500 n = splay_tree_lookup (outer->variables,
11501 (splay_tree_key)decl);
11502 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
11503 {
11504 omp_add_variable (outer, decl,
11505 GOVD_LASTPRIVATE | GOVD_SEEN);
11506 if (outer->region_type == ORT_COMBINED_PARALLEL
11507 && outer->outer_context
11508 && (outer->outer_context->region_type
11509 == ORT_WORKSHARE)
11510 && outer->outer_context->combined_loop)
11511 {
11512 outer = outer->outer_context;
11513 n = splay_tree_lookup (outer->variables,
11514 (splay_tree_key)decl);
11515 if (omp_check_private (outer, decl, false))
11516 outer = NULL;
11517 else if (n == NULL
11518 || ((n->value & GOVD_DATA_SHARE_CLASS)
11519 == 0))
11520 omp_add_variable (outer, decl,
11521 GOVD_LASTPRIVATE
11522 | GOVD_SEEN);
11523 else
11524 outer = NULL;
11525 }
11526 if (outer && outer->outer_context
11527 && ((outer->outer_context->region_type
11528 & ORT_COMBINED_TEAMS) == ORT_COMBINED_TEAMS
11529 || (((outer->region_type & ORT_TASKLOOP)
11530 == ORT_TASKLOOP)
11531 && (outer->outer_context->region_type
11532 == ORT_COMBINED_PARALLEL))))
11533 {
11534 outer = outer->outer_context;
11535 n = splay_tree_lookup (outer->variables,
11536 (splay_tree_key)decl);
11537 if (n == NULL
11538 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
11539 omp_add_variable (outer, decl,
11540 GOVD_SHARED | GOVD_SEEN);
11541 else
11542 outer = NULL;
11543 }
11544 if (outer && outer->outer_context)
11545 omp_notice_variable (outer->outer_context, decl,
11546 true);
11547 }
11548 }
11549 }
11550 }
11551 else
11552 {
11553 bool lastprivate
11554 = (!has_decl_expr
11555 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
11556 if (TREE_PRIVATE (t))
11557 lastprivate = false;
11558 if (loop_p && OMP_FOR_ORIG_DECLS (for_stmt))
11559 {
11560 tree elt = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
11561 if (TREE_CODE (elt) == TREE_LIST && TREE_PURPOSE (elt))
11562 lastprivate = false;
11563 }
11564
11565 struct gimplify_omp_ctx *outer
11566 = gimplify_omp_ctxp->outer_context;
11567 if (outer && lastprivate)
11568 {
11569 if (outer->region_type == ORT_WORKSHARE
11570 && outer->combined_loop)
11571 {
11572 n = splay_tree_lookup (outer->variables,
11573 (splay_tree_key)decl);
11574 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
11575 {
11576 lastprivate = false;
11577 outer = NULL;
11578 }
11579 else if (outer->outer_context
11580 && (outer->outer_context->region_type
11581 == ORT_COMBINED_PARALLEL))
11582 outer = outer->outer_context;
11583 else if (omp_check_private (outer, decl, false))
11584 outer = NULL;
11585 }
11586 else if (((outer->region_type & ORT_TASKLOOP)
11587 == ORT_TASKLOOP)
11588 && outer->combined_loop
11589 && !omp_check_private (gimplify_omp_ctxp,
11590 decl, false))
11591 ;
11592 else if (outer->region_type != ORT_COMBINED_PARALLEL)
11593 {
11594 omp_notice_variable (outer, decl, true);
11595 outer = NULL;
11596 }
11597 if (outer)
11598 {
11599 n = splay_tree_lookup (outer->variables,
11600 (splay_tree_key)decl);
11601 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
11602 {
11603 omp_add_variable (outer, decl,
11604 GOVD_LASTPRIVATE | GOVD_SEEN);
11605 if (outer->region_type == ORT_COMBINED_PARALLEL
11606 && outer->outer_context
11607 && (outer->outer_context->region_type
11608 == ORT_WORKSHARE)
11609 && outer->outer_context->combined_loop)
11610 {
11611 outer = outer->outer_context;
11612 n = splay_tree_lookup (outer->variables,
11613 (splay_tree_key)decl);
11614 if (omp_check_private (outer, decl, false))
11615 outer = NULL;
11616 else if (n == NULL
11617 || ((n->value & GOVD_DATA_SHARE_CLASS)
11618 == 0))
11619 omp_add_variable (outer, decl,
11620 GOVD_LASTPRIVATE
11621 | GOVD_SEEN);
11622 else
11623 outer = NULL;
11624 }
11625 if (outer && outer->outer_context
11626 && ((outer->outer_context->region_type
11627 & ORT_COMBINED_TEAMS) == ORT_COMBINED_TEAMS
11628 || (((outer->region_type & ORT_TASKLOOP)
11629 == ORT_TASKLOOP)
11630 && (outer->outer_context->region_type
11631 == ORT_COMBINED_PARALLEL))))
11632 {
11633 outer = outer->outer_context;
11634 n = splay_tree_lookup (outer->variables,
11635 (splay_tree_key)decl);
11636 if (n == NULL
11637 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
11638 omp_add_variable (outer, decl,
11639 GOVD_SHARED | GOVD_SEEN);
11640 else
11641 outer = NULL;
11642 }
11643 if (outer && outer->outer_context)
11644 omp_notice_variable (outer->outer_context, decl,
11645 true);
11646 }
11647 }
11648 }
11649
11650 c = build_omp_clause (input_location,
11651 lastprivate ? OMP_CLAUSE_LASTPRIVATE
11652 : OMP_CLAUSE_PRIVATE);
11653 OMP_CLAUSE_DECL (c) = decl;
11654 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
11655 OMP_FOR_CLAUSES (for_stmt) = c;
11656 omp_add_variable (gimplify_omp_ctxp, decl,
11657 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
11658 | GOVD_EXPLICIT | GOVD_SEEN);
11659 c = NULL_TREE;
11660 }
11661 }
11662 else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
11663 {
11664 omp_notice_variable (gimplify_omp_ctxp, decl, true);
11665 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
11666 (splay_tree_key) decl);
11667 if (n && (n->value & GOVD_LASTPRIVATE_CONDITIONAL))
11668 for (tree c3 = omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
11669 OMP_CLAUSE_LASTPRIVATE);
11670 c3; c3 = omp_find_clause (OMP_CLAUSE_CHAIN (c3),
11671 OMP_CLAUSE_LASTPRIVATE))
11672 if (OMP_CLAUSE_DECL (c3) == decl)
11673 {
11674 warning_at (OMP_CLAUSE_LOCATION (c3), 0,
11675 "conditional %<lastprivate%> on loop "
11676 "iterator %qD ignored", decl);
11677 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3) = 0;
11678 n->value &= ~GOVD_LASTPRIVATE_CONDITIONAL;
11679 }
11680 }
11681 else
11682 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
11683
11684 /* If DECL is not a gimple register, create a temporary variable to act
11685 as an iteration counter. This is valid, since DECL cannot be
11686 modified in the body of the loop. Similarly for any iteration vars
11687 in simd with collapse > 1 where the iterator vars must be
11688 lastprivate. */
11689 if (orig_for_stmt != for_stmt)
11690 var = decl;
11691 else if (!is_gimple_reg (decl)
11692 || (ort == ORT_SIMD
11693 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1))
11694 {
11695 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
11696 /* Make sure omp_add_variable is not called on it prematurely.
11697 We call it ourselves a few lines later. */
11698 gimplify_omp_ctxp = NULL;
11699 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
11700 gimplify_omp_ctxp = ctx;
11701 TREE_OPERAND (t, 0) = var;
11702
11703 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
11704
11705 if (ort == ORT_SIMD
11706 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
11707 {
11708 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
11709 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
11710 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
11711 OMP_CLAUSE_DECL (c2) = var;
11712 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
11713 OMP_FOR_CLAUSES (for_stmt) = c2;
11714 omp_add_variable (gimplify_omp_ctxp, var,
11715 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
11716 if (c == NULL_TREE)
11717 {
11718 c = c2;
11719 c2 = NULL_TREE;
11720 }
11721 }
11722 else
11723 omp_add_variable (gimplify_omp_ctxp, var,
11724 GOVD_PRIVATE | GOVD_SEEN);
11725 }
11726 else
11727 var = decl;
11728
11729 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
11730 is_gimple_val, fb_rvalue, false);
11731 ret = MIN (ret, tret);
11732 if (ret == GS_ERROR)
11733 return ret;
11734
11735 /* Handle OMP_FOR_COND. */
11736 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
11737 gcc_assert (COMPARISON_CLASS_P (t));
11738 gcc_assert (TREE_OPERAND (t, 0) == decl);
11739
11740 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
11741 is_gimple_val, fb_rvalue, false);
11742 ret = MIN (ret, tret);
11743
11744 /* Handle OMP_FOR_INCR. */
11745 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
11746 switch (TREE_CODE (t))
11747 {
11748 case PREINCREMENT_EXPR:
11749 case POSTINCREMENT_EXPR:
11750 {
11751 tree decl = TREE_OPERAND (t, 0);
11752 /* c_omp_for_incr_canonicalize_ptr() should have been
11753 called to massage things appropriately. */
11754 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
11755
11756 if (orig_for_stmt != for_stmt)
11757 break;
11758 t = build_int_cst (TREE_TYPE (decl), 1);
11759 if (c)
11760 OMP_CLAUSE_LINEAR_STEP (c) = t;
11761 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
11762 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
11763 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
11764 break;
11765 }
11766
11767 case PREDECREMENT_EXPR:
11768 case POSTDECREMENT_EXPR:
11769 /* c_omp_for_incr_canonicalize_ptr() should have been
11770 called to massage things appropriately. */
11771 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
11772 if (orig_for_stmt != for_stmt)
11773 break;
11774 t = build_int_cst (TREE_TYPE (decl), -1);
11775 if (c)
11776 OMP_CLAUSE_LINEAR_STEP (c) = t;
11777 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
11778 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
11779 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
11780 break;
11781
11782 case MODIFY_EXPR:
11783 gcc_assert (TREE_OPERAND (t, 0) == decl);
11784 TREE_OPERAND (t, 0) = var;
11785
11786 t = TREE_OPERAND (t, 1);
11787 switch (TREE_CODE (t))
11788 {
11789 case PLUS_EXPR:
11790 if (TREE_OPERAND (t, 1) == decl)
11791 {
11792 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
11793 TREE_OPERAND (t, 0) = var;
11794 break;
11795 }
11796
11797 /* Fallthru. */
11798 case MINUS_EXPR:
11799 case POINTER_PLUS_EXPR:
11800 gcc_assert (TREE_OPERAND (t, 0) == decl);
11801 TREE_OPERAND (t, 0) = var;
11802 break;
11803 default:
11804 gcc_unreachable ();
11805 }
11806
11807 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
11808 is_gimple_val, fb_rvalue, false);
11809 ret = MIN (ret, tret);
11810 if (c)
11811 {
11812 tree step = TREE_OPERAND (t, 1);
11813 tree stept = TREE_TYPE (decl);
11814 if (POINTER_TYPE_P (stept))
11815 stept = sizetype;
11816 step = fold_convert (stept, step);
11817 if (TREE_CODE (t) == MINUS_EXPR)
11818 step = fold_build1 (NEGATE_EXPR, stept, step);
11819 OMP_CLAUSE_LINEAR_STEP (c) = step;
11820 if (step != TREE_OPERAND (t, 1))
11821 {
11822 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
11823 &for_pre_body, NULL,
11824 is_gimple_val, fb_rvalue, false);
11825 ret = MIN (ret, tret);
11826 }
11827 }
11828 break;
11829
11830 default:
11831 gcc_unreachable ();
11832 }
11833
11834 if (c2)
11835 {
11836 gcc_assert (c);
11837 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
11838 }
11839
11840 if ((var != decl || collapse > 1 || tile) && orig_for_stmt == for_stmt)
11841 {
11842 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
11843 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
11844 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
11845 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
11846 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
11847 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
11848 && OMP_CLAUSE_DECL (c) == decl)
11849 {
11850 if (is_doacross && (collapse == 1 || i >= collapse))
11851 t = var;
11852 else
11853 {
11854 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
11855 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
11856 gcc_assert (TREE_OPERAND (t, 0) == var);
11857 t = TREE_OPERAND (t, 1);
11858 gcc_assert (TREE_CODE (t) == PLUS_EXPR
11859 || TREE_CODE (t) == MINUS_EXPR
11860 || TREE_CODE (t) == POINTER_PLUS_EXPR);
11861 gcc_assert (TREE_OPERAND (t, 0) == var);
11862 t = build2 (TREE_CODE (t), TREE_TYPE (decl),
11863 is_doacross ? var : decl,
11864 TREE_OPERAND (t, 1));
11865 }
11866 gimple_seq *seq;
11867 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
11868 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
11869 else
11870 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
11871 push_gimplify_context ();
11872 gimplify_assign (decl, t, seq);
11873 gimple *bind = NULL;
11874 if (gimplify_ctxp->temps)
11875 {
11876 bind = gimple_build_bind (NULL_TREE, *seq, NULL_TREE);
11877 *seq = NULL;
11878 gimplify_seq_add_stmt (seq, bind);
11879 }
11880 pop_gimplify_context (bind);
11881 }
11882 }
11883 }
11884
11885 BITMAP_FREE (has_decl_expr);
11886
11887 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP
11888 || (loop_p && orig_for_stmt == for_stmt))
11889 {
11890 push_gimplify_context ();
11891 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR)
11892 {
11893 OMP_FOR_BODY (orig_for_stmt)
11894 = build3 (BIND_EXPR, void_type_node, NULL,
11895 OMP_FOR_BODY (orig_for_stmt), NULL);
11896 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1;
11897 }
11898 }
11899
11900 gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt),
11901 &for_body);
11902
11903 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP
11904 || (loop_p && orig_for_stmt == for_stmt))
11905 {
11906 if (gimple_code (g) == GIMPLE_BIND)
11907 pop_gimplify_context (g);
11908 else
11909 pop_gimplify_context (NULL);
11910 }
11911
11912 if (orig_for_stmt != for_stmt)
11913 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
11914 {
11915 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
11916 decl = TREE_OPERAND (t, 0);
11917 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
11918 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
11919 gimplify_omp_ctxp = ctx->outer_context;
11920 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
11921 gimplify_omp_ctxp = ctx;
11922 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
11923 TREE_OPERAND (t, 0) = var;
11924 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
11925 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
11926 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
11927 }
11928
11929 gimplify_adjust_omp_clauses (pre_p, for_body,
11930 &OMP_FOR_CLAUSES (orig_for_stmt),
11931 TREE_CODE (orig_for_stmt));
11932
11933 int kind;
11934 switch (TREE_CODE (orig_for_stmt))
11935 {
11936 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
11937 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
11938 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
11939 case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break;
11940 case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
11941 default:
11942 gcc_unreachable ();
11943 }
11944 if (loop_p && kind == GF_OMP_FOR_KIND_SIMD)
11945 {
11946 gimplify_seq_add_seq (pre_p, for_pre_body);
11947 for_pre_body = NULL;
11948 }
11949 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
11950 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
11951 for_pre_body);
11952 if (orig_for_stmt != for_stmt)
11953 gimple_omp_for_set_combined_p (gfor, true);
11954 if (gimplify_omp_ctxp
11955 && (gimplify_omp_ctxp->combined_loop
11956 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
11957 && gimplify_omp_ctxp->outer_context
11958 && gimplify_omp_ctxp->outer_context->combined_loop)))
11959 {
11960 gimple_omp_for_set_combined_into_p (gfor, true);
11961 if (gimplify_omp_ctxp->combined_loop)
11962 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
11963 else
11964 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
11965 }
11966
11967 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
11968 {
11969 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
11970 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
11971 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
11972 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
11973 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
11974 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
11975 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
11976 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
11977 }
11978
11979 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
11980 constructs with GIMPLE_OMP_TASK sandwiched in between them.
11981 The outer taskloop stands for computing the number of iterations,
11982 counts for collapsed loops and holding taskloop specific clauses.
11983 The task construct stands for the effect of data sharing on the
11984 explicit task it creates and the inner taskloop stands for expansion
11985 of the static loop inside of the explicit task construct. */
11986 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
11987 {
11988 tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gfor);
11989 tree task_clauses = NULL_TREE;
11990 tree c = *gfor_clauses_ptr;
11991 tree *gtask_clauses_ptr = &task_clauses;
11992 tree outer_for_clauses = NULL_TREE;
11993 tree *gforo_clauses_ptr = &outer_for_clauses;
11994 for (; c; c = OMP_CLAUSE_CHAIN (c))
11995 switch (OMP_CLAUSE_CODE (c))
11996 {
11997 /* These clauses are allowed on task, move them there. */
11998 case OMP_CLAUSE_SHARED:
11999 case OMP_CLAUSE_FIRSTPRIVATE:
12000 case OMP_CLAUSE_DEFAULT:
12001 case OMP_CLAUSE_IF:
12002 case OMP_CLAUSE_UNTIED:
12003 case OMP_CLAUSE_FINAL:
12004 case OMP_CLAUSE_MERGEABLE:
12005 case OMP_CLAUSE_PRIORITY:
12006 case OMP_CLAUSE_REDUCTION:
12007 case OMP_CLAUSE_IN_REDUCTION:
12008 *gtask_clauses_ptr = c;
12009 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12010 break;
12011 case OMP_CLAUSE_PRIVATE:
12012 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c))
12013 {
12014 /* We want private on outer for and firstprivate
12015 on task. */
12016 *gtask_clauses_ptr
12017 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
12018 OMP_CLAUSE_FIRSTPRIVATE);
12019 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
12020 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
12021 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
12022 *gforo_clauses_ptr = c;
12023 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12024 }
12025 else
12026 {
12027 *gtask_clauses_ptr = c;
12028 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12029 }
12030 break;
12031 /* These clauses go into outer taskloop clauses. */
12032 case OMP_CLAUSE_GRAINSIZE:
12033 case OMP_CLAUSE_NUM_TASKS:
12034 case OMP_CLAUSE_NOGROUP:
12035 *gforo_clauses_ptr = c;
12036 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12037 break;
12038 /* Taskloop clause we duplicate on both taskloops. */
12039 case OMP_CLAUSE_COLLAPSE:
12040 *gfor_clauses_ptr = c;
12041 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12042 *gforo_clauses_ptr = copy_node (c);
12043 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
12044 break;
12045 /* For lastprivate, keep the clause on inner taskloop, and add
12046 a shared clause on task. If the same decl is also firstprivate,
12047 add also firstprivate clause on the inner taskloop. */
12048 case OMP_CLAUSE_LASTPRIVATE:
12049 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
12050 {
12051 /* For taskloop C++ lastprivate IVs, we want:
12052 1) private on outer taskloop
12053 2) firstprivate and shared on task
12054 3) lastprivate on inner taskloop */
12055 *gtask_clauses_ptr
12056 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
12057 OMP_CLAUSE_FIRSTPRIVATE);
12058 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
12059 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
12060 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
12061 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1;
12062 *gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c),
12063 OMP_CLAUSE_PRIVATE);
12064 OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c);
12065 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1;
12066 TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c);
12067 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
12068 }
12069 *gfor_clauses_ptr = c;
12070 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12071 *gtask_clauses_ptr
12072 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED);
12073 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
12074 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
12075 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1;
12076 gtask_clauses_ptr
12077 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
12078 break;
12079 default:
12080 gcc_unreachable ();
12081 }
12082 *gfor_clauses_ptr = NULL_TREE;
12083 *gtask_clauses_ptr = NULL_TREE;
12084 *gforo_clauses_ptr = NULL_TREE;
12085 g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE);
12086 g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE,
12087 NULL_TREE, NULL_TREE, NULL_TREE);
12088 gimple_omp_task_set_taskloop_p (g, true);
12089 g = gimple_build_bind (NULL_TREE, g, NULL_TREE);
12090 gomp_for *gforo
12091 = gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses,
12092 gimple_omp_for_collapse (gfor),
12093 gimple_omp_for_pre_body (gfor));
12094 gimple_omp_for_set_pre_body (gfor, NULL);
12095 gimple_omp_for_set_combined_p (gforo, true);
12096 gimple_omp_for_set_combined_into_p (gfor, true);
12097 for (i = 0; i < (int) gimple_omp_for_collapse (gfor); i++)
12098 {
12099 tree type = TREE_TYPE (gimple_omp_for_index (gfor, i));
12100 tree v = create_tmp_var (type);
12101 gimple_omp_for_set_index (gforo, i, v);
12102 t = unshare_expr (gimple_omp_for_initial (gfor, i));
12103 gimple_omp_for_set_initial (gforo, i, t);
12104 gimple_omp_for_set_cond (gforo, i,
12105 gimple_omp_for_cond (gfor, i));
12106 t = unshare_expr (gimple_omp_for_final (gfor, i));
12107 gimple_omp_for_set_final (gforo, i, t);
12108 t = unshare_expr (gimple_omp_for_incr (gfor, i));
12109 gcc_assert (TREE_OPERAND (t, 0) == gimple_omp_for_index (gfor, i));
12110 TREE_OPERAND (t, 0) = v;
12111 gimple_omp_for_set_incr (gforo, i, t);
12112 t = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
12113 OMP_CLAUSE_DECL (t) = v;
12114 OMP_CLAUSE_CHAIN (t) = gimple_omp_for_clauses (gforo);
12115 gimple_omp_for_set_clauses (gforo, t);
12116 }
12117 gimplify_seq_add_stmt (pre_p, gforo);
12118 }
12119 else
12120 gimplify_seq_add_stmt (pre_p, gfor);
12121
12122 if (TREE_CODE (orig_for_stmt) == OMP_FOR)
12123 {
12124 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12125 unsigned lastprivate_conditional = 0;
12126 while (ctx
12127 && (ctx->region_type == ORT_TARGET_DATA
12128 || ctx->region_type == ORT_TASKGROUP))
12129 ctx = ctx->outer_context;
12130 if (ctx && (ctx->region_type & ORT_PARALLEL) != 0)
12131 for (tree c = gimple_omp_for_clauses (gfor);
12132 c; c = OMP_CLAUSE_CHAIN (c))
12133 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
12134 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
12135 ++lastprivate_conditional;
12136 if (lastprivate_conditional)
12137 {
12138 struct omp_for_data fd;
12139 omp_extract_for_data (gfor, &fd, NULL);
12140 tree type = build_array_type_nelts (unsigned_type_for (fd.iter_type),
12141 lastprivate_conditional);
12142 tree var = create_tmp_var_raw (type);
12143 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
12144 OMP_CLAUSE_DECL (c) = var;
12145 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gfor);
12146 gimple_omp_for_set_clauses (gfor, c);
12147 omp_add_variable (ctx, var, GOVD_CONDTEMP | GOVD_SEEN);
12148 }
12149 }
12150 else if (TREE_CODE (orig_for_stmt) == OMP_SIMD)
12151 {
12152 unsigned lastprivate_conditional = 0;
12153 for (tree c = gimple_omp_for_clauses (gfor); c; c = OMP_CLAUSE_CHAIN (c))
12154 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
12155 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
12156 ++lastprivate_conditional;
12157 if (lastprivate_conditional)
12158 {
12159 struct omp_for_data fd;
12160 omp_extract_for_data (gfor, &fd, NULL);
12161 tree type = unsigned_type_for (fd.iter_type);
12162 while (lastprivate_conditional--)
12163 {
12164 tree c = build_omp_clause (UNKNOWN_LOCATION,
12165 OMP_CLAUSE__CONDTEMP_);
12166 OMP_CLAUSE_DECL (c) = create_tmp_var (type);
12167 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gfor);
12168 gimple_omp_for_set_clauses (gfor, c);
12169 }
12170 }
12171 }
12172
12173 if (ret != GS_ALL_DONE)
12174 return GS_ERROR;
12175 *expr_p = NULL_TREE;
12176 return GS_ALL_DONE;
12177 }
12178
12179 /* Helper for gimplify_omp_loop, called through walk_tree. */
12180
12181 static tree
12182 replace_reduction_placeholders (tree *tp, int *walk_subtrees, void *data)
12183 {
12184 if (DECL_P (*tp))
12185 {
12186 tree *d = (tree *) data;
12187 if (*tp == OMP_CLAUSE_REDUCTION_PLACEHOLDER (d[0]))
12188 {
12189 *tp = OMP_CLAUSE_REDUCTION_PLACEHOLDER (d[1]);
12190 *walk_subtrees = 0;
12191 }
12192 else if (*tp == OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (d[0]))
12193 {
12194 *tp = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (d[1]);
12195 *walk_subtrees = 0;
12196 }
12197 }
12198 return NULL_TREE;
12199 }
12200
12201 /* Gimplify the gross structure of an OMP_LOOP statement. */
12202
12203 static enum gimplify_status
12204 gimplify_omp_loop (tree *expr_p, gimple_seq *pre_p)
12205 {
12206 tree for_stmt = *expr_p;
12207 tree clauses = OMP_FOR_CLAUSES (for_stmt);
12208 struct gimplify_omp_ctx *octx = gimplify_omp_ctxp;
12209 enum omp_clause_bind_kind kind = OMP_CLAUSE_BIND_THREAD;
12210 int i;
12211
12212 /* If order is not present, the behavior is as if order(concurrent)
12213 appeared. */
12214 tree order = omp_find_clause (clauses, OMP_CLAUSE_ORDER);
12215 if (order == NULL_TREE)
12216 {
12217 order = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_ORDER);
12218 OMP_CLAUSE_CHAIN (order) = clauses;
12219 OMP_FOR_CLAUSES (for_stmt) = clauses = order;
12220 }
12221
12222 tree bind = omp_find_clause (clauses, OMP_CLAUSE_BIND);
12223 if (bind == NULL_TREE)
12224 {
12225 if (!flag_openmp) /* flag_openmp_simd */
12226 ;
12227 else if (octx && (octx->region_type & ORT_TEAMS) != 0)
12228 kind = OMP_CLAUSE_BIND_TEAMS;
12229 else if (octx && (octx->region_type & ORT_PARALLEL) != 0)
12230 kind = OMP_CLAUSE_BIND_PARALLEL;
12231 else
12232 {
12233 for (; octx; octx = octx->outer_context)
12234 {
12235 if ((octx->region_type & ORT_ACC) != 0
12236 || octx->region_type == ORT_NONE
12237 || octx->region_type == ORT_IMPLICIT_TARGET)
12238 continue;
12239 break;
12240 }
12241 if (octx == NULL && !in_omp_construct)
12242 error_at (EXPR_LOCATION (for_stmt),
12243 "%<bind%> clause not specified on a %<loop%> "
12244 "construct not nested inside another OpenMP construct");
12245 }
12246 bind = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_BIND);
12247 OMP_CLAUSE_CHAIN (bind) = clauses;
12248 OMP_CLAUSE_BIND_KIND (bind) = kind;
12249 OMP_FOR_CLAUSES (for_stmt) = bind;
12250 }
12251 else
12252 switch (OMP_CLAUSE_BIND_KIND (bind))
12253 {
12254 case OMP_CLAUSE_BIND_THREAD:
12255 break;
12256 case OMP_CLAUSE_BIND_PARALLEL:
12257 if (!flag_openmp) /* flag_openmp_simd */
12258 {
12259 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
12260 break;
12261 }
12262 for (; octx; octx = octx->outer_context)
12263 if (octx->region_type == ORT_SIMD
12264 && omp_find_clause (octx->clauses, OMP_CLAUSE_BIND) == NULL_TREE)
12265 {
12266 error_at (EXPR_LOCATION (for_stmt),
12267 "%<bind(parallel)%> on a %<loop%> construct nested "
12268 "inside %<simd%> construct");
12269 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
12270 break;
12271 }
12272 kind = OMP_CLAUSE_BIND_PARALLEL;
12273 break;
12274 case OMP_CLAUSE_BIND_TEAMS:
12275 if (!flag_openmp) /* flag_openmp_simd */
12276 {
12277 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
12278 break;
12279 }
12280 if ((octx
12281 && octx->region_type != ORT_IMPLICIT_TARGET
12282 && octx->region_type != ORT_NONE
12283 && (octx->region_type & ORT_TEAMS) == 0)
12284 || in_omp_construct)
12285 {
12286 error_at (EXPR_LOCATION (for_stmt),
12287 "%<bind(teams)%> on a %<loop%> region not strictly "
12288 "nested inside of a %<teams%> region");
12289 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
12290 break;
12291 }
12292 kind = OMP_CLAUSE_BIND_TEAMS;
12293 break;
12294 default:
12295 gcc_unreachable ();
12296 }
12297
12298 for (tree *pc = &OMP_FOR_CLAUSES (for_stmt); *pc; )
12299 switch (OMP_CLAUSE_CODE (*pc))
12300 {
12301 case OMP_CLAUSE_REDUCTION:
12302 if (OMP_CLAUSE_REDUCTION_INSCAN (*pc))
12303 {
12304 error_at (OMP_CLAUSE_LOCATION (*pc),
12305 "%<inscan%> %<reduction%> clause on "
12306 "%qs construct", "loop");
12307 OMP_CLAUSE_REDUCTION_INSCAN (*pc) = 0;
12308 }
12309 if (OMP_CLAUSE_REDUCTION_TASK (*pc))
12310 {
12311 error_at (OMP_CLAUSE_LOCATION (*pc),
12312 "invalid %<task%> reduction modifier on construct "
12313 "other than %<parallel%>, %<for%> or %<sections%>");
12314 OMP_CLAUSE_REDUCTION_TASK (*pc) = 0;
12315 }
12316 pc = &OMP_CLAUSE_CHAIN (*pc);
12317 break;
12318 case OMP_CLAUSE_LASTPRIVATE:
12319 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
12320 {
12321 tree t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
12322 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
12323 if (OMP_CLAUSE_DECL (*pc) == TREE_OPERAND (t, 0))
12324 break;
12325 if (OMP_FOR_ORIG_DECLS (for_stmt)
12326 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt),
12327 i)) == TREE_LIST
12328 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt),
12329 i)))
12330 {
12331 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
12332 if (OMP_CLAUSE_DECL (*pc) == TREE_PURPOSE (orig))
12333 break;
12334 }
12335 }
12336 if (i == TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)))
12337 {
12338 error_at (OMP_CLAUSE_LOCATION (*pc),
12339 "%<lastprivate%> clause on a %<loop%> construct refers "
12340 "to a variable %qD which is not the loop iterator",
12341 OMP_CLAUSE_DECL (*pc));
12342 *pc = OMP_CLAUSE_CHAIN (*pc);
12343 break;
12344 }
12345 pc = &OMP_CLAUSE_CHAIN (*pc);
12346 break;
12347 default:
12348 pc = &OMP_CLAUSE_CHAIN (*pc);
12349 break;
12350 }
12351
12352 TREE_SET_CODE (for_stmt, OMP_SIMD);
12353
12354 int last;
12355 switch (kind)
12356 {
12357 case OMP_CLAUSE_BIND_THREAD: last = 0; break;
12358 case OMP_CLAUSE_BIND_PARALLEL: last = 1; break;
12359 case OMP_CLAUSE_BIND_TEAMS: last = 2; break;
12360 }
12361 for (int pass = 1; pass <= last; pass++)
12362 {
12363 if (pass == 2)
12364 {
12365 tree bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
12366 append_to_statement_list (*expr_p, &BIND_EXPR_BODY (bind));
12367 *expr_p = make_node (OMP_PARALLEL);
12368 TREE_TYPE (*expr_p) = void_type_node;
12369 OMP_PARALLEL_BODY (*expr_p) = bind;
12370 OMP_PARALLEL_COMBINED (*expr_p) = 1;
12371 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (for_stmt));
12372 tree *pc = &OMP_PARALLEL_CLAUSES (*expr_p);
12373 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
12374 if (OMP_FOR_ORIG_DECLS (for_stmt)
12375 && (TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i))
12376 == TREE_LIST))
12377 {
12378 tree elt = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
12379 if (TREE_PURPOSE (elt) && TREE_VALUE (elt))
12380 {
12381 *pc = build_omp_clause (UNKNOWN_LOCATION,
12382 OMP_CLAUSE_FIRSTPRIVATE);
12383 OMP_CLAUSE_DECL (*pc) = TREE_VALUE (elt);
12384 pc = &OMP_CLAUSE_CHAIN (*pc);
12385 }
12386 }
12387 }
12388 tree t = make_node (pass == 2 ? OMP_DISTRIBUTE : OMP_FOR);
12389 tree *pc = &OMP_FOR_CLAUSES (t);
12390 TREE_TYPE (t) = void_type_node;
12391 OMP_FOR_BODY (t) = *expr_p;
12392 SET_EXPR_LOCATION (t, EXPR_LOCATION (for_stmt));
12393 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
12394 switch (OMP_CLAUSE_CODE (c))
12395 {
12396 case OMP_CLAUSE_BIND:
12397 case OMP_CLAUSE_ORDER:
12398 case OMP_CLAUSE_COLLAPSE:
12399 *pc = copy_node (c);
12400 pc = &OMP_CLAUSE_CHAIN (*pc);
12401 break;
12402 case OMP_CLAUSE_PRIVATE:
12403 case OMP_CLAUSE_FIRSTPRIVATE:
12404 /* Only needed on innermost. */
12405 break;
12406 case OMP_CLAUSE_LASTPRIVATE:
12407 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c) && pass != last)
12408 {
12409 *pc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
12410 OMP_CLAUSE_FIRSTPRIVATE);
12411 OMP_CLAUSE_DECL (*pc) = OMP_CLAUSE_DECL (c);
12412 lang_hooks.decls.omp_finish_clause (*pc, NULL);
12413 pc = &OMP_CLAUSE_CHAIN (*pc);
12414 }
12415 *pc = copy_node (c);
12416 OMP_CLAUSE_LASTPRIVATE_STMT (*pc) = NULL_TREE;
12417 TREE_TYPE (*pc) = unshare_expr (TREE_TYPE (c));
12418 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
12419 {
12420 if (pass != last)
12421 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (*pc) = 1;
12422 else
12423 lang_hooks.decls.omp_finish_clause (*pc, NULL);
12424 OMP_CLAUSE_LASTPRIVATE_LOOP_IV (*pc) = 0;
12425 }
12426 pc = &OMP_CLAUSE_CHAIN (*pc);
12427 break;
12428 case OMP_CLAUSE_REDUCTION:
12429 *pc = copy_node (c);
12430 OMP_CLAUSE_DECL (*pc) = unshare_expr (OMP_CLAUSE_DECL (c));
12431 TREE_TYPE (*pc) = unshare_expr (TREE_TYPE (c));
12432 OMP_CLAUSE_REDUCTION_INIT (*pc)
12433 = unshare_expr (OMP_CLAUSE_REDUCTION_INIT (c));
12434 OMP_CLAUSE_REDUCTION_MERGE (*pc)
12435 = unshare_expr (OMP_CLAUSE_REDUCTION_MERGE (c));
12436 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc))
12437 {
12438 OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc)
12439 = copy_node (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c));
12440 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc))
12441 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc)
12442 = copy_node (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c));
12443 tree nc = *pc;
12444 tree data[2] = { c, nc };
12445 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_INIT (nc),
12446 replace_reduction_placeholders,
12447 data);
12448 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_MERGE (nc),
12449 replace_reduction_placeholders,
12450 data);
12451 }
12452 pc = &OMP_CLAUSE_CHAIN (*pc);
12453 break;
12454 default:
12455 gcc_unreachable ();
12456 }
12457 *pc = NULL_TREE;
12458 *expr_p = t;
12459 }
12460 return gimplify_omp_for (expr_p, pre_p);
12461 }
12462
12463
12464 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
12465 of OMP_TARGET's body. */
12466
12467 static tree
12468 find_omp_teams (tree *tp, int *walk_subtrees, void *)
12469 {
12470 *walk_subtrees = 0;
12471 switch (TREE_CODE (*tp))
12472 {
12473 case OMP_TEAMS:
12474 return *tp;
12475 case BIND_EXPR:
12476 case STATEMENT_LIST:
12477 *walk_subtrees = 1;
12478 break;
12479 default:
12480 break;
12481 }
12482 return NULL_TREE;
12483 }
12484
12485 /* Helper function of optimize_target_teams, determine if the expression
12486 can be computed safely before the target construct on the host. */
12487
12488 static tree
12489 computable_teams_clause (tree *tp, int *walk_subtrees, void *)
12490 {
12491 splay_tree_node n;
12492
12493 if (TYPE_P (*tp))
12494 {
12495 *walk_subtrees = 0;
12496 return NULL_TREE;
12497 }
12498 switch (TREE_CODE (*tp))
12499 {
12500 case VAR_DECL:
12501 case PARM_DECL:
12502 case RESULT_DECL:
12503 *walk_subtrees = 0;
12504 if (error_operand_p (*tp)
12505 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp))
12506 || DECL_HAS_VALUE_EXPR_P (*tp)
12507 || DECL_THREAD_LOCAL_P (*tp)
12508 || TREE_SIDE_EFFECTS (*tp)
12509 || TREE_THIS_VOLATILE (*tp))
12510 return *tp;
12511 if (is_global_var (*tp)
12512 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp))
12513 || lookup_attribute ("omp declare target link",
12514 DECL_ATTRIBUTES (*tp))))
12515 return *tp;
12516 if (VAR_P (*tp)
12517 && !DECL_SEEN_IN_BIND_EXPR_P (*tp)
12518 && !is_global_var (*tp)
12519 && decl_function_context (*tp) == current_function_decl)
12520 return *tp;
12521 n = splay_tree_lookup (gimplify_omp_ctxp->variables,
12522 (splay_tree_key) *tp);
12523 if (n == NULL)
12524 {
12525 if (gimplify_omp_ctxp->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
12526 return NULL_TREE;
12527 return *tp;
12528 }
12529 else if (n->value & GOVD_LOCAL)
12530 return *tp;
12531 else if (n->value & GOVD_FIRSTPRIVATE)
12532 return NULL_TREE;
12533 else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
12534 == (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
12535 return NULL_TREE;
12536 return *tp;
12537 case INTEGER_CST:
12538 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
12539 return *tp;
12540 return NULL_TREE;
12541 case TARGET_EXPR:
12542 if (TARGET_EXPR_INITIAL (*tp)
12543 || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL)
12544 return *tp;
12545 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp),
12546 walk_subtrees, NULL);
12547 /* Allow some reasonable subset of integral arithmetics. */
12548 case PLUS_EXPR:
12549 case MINUS_EXPR:
12550 case MULT_EXPR:
12551 case TRUNC_DIV_EXPR:
12552 case CEIL_DIV_EXPR:
12553 case FLOOR_DIV_EXPR:
12554 case ROUND_DIV_EXPR:
12555 case TRUNC_MOD_EXPR:
12556 case CEIL_MOD_EXPR:
12557 case FLOOR_MOD_EXPR:
12558 case ROUND_MOD_EXPR:
12559 case RDIV_EXPR:
12560 case EXACT_DIV_EXPR:
12561 case MIN_EXPR:
12562 case MAX_EXPR:
12563 case LSHIFT_EXPR:
12564 case RSHIFT_EXPR:
12565 case BIT_IOR_EXPR:
12566 case BIT_XOR_EXPR:
12567 case BIT_AND_EXPR:
12568 case NEGATE_EXPR:
12569 case ABS_EXPR:
12570 case BIT_NOT_EXPR:
12571 case NON_LVALUE_EXPR:
12572 CASE_CONVERT:
12573 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
12574 return *tp;
12575 return NULL_TREE;
12576 /* And disallow anything else, except for comparisons. */
12577 default:
12578 if (COMPARISON_CLASS_P (*tp))
12579 return NULL_TREE;
12580 return *tp;
12581 }
12582 }
12583
12584 /* Try to determine if the num_teams and/or thread_limit expressions
12585 can have their values determined already before entering the
12586 target construct.
12587 INTEGER_CSTs trivially are,
12588 integral decls that are firstprivate (explicitly or implicitly)
12589 or explicitly map(always, to:) or map(always, tofrom:) on the target
12590 region too, and expressions involving simple arithmetics on those
12591 too, function calls are not ok, dereferencing something neither etc.
12592 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
12593 EXPR based on what we find:
12594 0 stands for clause not specified at all, use implementation default
12595 -1 stands for value that can't be determined easily before entering
12596 the target construct.
12597 If teams construct is not present at all, use 1 for num_teams
12598 and 0 for thread_limit (only one team is involved, and the thread
12599 limit is implementation defined. */
12600
12601 static void
12602 optimize_target_teams (tree target, gimple_seq *pre_p)
12603 {
12604 tree body = OMP_BODY (target);
12605 tree teams = walk_tree (&body, find_omp_teams, NULL, NULL);
12606 tree num_teams = integer_zero_node;
12607 tree thread_limit = integer_zero_node;
12608 location_t num_teams_loc = EXPR_LOCATION (target);
12609 location_t thread_limit_loc = EXPR_LOCATION (target);
12610 tree c, *p, expr;
12611 struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp;
12612
12613 if (teams == NULL_TREE)
12614 num_teams = integer_one_node;
12615 else
12616 for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c))
12617 {
12618 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS)
12619 {
12620 p = &num_teams;
12621 num_teams_loc = OMP_CLAUSE_LOCATION (c);
12622 }
12623 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT)
12624 {
12625 p = &thread_limit;
12626 thread_limit_loc = OMP_CLAUSE_LOCATION (c);
12627 }
12628 else
12629 continue;
12630 expr = OMP_CLAUSE_OPERAND (c, 0);
12631 if (TREE_CODE (expr) == INTEGER_CST)
12632 {
12633 *p = expr;
12634 continue;
12635 }
12636 if (walk_tree (&expr, computable_teams_clause, NULL, NULL))
12637 {
12638 *p = integer_minus_one_node;
12639 continue;
12640 }
12641 *p = expr;
12642 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
12643 if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue, false)
12644 == GS_ERROR)
12645 {
12646 gimplify_omp_ctxp = target_ctx;
12647 *p = integer_minus_one_node;
12648 continue;
12649 }
12650 gimplify_omp_ctxp = target_ctx;
12651 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
12652 OMP_CLAUSE_OPERAND (c, 0) = *p;
12653 }
12654 c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT);
12655 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
12656 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
12657 OMP_TARGET_CLAUSES (target) = c;
12658 c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS);
12659 OMP_CLAUSE_NUM_TEAMS_EXPR (c) = num_teams;
12660 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
12661 OMP_TARGET_CLAUSES (target) = c;
12662 }
12663
12664 /* Gimplify the gross structure of several OMP constructs. */
12665
12666 static void
12667 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
12668 {
12669 tree expr = *expr_p;
12670 gimple *stmt;
12671 gimple_seq body = NULL;
12672 enum omp_region_type ort;
12673
12674 switch (TREE_CODE (expr))
12675 {
12676 case OMP_SECTIONS:
12677 case OMP_SINGLE:
12678 ort = ORT_WORKSHARE;
12679 break;
12680 case OMP_TARGET:
12681 ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET;
12682 break;
12683 case OACC_KERNELS:
12684 ort = ORT_ACC_KERNELS;
12685 break;
12686 case OACC_PARALLEL:
12687 ort = ORT_ACC_PARALLEL;
12688 break;
12689 case OACC_SERIAL:
12690 ort = ORT_ACC_SERIAL;
12691 break;
12692 case OACC_DATA:
12693 ort = ORT_ACC_DATA;
12694 break;
12695 case OMP_TARGET_DATA:
12696 ort = ORT_TARGET_DATA;
12697 break;
12698 case OMP_TEAMS:
12699 ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
12700 if (gimplify_omp_ctxp == NULL
12701 || gimplify_omp_ctxp->region_type == ORT_IMPLICIT_TARGET)
12702 ort = (enum omp_region_type) (ort | ORT_HOST_TEAMS);
12703 break;
12704 case OACC_HOST_DATA:
12705 ort = ORT_ACC_HOST_DATA;
12706 break;
12707 default:
12708 gcc_unreachable ();
12709 }
12710
12711 bool save_in_omp_construct = in_omp_construct;
12712 if ((ort & ORT_ACC) == 0)
12713 in_omp_construct = false;
12714 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort,
12715 TREE_CODE (expr));
12716 if (TREE_CODE (expr) == OMP_TARGET)
12717 optimize_target_teams (expr, pre_p);
12718 if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0
12719 || (ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
12720 {
12721 push_gimplify_context ();
12722 gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body);
12723 if (gimple_code (g) == GIMPLE_BIND)
12724 pop_gimplify_context (g);
12725 else
12726 pop_gimplify_context (NULL);
12727 if ((ort & ORT_TARGET_DATA) != 0)
12728 {
12729 enum built_in_function end_ix;
12730 switch (TREE_CODE (expr))
12731 {
12732 case OACC_DATA:
12733 case OACC_HOST_DATA:
12734 end_ix = BUILT_IN_GOACC_DATA_END;
12735 break;
12736 case OMP_TARGET_DATA:
12737 end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
12738 break;
12739 default:
12740 gcc_unreachable ();
12741 }
12742 tree fn = builtin_decl_explicit (end_ix);
12743 g = gimple_build_call (fn, 0);
12744 gimple_seq cleanup = NULL;
12745 gimple_seq_add_stmt (&cleanup, g);
12746 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
12747 body = NULL;
12748 gimple_seq_add_stmt (&body, g);
12749 }
12750 }
12751 else
12752 gimplify_and_add (OMP_BODY (expr), &body);
12753 gimplify_adjust_omp_clauses (pre_p, body, &OMP_CLAUSES (expr),
12754 TREE_CODE (expr));
12755 in_omp_construct = save_in_omp_construct;
12756
12757 switch (TREE_CODE (expr))
12758 {
12759 case OACC_DATA:
12760 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
12761 OMP_CLAUSES (expr));
12762 break;
12763 case OACC_HOST_DATA:
12764 if (omp_find_clause (OMP_CLAUSES (expr), OMP_CLAUSE_IF_PRESENT))
12765 {
12766 for (tree c = OMP_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
12767 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
12768 OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c) = 1;
12769 }
12770
12771 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_HOST_DATA,
12772 OMP_CLAUSES (expr));
12773 break;
12774 case OACC_KERNELS:
12775 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
12776 OMP_CLAUSES (expr));
12777 break;
12778 case OACC_PARALLEL:
12779 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
12780 OMP_CLAUSES (expr));
12781 break;
12782 case OACC_SERIAL:
12783 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_SERIAL,
12784 OMP_CLAUSES (expr));
12785 break;
12786 case OMP_SECTIONS:
12787 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
12788 break;
12789 case OMP_SINGLE:
12790 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
12791 break;
12792 case OMP_TARGET:
12793 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
12794 OMP_CLAUSES (expr));
12795 break;
12796 case OMP_TARGET_DATA:
12797 /* Put use_device_{ptr,addr} clauses last, as map clauses are supposed
12798 to be evaluated before the use_device_{ptr,addr} clauses if they
12799 refer to the same variables. */
12800 {
12801 tree use_device_clauses;
12802 tree *pc, *uc = &use_device_clauses;
12803 for (pc = &OMP_CLAUSES (expr); *pc; )
12804 if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_USE_DEVICE_PTR
12805 || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_USE_DEVICE_ADDR)
12806 {
12807 *uc = *pc;
12808 *pc = OMP_CLAUSE_CHAIN (*pc);
12809 uc = &OMP_CLAUSE_CHAIN (*uc);
12810 }
12811 else
12812 pc = &OMP_CLAUSE_CHAIN (*pc);
12813 *uc = NULL_TREE;
12814 *pc = use_device_clauses;
12815 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
12816 OMP_CLAUSES (expr));
12817 }
12818 break;
12819 case OMP_TEAMS:
12820 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
12821 if ((ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
12822 gimple_omp_teams_set_host (as_a <gomp_teams *> (stmt), true);
12823 break;
12824 default:
12825 gcc_unreachable ();
12826 }
12827
12828 gimplify_seq_add_stmt (pre_p, stmt);
12829 *expr_p = NULL_TREE;
12830 }
12831
12832 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
12833 target update constructs. */
12834
12835 static void
12836 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
12837 {
12838 tree expr = *expr_p;
12839 int kind;
12840 gomp_target *stmt;
12841 enum omp_region_type ort = ORT_WORKSHARE;
12842
12843 switch (TREE_CODE (expr))
12844 {
12845 case OACC_ENTER_DATA:
12846 case OACC_EXIT_DATA:
12847 kind = GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA;
12848 ort = ORT_ACC;
12849 break;
12850 case OACC_UPDATE:
12851 kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
12852 ort = ORT_ACC;
12853 break;
12854 case OMP_TARGET_UPDATE:
12855 kind = GF_OMP_TARGET_KIND_UPDATE;
12856 break;
12857 case OMP_TARGET_ENTER_DATA:
12858 kind = GF_OMP_TARGET_KIND_ENTER_DATA;
12859 break;
12860 case OMP_TARGET_EXIT_DATA:
12861 kind = GF_OMP_TARGET_KIND_EXIT_DATA;
12862 break;
12863 default:
12864 gcc_unreachable ();
12865 }
12866 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p,
12867 ort, TREE_CODE (expr));
12868 gimplify_adjust_omp_clauses (pre_p, NULL, &OMP_STANDALONE_CLAUSES (expr),
12869 TREE_CODE (expr));
12870 if (TREE_CODE (expr) == OACC_UPDATE
12871 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
12872 OMP_CLAUSE_IF_PRESENT))
12873 {
12874 /* The runtime uses GOMP_MAP_{TO,FROM} to denote the if_present
12875 clause. */
12876 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
12877 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
12878 switch (OMP_CLAUSE_MAP_KIND (c))
12879 {
12880 case GOMP_MAP_FORCE_TO:
12881 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
12882 break;
12883 case GOMP_MAP_FORCE_FROM:
12884 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FROM);
12885 break;
12886 default:
12887 break;
12888 }
12889 }
12890 else if (TREE_CODE (expr) == OACC_EXIT_DATA
12891 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
12892 OMP_CLAUSE_FINALIZE))
12893 {
12894 /* Use GOMP_MAP_DELETE/GOMP_MAP_FORCE_FROM to denote "finalize"
12895 semantics. */
12896 bool have_clause = false;
12897 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
12898 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
12899 switch (OMP_CLAUSE_MAP_KIND (c))
12900 {
12901 case GOMP_MAP_FROM:
12902 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_FROM);
12903 have_clause = true;
12904 break;
12905 case GOMP_MAP_RELEASE:
12906 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_DELETE);
12907 have_clause = true;
12908 break;
12909 case GOMP_MAP_POINTER:
12910 case GOMP_MAP_TO_PSET:
12911 /* TODO PR92929: we may see these here, but they'll always follow
12912 one of the clauses above, and will be handled by libgomp as
12913 one group, so no handling required here. */
12914 gcc_assert (have_clause);
12915 break;
12916 case GOMP_MAP_DETACH:
12917 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_DETACH);
12918 have_clause = false;
12919 break;
12920 case GOMP_MAP_STRUCT:
12921 have_clause = false;
12922 break;
12923 default:
12924 gcc_unreachable ();
12925 }
12926 }
12927 stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr));
12928
12929 gimplify_seq_add_stmt (pre_p, stmt);
12930 *expr_p = NULL_TREE;
12931 }
12932
12933 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
12934 stabilized the lhs of the atomic operation as *ADDR. Return true if
12935 EXPR is this stabilized form. */
12936
12937 static bool
12938 goa_lhs_expr_p (tree expr, tree addr)
12939 {
12940 /* Also include casts to other type variants. The C front end is fond
12941 of adding these for e.g. volatile variables. This is like
12942 STRIP_TYPE_NOPS but includes the main variant lookup. */
12943 STRIP_USELESS_TYPE_CONVERSION (expr);
12944
12945 if (TREE_CODE (expr) == INDIRECT_REF)
12946 {
12947 expr = TREE_OPERAND (expr, 0);
12948 while (expr != addr
12949 && (CONVERT_EXPR_P (expr)
12950 || TREE_CODE (expr) == NON_LVALUE_EXPR)
12951 && TREE_CODE (expr) == TREE_CODE (addr)
12952 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
12953 {
12954 expr = TREE_OPERAND (expr, 0);
12955 addr = TREE_OPERAND (addr, 0);
12956 }
12957 if (expr == addr)
12958 return true;
12959 return (TREE_CODE (addr) == ADDR_EXPR
12960 && TREE_CODE (expr) == ADDR_EXPR
12961 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
12962 }
12963 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
12964 return true;
12965 return false;
12966 }
12967
12968 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
12969 expression does not involve the lhs, evaluate it into a temporary.
12970 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
12971 or -1 if an error was encountered. */
12972
12973 static int
12974 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
12975 tree lhs_var)
12976 {
12977 tree expr = *expr_p;
12978 int saw_lhs;
12979
12980 if (goa_lhs_expr_p (expr, lhs_addr))
12981 {
12982 *expr_p = lhs_var;
12983 return 1;
12984 }
12985 if (is_gimple_val (expr))
12986 return 0;
12987
12988 saw_lhs = 0;
12989 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
12990 {
12991 case tcc_binary:
12992 case tcc_comparison:
12993 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
12994 lhs_var);
12995 /* FALLTHRU */
12996 case tcc_unary:
12997 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
12998 lhs_var);
12999 break;
13000 case tcc_expression:
13001 switch (TREE_CODE (expr))
13002 {
13003 case TRUTH_ANDIF_EXPR:
13004 case TRUTH_ORIF_EXPR:
13005 case TRUTH_AND_EXPR:
13006 case TRUTH_OR_EXPR:
13007 case TRUTH_XOR_EXPR:
13008 case BIT_INSERT_EXPR:
13009 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
13010 lhs_addr, lhs_var);
13011 /* FALLTHRU */
13012 case TRUTH_NOT_EXPR:
13013 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
13014 lhs_addr, lhs_var);
13015 break;
13016 case COMPOUND_EXPR:
13017 /* Break out any preevaluations from cp_build_modify_expr. */
13018 for (; TREE_CODE (expr) == COMPOUND_EXPR;
13019 expr = TREE_OPERAND (expr, 1))
13020 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
13021 *expr_p = expr;
13022 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
13023 default:
13024 break;
13025 }
13026 break;
13027 case tcc_reference:
13028 if (TREE_CODE (expr) == BIT_FIELD_REF)
13029 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
13030 lhs_addr, lhs_var);
13031 break;
13032 default:
13033 break;
13034 }
13035
13036 if (saw_lhs == 0)
13037 {
13038 enum gimplify_status gs;
13039 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
13040 if (gs != GS_ALL_DONE)
13041 saw_lhs = -1;
13042 }
13043
13044 return saw_lhs;
13045 }
13046
13047 /* Gimplify an OMP_ATOMIC statement. */
13048
13049 static enum gimplify_status
13050 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
13051 {
13052 tree addr = TREE_OPERAND (*expr_p, 0);
13053 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
13054 ? NULL : TREE_OPERAND (*expr_p, 1);
13055 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
13056 tree tmp_load;
13057 gomp_atomic_load *loadstmt;
13058 gomp_atomic_store *storestmt;
13059
13060 tmp_load = create_tmp_reg (type);
13061 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
13062 return GS_ERROR;
13063
13064 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
13065 != GS_ALL_DONE)
13066 return GS_ERROR;
13067
13068 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr,
13069 OMP_ATOMIC_MEMORY_ORDER (*expr_p));
13070 gimplify_seq_add_stmt (pre_p, loadstmt);
13071 if (rhs)
13072 {
13073 /* BIT_INSERT_EXPR is not valid for non-integral bitfield
13074 representatives. Use BIT_FIELD_REF on the lhs instead. */
13075 if (TREE_CODE (rhs) == BIT_INSERT_EXPR
13076 && !INTEGRAL_TYPE_P (TREE_TYPE (tmp_load)))
13077 {
13078 tree bitpos = TREE_OPERAND (rhs, 2);
13079 tree op1 = TREE_OPERAND (rhs, 1);
13080 tree bitsize;
13081 tree tmp_store = tmp_load;
13082 if (TREE_CODE (*expr_p) == OMP_ATOMIC_CAPTURE_OLD)
13083 tmp_store = get_initialized_tmp_var (tmp_load, pre_p);
13084 if (INTEGRAL_TYPE_P (TREE_TYPE (op1)))
13085 bitsize = bitsize_int (TYPE_PRECISION (TREE_TYPE (op1)));
13086 else
13087 bitsize = TYPE_SIZE (TREE_TYPE (op1));
13088 gcc_assert (TREE_OPERAND (rhs, 0) == tmp_load);
13089 tree t = build2_loc (EXPR_LOCATION (rhs),
13090 MODIFY_EXPR, void_type_node,
13091 build3_loc (EXPR_LOCATION (rhs), BIT_FIELD_REF,
13092 TREE_TYPE (op1), tmp_store, bitsize,
13093 bitpos), op1);
13094 gimplify_and_add (t, pre_p);
13095 rhs = tmp_store;
13096 }
13097 if (gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
13098 != GS_ALL_DONE)
13099 return GS_ERROR;
13100 }
13101
13102 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
13103 rhs = tmp_load;
13104 storestmt
13105 = gimple_build_omp_atomic_store (rhs, OMP_ATOMIC_MEMORY_ORDER (*expr_p));
13106 gimplify_seq_add_stmt (pre_p, storestmt);
13107 switch (TREE_CODE (*expr_p))
13108 {
13109 case OMP_ATOMIC_READ:
13110 case OMP_ATOMIC_CAPTURE_OLD:
13111 *expr_p = tmp_load;
13112 gimple_omp_atomic_set_need_value (loadstmt);
13113 break;
13114 case OMP_ATOMIC_CAPTURE_NEW:
13115 *expr_p = rhs;
13116 gimple_omp_atomic_set_need_value (storestmt);
13117 break;
13118 default:
13119 *expr_p = NULL;
13120 break;
13121 }
13122
13123 return GS_ALL_DONE;
13124 }
13125
13126 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
13127 body, and adding some EH bits. */
13128
13129 static enum gimplify_status
13130 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
13131 {
13132 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
13133 gimple *body_stmt;
13134 gtransaction *trans_stmt;
13135 gimple_seq body = NULL;
13136 int subcode = 0;
13137
13138 /* Wrap the transaction body in a BIND_EXPR so we have a context
13139 where to put decls for OMP. */
13140 if (TREE_CODE (tbody) != BIND_EXPR)
13141 {
13142 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
13143 TREE_SIDE_EFFECTS (bind) = 1;
13144 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
13145 TRANSACTION_EXPR_BODY (expr) = bind;
13146 }
13147
13148 push_gimplify_context ();
13149 temp = voidify_wrapper_expr (*expr_p, NULL);
13150
13151 body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
13152 pop_gimplify_context (body_stmt);
13153
13154 trans_stmt = gimple_build_transaction (body);
13155 if (TRANSACTION_EXPR_OUTER (expr))
13156 subcode = GTMA_IS_OUTER;
13157 else if (TRANSACTION_EXPR_RELAXED (expr))
13158 subcode = GTMA_IS_RELAXED;
13159 gimple_transaction_set_subcode (trans_stmt, subcode);
13160
13161 gimplify_seq_add_stmt (pre_p, trans_stmt);
13162
13163 if (temp)
13164 {
13165 *expr_p = temp;
13166 return GS_OK;
13167 }
13168
13169 *expr_p = NULL_TREE;
13170 return GS_ALL_DONE;
13171 }
13172
13173 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
13174 is the OMP_BODY of the original EXPR (which has already been
13175 gimplified so it's not present in the EXPR).
13176
13177 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
13178
13179 static gimple *
13180 gimplify_omp_ordered (tree expr, gimple_seq body)
13181 {
13182 tree c, decls;
13183 int failures = 0;
13184 unsigned int i;
13185 tree source_c = NULL_TREE;
13186 tree sink_c = NULL_TREE;
13187
13188 if (gimplify_omp_ctxp)
13189 {
13190 for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
13191 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
13192 && gimplify_omp_ctxp->loop_iter_var.is_empty ()
13193 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK
13194 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE))
13195 {
13196 error_at (OMP_CLAUSE_LOCATION (c),
13197 "%<ordered%> construct with %<depend%> clause must be "
13198 "closely nested inside a loop with %<ordered%> clause "
13199 "with a parameter");
13200 failures++;
13201 }
13202 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
13203 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
13204 {
13205 bool fail = false;
13206 for (decls = OMP_CLAUSE_DECL (c), i = 0;
13207 decls && TREE_CODE (decls) == TREE_LIST;
13208 decls = TREE_CHAIN (decls), ++i)
13209 if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2)
13210 continue;
13211 else if (TREE_VALUE (decls)
13212 != gimplify_omp_ctxp->loop_iter_var[2 * i])
13213 {
13214 error_at (OMP_CLAUSE_LOCATION (c),
13215 "variable %qE is not an iteration "
13216 "of outermost loop %d, expected %qE",
13217 TREE_VALUE (decls), i + 1,
13218 gimplify_omp_ctxp->loop_iter_var[2 * i]);
13219 fail = true;
13220 failures++;
13221 }
13222 else
13223 TREE_VALUE (decls)
13224 = gimplify_omp_ctxp->loop_iter_var[2 * i + 1];
13225 if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2)
13226 {
13227 error_at (OMP_CLAUSE_LOCATION (c),
13228 "number of variables in %<depend%> clause with "
13229 "%<sink%> modifier does not match number of "
13230 "iteration variables");
13231 failures++;
13232 }
13233 sink_c = c;
13234 }
13235 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
13236 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
13237 {
13238 if (source_c)
13239 {
13240 error_at (OMP_CLAUSE_LOCATION (c),
13241 "more than one %<depend%> clause with %<source%> "
13242 "modifier on an %<ordered%> construct");
13243 failures++;
13244 }
13245 else
13246 source_c = c;
13247 }
13248 }
13249 if (source_c && sink_c)
13250 {
13251 error_at (OMP_CLAUSE_LOCATION (source_c),
13252 "%<depend%> clause with %<source%> modifier specified "
13253 "together with %<depend%> clauses with %<sink%> modifier "
13254 "on the same construct");
13255 failures++;
13256 }
13257
13258 if (failures)
13259 return gimple_build_nop ();
13260 return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr));
13261 }
13262
13263 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
13264 expression produces a value to be used as an operand inside a GIMPLE
13265 statement, the value will be stored back in *EXPR_P. This value will
13266 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
13267 an SSA_NAME. The corresponding sequence of GIMPLE statements is
13268 emitted in PRE_P and POST_P.
13269
13270 Additionally, this process may overwrite parts of the input
13271 expression during gimplification. Ideally, it should be
13272 possible to do non-destructive gimplification.
13273
13274 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
13275 the expression needs to evaluate to a value to be used as
13276 an operand in a GIMPLE statement, this value will be stored in
13277 *EXPR_P on exit. This happens when the caller specifies one
13278 of fb_lvalue or fb_rvalue fallback flags.
13279
13280 PRE_P will contain the sequence of GIMPLE statements corresponding
13281 to the evaluation of EXPR and all the side-effects that must
13282 be executed before the main expression. On exit, the last
13283 statement of PRE_P is the core statement being gimplified. For
13284 instance, when gimplifying 'if (++a)' the last statement in
13285 PRE_P will be 'if (t.1)' where t.1 is the result of
13286 pre-incrementing 'a'.
13287
13288 POST_P will contain the sequence of GIMPLE statements corresponding
13289 to the evaluation of all the side-effects that must be executed
13290 after the main expression. If this is NULL, the post
13291 side-effects are stored at the end of PRE_P.
13292
13293 The reason why the output is split in two is to handle post
13294 side-effects explicitly. In some cases, an expression may have
13295 inner and outer post side-effects which need to be emitted in
13296 an order different from the one given by the recursive
13297 traversal. For instance, for the expression (*p--)++ the post
13298 side-effects of '--' must actually occur *after* the post
13299 side-effects of '++'. However, gimplification will first visit
13300 the inner expression, so if a separate POST sequence was not
13301 used, the resulting sequence would be:
13302
13303 1 t.1 = *p
13304 2 p = p - 1
13305 3 t.2 = t.1 + 1
13306 4 *p = t.2
13307
13308 However, the post-decrement operation in line #2 must not be
13309 evaluated until after the store to *p at line #4, so the
13310 correct sequence should be:
13311
13312 1 t.1 = *p
13313 2 t.2 = t.1 + 1
13314 3 *p = t.2
13315 4 p = p - 1
13316
13317 So, by specifying a separate post queue, it is possible
13318 to emit the post side-effects in the correct order.
13319 If POST_P is NULL, an internal queue will be used. Before
13320 returning to the caller, the sequence POST_P is appended to
13321 the main output sequence PRE_P.
13322
13323 GIMPLE_TEST_F points to a function that takes a tree T and
13324 returns nonzero if T is in the GIMPLE form requested by the
13325 caller. The GIMPLE predicates are in gimple.c.
13326
13327 FALLBACK tells the function what sort of a temporary we want if
13328 gimplification cannot produce an expression that complies with
13329 GIMPLE_TEST_F.
13330
13331 fb_none means that no temporary should be generated
13332 fb_rvalue means that an rvalue is OK to generate
13333 fb_lvalue means that an lvalue is OK to generate
13334 fb_either means that either is OK, but an lvalue is preferable.
13335 fb_mayfail means that gimplification may fail (in which case
13336 GS_ERROR will be returned)
13337
13338 The return value is either GS_ERROR or GS_ALL_DONE, since this
13339 function iterates until EXPR is completely gimplified or an error
13340 occurs. */
13341
13342 enum gimplify_status
13343 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
13344 bool (*gimple_test_f) (tree), fallback_t fallback)
13345 {
13346 tree tmp;
13347 gimple_seq internal_pre = NULL;
13348 gimple_seq internal_post = NULL;
13349 tree save_expr;
13350 bool is_statement;
13351 location_t saved_location;
13352 enum gimplify_status ret;
13353 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
13354 tree label;
13355
13356 save_expr = *expr_p;
13357 if (save_expr == NULL_TREE)
13358 return GS_ALL_DONE;
13359
13360 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
13361 is_statement = gimple_test_f == is_gimple_stmt;
13362 if (is_statement)
13363 gcc_assert (pre_p);
13364
13365 /* Consistency checks. */
13366 if (gimple_test_f == is_gimple_reg)
13367 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
13368 else if (gimple_test_f == is_gimple_val
13369 || gimple_test_f == is_gimple_call_addr
13370 || gimple_test_f == is_gimple_condexpr
13371 || gimple_test_f == is_gimple_condexpr_for_cond
13372 || gimple_test_f == is_gimple_mem_rhs
13373 || gimple_test_f == is_gimple_mem_rhs_or_call
13374 || gimple_test_f == is_gimple_reg_rhs
13375 || gimple_test_f == is_gimple_reg_rhs_or_call
13376 || gimple_test_f == is_gimple_asm_val
13377 || gimple_test_f == is_gimple_mem_ref_addr)
13378 gcc_assert (fallback & fb_rvalue);
13379 else if (gimple_test_f == is_gimple_min_lval
13380 || gimple_test_f == is_gimple_lvalue)
13381 gcc_assert (fallback & fb_lvalue);
13382 else if (gimple_test_f == is_gimple_addressable)
13383 gcc_assert (fallback & fb_either);
13384 else if (gimple_test_f == is_gimple_stmt)
13385 gcc_assert (fallback == fb_none);
13386 else
13387 {
13388 /* We should have recognized the GIMPLE_TEST_F predicate to
13389 know what kind of fallback to use in case a temporary is
13390 needed to hold the value or address of *EXPR_P. */
13391 gcc_unreachable ();
13392 }
13393
13394 /* We used to check the predicate here and return immediately if it
13395 succeeds. This is wrong; the design is for gimplification to be
13396 idempotent, and for the predicates to only test for valid forms, not
13397 whether they are fully simplified. */
13398 if (pre_p == NULL)
13399 pre_p = &internal_pre;
13400
13401 if (post_p == NULL)
13402 post_p = &internal_post;
13403
13404 /* Remember the last statements added to PRE_P and POST_P. Every
13405 new statement added by the gimplification helpers needs to be
13406 annotated with location information. To centralize the
13407 responsibility, we remember the last statement that had been
13408 added to both queues before gimplifying *EXPR_P. If
13409 gimplification produces new statements in PRE_P and POST_P, those
13410 statements will be annotated with the same location information
13411 as *EXPR_P. */
13412 pre_last_gsi = gsi_last (*pre_p);
13413 post_last_gsi = gsi_last (*post_p);
13414
13415 saved_location = input_location;
13416 if (save_expr != error_mark_node
13417 && EXPR_HAS_LOCATION (*expr_p))
13418 input_location = EXPR_LOCATION (*expr_p);
13419
13420 /* Loop over the specific gimplifiers until the toplevel node
13421 remains the same. */
13422 do
13423 {
13424 /* Strip away as many useless type conversions as possible
13425 at the toplevel. */
13426 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
13427
13428 /* Remember the expr. */
13429 save_expr = *expr_p;
13430
13431 /* Die, die, die, my darling. */
13432 if (error_operand_p (save_expr))
13433 {
13434 ret = GS_ERROR;
13435 break;
13436 }
13437
13438 /* Do any language-specific gimplification. */
13439 ret = ((enum gimplify_status)
13440 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
13441 if (ret == GS_OK)
13442 {
13443 if (*expr_p == NULL_TREE)
13444 break;
13445 if (*expr_p != save_expr)
13446 continue;
13447 }
13448 else if (ret != GS_UNHANDLED)
13449 break;
13450
13451 /* Make sure that all the cases set 'ret' appropriately. */
13452 ret = GS_UNHANDLED;
13453 switch (TREE_CODE (*expr_p))
13454 {
13455 /* First deal with the special cases. */
13456
13457 case POSTINCREMENT_EXPR:
13458 case POSTDECREMENT_EXPR:
13459 case PREINCREMENT_EXPR:
13460 case PREDECREMENT_EXPR:
13461 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
13462 fallback != fb_none,
13463 TREE_TYPE (*expr_p));
13464 break;
13465
13466 case VIEW_CONVERT_EXPR:
13467 if ((fallback & fb_rvalue)
13468 && is_gimple_reg_type (TREE_TYPE (*expr_p))
13469 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
13470 {
13471 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
13472 post_p, is_gimple_val, fb_rvalue);
13473 recalculate_side_effects (*expr_p);
13474 break;
13475 }
13476 /* Fallthru. */
13477
13478 case ARRAY_REF:
13479 case ARRAY_RANGE_REF:
13480 case REALPART_EXPR:
13481 case IMAGPART_EXPR:
13482 case COMPONENT_REF:
13483 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
13484 fallback ? fallback : fb_rvalue);
13485 break;
13486
13487 case COND_EXPR:
13488 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
13489
13490 /* C99 code may assign to an array in a structure value of a
13491 conditional expression, and this has undefined behavior
13492 only on execution, so create a temporary if an lvalue is
13493 required. */
13494 if (fallback == fb_lvalue)
13495 {
13496 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
13497 mark_addressable (*expr_p);
13498 ret = GS_OK;
13499 }
13500 break;
13501
13502 case CALL_EXPR:
13503 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
13504
13505 /* C99 code may assign to an array in a structure returned
13506 from a function, and this has undefined behavior only on
13507 execution, so create a temporary if an lvalue is
13508 required. */
13509 if (fallback == fb_lvalue)
13510 {
13511 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
13512 mark_addressable (*expr_p);
13513 ret = GS_OK;
13514 }
13515 break;
13516
13517 case TREE_LIST:
13518 gcc_unreachable ();
13519
13520 case COMPOUND_EXPR:
13521 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
13522 break;
13523
13524 case COMPOUND_LITERAL_EXPR:
13525 ret = gimplify_compound_literal_expr (expr_p, pre_p,
13526 gimple_test_f, fallback);
13527 break;
13528
13529 case MODIFY_EXPR:
13530 case INIT_EXPR:
13531 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
13532 fallback != fb_none);
13533 break;
13534
13535 case TRUTH_ANDIF_EXPR:
13536 case TRUTH_ORIF_EXPR:
13537 {
13538 /* Preserve the original type of the expression and the
13539 source location of the outer expression. */
13540 tree org_type = TREE_TYPE (*expr_p);
13541 *expr_p = gimple_boolify (*expr_p);
13542 *expr_p = build3_loc (input_location, COND_EXPR,
13543 org_type, *expr_p,
13544 fold_convert_loc
13545 (input_location,
13546 org_type, boolean_true_node),
13547 fold_convert_loc
13548 (input_location,
13549 org_type, boolean_false_node));
13550 ret = GS_OK;
13551 break;
13552 }
13553
13554 case TRUTH_NOT_EXPR:
13555 {
13556 tree type = TREE_TYPE (*expr_p);
13557 /* The parsers are careful to generate TRUTH_NOT_EXPR
13558 only with operands that are always zero or one.
13559 We do not fold here but handle the only interesting case
13560 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
13561 *expr_p = gimple_boolify (*expr_p);
13562 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
13563 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
13564 TREE_TYPE (*expr_p),
13565 TREE_OPERAND (*expr_p, 0));
13566 else
13567 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
13568 TREE_TYPE (*expr_p),
13569 TREE_OPERAND (*expr_p, 0),
13570 build_int_cst (TREE_TYPE (*expr_p), 1));
13571 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
13572 *expr_p = fold_convert_loc (input_location, type, *expr_p);
13573 ret = GS_OK;
13574 break;
13575 }
13576
13577 case ADDR_EXPR:
13578 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
13579 break;
13580
13581 case ANNOTATE_EXPR:
13582 {
13583 tree cond = TREE_OPERAND (*expr_p, 0);
13584 tree kind = TREE_OPERAND (*expr_p, 1);
13585 tree data = TREE_OPERAND (*expr_p, 2);
13586 tree type = TREE_TYPE (cond);
13587 if (!INTEGRAL_TYPE_P (type))
13588 {
13589 *expr_p = cond;
13590 ret = GS_OK;
13591 break;
13592 }
13593 tree tmp = create_tmp_var (type);
13594 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
13595 gcall *call
13596 = gimple_build_call_internal (IFN_ANNOTATE, 3, cond, kind, data);
13597 gimple_call_set_lhs (call, tmp);
13598 gimplify_seq_add_stmt (pre_p, call);
13599 *expr_p = tmp;
13600 ret = GS_ALL_DONE;
13601 break;
13602 }
13603
13604 case VA_ARG_EXPR:
13605 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
13606 break;
13607
13608 CASE_CONVERT:
13609 if (IS_EMPTY_STMT (*expr_p))
13610 {
13611 ret = GS_ALL_DONE;
13612 break;
13613 }
13614
13615 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
13616 || fallback == fb_none)
13617 {
13618 /* Just strip a conversion to void (or in void context) and
13619 try again. */
13620 *expr_p = TREE_OPERAND (*expr_p, 0);
13621 ret = GS_OK;
13622 break;
13623 }
13624
13625 ret = gimplify_conversion (expr_p);
13626 if (ret == GS_ERROR)
13627 break;
13628 if (*expr_p != save_expr)
13629 break;
13630 /* FALLTHRU */
13631
13632 case FIX_TRUNC_EXPR:
13633 /* unary_expr: ... | '(' cast ')' val | ... */
13634 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
13635 is_gimple_val, fb_rvalue);
13636 recalculate_side_effects (*expr_p);
13637 break;
13638
13639 case INDIRECT_REF:
13640 {
13641 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
13642 bool notrap = TREE_THIS_NOTRAP (*expr_p);
13643 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
13644
13645 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
13646 if (*expr_p != save_expr)
13647 {
13648 ret = GS_OK;
13649 break;
13650 }
13651
13652 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
13653 is_gimple_reg, fb_rvalue);
13654 if (ret == GS_ERROR)
13655 break;
13656
13657 recalculate_side_effects (*expr_p);
13658 *expr_p = fold_build2_loc (input_location, MEM_REF,
13659 TREE_TYPE (*expr_p),
13660 TREE_OPERAND (*expr_p, 0),
13661 build_int_cst (saved_ptr_type, 0));
13662 TREE_THIS_VOLATILE (*expr_p) = volatilep;
13663 TREE_THIS_NOTRAP (*expr_p) = notrap;
13664 ret = GS_OK;
13665 break;
13666 }
13667
13668 /* We arrive here through the various re-gimplifcation paths. */
13669 case MEM_REF:
13670 /* First try re-folding the whole thing. */
13671 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
13672 TREE_OPERAND (*expr_p, 0),
13673 TREE_OPERAND (*expr_p, 1));
13674 if (tmp)
13675 {
13676 REF_REVERSE_STORAGE_ORDER (tmp)
13677 = REF_REVERSE_STORAGE_ORDER (*expr_p);
13678 *expr_p = tmp;
13679 recalculate_side_effects (*expr_p);
13680 ret = GS_OK;
13681 break;
13682 }
13683 /* Avoid re-gimplifying the address operand if it is already
13684 in suitable form. Re-gimplifying would mark the address
13685 operand addressable. Always gimplify when not in SSA form
13686 as we still may have to gimplify decls with value-exprs. */
13687 if (!gimplify_ctxp || !gimple_in_ssa_p (cfun)
13688 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
13689 {
13690 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
13691 is_gimple_mem_ref_addr, fb_rvalue);
13692 if (ret == GS_ERROR)
13693 break;
13694 }
13695 recalculate_side_effects (*expr_p);
13696 ret = GS_ALL_DONE;
13697 break;
13698
13699 /* Constants need not be gimplified. */
13700 case INTEGER_CST:
13701 case REAL_CST:
13702 case FIXED_CST:
13703 case STRING_CST:
13704 case COMPLEX_CST:
13705 case VECTOR_CST:
13706 /* Drop the overflow flag on constants, we do not want
13707 that in the GIMPLE IL. */
13708 if (TREE_OVERFLOW_P (*expr_p))
13709 *expr_p = drop_tree_overflow (*expr_p);
13710 ret = GS_ALL_DONE;
13711 break;
13712
13713 case CONST_DECL:
13714 /* If we require an lvalue, such as for ADDR_EXPR, retain the
13715 CONST_DECL node. Otherwise the decl is replaceable by its
13716 value. */
13717 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
13718 if (fallback & fb_lvalue)
13719 ret = GS_ALL_DONE;
13720 else
13721 {
13722 *expr_p = DECL_INITIAL (*expr_p);
13723 ret = GS_OK;
13724 }
13725 break;
13726
13727 case DECL_EXPR:
13728 ret = gimplify_decl_expr (expr_p, pre_p);
13729 break;
13730
13731 case BIND_EXPR:
13732 ret = gimplify_bind_expr (expr_p, pre_p);
13733 break;
13734
13735 case LOOP_EXPR:
13736 ret = gimplify_loop_expr (expr_p, pre_p);
13737 break;
13738
13739 case SWITCH_EXPR:
13740 ret = gimplify_switch_expr (expr_p, pre_p);
13741 break;
13742
13743 case EXIT_EXPR:
13744 ret = gimplify_exit_expr (expr_p);
13745 break;
13746
13747 case GOTO_EXPR:
13748 /* If the target is not LABEL, then it is a computed jump
13749 and the target needs to be gimplified. */
13750 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
13751 {
13752 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
13753 NULL, is_gimple_val, fb_rvalue);
13754 if (ret == GS_ERROR)
13755 break;
13756 }
13757 gimplify_seq_add_stmt (pre_p,
13758 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
13759 ret = GS_ALL_DONE;
13760 break;
13761
13762 case PREDICT_EXPR:
13763 gimplify_seq_add_stmt (pre_p,
13764 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
13765 PREDICT_EXPR_OUTCOME (*expr_p)));
13766 ret = GS_ALL_DONE;
13767 break;
13768
13769 case LABEL_EXPR:
13770 ret = gimplify_label_expr (expr_p, pre_p);
13771 label = LABEL_EXPR_LABEL (*expr_p);
13772 gcc_assert (decl_function_context (label) == current_function_decl);
13773
13774 /* If the label is used in a goto statement, or address of the label
13775 is taken, we need to unpoison all variables that were seen so far.
13776 Doing so would prevent us from reporting a false positives. */
13777 if (asan_poisoned_variables
13778 && asan_used_labels != NULL
13779 && asan_used_labels->contains (label))
13780 asan_poison_variables (asan_poisoned_variables, false, pre_p);
13781 break;
13782
13783 case CASE_LABEL_EXPR:
13784 ret = gimplify_case_label_expr (expr_p, pre_p);
13785
13786 if (gimplify_ctxp->live_switch_vars)
13787 asan_poison_variables (gimplify_ctxp->live_switch_vars, false,
13788 pre_p);
13789 break;
13790
13791 case RETURN_EXPR:
13792 ret = gimplify_return_expr (*expr_p, pre_p);
13793 break;
13794
13795 case CONSTRUCTOR:
13796 /* Don't reduce this in place; let gimplify_init_constructor work its
13797 magic. Buf if we're just elaborating this for side effects, just
13798 gimplify any element that has side-effects. */
13799 if (fallback == fb_none)
13800 {
13801 unsigned HOST_WIDE_INT ix;
13802 tree val;
13803 tree temp = NULL_TREE;
13804 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
13805 if (TREE_SIDE_EFFECTS (val))
13806 append_to_statement_list (val, &temp);
13807
13808 *expr_p = temp;
13809 ret = temp ? GS_OK : GS_ALL_DONE;
13810 }
13811 /* C99 code may assign to an array in a constructed
13812 structure or union, and this has undefined behavior only
13813 on execution, so create a temporary if an lvalue is
13814 required. */
13815 else if (fallback == fb_lvalue)
13816 {
13817 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
13818 mark_addressable (*expr_p);
13819 ret = GS_OK;
13820 }
13821 else
13822 ret = GS_ALL_DONE;
13823 break;
13824
13825 /* The following are special cases that are not handled by the
13826 original GIMPLE grammar. */
13827
13828 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
13829 eliminated. */
13830 case SAVE_EXPR:
13831 ret = gimplify_save_expr (expr_p, pre_p, post_p);
13832 break;
13833
13834 case BIT_FIELD_REF:
13835 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
13836 post_p, is_gimple_lvalue, fb_either);
13837 recalculate_side_effects (*expr_p);
13838 break;
13839
13840 case TARGET_MEM_REF:
13841 {
13842 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
13843
13844 if (TMR_BASE (*expr_p))
13845 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
13846 post_p, is_gimple_mem_ref_addr, fb_either);
13847 if (TMR_INDEX (*expr_p))
13848 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
13849 post_p, is_gimple_val, fb_rvalue);
13850 if (TMR_INDEX2 (*expr_p))
13851 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
13852 post_p, is_gimple_val, fb_rvalue);
13853 /* TMR_STEP and TMR_OFFSET are always integer constants. */
13854 ret = MIN (r0, r1);
13855 }
13856 break;
13857
13858 case NON_LVALUE_EXPR:
13859 /* This should have been stripped above. */
13860 gcc_unreachable ();
13861
13862 case ASM_EXPR:
13863 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
13864 break;
13865
13866 case TRY_FINALLY_EXPR:
13867 case TRY_CATCH_EXPR:
13868 {
13869 gimple_seq eval, cleanup;
13870 gtry *try_;
13871
13872 /* Calls to destructors are generated automatically in FINALLY/CATCH
13873 block. They should have location as UNKNOWN_LOCATION. However,
13874 gimplify_call_expr will reset these call stmts to input_location
13875 if it finds stmt's location is unknown. To prevent resetting for
13876 destructors, we set the input_location to unknown.
13877 Note that this only affects the destructor calls in FINALLY/CATCH
13878 block, and will automatically reset to its original value by the
13879 end of gimplify_expr. */
13880 input_location = UNKNOWN_LOCATION;
13881 eval = cleanup = NULL;
13882 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
13883 if (TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
13884 && TREE_CODE (TREE_OPERAND (*expr_p, 1)) == EH_ELSE_EXPR)
13885 {
13886 gimple_seq n = NULL, e = NULL;
13887 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p, 1),
13888 0), &n);
13889 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p, 1),
13890 1), &e);
13891 if (!gimple_seq_empty_p (n) && !gimple_seq_empty_p (e))
13892 {
13893 geh_else *stmt = gimple_build_eh_else (n, e);
13894 gimple_seq_add_stmt (&cleanup, stmt);
13895 }
13896 }
13897 else
13898 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
13899 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
13900 if (gimple_seq_empty_p (cleanup))
13901 {
13902 gimple_seq_add_seq (pre_p, eval);
13903 ret = GS_ALL_DONE;
13904 break;
13905 }
13906 try_ = gimple_build_try (eval, cleanup,
13907 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
13908 ? GIMPLE_TRY_FINALLY
13909 : GIMPLE_TRY_CATCH);
13910 if (EXPR_HAS_LOCATION (save_expr))
13911 gimple_set_location (try_, EXPR_LOCATION (save_expr));
13912 else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
13913 gimple_set_location (try_, saved_location);
13914 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
13915 gimple_try_set_catch_is_cleanup (try_,
13916 TRY_CATCH_IS_CLEANUP (*expr_p));
13917 gimplify_seq_add_stmt (pre_p, try_);
13918 ret = GS_ALL_DONE;
13919 break;
13920 }
13921
13922 case CLEANUP_POINT_EXPR:
13923 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
13924 break;
13925
13926 case TARGET_EXPR:
13927 ret = gimplify_target_expr (expr_p, pre_p, post_p);
13928 break;
13929
13930 case CATCH_EXPR:
13931 {
13932 gimple *c;
13933 gimple_seq handler = NULL;
13934 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
13935 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
13936 gimplify_seq_add_stmt (pre_p, c);
13937 ret = GS_ALL_DONE;
13938 break;
13939 }
13940
13941 case EH_FILTER_EXPR:
13942 {
13943 gimple *ehf;
13944 gimple_seq failure = NULL;
13945
13946 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
13947 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
13948 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
13949 gimplify_seq_add_stmt (pre_p, ehf);
13950 ret = GS_ALL_DONE;
13951 break;
13952 }
13953
13954 case OBJ_TYPE_REF:
13955 {
13956 enum gimplify_status r0, r1;
13957 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
13958 post_p, is_gimple_val, fb_rvalue);
13959 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
13960 post_p, is_gimple_val, fb_rvalue);
13961 TREE_SIDE_EFFECTS (*expr_p) = 0;
13962 ret = MIN (r0, r1);
13963 }
13964 break;
13965
13966 case LABEL_DECL:
13967 /* We get here when taking the address of a label. We mark
13968 the label as "forced"; meaning it can never be removed and
13969 it is a potential target for any computed goto. */
13970 FORCED_LABEL (*expr_p) = 1;
13971 ret = GS_ALL_DONE;
13972 break;
13973
13974 case STATEMENT_LIST:
13975 ret = gimplify_statement_list (expr_p, pre_p);
13976 break;
13977
13978 case WITH_SIZE_EXPR:
13979 {
13980 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
13981 post_p == &internal_post ? NULL : post_p,
13982 gimple_test_f, fallback);
13983 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
13984 is_gimple_val, fb_rvalue);
13985 ret = GS_ALL_DONE;
13986 }
13987 break;
13988
13989 case VAR_DECL:
13990 case PARM_DECL:
13991 ret = gimplify_var_or_parm_decl (expr_p);
13992 break;
13993
13994 case RESULT_DECL:
13995 /* When within an OMP context, notice uses of variables. */
13996 if (gimplify_omp_ctxp)
13997 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
13998 ret = GS_ALL_DONE;
13999 break;
14000
14001 case DEBUG_EXPR_DECL:
14002 gcc_unreachable ();
14003
14004 case DEBUG_BEGIN_STMT:
14005 gimplify_seq_add_stmt (pre_p,
14006 gimple_build_debug_begin_stmt
14007 (TREE_BLOCK (*expr_p),
14008 EXPR_LOCATION (*expr_p)));
14009 ret = GS_ALL_DONE;
14010 *expr_p = NULL;
14011 break;
14012
14013 case SSA_NAME:
14014 /* Allow callbacks into the gimplifier during optimization. */
14015 ret = GS_ALL_DONE;
14016 break;
14017
14018 case OMP_PARALLEL:
14019 gimplify_omp_parallel (expr_p, pre_p);
14020 ret = GS_ALL_DONE;
14021 break;
14022
14023 case OMP_TASK:
14024 gimplify_omp_task (expr_p, pre_p);
14025 ret = GS_ALL_DONE;
14026 break;
14027
14028 case OMP_FOR:
14029 case OMP_SIMD:
14030 case OMP_DISTRIBUTE:
14031 case OMP_TASKLOOP:
14032 case OACC_LOOP:
14033 ret = gimplify_omp_for (expr_p, pre_p);
14034 break;
14035
14036 case OMP_LOOP:
14037 ret = gimplify_omp_loop (expr_p, pre_p);
14038 break;
14039
14040 case OACC_CACHE:
14041 gimplify_oacc_cache (expr_p, pre_p);
14042 ret = GS_ALL_DONE;
14043 break;
14044
14045 case OACC_DECLARE:
14046 gimplify_oacc_declare (expr_p, pre_p);
14047 ret = GS_ALL_DONE;
14048 break;
14049
14050 case OACC_HOST_DATA:
14051 case OACC_DATA:
14052 case OACC_KERNELS:
14053 case OACC_PARALLEL:
14054 case OACC_SERIAL:
14055 case OMP_SECTIONS:
14056 case OMP_SINGLE:
14057 case OMP_TARGET:
14058 case OMP_TARGET_DATA:
14059 case OMP_TEAMS:
14060 gimplify_omp_workshare (expr_p, pre_p);
14061 ret = GS_ALL_DONE;
14062 break;
14063
14064 case OACC_ENTER_DATA:
14065 case OACC_EXIT_DATA:
14066 case OACC_UPDATE:
14067 case OMP_TARGET_UPDATE:
14068 case OMP_TARGET_ENTER_DATA:
14069 case OMP_TARGET_EXIT_DATA:
14070 gimplify_omp_target_update (expr_p, pre_p);
14071 ret = GS_ALL_DONE;
14072 break;
14073
14074 case OMP_SECTION:
14075 case OMP_MASTER:
14076 case OMP_ORDERED:
14077 case OMP_CRITICAL:
14078 case OMP_SCAN:
14079 {
14080 gimple_seq body = NULL;
14081 gimple *g;
14082 bool saved_in_omp_construct = in_omp_construct;
14083
14084 in_omp_construct = true;
14085 gimplify_and_add (OMP_BODY (*expr_p), &body);
14086 in_omp_construct = saved_in_omp_construct;
14087 switch (TREE_CODE (*expr_p))
14088 {
14089 case OMP_SECTION:
14090 g = gimple_build_omp_section (body);
14091 break;
14092 case OMP_MASTER:
14093 g = gimple_build_omp_master (body);
14094 break;
14095 case OMP_ORDERED:
14096 g = gimplify_omp_ordered (*expr_p, body);
14097 break;
14098 case OMP_CRITICAL:
14099 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p),
14100 pre_p, ORT_WORKSHARE, OMP_CRITICAL);
14101 gimplify_adjust_omp_clauses (pre_p, body,
14102 &OMP_CRITICAL_CLAUSES (*expr_p),
14103 OMP_CRITICAL);
14104 g = gimple_build_omp_critical (body,
14105 OMP_CRITICAL_NAME (*expr_p),
14106 OMP_CRITICAL_CLAUSES (*expr_p));
14107 break;
14108 case OMP_SCAN:
14109 gimplify_scan_omp_clauses (&OMP_SCAN_CLAUSES (*expr_p),
14110 pre_p, ORT_WORKSHARE, OMP_SCAN);
14111 gimplify_adjust_omp_clauses (pre_p, body,
14112 &OMP_SCAN_CLAUSES (*expr_p),
14113 OMP_SCAN);
14114 g = gimple_build_omp_scan (body, OMP_SCAN_CLAUSES (*expr_p));
14115 break;
14116 default:
14117 gcc_unreachable ();
14118 }
14119 gimplify_seq_add_stmt (pre_p, g);
14120 ret = GS_ALL_DONE;
14121 break;
14122 }
14123
14124 case OMP_TASKGROUP:
14125 {
14126 gimple_seq body = NULL;
14127
14128 tree *pclauses = &OMP_TASKGROUP_CLAUSES (*expr_p);
14129 bool saved_in_omp_construct = in_omp_construct;
14130 gimplify_scan_omp_clauses (pclauses, pre_p, ORT_TASKGROUP,
14131 OMP_TASKGROUP);
14132 gimplify_adjust_omp_clauses (pre_p, NULL, pclauses, OMP_TASKGROUP);
14133
14134 in_omp_construct = true;
14135 gimplify_and_add (OMP_BODY (*expr_p), &body);
14136 in_omp_construct = saved_in_omp_construct;
14137 gimple_seq cleanup = NULL;
14138 tree fn = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
14139 gimple *g = gimple_build_call (fn, 0);
14140 gimple_seq_add_stmt (&cleanup, g);
14141 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
14142 body = NULL;
14143 gimple_seq_add_stmt (&body, g);
14144 g = gimple_build_omp_taskgroup (body, *pclauses);
14145 gimplify_seq_add_stmt (pre_p, g);
14146 ret = GS_ALL_DONE;
14147 break;
14148 }
14149
14150 case OMP_ATOMIC:
14151 case OMP_ATOMIC_READ:
14152 case OMP_ATOMIC_CAPTURE_OLD:
14153 case OMP_ATOMIC_CAPTURE_NEW:
14154 ret = gimplify_omp_atomic (expr_p, pre_p);
14155 break;
14156
14157 case TRANSACTION_EXPR:
14158 ret = gimplify_transaction (expr_p, pre_p);
14159 break;
14160
14161 case TRUTH_AND_EXPR:
14162 case TRUTH_OR_EXPR:
14163 case TRUTH_XOR_EXPR:
14164 {
14165 tree orig_type = TREE_TYPE (*expr_p);
14166 tree new_type, xop0, xop1;
14167 *expr_p = gimple_boolify (*expr_p);
14168 new_type = TREE_TYPE (*expr_p);
14169 if (!useless_type_conversion_p (orig_type, new_type))
14170 {
14171 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
14172 ret = GS_OK;
14173 break;
14174 }
14175
14176 /* Boolified binary truth expressions are semantically equivalent
14177 to bitwise binary expressions. Canonicalize them to the
14178 bitwise variant. */
14179 switch (TREE_CODE (*expr_p))
14180 {
14181 case TRUTH_AND_EXPR:
14182 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
14183 break;
14184 case TRUTH_OR_EXPR:
14185 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
14186 break;
14187 case TRUTH_XOR_EXPR:
14188 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
14189 break;
14190 default:
14191 break;
14192 }
14193 /* Now make sure that operands have compatible type to
14194 expression's new_type. */
14195 xop0 = TREE_OPERAND (*expr_p, 0);
14196 xop1 = TREE_OPERAND (*expr_p, 1);
14197 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
14198 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
14199 new_type,
14200 xop0);
14201 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
14202 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
14203 new_type,
14204 xop1);
14205 /* Continue classified as tcc_binary. */
14206 goto expr_2;
14207 }
14208
14209 case VEC_COND_EXPR:
14210 {
14211 enum gimplify_status r0, r1, r2;
14212
14213 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
14214 post_p, is_gimple_condexpr, fb_rvalue);
14215 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
14216 post_p, is_gimple_val, fb_rvalue);
14217 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
14218 post_p, is_gimple_val, fb_rvalue);
14219
14220 ret = MIN (MIN (r0, r1), r2);
14221 recalculate_side_effects (*expr_p);
14222 }
14223 break;
14224
14225 case VEC_PERM_EXPR:
14226 /* Classified as tcc_expression. */
14227 goto expr_3;
14228
14229 case BIT_INSERT_EXPR:
14230 /* Argument 3 is a constant. */
14231 goto expr_2;
14232
14233 case POINTER_PLUS_EXPR:
14234 {
14235 enum gimplify_status r0, r1;
14236 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
14237 post_p, is_gimple_val, fb_rvalue);
14238 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
14239 post_p, is_gimple_val, fb_rvalue);
14240 recalculate_side_effects (*expr_p);
14241 ret = MIN (r0, r1);
14242 break;
14243 }
14244
14245 default:
14246 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
14247 {
14248 case tcc_comparison:
14249 /* Handle comparison of objects of non scalar mode aggregates
14250 with a call to memcmp. It would be nice to only have to do
14251 this for variable-sized objects, but then we'd have to allow
14252 the same nest of reference nodes we allow for MODIFY_EXPR and
14253 that's too complex.
14254
14255 Compare scalar mode aggregates as scalar mode values. Using
14256 memcmp for them would be very inefficient at best, and is
14257 plain wrong if bitfields are involved. */
14258 {
14259 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
14260
14261 /* Vector comparisons need no boolification. */
14262 if (TREE_CODE (type) == VECTOR_TYPE)
14263 goto expr_2;
14264 else if (!AGGREGATE_TYPE_P (type))
14265 {
14266 tree org_type = TREE_TYPE (*expr_p);
14267 *expr_p = gimple_boolify (*expr_p);
14268 if (!useless_type_conversion_p (org_type,
14269 TREE_TYPE (*expr_p)))
14270 {
14271 *expr_p = fold_convert_loc (input_location,
14272 org_type, *expr_p);
14273 ret = GS_OK;
14274 }
14275 else
14276 goto expr_2;
14277 }
14278 else if (TYPE_MODE (type) != BLKmode)
14279 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
14280 else
14281 ret = gimplify_variable_sized_compare (expr_p);
14282
14283 break;
14284 }
14285
14286 /* If *EXPR_P does not need to be special-cased, handle it
14287 according to its class. */
14288 case tcc_unary:
14289 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
14290 post_p, is_gimple_val, fb_rvalue);
14291 break;
14292
14293 case tcc_binary:
14294 expr_2:
14295 {
14296 enum gimplify_status r0, r1;
14297
14298 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
14299 post_p, is_gimple_val, fb_rvalue);
14300 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
14301 post_p, is_gimple_val, fb_rvalue);
14302
14303 ret = MIN (r0, r1);
14304 break;
14305 }
14306
14307 expr_3:
14308 {
14309 enum gimplify_status r0, r1, r2;
14310
14311 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
14312 post_p, is_gimple_val, fb_rvalue);
14313 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
14314 post_p, is_gimple_val, fb_rvalue);
14315 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
14316 post_p, is_gimple_val, fb_rvalue);
14317
14318 ret = MIN (MIN (r0, r1), r2);
14319 break;
14320 }
14321
14322 case tcc_declaration:
14323 case tcc_constant:
14324 ret = GS_ALL_DONE;
14325 goto dont_recalculate;
14326
14327 default:
14328 gcc_unreachable ();
14329 }
14330
14331 recalculate_side_effects (*expr_p);
14332
14333 dont_recalculate:
14334 break;
14335 }
14336
14337 gcc_assert (*expr_p || ret != GS_OK);
14338 }
14339 while (ret == GS_OK);
14340
14341 /* If we encountered an error_mark somewhere nested inside, either
14342 stub out the statement or propagate the error back out. */
14343 if (ret == GS_ERROR)
14344 {
14345 if (is_statement)
14346 *expr_p = NULL;
14347 goto out;
14348 }
14349
14350 /* This was only valid as a return value from the langhook, which
14351 we handled. Make sure it doesn't escape from any other context. */
14352 gcc_assert (ret != GS_UNHANDLED);
14353
14354 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
14355 {
14356 /* We aren't looking for a value, and we don't have a valid
14357 statement. If it doesn't have side-effects, throw it away.
14358 We can also get here with code such as "*&&L;", where L is
14359 a LABEL_DECL that is marked as FORCED_LABEL. */
14360 if (TREE_CODE (*expr_p) == LABEL_DECL
14361 || !TREE_SIDE_EFFECTS (*expr_p))
14362 *expr_p = NULL;
14363 else if (!TREE_THIS_VOLATILE (*expr_p))
14364 {
14365 /* This is probably a _REF that contains something nested that
14366 has side effects. Recurse through the operands to find it. */
14367 enum tree_code code = TREE_CODE (*expr_p);
14368
14369 switch (code)
14370 {
14371 case COMPONENT_REF:
14372 case REALPART_EXPR:
14373 case IMAGPART_EXPR:
14374 case VIEW_CONVERT_EXPR:
14375 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
14376 gimple_test_f, fallback);
14377 break;
14378
14379 case ARRAY_REF:
14380 case ARRAY_RANGE_REF:
14381 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
14382 gimple_test_f, fallback);
14383 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
14384 gimple_test_f, fallback);
14385 break;
14386
14387 default:
14388 /* Anything else with side-effects must be converted to
14389 a valid statement before we get here. */
14390 gcc_unreachable ();
14391 }
14392
14393 *expr_p = NULL;
14394 }
14395 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
14396 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
14397 {
14398 /* Historically, the compiler has treated a bare reference
14399 to a non-BLKmode volatile lvalue as forcing a load. */
14400 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
14401
14402 /* Normally, we do not want to create a temporary for a
14403 TREE_ADDRESSABLE type because such a type should not be
14404 copied by bitwise-assignment. However, we make an
14405 exception here, as all we are doing here is ensuring that
14406 we read the bytes that make up the type. We use
14407 create_tmp_var_raw because create_tmp_var will abort when
14408 given a TREE_ADDRESSABLE type. */
14409 tree tmp = create_tmp_var_raw (type, "vol");
14410 gimple_add_tmp_var (tmp);
14411 gimplify_assign (tmp, *expr_p, pre_p);
14412 *expr_p = NULL;
14413 }
14414 else
14415 /* We can't do anything useful with a volatile reference to
14416 an incomplete type, so just throw it away. Likewise for
14417 a BLKmode type, since any implicit inner load should
14418 already have been turned into an explicit one by the
14419 gimplification process. */
14420 *expr_p = NULL;
14421 }
14422
14423 /* If we are gimplifying at the statement level, we're done. Tack
14424 everything together and return. */
14425 if (fallback == fb_none || is_statement)
14426 {
14427 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
14428 it out for GC to reclaim it. */
14429 *expr_p = NULL_TREE;
14430
14431 if (!gimple_seq_empty_p (internal_pre)
14432 || !gimple_seq_empty_p (internal_post))
14433 {
14434 gimplify_seq_add_seq (&internal_pre, internal_post);
14435 gimplify_seq_add_seq (pre_p, internal_pre);
14436 }
14437
14438 /* The result of gimplifying *EXPR_P is going to be the last few
14439 statements in *PRE_P and *POST_P. Add location information
14440 to all the statements that were added by the gimplification
14441 helpers. */
14442 if (!gimple_seq_empty_p (*pre_p))
14443 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
14444
14445 if (!gimple_seq_empty_p (*post_p))
14446 annotate_all_with_location_after (*post_p, post_last_gsi,
14447 input_location);
14448
14449 goto out;
14450 }
14451
14452 #ifdef ENABLE_GIMPLE_CHECKING
14453 if (*expr_p)
14454 {
14455 enum tree_code code = TREE_CODE (*expr_p);
14456 /* These expressions should already be in gimple IR form. */
14457 gcc_assert (code != MODIFY_EXPR
14458 && code != ASM_EXPR
14459 && code != BIND_EXPR
14460 && code != CATCH_EXPR
14461 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
14462 && code != EH_FILTER_EXPR
14463 && code != GOTO_EXPR
14464 && code != LABEL_EXPR
14465 && code != LOOP_EXPR
14466 && code != SWITCH_EXPR
14467 && code != TRY_FINALLY_EXPR
14468 && code != EH_ELSE_EXPR
14469 && code != OACC_PARALLEL
14470 && code != OACC_KERNELS
14471 && code != OACC_SERIAL
14472 && code != OACC_DATA
14473 && code != OACC_HOST_DATA
14474 && code != OACC_DECLARE
14475 && code != OACC_UPDATE
14476 && code != OACC_ENTER_DATA
14477 && code != OACC_EXIT_DATA
14478 && code != OACC_CACHE
14479 && code != OMP_CRITICAL
14480 && code != OMP_FOR
14481 && code != OACC_LOOP
14482 && code != OMP_MASTER
14483 && code != OMP_TASKGROUP
14484 && code != OMP_ORDERED
14485 && code != OMP_PARALLEL
14486 && code != OMP_SCAN
14487 && code != OMP_SECTIONS
14488 && code != OMP_SECTION
14489 && code != OMP_SINGLE);
14490 }
14491 #endif
14492
14493 /* Otherwise we're gimplifying a subexpression, so the resulting
14494 value is interesting. If it's a valid operand that matches
14495 GIMPLE_TEST_F, we're done. Unless we are handling some
14496 post-effects internally; if that's the case, we need to copy into
14497 a temporary before adding the post-effects to POST_P. */
14498 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
14499 goto out;
14500
14501 /* Otherwise, we need to create a new temporary for the gimplified
14502 expression. */
14503
14504 /* We can't return an lvalue if we have an internal postqueue. The
14505 object the lvalue refers to would (probably) be modified by the
14506 postqueue; we need to copy the value out first, which means an
14507 rvalue. */
14508 if ((fallback & fb_lvalue)
14509 && gimple_seq_empty_p (internal_post)
14510 && is_gimple_addressable (*expr_p))
14511 {
14512 /* An lvalue will do. Take the address of the expression, store it
14513 in a temporary, and replace the expression with an INDIRECT_REF of
14514 that temporary. */
14515 tree ref_alias_type = reference_alias_ptr_type (*expr_p);
14516 unsigned int ref_align = get_object_alignment (*expr_p);
14517 tree ref_type = TREE_TYPE (*expr_p);
14518 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
14519 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
14520 if (TYPE_ALIGN (ref_type) != ref_align)
14521 ref_type = build_aligned_type (ref_type, ref_align);
14522 *expr_p = build2 (MEM_REF, ref_type,
14523 tmp, build_zero_cst (ref_alias_type));
14524 }
14525 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
14526 {
14527 /* An rvalue will do. Assign the gimplified expression into a
14528 new temporary TMP and replace the original expression with
14529 TMP. First, make sure that the expression has a type so that
14530 it can be assigned into a temporary. */
14531 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
14532 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
14533 }
14534 else
14535 {
14536 #ifdef ENABLE_GIMPLE_CHECKING
14537 if (!(fallback & fb_mayfail))
14538 {
14539 fprintf (stderr, "gimplification failed:\n");
14540 print_generic_expr (stderr, *expr_p);
14541 debug_tree (*expr_p);
14542 internal_error ("gimplification failed");
14543 }
14544 #endif
14545 gcc_assert (fallback & fb_mayfail);
14546
14547 /* If this is an asm statement, and the user asked for the
14548 impossible, don't die. Fail and let gimplify_asm_expr
14549 issue an error. */
14550 ret = GS_ERROR;
14551 goto out;
14552 }
14553
14554 /* Make sure the temporary matches our predicate. */
14555 gcc_assert ((*gimple_test_f) (*expr_p));
14556
14557 if (!gimple_seq_empty_p (internal_post))
14558 {
14559 annotate_all_with_location (internal_post, input_location);
14560 gimplify_seq_add_seq (pre_p, internal_post);
14561 }
14562
14563 out:
14564 input_location = saved_location;
14565 return ret;
14566 }
14567
14568 /* Like gimplify_expr but make sure the gimplified result is not itself
14569 a SSA name (but a decl if it were). Temporaries required by
14570 evaluating *EXPR_P may be still SSA names. */
14571
14572 static enum gimplify_status
14573 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
14574 bool (*gimple_test_f) (tree), fallback_t fallback,
14575 bool allow_ssa)
14576 {
14577 bool was_ssa_name_p = TREE_CODE (*expr_p) == SSA_NAME;
14578 enum gimplify_status ret = gimplify_expr (expr_p, pre_p, post_p,
14579 gimple_test_f, fallback);
14580 if (! allow_ssa
14581 && TREE_CODE (*expr_p) == SSA_NAME)
14582 {
14583 tree name = *expr_p;
14584 if (was_ssa_name_p)
14585 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, NULL, false);
14586 else
14587 {
14588 /* Avoid the extra copy if possible. */
14589 *expr_p = create_tmp_reg (TREE_TYPE (name));
14590 if (!gimple_nop_p (SSA_NAME_DEF_STMT (name)))
14591 gimple_set_lhs (SSA_NAME_DEF_STMT (name), *expr_p);
14592 release_ssa_name (name);
14593 }
14594 }
14595 return ret;
14596 }
14597
14598 /* Look through TYPE for variable-sized objects and gimplify each such
14599 size that we find. Add to LIST_P any statements generated. */
14600
14601 void
14602 gimplify_type_sizes (tree type, gimple_seq *list_p)
14603 {
14604 tree field, t;
14605
14606 if (type == NULL || type == error_mark_node)
14607 return;
14608
14609 /* We first do the main variant, then copy into any other variants. */
14610 type = TYPE_MAIN_VARIANT (type);
14611
14612 /* Avoid infinite recursion. */
14613 if (TYPE_SIZES_GIMPLIFIED (type))
14614 return;
14615
14616 TYPE_SIZES_GIMPLIFIED (type) = 1;
14617
14618 switch (TREE_CODE (type))
14619 {
14620 case INTEGER_TYPE:
14621 case ENUMERAL_TYPE:
14622 case BOOLEAN_TYPE:
14623 case REAL_TYPE:
14624 case FIXED_POINT_TYPE:
14625 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
14626 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
14627
14628 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
14629 {
14630 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
14631 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
14632 }
14633 break;
14634
14635 case ARRAY_TYPE:
14636 /* These types may not have declarations, so handle them here. */
14637 gimplify_type_sizes (TREE_TYPE (type), list_p);
14638 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
14639 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
14640 with assigned stack slots, for -O1+ -g they should be tracked
14641 by VTA. */
14642 if (!(TYPE_NAME (type)
14643 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
14644 && DECL_IGNORED_P (TYPE_NAME (type)))
14645 && TYPE_DOMAIN (type)
14646 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
14647 {
14648 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
14649 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
14650 DECL_IGNORED_P (t) = 0;
14651 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
14652 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
14653 DECL_IGNORED_P (t) = 0;
14654 }
14655 break;
14656
14657 case RECORD_TYPE:
14658 case UNION_TYPE:
14659 case QUAL_UNION_TYPE:
14660 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
14661 if (TREE_CODE (field) == FIELD_DECL)
14662 {
14663 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
14664 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
14665 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
14666 gimplify_type_sizes (TREE_TYPE (field), list_p);
14667 }
14668 break;
14669
14670 case POINTER_TYPE:
14671 case REFERENCE_TYPE:
14672 /* We used to recurse on the pointed-to type here, which turned out to
14673 be incorrect because its definition might refer to variables not
14674 yet initialized at this point if a forward declaration is involved.
14675
14676 It was actually useful for anonymous pointed-to types to ensure
14677 that the sizes evaluation dominates every possible later use of the
14678 values. Restricting to such types here would be safe since there
14679 is no possible forward declaration around, but would introduce an
14680 undesirable middle-end semantic to anonymity. We then defer to
14681 front-ends the responsibility of ensuring that the sizes are
14682 evaluated both early and late enough, e.g. by attaching artificial
14683 type declarations to the tree. */
14684 break;
14685
14686 default:
14687 break;
14688 }
14689
14690 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
14691 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
14692
14693 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
14694 {
14695 TYPE_SIZE (t) = TYPE_SIZE (type);
14696 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
14697 TYPE_SIZES_GIMPLIFIED (t) = 1;
14698 }
14699 }
14700
14701 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
14702 a size or position, has had all of its SAVE_EXPRs evaluated.
14703 We add any required statements to *STMT_P. */
14704
14705 void
14706 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
14707 {
14708 tree expr = *expr_p;
14709
14710 /* We don't do anything if the value isn't there, is constant, or contains
14711 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
14712 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
14713 will want to replace it with a new variable, but that will cause problems
14714 if this type is from outside the function. It's OK to have that here. */
14715 if (expr == NULL_TREE
14716 || is_gimple_constant (expr)
14717 || TREE_CODE (expr) == VAR_DECL
14718 || CONTAINS_PLACEHOLDER_P (expr))
14719 return;
14720
14721 *expr_p = unshare_expr (expr);
14722
14723 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
14724 if the def vanishes. */
14725 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue, false);
14726
14727 /* If expr wasn't already is_gimple_sizepos or is_gimple_constant from the
14728 FE, ensure that it is a VAR_DECL, otherwise we might handle some decls
14729 as gimplify_vla_decl even when they would have all sizes INTEGER_CSTs. */
14730 if (is_gimple_constant (*expr_p))
14731 *expr_p = get_initialized_tmp_var (*expr_p, stmt_p, NULL, false);
14732 }
14733
14734 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
14735 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
14736 is true, also gimplify the parameters. */
14737
14738 gbind *
14739 gimplify_body (tree fndecl, bool do_parms)
14740 {
14741 location_t saved_location = input_location;
14742 gimple_seq parm_stmts, parm_cleanup = NULL, seq;
14743 gimple *outer_stmt;
14744 gbind *outer_bind;
14745
14746 timevar_push (TV_TREE_GIMPLIFY);
14747
14748 init_tree_ssa (cfun);
14749
14750 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
14751 gimplification. */
14752 default_rtl_profile ();
14753
14754 gcc_assert (gimplify_ctxp == NULL);
14755 push_gimplify_context (true);
14756
14757 if (flag_openacc || flag_openmp)
14758 {
14759 gcc_assert (gimplify_omp_ctxp == NULL);
14760 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
14761 gimplify_omp_ctxp = new_omp_context (ORT_IMPLICIT_TARGET);
14762 }
14763
14764 /* Unshare most shared trees in the body and in that of any nested functions.
14765 It would seem we don't have to do this for nested functions because
14766 they are supposed to be output and then the outer function gimplified
14767 first, but the g++ front end doesn't always do it that way. */
14768 unshare_body (fndecl);
14769 unvisit_body (fndecl);
14770
14771 /* Make sure input_location isn't set to something weird. */
14772 input_location = DECL_SOURCE_LOCATION (fndecl);
14773
14774 /* Resolve callee-copies. This has to be done before processing
14775 the body so that DECL_VALUE_EXPR gets processed correctly. */
14776 parm_stmts = do_parms ? gimplify_parameters (&parm_cleanup) : NULL;
14777
14778 /* Gimplify the function's body. */
14779 seq = NULL;
14780 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
14781 outer_stmt = gimple_seq_first_nondebug_stmt (seq);
14782 if (!outer_stmt)
14783 {
14784 outer_stmt = gimple_build_nop ();
14785 gimplify_seq_add_stmt (&seq, outer_stmt);
14786 }
14787
14788 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
14789 not the case, wrap everything in a GIMPLE_BIND to make it so. */
14790 if (gimple_code (outer_stmt) == GIMPLE_BIND
14791 && (gimple_seq_first_nondebug_stmt (seq)
14792 == gimple_seq_last_nondebug_stmt (seq)))
14793 {
14794 outer_bind = as_a <gbind *> (outer_stmt);
14795 if (gimple_seq_first_stmt (seq) != outer_stmt
14796 || gimple_seq_last_stmt (seq) != outer_stmt)
14797 {
14798 /* If there are debug stmts before or after outer_stmt, move them
14799 inside of outer_bind body. */
14800 gimple_stmt_iterator gsi = gsi_for_stmt (outer_stmt, &seq);
14801 gimple_seq second_seq = NULL;
14802 if (gimple_seq_first_stmt (seq) != outer_stmt
14803 && gimple_seq_last_stmt (seq) != outer_stmt)
14804 {
14805 second_seq = gsi_split_seq_after (gsi);
14806 gsi_remove (&gsi, false);
14807 }
14808 else if (gimple_seq_first_stmt (seq) != outer_stmt)
14809 gsi_remove (&gsi, false);
14810 else
14811 {
14812 gsi_remove (&gsi, false);
14813 second_seq = seq;
14814 seq = NULL;
14815 }
14816 gimple_seq_add_seq_without_update (&seq,
14817 gimple_bind_body (outer_bind));
14818 gimple_seq_add_seq_without_update (&seq, second_seq);
14819 gimple_bind_set_body (outer_bind, seq);
14820 }
14821 }
14822 else
14823 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
14824
14825 DECL_SAVED_TREE (fndecl) = NULL_TREE;
14826
14827 /* If we had callee-copies statements, insert them at the beginning
14828 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
14829 if (!gimple_seq_empty_p (parm_stmts))
14830 {
14831 tree parm;
14832
14833 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
14834 if (parm_cleanup)
14835 {
14836 gtry *g = gimple_build_try (parm_stmts, parm_cleanup,
14837 GIMPLE_TRY_FINALLY);
14838 parm_stmts = NULL;
14839 gimple_seq_add_stmt (&parm_stmts, g);
14840 }
14841 gimple_bind_set_body (outer_bind, parm_stmts);
14842
14843 for (parm = DECL_ARGUMENTS (current_function_decl);
14844 parm; parm = DECL_CHAIN (parm))
14845 if (DECL_HAS_VALUE_EXPR_P (parm))
14846 {
14847 DECL_HAS_VALUE_EXPR_P (parm) = 0;
14848 DECL_IGNORED_P (parm) = 0;
14849 }
14850 }
14851
14852 if ((flag_openacc || flag_openmp || flag_openmp_simd)
14853 && gimplify_omp_ctxp)
14854 {
14855 delete_omp_context (gimplify_omp_ctxp);
14856 gimplify_omp_ctxp = NULL;
14857 }
14858
14859 pop_gimplify_context (outer_bind);
14860 gcc_assert (gimplify_ctxp == NULL);
14861
14862 if (flag_checking && !seen_error ())
14863 verify_gimple_in_seq (gimple_bind_body (outer_bind));
14864
14865 timevar_pop (TV_TREE_GIMPLIFY);
14866 input_location = saved_location;
14867
14868 return outer_bind;
14869 }
14870
14871 typedef char *char_p; /* For DEF_VEC_P. */
14872
14873 /* Return whether we should exclude FNDECL from instrumentation. */
14874
14875 static bool
14876 flag_instrument_functions_exclude_p (tree fndecl)
14877 {
14878 vec<char_p> *v;
14879
14880 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
14881 if (v && v->length () > 0)
14882 {
14883 const char *name;
14884 int i;
14885 char *s;
14886
14887 name = lang_hooks.decl_printable_name (fndecl, 1);
14888 FOR_EACH_VEC_ELT (*v, i, s)
14889 if (strstr (name, s) != NULL)
14890 return true;
14891 }
14892
14893 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
14894 if (v && v->length () > 0)
14895 {
14896 const char *name;
14897 int i;
14898 char *s;
14899
14900 name = DECL_SOURCE_FILE (fndecl);
14901 FOR_EACH_VEC_ELT (*v, i, s)
14902 if (strstr (name, s) != NULL)
14903 return true;
14904 }
14905
14906 return false;
14907 }
14908
14909 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
14910 node for the function we want to gimplify.
14911
14912 Return the sequence of GIMPLE statements corresponding to the body
14913 of FNDECL. */
14914
14915 void
14916 gimplify_function_tree (tree fndecl)
14917 {
14918 gimple_seq seq;
14919 gbind *bind;
14920
14921 gcc_assert (!gimple_body (fndecl));
14922
14923 if (DECL_STRUCT_FUNCTION (fndecl))
14924 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
14925 else
14926 push_struct_function (fndecl);
14927
14928 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
14929 if necessary. */
14930 cfun->curr_properties |= PROP_gimple_lva;
14931
14932 if (asan_sanitize_use_after_scope () && sanitize_flags_p (SANITIZE_ADDRESS))
14933 asan_poisoned_variables = new hash_set<tree> ();
14934 bind = gimplify_body (fndecl, true);
14935 if (asan_poisoned_variables)
14936 {
14937 delete asan_poisoned_variables;
14938 asan_poisoned_variables = NULL;
14939 }
14940
14941 /* The tree body of the function is no longer needed, replace it
14942 with the new GIMPLE body. */
14943 seq = NULL;
14944 gimple_seq_add_stmt (&seq, bind);
14945 gimple_set_body (fndecl, seq);
14946
14947 /* If we're instrumenting function entry/exit, then prepend the call to
14948 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
14949 catch the exit hook. */
14950 /* ??? Add some way to ignore exceptions for this TFE. */
14951 if (flag_instrument_function_entry_exit
14952 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
14953 /* Do not instrument extern inline functions. */
14954 && !(DECL_DECLARED_INLINE_P (fndecl)
14955 && DECL_EXTERNAL (fndecl)
14956 && DECL_DISREGARD_INLINE_LIMITS (fndecl))
14957 && !flag_instrument_functions_exclude_p (fndecl))
14958 {
14959 tree x;
14960 gbind *new_bind;
14961 gimple *tf;
14962 gimple_seq cleanup = NULL, body = NULL;
14963 tree tmp_var, this_fn_addr;
14964 gcall *call;
14965
14966 /* The instrumentation hooks aren't going to call the instrumented
14967 function and the address they receive is expected to be matchable
14968 against symbol addresses. Make sure we don't create a trampoline,
14969 in case the current function is nested. */
14970 this_fn_addr = build_fold_addr_expr (current_function_decl);
14971 TREE_NO_TRAMPOLINE (this_fn_addr) = 1;
14972
14973 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
14974 call = gimple_build_call (x, 1, integer_zero_node);
14975 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
14976 gimple_call_set_lhs (call, tmp_var);
14977 gimplify_seq_add_stmt (&cleanup, call);
14978 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
14979 call = gimple_build_call (x, 2, this_fn_addr, tmp_var);
14980 gimplify_seq_add_stmt (&cleanup, call);
14981 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
14982
14983 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
14984 call = gimple_build_call (x, 1, integer_zero_node);
14985 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
14986 gimple_call_set_lhs (call, tmp_var);
14987 gimplify_seq_add_stmt (&body, call);
14988 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
14989 call = gimple_build_call (x, 2, this_fn_addr, tmp_var);
14990 gimplify_seq_add_stmt (&body, call);
14991 gimplify_seq_add_stmt (&body, tf);
14992 new_bind = gimple_build_bind (NULL, body, NULL);
14993
14994 /* Replace the current function body with the body
14995 wrapped in the try/finally TF. */
14996 seq = NULL;
14997 gimple_seq_add_stmt (&seq, new_bind);
14998 gimple_set_body (fndecl, seq);
14999 bind = new_bind;
15000 }
15001
15002 if (sanitize_flags_p (SANITIZE_THREAD))
15003 {
15004 gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
15005 gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
15006 gbind *new_bind = gimple_build_bind (NULL, tf, NULL);
15007 /* Replace the current function body with the body
15008 wrapped in the try/finally TF. */
15009 seq = NULL;
15010 gimple_seq_add_stmt (&seq, new_bind);
15011 gimple_set_body (fndecl, seq);
15012 }
15013
15014 DECL_SAVED_TREE (fndecl) = NULL_TREE;
15015 cfun->curr_properties |= PROP_gimple_any;
15016
15017 pop_cfun ();
15018
15019 dump_function (TDI_gimple, fndecl);
15020 }
15021
15022 /* Return a dummy expression of type TYPE in order to keep going after an
15023 error. */
15024
15025 static tree
15026 dummy_object (tree type)
15027 {
15028 tree t = build_int_cst (build_pointer_type (type), 0);
15029 return build2 (MEM_REF, type, t, t);
15030 }
15031
15032 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
15033 builtin function, but a very special sort of operator. */
15034
15035 enum gimplify_status
15036 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
15037 gimple_seq *post_p ATTRIBUTE_UNUSED)
15038 {
15039 tree promoted_type, have_va_type;
15040 tree valist = TREE_OPERAND (*expr_p, 0);
15041 tree type = TREE_TYPE (*expr_p);
15042 tree t, tag, aptag;
15043 location_t loc = EXPR_LOCATION (*expr_p);
15044
15045 /* Verify that valist is of the proper type. */
15046 have_va_type = TREE_TYPE (valist);
15047 if (have_va_type == error_mark_node)
15048 return GS_ERROR;
15049 have_va_type = targetm.canonical_va_list_type (have_va_type);
15050 if (have_va_type == NULL_TREE
15051 && POINTER_TYPE_P (TREE_TYPE (valist)))
15052 /* Handle 'Case 1: Not an array type' from c-common.c/build_va_arg. */
15053 have_va_type
15054 = targetm.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist)));
15055 gcc_assert (have_va_type != NULL_TREE);
15056
15057 /* Generate a diagnostic for requesting data of a type that cannot
15058 be passed through `...' due to type promotion at the call site. */
15059 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
15060 != type)
15061 {
15062 static bool gave_help;
15063 bool warned;
15064 /* Use the expansion point to handle cases such as passing bool (defined
15065 in a system header) through `...'. */
15066 location_t xloc
15067 = expansion_point_location_if_in_system_header (loc);
15068
15069 /* Unfortunately, this is merely undefined, rather than a constraint
15070 violation, so we cannot make this an error. If this call is never
15071 executed, the program is still strictly conforming. */
15072 auto_diagnostic_group d;
15073 warned = warning_at (xloc, 0,
15074 "%qT is promoted to %qT when passed through %<...%>",
15075 type, promoted_type);
15076 if (!gave_help && warned)
15077 {
15078 gave_help = true;
15079 inform (xloc, "(so you should pass %qT not %qT to %<va_arg%>)",
15080 promoted_type, type);
15081 }
15082
15083 /* We can, however, treat "undefined" any way we please.
15084 Call abort to encourage the user to fix the program. */
15085 if (warned)
15086 inform (xloc, "if this code is reached, the program will abort");
15087 /* Before the abort, allow the evaluation of the va_list
15088 expression to exit or longjmp. */
15089 gimplify_and_add (valist, pre_p);
15090 t = build_call_expr_loc (loc,
15091 builtin_decl_implicit (BUILT_IN_TRAP), 0);
15092 gimplify_and_add (t, pre_p);
15093
15094 /* This is dead code, but go ahead and finish so that the
15095 mode of the result comes out right. */
15096 *expr_p = dummy_object (type);
15097 return GS_ALL_DONE;
15098 }
15099
15100 tag = build_int_cst (build_pointer_type (type), 0);
15101 aptag = build_int_cst (TREE_TYPE (valist), 0);
15102
15103 *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 3,
15104 valist, tag, aptag);
15105
15106 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
15107 needs to be expanded. */
15108 cfun->curr_properties &= ~PROP_gimple_lva;
15109
15110 return GS_OK;
15111 }
15112
15113 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
15114
15115 DST/SRC are the destination and source respectively. You can pass
15116 ungimplified trees in DST or SRC, in which case they will be
15117 converted to a gimple operand if necessary.
15118
15119 This function returns the newly created GIMPLE_ASSIGN tuple. */
15120
15121 gimple *
15122 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
15123 {
15124 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
15125 gimplify_and_add (t, seq_p);
15126 ggc_free (t);
15127 return gimple_seq_last_stmt (*seq_p);
15128 }
15129
15130 inline hashval_t
15131 gimplify_hasher::hash (const elt_t *p)
15132 {
15133 tree t = p->val;
15134 return iterative_hash_expr (t, 0);
15135 }
15136
15137 inline bool
15138 gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
15139 {
15140 tree t1 = p1->val;
15141 tree t2 = p2->val;
15142 enum tree_code code = TREE_CODE (t1);
15143
15144 if (TREE_CODE (t2) != code
15145 || TREE_TYPE (t1) != TREE_TYPE (t2))
15146 return false;
15147
15148 if (!operand_equal_p (t1, t2, 0))
15149 return false;
15150
15151 /* Only allow them to compare equal if they also hash equal; otherwise
15152 results are nondeterminate, and we fail bootstrap comparison. */
15153 gcc_checking_assert (hash (p1) == hash (p2));
15154
15155 return true;
15156 }