]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/gimplify.c
Update libbid according to the latest Intel Decimal Floating-Point Math Library.
[thirdparty/gcc.git] / gcc / gimplify.c
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2019 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "gimple.h"
33 #include "gimple-predict.h"
34 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
35 #include "ssa.h"
36 #include "cgraph.h"
37 #include "tree-pretty-print.h"
38 #include "diagnostic-core.h"
39 #include "alias.h"
40 #include "fold-const.h"
41 #include "calls.h"
42 #include "varasm.h"
43 #include "stmt.h"
44 #include "expr.h"
45 #include "gimple-fold.h"
46 #include "tree-eh.h"
47 #include "gimplify.h"
48 #include "gimple-iterator.h"
49 #include "stor-layout.h"
50 #include "print-tree.h"
51 #include "tree-iterator.h"
52 #include "tree-inline.h"
53 #include "langhooks.h"
54 #include "tree-cfg.h"
55 #include "tree-ssa.h"
56 #include "omp-general.h"
57 #include "omp-low.h"
58 #include "gimple-low.h"
59 #include "gomp-constants.h"
60 #include "splay-tree.h"
61 #include "gimple-walk.h"
62 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
63 #include "builtins.h"
64 #include "stringpool.h"
65 #include "attribs.h"
66 #include "asan.h"
67 #include "dbgcnt.h"
68 #include "omp-offload.h"
69 #include "context.h"
70
71 /* Hash set of poisoned variables in a bind expr. */
72 static hash_set<tree> *asan_poisoned_variables = NULL;
73
74 enum gimplify_omp_var_data
75 {
76 GOVD_SEEN = 0x000001,
77 GOVD_EXPLICIT = 0x000002,
78 GOVD_SHARED = 0x000004,
79 GOVD_PRIVATE = 0x000008,
80 GOVD_FIRSTPRIVATE = 0x000010,
81 GOVD_LASTPRIVATE = 0x000020,
82 GOVD_REDUCTION = 0x000040,
83 GOVD_LOCAL = 0x00080,
84 GOVD_MAP = 0x000100,
85 GOVD_DEBUG_PRIVATE = 0x000200,
86 GOVD_PRIVATE_OUTER_REF = 0x000400,
87 GOVD_LINEAR = 0x000800,
88 GOVD_ALIGNED = 0x001000,
89
90 /* Flag for GOVD_MAP: don't copy back. */
91 GOVD_MAP_TO_ONLY = 0x002000,
92
93 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
94 GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 0x004000,
95
96 GOVD_MAP_0LEN_ARRAY = 0x008000,
97
98 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
99 GOVD_MAP_ALWAYS_TO = 0x010000,
100
101 /* Flag for shared vars that are or might be stored to in the region. */
102 GOVD_WRITTEN = 0x020000,
103
104 /* Flag for GOVD_MAP, if it is a forced mapping. */
105 GOVD_MAP_FORCE = 0x040000,
106
107 /* Flag for GOVD_MAP: must be present already. */
108 GOVD_MAP_FORCE_PRESENT = 0x080000,
109
110 /* Flag for GOVD_MAP: only allocate. */
111 GOVD_MAP_ALLOC_ONLY = 0x100000,
112
113 /* Flag for GOVD_MAP: only copy back. */
114 GOVD_MAP_FROM_ONLY = 0x200000,
115
116 GOVD_NONTEMPORAL = 0x400000,
117
118 /* Flag for GOVD_LASTPRIVATE: conditional modifier. */
119 GOVD_LASTPRIVATE_CONDITIONAL = 0x800000,
120
121 GOVD_CONDTEMP = 0x1000000,
122
123 /* Flag for GOVD_REDUCTION: inscan seen in {in,ex}clusive clause. */
124 GOVD_REDUCTION_INSCAN = 0x2000000,
125
126 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
127 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
128 | GOVD_LOCAL)
129 };
130
131
132 enum omp_region_type
133 {
134 ORT_WORKSHARE = 0x00,
135 ORT_TASKGROUP = 0x01,
136 ORT_SIMD = 0x04,
137
138 ORT_PARALLEL = 0x08,
139 ORT_COMBINED_PARALLEL = ORT_PARALLEL | 1,
140
141 ORT_TASK = 0x10,
142 ORT_UNTIED_TASK = ORT_TASK | 1,
143 ORT_TASKLOOP = ORT_TASK | 2,
144 ORT_UNTIED_TASKLOOP = ORT_UNTIED_TASK | 2,
145
146 ORT_TEAMS = 0x20,
147 ORT_COMBINED_TEAMS = ORT_TEAMS | 1,
148 ORT_HOST_TEAMS = ORT_TEAMS | 2,
149 ORT_COMBINED_HOST_TEAMS = ORT_COMBINED_TEAMS | 2,
150
151 /* Data region. */
152 ORT_TARGET_DATA = 0x40,
153
154 /* Data region with offloading. */
155 ORT_TARGET = 0x80,
156 ORT_COMBINED_TARGET = ORT_TARGET | 1,
157
158 /* OpenACC variants. */
159 ORT_ACC = 0x100, /* A generic OpenACC region. */
160 ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */
161 ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */
162 ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 2, /* Kernels construct. */
163 ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 2, /* Host data. */
164
165 /* Dummy OpenMP region, used to disable expansion of
166 DECL_VALUE_EXPRs in taskloop pre body. */
167 ORT_NONE = 0x200
168 };
169
170 /* Gimplify hashtable helper. */
171
172 struct gimplify_hasher : free_ptr_hash <elt_t>
173 {
174 static inline hashval_t hash (const elt_t *);
175 static inline bool equal (const elt_t *, const elt_t *);
176 };
177
178 struct gimplify_ctx
179 {
180 struct gimplify_ctx *prev_context;
181
182 vec<gbind *> bind_expr_stack;
183 tree temps;
184 gimple_seq conditional_cleanups;
185 tree exit_label;
186 tree return_temp;
187
188 vec<tree> case_labels;
189 hash_set<tree> *live_switch_vars;
190 /* The formal temporary table. Should this be persistent? */
191 hash_table<gimplify_hasher> *temp_htab;
192
193 int conditions;
194 unsigned into_ssa : 1;
195 unsigned allow_rhs_cond_expr : 1;
196 unsigned in_cleanup_point_expr : 1;
197 unsigned keep_stack : 1;
198 unsigned save_stack : 1;
199 unsigned in_switch_expr : 1;
200 };
201
202 enum gimplify_defaultmap_kind
203 {
204 GDMK_SCALAR,
205 GDMK_AGGREGATE,
206 GDMK_ALLOCATABLE,
207 GDMK_POINTER
208 };
209
210 struct gimplify_omp_ctx
211 {
212 struct gimplify_omp_ctx *outer_context;
213 splay_tree variables;
214 hash_set<tree> *privatized_types;
215 tree clauses;
216 /* Iteration variables in an OMP_FOR. */
217 vec<tree> loop_iter_var;
218 location_t location;
219 enum omp_clause_default_kind default_kind;
220 enum omp_region_type region_type;
221 bool combined_loop;
222 bool distribute;
223 bool target_firstprivatize_array_bases;
224 int defaultmap[4];
225 };
226
227 static struct gimplify_ctx *gimplify_ctxp;
228 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
229
230 /* Forward declaration. */
231 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
232 static hash_map<tree, tree> *oacc_declare_returns;
233 static enum gimplify_status gimplify_expr (tree *, gimple_seq *, gimple_seq *,
234 bool (*) (tree), fallback_t, bool);
235
236 /* Shorter alias name for the above function for use in gimplify.c
237 only. */
238
239 static inline void
240 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
241 {
242 gimple_seq_add_stmt_without_update (seq_p, gs);
243 }
244
245 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
246 NULL, a new sequence is allocated. This function is
247 similar to gimple_seq_add_seq, but does not scan the operands.
248 During gimplification, we need to manipulate statement sequences
249 before the def/use vectors have been constructed. */
250
251 static void
252 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
253 {
254 gimple_stmt_iterator si;
255
256 if (src == NULL)
257 return;
258
259 si = gsi_last (*dst_p);
260 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
261 }
262
263
264 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
265 and popping gimplify contexts. */
266
267 static struct gimplify_ctx *ctx_pool = NULL;
268
269 /* Return a gimplify context struct from the pool. */
270
271 static inline struct gimplify_ctx *
272 ctx_alloc (void)
273 {
274 struct gimplify_ctx * c = ctx_pool;
275
276 if (c)
277 ctx_pool = c->prev_context;
278 else
279 c = XNEW (struct gimplify_ctx);
280
281 memset (c, '\0', sizeof (*c));
282 return c;
283 }
284
285 /* Put gimplify context C back into the pool. */
286
287 static inline void
288 ctx_free (struct gimplify_ctx *c)
289 {
290 c->prev_context = ctx_pool;
291 ctx_pool = c;
292 }
293
294 /* Free allocated ctx stack memory. */
295
296 void
297 free_gimplify_stack (void)
298 {
299 struct gimplify_ctx *c;
300
301 while ((c = ctx_pool))
302 {
303 ctx_pool = c->prev_context;
304 free (c);
305 }
306 }
307
308
309 /* Set up a context for the gimplifier. */
310
311 void
312 push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
313 {
314 struct gimplify_ctx *c = ctx_alloc ();
315
316 c->prev_context = gimplify_ctxp;
317 gimplify_ctxp = c;
318 gimplify_ctxp->into_ssa = in_ssa;
319 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
320 }
321
322 /* Tear down a context for the gimplifier. If BODY is non-null, then
323 put the temporaries into the outer BIND_EXPR. Otherwise, put them
324 in the local_decls.
325
326 BODY is not a sequence, but the first tuple in a sequence. */
327
328 void
329 pop_gimplify_context (gimple *body)
330 {
331 struct gimplify_ctx *c = gimplify_ctxp;
332
333 gcc_assert (c
334 && (!c->bind_expr_stack.exists ()
335 || c->bind_expr_stack.is_empty ()));
336 c->bind_expr_stack.release ();
337 gimplify_ctxp = c->prev_context;
338
339 if (body)
340 declare_vars (c->temps, body, false);
341 else
342 record_vars (c->temps);
343
344 delete c->temp_htab;
345 c->temp_htab = NULL;
346 ctx_free (c);
347 }
348
349 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
350
351 static void
352 gimple_push_bind_expr (gbind *bind_stmt)
353 {
354 gimplify_ctxp->bind_expr_stack.reserve (8);
355 gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
356 }
357
358 /* Pop the first element off the stack of bindings. */
359
360 static void
361 gimple_pop_bind_expr (void)
362 {
363 gimplify_ctxp->bind_expr_stack.pop ();
364 }
365
366 /* Return the first element of the stack of bindings. */
367
368 gbind *
369 gimple_current_bind_expr (void)
370 {
371 return gimplify_ctxp->bind_expr_stack.last ();
372 }
373
374 /* Return the stack of bindings created during gimplification. */
375
376 vec<gbind *>
377 gimple_bind_expr_stack (void)
378 {
379 return gimplify_ctxp->bind_expr_stack;
380 }
381
382 /* Return true iff there is a COND_EXPR between us and the innermost
383 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
384
385 static bool
386 gimple_conditional_context (void)
387 {
388 return gimplify_ctxp->conditions > 0;
389 }
390
391 /* Note that we've entered a COND_EXPR. */
392
393 static void
394 gimple_push_condition (void)
395 {
396 #ifdef ENABLE_GIMPLE_CHECKING
397 if (gimplify_ctxp->conditions == 0)
398 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
399 #endif
400 ++(gimplify_ctxp->conditions);
401 }
402
403 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
404 now, add any conditional cleanups we've seen to the prequeue. */
405
406 static void
407 gimple_pop_condition (gimple_seq *pre_p)
408 {
409 int conds = --(gimplify_ctxp->conditions);
410
411 gcc_assert (conds >= 0);
412 if (conds == 0)
413 {
414 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
415 gimplify_ctxp->conditional_cleanups = NULL;
416 }
417 }
418
419 /* A stable comparison routine for use with splay trees and DECLs. */
420
421 static int
422 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
423 {
424 tree a = (tree) xa;
425 tree b = (tree) xb;
426
427 return DECL_UID (a) - DECL_UID (b);
428 }
429
430 /* Create a new omp construct that deals with variable remapping. */
431
432 static struct gimplify_omp_ctx *
433 new_omp_context (enum omp_region_type region_type)
434 {
435 struct gimplify_omp_ctx *c;
436
437 c = XCNEW (struct gimplify_omp_ctx);
438 c->outer_context = gimplify_omp_ctxp;
439 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
440 c->privatized_types = new hash_set<tree>;
441 c->location = input_location;
442 c->region_type = region_type;
443 if ((region_type & ORT_TASK) == 0)
444 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
445 else
446 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
447 c->defaultmap[GDMK_SCALAR] = GOVD_MAP;
448 c->defaultmap[GDMK_AGGREGATE] = GOVD_MAP;
449 c->defaultmap[GDMK_ALLOCATABLE] = GOVD_MAP;
450 c->defaultmap[GDMK_POINTER] = GOVD_MAP;
451
452 return c;
453 }
454
455 /* Destroy an omp construct that deals with variable remapping. */
456
457 static void
458 delete_omp_context (struct gimplify_omp_ctx *c)
459 {
460 splay_tree_delete (c->variables);
461 delete c->privatized_types;
462 c->loop_iter_var.release ();
463 XDELETE (c);
464 }
465
466 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
467 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
468
469 /* Both gimplify the statement T and append it to *SEQ_P. This function
470 behaves exactly as gimplify_stmt, but you don't have to pass T as a
471 reference. */
472
473 void
474 gimplify_and_add (tree t, gimple_seq *seq_p)
475 {
476 gimplify_stmt (&t, seq_p);
477 }
478
479 /* Gimplify statement T into sequence *SEQ_P, and return the first
480 tuple in the sequence of generated tuples for this statement.
481 Return NULL if gimplifying T produced no tuples. */
482
483 static gimple *
484 gimplify_and_return_first (tree t, gimple_seq *seq_p)
485 {
486 gimple_stmt_iterator last = gsi_last (*seq_p);
487
488 gimplify_and_add (t, seq_p);
489
490 if (!gsi_end_p (last))
491 {
492 gsi_next (&last);
493 return gsi_stmt (last);
494 }
495 else
496 return gimple_seq_first_stmt (*seq_p);
497 }
498
499 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
500 LHS, or for a call argument. */
501
502 static bool
503 is_gimple_mem_rhs (tree t)
504 {
505 /* If we're dealing with a renamable type, either source or dest must be
506 a renamed variable. */
507 if (is_gimple_reg_type (TREE_TYPE (t)))
508 return is_gimple_val (t);
509 else
510 return is_gimple_val (t) || is_gimple_lvalue (t);
511 }
512
513 /* Return true if T is a CALL_EXPR or an expression that can be
514 assigned to a temporary. Note that this predicate should only be
515 used during gimplification. See the rationale for this in
516 gimplify_modify_expr. */
517
518 static bool
519 is_gimple_reg_rhs_or_call (tree t)
520 {
521 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
522 || TREE_CODE (t) == CALL_EXPR);
523 }
524
525 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
526 this predicate should only be used during gimplification. See the
527 rationale for this in gimplify_modify_expr. */
528
529 static bool
530 is_gimple_mem_rhs_or_call (tree t)
531 {
532 /* If we're dealing with a renamable type, either source or dest must be
533 a renamed variable. */
534 if (is_gimple_reg_type (TREE_TYPE (t)))
535 return is_gimple_val (t);
536 else
537 return (is_gimple_val (t)
538 || is_gimple_lvalue (t)
539 || TREE_CLOBBER_P (t)
540 || TREE_CODE (t) == CALL_EXPR);
541 }
542
543 /* Create a temporary with a name derived from VAL. Subroutine of
544 lookup_tmp_var; nobody else should call this function. */
545
546 static inline tree
547 create_tmp_from_val (tree val)
548 {
549 /* Drop all qualifiers and address-space information from the value type. */
550 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
551 tree var = create_tmp_var (type, get_name (val));
552 if (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
553 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
554 DECL_GIMPLE_REG_P (var) = 1;
555 return var;
556 }
557
558 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
559 an existing expression temporary. */
560
561 static tree
562 lookup_tmp_var (tree val, bool is_formal)
563 {
564 tree ret;
565
566 /* If not optimizing, never really reuse a temporary. local-alloc
567 won't allocate any variable that is used in more than one basic
568 block, which means it will go into memory, causing much extra
569 work in reload and final and poorer code generation, outweighing
570 the extra memory allocation here. */
571 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
572 ret = create_tmp_from_val (val);
573 else
574 {
575 elt_t elt, *elt_p;
576 elt_t **slot;
577
578 elt.val = val;
579 if (!gimplify_ctxp->temp_htab)
580 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
581 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
582 if (*slot == NULL)
583 {
584 elt_p = XNEW (elt_t);
585 elt_p->val = val;
586 elt_p->temp = ret = create_tmp_from_val (val);
587 *slot = elt_p;
588 }
589 else
590 {
591 elt_p = *slot;
592 ret = elt_p->temp;
593 }
594 }
595
596 return ret;
597 }
598
599 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
600
601 static tree
602 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
603 bool is_formal, bool allow_ssa)
604 {
605 tree t, mod;
606
607 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
608 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
609 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
610 fb_rvalue);
611
612 if (allow_ssa
613 && gimplify_ctxp->into_ssa
614 && is_gimple_reg_type (TREE_TYPE (val)))
615 {
616 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
617 if (! gimple_in_ssa_p (cfun))
618 {
619 const char *name = get_name (val);
620 if (name)
621 SET_SSA_NAME_VAR_OR_IDENTIFIER (t, create_tmp_var_name (name));
622 }
623 }
624 else
625 t = lookup_tmp_var (val, is_formal);
626
627 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
628
629 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
630
631 /* gimplify_modify_expr might want to reduce this further. */
632 gimplify_and_add (mod, pre_p);
633 ggc_free (mod);
634
635 return t;
636 }
637
638 /* Return a formal temporary variable initialized with VAL. PRE_P is as
639 in gimplify_expr. Only use this function if:
640
641 1) The value of the unfactored expression represented by VAL will not
642 change between the initialization and use of the temporary, and
643 2) The temporary will not be otherwise modified.
644
645 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
646 and #2 means it is inappropriate for && temps.
647
648 For other cases, use get_initialized_tmp_var instead. */
649
650 tree
651 get_formal_tmp_var (tree val, gimple_seq *pre_p)
652 {
653 return internal_get_tmp_var (val, pre_p, NULL, true, true);
654 }
655
656 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
657 are as in gimplify_expr. */
658
659 tree
660 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
661 bool allow_ssa)
662 {
663 return internal_get_tmp_var (val, pre_p, post_p, false, allow_ssa);
664 }
665
666 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
667 generate debug info for them; otherwise don't. */
668
669 void
670 declare_vars (tree vars, gimple *gs, bool debug_info)
671 {
672 tree last = vars;
673 if (last)
674 {
675 tree temps, block;
676
677 gbind *scope = as_a <gbind *> (gs);
678
679 temps = nreverse (last);
680
681 block = gimple_bind_block (scope);
682 gcc_assert (!block || TREE_CODE (block) == BLOCK);
683 if (!block || !debug_info)
684 {
685 DECL_CHAIN (last) = gimple_bind_vars (scope);
686 gimple_bind_set_vars (scope, temps);
687 }
688 else
689 {
690 /* We need to attach the nodes both to the BIND_EXPR and to its
691 associated BLOCK for debugging purposes. The key point here
692 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
693 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
694 if (BLOCK_VARS (block))
695 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
696 else
697 {
698 gimple_bind_set_vars (scope,
699 chainon (gimple_bind_vars (scope), temps));
700 BLOCK_VARS (block) = temps;
701 }
702 }
703 }
704 }
705
706 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
707 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
708 no such upper bound can be obtained. */
709
710 static void
711 force_constant_size (tree var)
712 {
713 /* The only attempt we make is by querying the maximum size of objects
714 of the variable's type. */
715
716 HOST_WIDE_INT max_size;
717
718 gcc_assert (VAR_P (var));
719
720 max_size = max_int_size_in_bytes (TREE_TYPE (var));
721
722 gcc_assert (max_size >= 0);
723
724 DECL_SIZE_UNIT (var)
725 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
726 DECL_SIZE (var)
727 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
728 }
729
730 /* Push the temporary variable TMP into the current binding. */
731
732 void
733 gimple_add_tmp_var_fn (struct function *fn, tree tmp)
734 {
735 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
736
737 /* Later processing assumes that the object size is constant, which might
738 not be true at this point. Force the use of a constant upper bound in
739 this case. */
740 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
741 force_constant_size (tmp);
742
743 DECL_CONTEXT (tmp) = fn->decl;
744 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
745
746 record_vars_into (tmp, fn->decl);
747 }
748
749 /* Push the temporary variable TMP into the current binding. */
750
751 void
752 gimple_add_tmp_var (tree tmp)
753 {
754 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
755
756 /* Later processing assumes that the object size is constant, which might
757 not be true at this point. Force the use of a constant upper bound in
758 this case. */
759 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
760 force_constant_size (tmp);
761
762 DECL_CONTEXT (tmp) = current_function_decl;
763 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
764
765 if (gimplify_ctxp)
766 {
767 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
768 gimplify_ctxp->temps = tmp;
769
770 /* Mark temporaries local within the nearest enclosing parallel. */
771 if (gimplify_omp_ctxp)
772 {
773 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
774 while (ctx
775 && (ctx->region_type == ORT_WORKSHARE
776 || ctx->region_type == ORT_TASKGROUP
777 || ctx->region_type == ORT_SIMD
778 || ctx->region_type == ORT_ACC))
779 ctx = ctx->outer_context;
780 if (ctx)
781 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
782 }
783 }
784 else if (cfun)
785 record_vars (tmp);
786 else
787 {
788 gimple_seq body_seq;
789
790 /* This case is for nested functions. We need to expose the locals
791 they create. */
792 body_seq = gimple_body (current_function_decl);
793 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
794 }
795 }
796
797
798 \f
799 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
800 nodes that are referenced more than once in GENERIC functions. This is
801 necessary because gimplification (translation into GIMPLE) is performed
802 by modifying tree nodes in-place, so gimplication of a shared node in a
803 first context could generate an invalid GIMPLE form in a second context.
804
805 This is achieved with a simple mark/copy/unmark algorithm that walks the
806 GENERIC representation top-down, marks nodes with TREE_VISITED the first
807 time it encounters them, duplicates them if they already have TREE_VISITED
808 set, and finally removes the TREE_VISITED marks it has set.
809
810 The algorithm works only at the function level, i.e. it generates a GENERIC
811 representation of a function with no nodes shared within the function when
812 passed a GENERIC function (except for nodes that are allowed to be shared).
813
814 At the global level, it is also necessary to unshare tree nodes that are
815 referenced in more than one function, for the same aforementioned reason.
816 This requires some cooperation from the front-end. There are 2 strategies:
817
818 1. Manual unsharing. The front-end needs to call unshare_expr on every
819 expression that might end up being shared across functions.
820
821 2. Deep unsharing. This is an extension of regular unsharing. Instead
822 of calling unshare_expr on expressions that might be shared across
823 functions, the front-end pre-marks them with TREE_VISITED. This will
824 ensure that they are unshared on the first reference within functions
825 when the regular unsharing algorithm runs. The counterpart is that
826 this algorithm must look deeper than for manual unsharing, which is
827 specified by LANG_HOOKS_DEEP_UNSHARING.
828
829 If there are only few specific cases of node sharing across functions, it is
830 probably easier for a front-end to unshare the expressions manually. On the
831 contrary, if the expressions generated at the global level are as widespread
832 as expressions generated within functions, deep unsharing is very likely the
833 way to go. */
834
835 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
836 These nodes model computations that must be done once. If we were to
837 unshare something like SAVE_EXPR(i++), the gimplification process would
838 create wrong code. However, if DATA is non-null, it must hold a pointer
839 set that is used to unshare the subtrees of these nodes. */
840
841 static tree
842 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
843 {
844 tree t = *tp;
845 enum tree_code code = TREE_CODE (t);
846
847 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
848 copy their subtrees if we can make sure to do it only once. */
849 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
850 {
851 if (data && !((hash_set<tree> *)data)->add (t))
852 ;
853 else
854 *walk_subtrees = 0;
855 }
856
857 /* Stop at types, decls, constants like copy_tree_r. */
858 else if (TREE_CODE_CLASS (code) == tcc_type
859 || TREE_CODE_CLASS (code) == tcc_declaration
860 || TREE_CODE_CLASS (code) == tcc_constant)
861 *walk_subtrees = 0;
862
863 /* Cope with the statement expression extension. */
864 else if (code == STATEMENT_LIST)
865 ;
866
867 /* Leave the bulk of the work to copy_tree_r itself. */
868 else
869 copy_tree_r (tp, walk_subtrees, NULL);
870
871 return NULL_TREE;
872 }
873
874 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
875 If *TP has been visited already, then *TP is deeply copied by calling
876 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
877
878 static tree
879 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
880 {
881 tree t = *tp;
882 enum tree_code code = TREE_CODE (t);
883
884 /* Skip types, decls, and constants. But we do want to look at their
885 types and the bounds of types. Mark them as visited so we properly
886 unmark their subtrees on the unmark pass. If we've already seen them,
887 don't look down further. */
888 if (TREE_CODE_CLASS (code) == tcc_type
889 || TREE_CODE_CLASS (code) == tcc_declaration
890 || TREE_CODE_CLASS (code) == tcc_constant)
891 {
892 if (TREE_VISITED (t))
893 *walk_subtrees = 0;
894 else
895 TREE_VISITED (t) = 1;
896 }
897
898 /* If this node has been visited already, unshare it and don't look
899 any deeper. */
900 else if (TREE_VISITED (t))
901 {
902 walk_tree (tp, mostly_copy_tree_r, data, NULL);
903 *walk_subtrees = 0;
904 }
905
906 /* Otherwise, mark the node as visited and keep looking. */
907 else
908 TREE_VISITED (t) = 1;
909
910 return NULL_TREE;
911 }
912
913 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
914 copy_if_shared_r callback unmodified. */
915
916 static inline void
917 copy_if_shared (tree *tp, void *data)
918 {
919 walk_tree (tp, copy_if_shared_r, data, NULL);
920 }
921
922 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
923 any nested functions. */
924
925 static void
926 unshare_body (tree fndecl)
927 {
928 struct cgraph_node *cgn = cgraph_node::get (fndecl);
929 /* If the language requires deep unsharing, we need a pointer set to make
930 sure we don't repeatedly unshare subtrees of unshareable nodes. */
931 hash_set<tree> *visited
932 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
933
934 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
935 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
936 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
937
938 delete visited;
939
940 if (cgn)
941 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
942 unshare_body (cgn->decl);
943 }
944
945 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
946 Subtrees are walked until the first unvisited node is encountered. */
947
948 static tree
949 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
950 {
951 tree t = *tp;
952
953 /* If this node has been visited, unmark it and keep looking. */
954 if (TREE_VISITED (t))
955 TREE_VISITED (t) = 0;
956
957 /* Otherwise, don't look any deeper. */
958 else
959 *walk_subtrees = 0;
960
961 return NULL_TREE;
962 }
963
964 /* Unmark the visited trees rooted at *TP. */
965
966 static inline void
967 unmark_visited (tree *tp)
968 {
969 walk_tree (tp, unmark_visited_r, NULL, NULL);
970 }
971
972 /* Likewise, but mark all trees as not visited. */
973
974 static void
975 unvisit_body (tree fndecl)
976 {
977 struct cgraph_node *cgn = cgraph_node::get (fndecl);
978
979 unmark_visited (&DECL_SAVED_TREE (fndecl));
980 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
981 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
982
983 if (cgn)
984 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
985 unvisit_body (cgn->decl);
986 }
987
988 /* Unconditionally make an unshared copy of EXPR. This is used when using
989 stored expressions which span multiple functions, such as BINFO_VTABLE,
990 as the normal unsharing process can't tell that they're shared. */
991
992 tree
993 unshare_expr (tree expr)
994 {
995 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
996 return expr;
997 }
998
999 /* Worker for unshare_expr_without_location. */
1000
1001 static tree
1002 prune_expr_location (tree *tp, int *walk_subtrees, void *)
1003 {
1004 if (EXPR_P (*tp))
1005 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
1006 else
1007 *walk_subtrees = 0;
1008 return NULL_TREE;
1009 }
1010
1011 /* Similar to unshare_expr but also prune all expression locations
1012 from EXPR. */
1013
1014 tree
1015 unshare_expr_without_location (tree expr)
1016 {
1017 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1018 if (EXPR_P (expr))
1019 walk_tree (&expr, prune_expr_location, NULL, NULL);
1020 return expr;
1021 }
1022
1023 /* Return the EXPR_LOCATION of EXPR, if it (maybe recursively) has
1024 one, OR_ELSE otherwise. The location of a STATEMENT_LISTs
1025 comprising at least one DEBUG_BEGIN_STMT followed by exactly one
1026 EXPR is the location of the EXPR. */
1027
1028 static location_t
1029 rexpr_location (tree expr, location_t or_else = UNKNOWN_LOCATION)
1030 {
1031 if (!expr)
1032 return or_else;
1033
1034 if (EXPR_HAS_LOCATION (expr))
1035 return EXPR_LOCATION (expr);
1036
1037 if (TREE_CODE (expr) != STATEMENT_LIST)
1038 return or_else;
1039
1040 tree_stmt_iterator i = tsi_start (expr);
1041
1042 bool found = false;
1043 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
1044 {
1045 found = true;
1046 tsi_next (&i);
1047 }
1048
1049 if (!found || !tsi_one_before_end_p (i))
1050 return or_else;
1051
1052 return rexpr_location (tsi_stmt (i), or_else);
1053 }
1054
1055 /* Return TRUE iff EXPR (maybe recursively) has a location; see
1056 rexpr_location for the potential recursion. */
1057
1058 static inline bool
1059 rexpr_has_location (tree expr)
1060 {
1061 return rexpr_location (expr) != UNKNOWN_LOCATION;
1062 }
1063
1064 \f
1065 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1066 contain statements and have a value. Assign its value to a temporary
1067 and give it void_type_node. Return the temporary, or NULL_TREE if
1068 WRAPPER was already void. */
1069
1070 tree
1071 voidify_wrapper_expr (tree wrapper, tree temp)
1072 {
1073 tree type = TREE_TYPE (wrapper);
1074 if (type && !VOID_TYPE_P (type))
1075 {
1076 tree *p;
1077
1078 /* Set p to point to the body of the wrapper. Loop until we find
1079 something that isn't a wrapper. */
1080 for (p = &wrapper; p && *p; )
1081 {
1082 switch (TREE_CODE (*p))
1083 {
1084 case BIND_EXPR:
1085 TREE_SIDE_EFFECTS (*p) = 1;
1086 TREE_TYPE (*p) = void_type_node;
1087 /* For a BIND_EXPR, the body is operand 1. */
1088 p = &BIND_EXPR_BODY (*p);
1089 break;
1090
1091 case CLEANUP_POINT_EXPR:
1092 case TRY_FINALLY_EXPR:
1093 case TRY_CATCH_EXPR:
1094 TREE_SIDE_EFFECTS (*p) = 1;
1095 TREE_TYPE (*p) = void_type_node;
1096 p = &TREE_OPERAND (*p, 0);
1097 break;
1098
1099 case STATEMENT_LIST:
1100 {
1101 tree_stmt_iterator i = tsi_last (*p);
1102 TREE_SIDE_EFFECTS (*p) = 1;
1103 TREE_TYPE (*p) = void_type_node;
1104 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1105 }
1106 break;
1107
1108 case COMPOUND_EXPR:
1109 /* Advance to the last statement. Set all container types to
1110 void. */
1111 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1112 {
1113 TREE_SIDE_EFFECTS (*p) = 1;
1114 TREE_TYPE (*p) = void_type_node;
1115 }
1116 break;
1117
1118 case TRANSACTION_EXPR:
1119 TREE_SIDE_EFFECTS (*p) = 1;
1120 TREE_TYPE (*p) = void_type_node;
1121 p = &TRANSACTION_EXPR_BODY (*p);
1122 break;
1123
1124 default:
1125 /* Assume that any tree upon which voidify_wrapper_expr is
1126 directly called is a wrapper, and that its body is op0. */
1127 if (p == &wrapper)
1128 {
1129 TREE_SIDE_EFFECTS (*p) = 1;
1130 TREE_TYPE (*p) = void_type_node;
1131 p = &TREE_OPERAND (*p, 0);
1132 break;
1133 }
1134 goto out;
1135 }
1136 }
1137
1138 out:
1139 if (p == NULL || IS_EMPTY_STMT (*p))
1140 temp = NULL_TREE;
1141 else if (temp)
1142 {
1143 /* The wrapper is on the RHS of an assignment that we're pushing
1144 down. */
1145 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1146 || TREE_CODE (temp) == MODIFY_EXPR);
1147 TREE_OPERAND (temp, 1) = *p;
1148 *p = temp;
1149 }
1150 else
1151 {
1152 temp = create_tmp_var (type, "retval");
1153 *p = build2 (INIT_EXPR, type, temp, *p);
1154 }
1155
1156 return temp;
1157 }
1158
1159 return NULL_TREE;
1160 }
1161
1162 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1163 a temporary through which they communicate. */
1164
1165 static void
1166 build_stack_save_restore (gcall **save, gcall **restore)
1167 {
1168 tree tmp_var;
1169
1170 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1171 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1172 gimple_call_set_lhs (*save, tmp_var);
1173
1174 *restore
1175 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1176 1, tmp_var);
1177 }
1178
1179 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1180
1181 static tree
1182 build_asan_poison_call_expr (tree decl)
1183 {
1184 /* Do not poison variables that have size equal to zero. */
1185 tree unit_size = DECL_SIZE_UNIT (decl);
1186 if (zerop (unit_size))
1187 return NULL_TREE;
1188
1189 tree base = build_fold_addr_expr (decl);
1190
1191 return build_call_expr_internal_loc (UNKNOWN_LOCATION, IFN_ASAN_MARK,
1192 void_type_node, 3,
1193 build_int_cst (integer_type_node,
1194 ASAN_MARK_POISON),
1195 base, unit_size);
1196 }
1197
1198 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1199 on POISON flag, shadow memory of a DECL variable. The call will be
1200 put on location identified by IT iterator, where BEFORE flag drives
1201 position where the stmt will be put. */
1202
1203 static void
1204 asan_poison_variable (tree decl, bool poison, gimple_stmt_iterator *it,
1205 bool before)
1206 {
1207 tree unit_size = DECL_SIZE_UNIT (decl);
1208 tree base = build_fold_addr_expr (decl);
1209
1210 /* Do not poison variables that have size equal to zero. */
1211 if (zerop (unit_size))
1212 return;
1213
1214 /* It's necessary to have all stack variables aligned to ASAN granularity
1215 bytes. */
1216 if (DECL_ALIGN_UNIT (decl) <= ASAN_SHADOW_GRANULARITY)
1217 SET_DECL_ALIGN (decl, BITS_PER_UNIT * ASAN_SHADOW_GRANULARITY);
1218
1219 HOST_WIDE_INT flags = poison ? ASAN_MARK_POISON : ASAN_MARK_UNPOISON;
1220
1221 gimple *g
1222 = gimple_build_call_internal (IFN_ASAN_MARK, 3,
1223 build_int_cst (integer_type_node, flags),
1224 base, unit_size);
1225
1226 if (before)
1227 gsi_insert_before (it, g, GSI_NEW_STMT);
1228 else
1229 gsi_insert_after (it, g, GSI_NEW_STMT);
1230 }
1231
1232 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1233 either poisons or unpoisons a DECL. Created statement is appended
1234 to SEQ_P gimple sequence. */
1235
1236 static void
1237 asan_poison_variable (tree decl, bool poison, gimple_seq *seq_p)
1238 {
1239 gimple_stmt_iterator it = gsi_last (*seq_p);
1240 bool before = false;
1241
1242 if (gsi_end_p (it))
1243 before = true;
1244
1245 asan_poison_variable (decl, poison, &it, before);
1246 }
1247
1248 /* Sort pair of VAR_DECLs A and B by DECL_UID. */
1249
1250 static int
1251 sort_by_decl_uid (const void *a, const void *b)
1252 {
1253 const tree *t1 = (const tree *)a;
1254 const tree *t2 = (const tree *)b;
1255
1256 int uid1 = DECL_UID (*t1);
1257 int uid2 = DECL_UID (*t2);
1258
1259 if (uid1 < uid2)
1260 return -1;
1261 else if (uid1 > uid2)
1262 return 1;
1263 else
1264 return 0;
1265 }
1266
1267 /* Generate IFN_ASAN_MARK internal call for all VARIABLES
1268 depending on POISON flag. Created statement is appended
1269 to SEQ_P gimple sequence. */
1270
1271 static void
1272 asan_poison_variables (hash_set<tree> *variables, bool poison, gimple_seq *seq_p)
1273 {
1274 unsigned c = variables->elements ();
1275 if (c == 0)
1276 return;
1277
1278 auto_vec<tree> sorted_variables (c);
1279
1280 for (hash_set<tree>::iterator it = variables->begin ();
1281 it != variables->end (); ++it)
1282 sorted_variables.safe_push (*it);
1283
1284 sorted_variables.qsort (sort_by_decl_uid);
1285
1286 unsigned i;
1287 tree var;
1288 FOR_EACH_VEC_ELT (sorted_variables, i, var)
1289 {
1290 asan_poison_variable (var, poison, seq_p);
1291
1292 /* Add use_after_scope_memory attribute for the variable in order
1293 to prevent re-written into SSA. */
1294 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE,
1295 DECL_ATTRIBUTES (var)))
1296 DECL_ATTRIBUTES (var)
1297 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE),
1298 integer_one_node,
1299 DECL_ATTRIBUTES (var));
1300 }
1301 }
1302
1303 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1304
1305 static enum gimplify_status
1306 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1307 {
1308 tree bind_expr = *expr_p;
1309 bool old_keep_stack = gimplify_ctxp->keep_stack;
1310 bool old_save_stack = gimplify_ctxp->save_stack;
1311 tree t;
1312 gbind *bind_stmt;
1313 gimple_seq body, cleanup;
1314 gcall *stack_save;
1315 location_t start_locus = 0, end_locus = 0;
1316 tree ret_clauses = NULL;
1317
1318 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1319
1320 /* Mark variables seen in this bind expr. */
1321 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1322 {
1323 if (VAR_P (t))
1324 {
1325 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1326
1327 /* Mark variable as local. */
1328 if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t))
1329 {
1330 if (! DECL_SEEN_IN_BIND_EXPR_P (t)
1331 || splay_tree_lookup (ctx->variables,
1332 (splay_tree_key) t) == NULL)
1333 {
1334 if (ctx->region_type == ORT_SIMD
1335 && TREE_ADDRESSABLE (t)
1336 && !TREE_STATIC (t))
1337 omp_add_variable (ctx, t, GOVD_PRIVATE | GOVD_SEEN);
1338 else
1339 omp_add_variable (ctx, t, GOVD_LOCAL | GOVD_SEEN);
1340 }
1341 /* Static locals inside of target construct or offloaded
1342 routines need to be "omp declare target". */
1343 if (TREE_STATIC (t))
1344 for (; ctx; ctx = ctx->outer_context)
1345 if ((ctx->region_type & ORT_TARGET) != 0)
1346 {
1347 if (!lookup_attribute ("omp declare target",
1348 DECL_ATTRIBUTES (t)))
1349 {
1350 tree id = get_identifier ("omp declare target");
1351 DECL_ATTRIBUTES (t)
1352 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (t));
1353 varpool_node *node = varpool_node::get (t);
1354 if (node)
1355 {
1356 node->offloadable = 1;
1357 if (ENABLE_OFFLOADING && !DECL_EXTERNAL (t))
1358 {
1359 g->have_offload = true;
1360 if (!in_lto_p)
1361 vec_safe_push (offload_vars, t);
1362 }
1363 }
1364 }
1365 break;
1366 }
1367 }
1368
1369 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1370
1371 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1372 cfun->has_local_explicit_reg_vars = true;
1373 }
1374
1375 /* Preliminarily mark non-addressed complex variables as eligible
1376 for promotion to gimple registers. We'll transform their uses
1377 as we find them. */
1378 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1379 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1380 && !TREE_THIS_VOLATILE (t)
1381 && (VAR_P (t) && !DECL_HARD_REGISTER (t))
1382 && !needs_to_live_in_memory (t))
1383 DECL_GIMPLE_REG_P (t) = 1;
1384 }
1385
1386 bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1387 BIND_EXPR_BLOCK (bind_expr));
1388 gimple_push_bind_expr (bind_stmt);
1389
1390 gimplify_ctxp->keep_stack = false;
1391 gimplify_ctxp->save_stack = false;
1392
1393 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1394 body = NULL;
1395 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1396 gimple_bind_set_body (bind_stmt, body);
1397
1398 /* Source location wise, the cleanup code (stack_restore and clobbers)
1399 belongs to the end of the block, so propagate what we have. The
1400 stack_save operation belongs to the beginning of block, which we can
1401 infer from the bind_expr directly if the block has no explicit
1402 assignment. */
1403 if (BIND_EXPR_BLOCK (bind_expr))
1404 {
1405 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1406 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1407 }
1408 if (start_locus == 0)
1409 start_locus = EXPR_LOCATION (bind_expr);
1410
1411 cleanup = NULL;
1412 stack_save = NULL;
1413
1414 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1415 the stack space allocated to the VLAs. */
1416 if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack)
1417 {
1418 gcall *stack_restore;
1419
1420 /* Save stack on entry and restore it on exit. Add a try_finally
1421 block to achieve this. */
1422 build_stack_save_restore (&stack_save, &stack_restore);
1423
1424 gimple_set_location (stack_save, start_locus);
1425 gimple_set_location (stack_restore, end_locus);
1426
1427 gimplify_seq_add_stmt (&cleanup, stack_restore);
1428 }
1429
1430 /* Add clobbers for all variables that go out of scope. */
1431 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1432 {
1433 if (VAR_P (t)
1434 && !is_global_var (t)
1435 && DECL_CONTEXT (t) == current_function_decl)
1436 {
1437 if (!DECL_HARD_REGISTER (t)
1438 && !TREE_THIS_VOLATILE (t)
1439 && !DECL_HAS_VALUE_EXPR_P (t)
1440 /* Only care for variables that have to be in memory. Others
1441 will be rewritten into SSA names, hence moved to the
1442 top-level. */
1443 && !is_gimple_reg (t)
1444 && flag_stack_reuse != SR_NONE)
1445 {
1446 tree clobber = build_clobber (TREE_TYPE (t));
1447 gimple *clobber_stmt;
1448 clobber_stmt = gimple_build_assign (t, clobber);
1449 gimple_set_location (clobber_stmt, end_locus);
1450 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1451 }
1452
1453 if (flag_openacc && oacc_declare_returns != NULL)
1454 {
1455 tree *c = oacc_declare_returns->get (t);
1456 if (c != NULL)
1457 {
1458 if (ret_clauses)
1459 OMP_CLAUSE_CHAIN (*c) = ret_clauses;
1460
1461 ret_clauses = *c;
1462
1463 oacc_declare_returns->remove (t);
1464
1465 if (oacc_declare_returns->is_empty ())
1466 {
1467 delete oacc_declare_returns;
1468 oacc_declare_returns = NULL;
1469 }
1470 }
1471 }
1472 }
1473
1474 if (asan_poisoned_variables != NULL
1475 && asan_poisoned_variables->contains (t))
1476 {
1477 asan_poisoned_variables->remove (t);
1478 asan_poison_variable (t, true, &cleanup);
1479 }
1480
1481 if (gimplify_ctxp->live_switch_vars != NULL
1482 && gimplify_ctxp->live_switch_vars->contains (t))
1483 gimplify_ctxp->live_switch_vars->remove (t);
1484 }
1485
1486 if (ret_clauses)
1487 {
1488 gomp_target *stmt;
1489 gimple_stmt_iterator si = gsi_start (cleanup);
1490
1491 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
1492 ret_clauses);
1493 gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT);
1494 }
1495
1496 if (cleanup)
1497 {
1498 gtry *gs;
1499 gimple_seq new_body;
1500
1501 new_body = NULL;
1502 gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
1503 GIMPLE_TRY_FINALLY);
1504
1505 if (stack_save)
1506 gimplify_seq_add_stmt (&new_body, stack_save);
1507 gimplify_seq_add_stmt (&new_body, gs);
1508 gimple_bind_set_body (bind_stmt, new_body);
1509 }
1510
1511 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1512 if (!gimplify_ctxp->keep_stack)
1513 gimplify_ctxp->keep_stack = old_keep_stack;
1514 gimplify_ctxp->save_stack = old_save_stack;
1515
1516 gimple_pop_bind_expr ();
1517
1518 gimplify_seq_add_stmt (pre_p, bind_stmt);
1519
1520 if (temp)
1521 {
1522 *expr_p = temp;
1523 return GS_OK;
1524 }
1525
1526 *expr_p = NULL_TREE;
1527 return GS_ALL_DONE;
1528 }
1529
1530 /* Maybe add early return predict statement to PRE_P sequence. */
1531
1532 static void
1533 maybe_add_early_return_predict_stmt (gimple_seq *pre_p)
1534 {
1535 /* If we are not in a conditional context, add PREDICT statement. */
1536 if (gimple_conditional_context ())
1537 {
1538 gimple *predict = gimple_build_predict (PRED_TREE_EARLY_RETURN,
1539 NOT_TAKEN);
1540 gimplify_seq_add_stmt (pre_p, predict);
1541 }
1542 }
1543
1544 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1545 GIMPLE value, it is assigned to a new temporary and the statement is
1546 re-written to return the temporary.
1547
1548 PRE_P points to the sequence where side effects that must happen before
1549 STMT should be stored. */
1550
1551 static enum gimplify_status
1552 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1553 {
1554 greturn *ret;
1555 tree ret_expr = TREE_OPERAND (stmt, 0);
1556 tree result_decl, result;
1557
1558 if (ret_expr == error_mark_node)
1559 return GS_ERROR;
1560
1561 if (!ret_expr
1562 || TREE_CODE (ret_expr) == RESULT_DECL)
1563 {
1564 maybe_add_early_return_predict_stmt (pre_p);
1565 greturn *ret = gimple_build_return (ret_expr);
1566 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1567 gimplify_seq_add_stmt (pre_p, ret);
1568 return GS_ALL_DONE;
1569 }
1570
1571 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1572 result_decl = NULL_TREE;
1573 else
1574 {
1575 result_decl = TREE_OPERAND (ret_expr, 0);
1576
1577 /* See through a return by reference. */
1578 if (TREE_CODE (result_decl) == INDIRECT_REF)
1579 result_decl = TREE_OPERAND (result_decl, 0);
1580
1581 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1582 || TREE_CODE (ret_expr) == INIT_EXPR)
1583 && TREE_CODE (result_decl) == RESULT_DECL);
1584 }
1585
1586 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1587 Recall that aggregate_value_p is FALSE for any aggregate type that is
1588 returned in registers. If we're returning values in registers, then
1589 we don't want to extend the lifetime of the RESULT_DECL, particularly
1590 across another call. In addition, for those aggregates for which
1591 hard_function_value generates a PARALLEL, we'll die during normal
1592 expansion of structure assignments; there's special code in expand_return
1593 to handle this case that does not exist in expand_expr. */
1594 if (!result_decl)
1595 result = NULL_TREE;
1596 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1597 {
1598 if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST)
1599 {
1600 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1601 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1602 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1603 should be effectively allocated by the caller, i.e. all calls to
1604 this function must be subject to the Return Slot Optimization. */
1605 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1606 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1607 }
1608 result = result_decl;
1609 }
1610 else if (gimplify_ctxp->return_temp)
1611 result = gimplify_ctxp->return_temp;
1612 else
1613 {
1614 result = create_tmp_reg (TREE_TYPE (result_decl));
1615
1616 /* ??? With complex control flow (usually involving abnormal edges),
1617 we can wind up warning about an uninitialized value for this. Due
1618 to how this variable is constructed and initialized, this is never
1619 true. Give up and never warn. */
1620 TREE_NO_WARNING (result) = 1;
1621
1622 gimplify_ctxp->return_temp = result;
1623 }
1624
1625 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1626 Then gimplify the whole thing. */
1627 if (result != result_decl)
1628 TREE_OPERAND (ret_expr, 0) = result;
1629
1630 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1631
1632 maybe_add_early_return_predict_stmt (pre_p);
1633 ret = gimple_build_return (result);
1634 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1635 gimplify_seq_add_stmt (pre_p, ret);
1636
1637 return GS_ALL_DONE;
1638 }
1639
1640 /* Gimplify a variable-length array DECL. */
1641
1642 static void
1643 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1644 {
1645 /* This is a variable-sized decl. Simplify its size and mark it
1646 for deferred expansion. */
1647 tree t, addr, ptr_type;
1648
1649 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1650 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1651
1652 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1653 if (DECL_HAS_VALUE_EXPR_P (decl))
1654 return;
1655
1656 /* All occurrences of this decl in final gimplified code will be
1657 replaced by indirection. Setting DECL_VALUE_EXPR does two
1658 things: First, it lets the rest of the gimplifier know what
1659 replacement to use. Second, it lets the debug info know
1660 where to find the value. */
1661 ptr_type = build_pointer_type (TREE_TYPE (decl));
1662 addr = create_tmp_var (ptr_type, get_name (decl));
1663 DECL_IGNORED_P (addr) = 0;
1664 t = build_fold_indirect_ref (addr);
1665 TREE_THIS_NOTRAP (t) = 1;
1666 SET_DECL_VALUE_EXPR (decl, t);
1667 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1668
1669 t = build_alloca_call_expr (DECL_SIZE_UNIT (decl), DECL_ALIGN (decl),
1670 max_int_size_in_bytes (TREE_TYPE (decl)));
1671 /* The call has been built for a variable-sized object. */
1672 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1673 t = fold_convert (ptr_type, t);
1674 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1675
1676 gimplify_and_add (t, seq_p);
1677 }
1678
1679 /* A helper function to be called via walk_tree. Mark all labels under *TP
1680 as being forced. To be called for DECL_INITIAL of static variables. */
1681
1682 static tree
1683 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1684 {
1685 if (TYPE_P (*tp))
1686 *walk_subtrees = 0;
1687 if (TREE_CODE (*tp) == LABEL_DECL)
1688 {
1689 FORCED_LABEL (*tp) = 1;
1690 cfun->has_forced_label_in_static = 1;
1691 }
1692
1693 return NULL_TREE;
1694 }
1695
1696 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1697 and initialization explicit. */
1698
1699 static enum gimplify_status
1700 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1701 {
1702 tree stmt = *stmt_p;
1703 tree decl = DECL_EXPR_DECL (stmt);
1704
1705 *stmt_p = NULL_TREE;
1706
1707 if (TREE_TYPE (decl) == error_mark_node)
1708 return GS_ERROR;
1709
1710 if ((TREE_CODE (decl) == TYPE_DECL
1711 || VAR_P (decl))
1712 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1713 {
1714 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1715 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1716 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl)), seq_p);
1717 }
1718
1719 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1720 in case its size expressions contain problematic nodes like CALL_EXPR. */
1721 if (TREE_CODE (decl) == TYPE_DECL
1722 && DECL_ORIGINAL_TYPE (decl)
1723 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1724 {
1725 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1726 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl)) == REFERENCE_TYPE)
1727 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl)), seq_p);
1728 }
1729
1730 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
1731 {
1732 tree init = DECL_INITIAL (decl);
1733 bool is_vla = false;
1734
1735 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1736 || (!TREE_STATIC (decl)
1737 && flag_stack_check == GENERIC_STACK_CHECK
1738 && compare_tree_int (DECL_SIZE_UNIT (decl),
1739 STACK_CHECK_MAX_VAR_SIZE) > 0))
1740 {
1741 gimplify_vla_decl (decl, seq_p);
1742 is_vla = true;
1743 }
1744
1745 if (asan_poisoned_variables
1746 && !is_vla
1747 && TREE_ADDRESSABLE (decl)
1748 && !TREE_STATIC (decl)
1749 && !DECL_HAS_VALUE_EXPR_P (decl)
1750 && DECL_ALIGN (decl) <= MAX_SUPPORTED_STACK_ALIGNMENT
1751 && dbg_cnt (asan_use_after_scope)
1752 && !gimplify_omp_ctxp)
1753 {
1754 asan_poisoned_variables->add (decl);
1755 asan_poison_variable (decl, false, seq_p);
1756 if (!DECL_ARTIFICIAL (decl) && gimplify_ctxp->live_switch_vars)
1757 gimplify_ctxp->live_switch_vars->add (decl);
1758 }
1759
1760 /* Some front ends do not explicitly declare all anonymous
1761 artificial variables. We compensate here by declaring the
1762 variables, though it would be better if the front ends would
1763 explicitly declare them. */
1764 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1765 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1766 gimple_add_tmp_var (decl);
1767
1768 if (init && init != error_mark_node)
1769 {
1770 if (!TREE_STATIC (decl))
1771 {
1772 DECL_INITIAL (decl) = NULL_TREE;
1773 init = build2 (INIT_EXPR, void_type_node, decl, init);
1774 gimplify_and_add (init, seq_p);
1775 ggc_free (init);
1776 }
1777 else
1778 /* We must still examine initializers for static variables
1779 as they may contain a label address. */
1780 walk_tree (&init, force_labels_r, NULL, NULL);
1781 }
1782 }
1783
1784 return GS_ALL_DONE;
1785 }
1786
1787 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1788 and replacing the LOOP_EXPR with goto, but if the loop contains an
1789 EXIT_EXPR, we need to append a label for it to jump to. */
1790
1791 static enum gimplify_status
1792 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1793 {
1794 tree saved_label = gimplify_ctxp->exit_label;
1795 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1796
1797 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1798
1799 gimplify_ctxp->exit_label = NULL_TREE;
1800
1801 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1802
1803 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1804
1805 if (gimplify_ctxp->exit_label)
1806 gimplify_seq_add_stmt (pre_p,
1807 gimple_build_label (gimplify_ctxp->exit_label));
1808
1809 gimplify_ctxp->exit_label = saved_label;
1810
1811 *expr_p = NULL;
1812 return GS_ALL_DONE;
1813 }
1814
1815 /* Gimplify a statement list onto a sequence. These may be created either
1816 by an enlightened front-end, or by shortcut_cond_expr. */
1817
1818 static enum gimplify_status
1819 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1820 {
1821 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1822
1823 tree_stmt_iterator i = tsi_start (*expr_p);
1824
1825 while (!tsi_end_p (i))
1826 {
1827 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1828 tsi_delink (&i);
1829 }
1830
1831 if (temp)
1832 {
1833 *expr_p = temp;
1834 return GS_OK;
1835 }
1836
1837 return GS_ALL_DONE;
1838 }
1839
1840 /* Callback for walk_gimple_seq. */
1841
1842 static tree
1843 warn_switch_unreachable_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
1844 struct walk_stmt_info *wi)
1845 {
1846 gimple *stmt = gsi_stmt (*gsi_p);
1847
1848 *handled_ops_p = true;
1849 switch (gimple_code (stmt))
1850 {
1851 case GIMPLE_TRY:
1852 /* A compiler-generated cleanup or a user-written try block.
1853 If it's empty, don't dive into it--that would result in
1854 worse location info. */
1855 if (gimple_try_eval (stmt) == NULL)
1856 {
1857 wi->info = stmt;
1858 return integer_zero_node;
1859 }
1860 /* Fall through. */
1861 case GIMPLE_BIND:
1862 case GIMPLE_CATCH:
1863 case GIMPLE_EH_FILTER:
1864 case GIMPLE_TRANSACTION:
1865 /* Walk the sub-statements. */
1866 *handled_ops_p = false;
1867 break;
1868
1869 case GIMPLE_DEBUG:
1870 /* Ignore these. We may generate them before declarations that
1871 are never executed. If there's something to warn about,
1872 there will be non-debug stmts too, and we'll catch those. */
1873 break;
1874
1875 case GIMPLE_CALL:
1876 if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
1877 {
1878 *handled_ops_p = false;
1879 break;
1880 }
1881 /* Fall through. */
1882 default:
1883 /* Save the first "real" statement (not a decl/lexical scope/...). */
1884 wi->info = stmt;
1885 return integer_zero_node;
1886 }
1887 return NULL_TREE;
1888 }
1889
1890 /* Possibly warn about unreachable statements between switch's controlling
1891 expression and the first case. SEQ is the body of a switch expression. */
1892
1893 static void
1894 maybe_warn_switch_unreachable (gimple_seq seq)
1895 {
1896 if (!warn_switch_unreachable
1897 /* This warning doesn't play well with Fortran when optimizations
1898 are on. */
1899 || lang_GNU_Fortran ()
1900 || seq == NULL)
1901 return;
1902
1903 struct walk_stmt_info wi;
1904 memset (&wi, 0, sizeof (wi));
1905 walk_gimple_seq (seq, warn_switch_unreachable_r, NULL, &wi);
1906 gimple *stmt = (gimple *) wi.info;
1907
1908 if (stmt && gimple_code (stmt) != GIMPLE_LABEL)
1909 {
1910 if (gimple_code (stmt) == GIMPLE_GOTO
1911 && TREE_CODE (gimple_goto_dest (stmt)) == LABEL_DECL
1912 && DECL_ARTIFICIAL (gimple_goto_dest (stmt)))
1913 /* Don't warn for compiler-generated gotos. These occur
1914 in Duff's devices, for example. */;
1915 else
1916 warning_at (gimple_location (stmt), OPT_Wswitch_unreachable,
1917 "statement will never be executed");
1918 }
1919 }
1920
1921
1922 /* A label entry that pairs label and a location. */
1923 struct label_entry
1924 {
1925 tree label;
1926 location_t loc;
1927 };
1928
1929 /* Find LABEL in vector of label entries VEC. */
1930
1931 static struct label_entry *
1932 find_label_entry (const auto_vec<struct label_entry> *vec, tree label)
1933 {
1934 unsigned int i;
1935 struct label_entry *l;
1936
1937 FOR_EACH_VEC_ELT (*vec, i, l)
1938 if (l->label == label)
1939 return l;
1940 return NULL;
1941 }
1942
1943 /* Return true if LABEL, a LABEL_DECL, represents a case label
1944 in a vector of labels CASES. */
1945
1946 static bool
1947 case_label_p (const vec<tree> *cases, tree label)
1948 {
1949 unsigned int i;
1950 tree l;
1951
1952 FOR_EACH_VEC_ELT (*cases, i, l)
1953 if (CASE_LABEL (l) == label)
1954 return true;
1955 return false;
1956 }
1957
1958 /* Find the last nondebug statement in a scope STMT. */
1959
1960 static gimple *
1961 last_stmt_in_scope (gimple *stmt)
1962 {
1963 if (!stmt)
1964 return NULL;
1965
1966 switch (gimple_code (stmt))
1967 {
1968 case GIMPLE_BIND:
1969 {
1970 gbind *bind = as_a <gbind *> (stmt);
1971 stmt = gimple_seq_last_nondebug_stmt (gimple_bind_body (bind));
1972 return last_stmt_in_scope (stmt);
1973 }
1974
1975 case GIMPLE_TRY:
1976 {
1977 gtry *try_stmt = as_a <gtry *> (stmt);
1978 stmt = gimple_seq_last_nondebug_stmt (gimple_try_eval (try_stmt));
1979 gimple *last_eval = last_stmt_in_scope (stmt);
1980 if (gimple_stmt_may_fallthru (last_eval)
1981 && (last_eval == NULL
1982 || !gimple_call_internal_p (last_eval, IFN_FALLTHROUGH))
1983 && gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY)
1984 {
1985 stmt = gimple_seq_last_nondebug_stmt (gimple_try_cleanup (try_stmt));
1986 return last_stmt_in_scope (stmt);
1987 }
1988 else
1989 return last_eval;
1990 }
1991
1992 case GIMPLE_DEBUG:
1993 gcc_unreachable ();
1994
1995 default:
1996 return stmt;
1997 }
1998 }
1999
2000 /* Collect interesting labels in LABELS and return the statement preceding
2001 another case label, or a user-defined label. Store a location useful
2002 to give warnings at *PREVLOC (usually the location of the returned
2003 statement or of its surrounding scope). */
2004
2005 static gimple *
2006 collect_fallthrough_labels (gimple_stmt_iterator *gsi_p,
2007 auto_vec <struct label_entry> *labels,
2008 location_t *prevloc)
2009 {
2010 gimple *prev = NULL;
2011
2012 *prevloc = UNKNOWN_LOCATION;
2013 do
2014 {
2015 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND)
2016 {
2017 /* Recognize the special GIMPLE_BIND added by gimplify_switch_expr,
2018 which starts on a GIMPLE_SWITCH and ends with a break label.
2019 Handle that as a single statement that can fall through. */
2020 gbind *bind = as_a <gbind *> (gsi_stmt (*gsi_p));
2021 gimple *first = gimple_seq_first_stmt (gimple_bind_body (bind));
2022 gimple *last = gimple_seq_last_stmt (gimple_bind_body (bind));
2023 if (last
2024 && gimple_code (first) == GIMPLE_SWITCH
2025 && gimple_code (last) == GIMPLE_LABEL)
2026 {
2027 tree label = gimple_label_label (as_a <glabel *> (last));
2028 if (SWITCH_BREAK_LABEL_P (label))
2029 {
2030 prev = bind;
2031 gsi_next (gsi_p);
2032 continue;
2033 }
2034 }
2035 }
2036 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND
2037 || gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_TRY)
2038 {
2039 /* Nested scope. Only look at the last statement of
2040 the innermost scope. */
2041 location_t bind_loc = gimple_location (gsi_stmt (*gsi_p));
2042 gimple *last = last_stmt_in_scope (gsi_stmt (*gsi_p));
2043 if (last)
2044 {
2045 prev = last;
2046 /* It might be a label without a location. Use the
2047 location of the scope then. */
2048 if (!gimple_has_location (prev))
2049 *prevloc = bind_loc;
2050 }
2051 gsi_next (gsi_p);
2052 continue;
2053 }
2054
2055 /* Ifs are tricky. */
2056 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_COND)
2057 {
2058 gcond *cond_stmt = as_a <gcond *> (gsi_stmt (*gsi_p));
2059 tree false_lab = gimple_cond_false_label (cond_stmt);
2060 location_t if_loc = gimple_location (cond_stmt);
2061
2062 /* If we have e.g.
2063 if (i > 1) goto <D.2259>; else goto D;
2064 we can't do much with the else-branch. */
2065 if (!DECL_ARTIFICIAL (false_lab))
2066 break;
2067
2068 /* Go on until the false label, then one step back. */
2069 for (; !gsi_end_p (*gsi_p); gsi_next (gsi_p))
2070 {
2071 gimple *stmt = gsi_stmt (*gsi_p);
2072 if (gimple_code (stmt) == GIMPLE_LABEL
2073 && gimple_label_label (as_a <glabel *> (stmt)) == false_lab)
2074 break;
2075 }
2076
2077 /* Not found? Oops. */
2078 if (gsi_end_p (*gsi_p))
2079 break;
2080
2081 struct label_entry l = { false_lab, if_loc };
2082 labels->safe_push (l);
2083
2084 /* Go to the last statement of the then branch. */
2085 gsi_prev (gsi_p);
2086
2087 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
2088 <D.1759>:
2089 <stmt>;
2090 goto <D.1761>;
2091 <D.1760>:
2092 */
2093 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_GOTO
2094 && !gimple_has_location (gsi_stmt (*gsi_p)))
2095 {
2096 /* Look at the statement before, it might be
2097 attribute fallthrough, in which case don't warn. */
2098 gsi_prev (gsi_p);
2099 bool fallthru_before_dest
2100 = gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_FALLTHROUGH);
2101 gsi_next (gsi_p);
2102 tree goto_dest = gimple_goto_dest (gsi_stmt (*gsi_p));
2103 if (!fallthru_before_dest)
2104 {
2105 struct label_entry l = { goto_dest, if_loc };
2106 labels->safe_push (l);
2107 }
2108 }
2109 /* And move back. */
2110 gsi_next (gsi_p);
2111 }
2112
2113 /* Remember the last statement. Skip labels that are of no interest
2114 to us. */
2115 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2116 {
2117 tree label = gimple_label_label (as_a <glabel *> (gsi_stmt (*gsi_p)));
2118 if (find_label_entry (labels, label))
2119 prev = gsi_stmt (*gsi_p);
2120 }
2121 else if (gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_ASAN_MARK))
2122 ;
2123 else if (!is_gimple_debug (gsi_stmt (*gsi_p)))
2124 prev = gsi_stmt (*gsi_p);
2125 gsi_next (gsi_p);
2126 }
2127 while (!gsi_end_p (*gsi_p)
2128 /* Stop if we find a case or a user-defined label. */
2129 && (gimple_code (gsi_stmt (*gsi_p)) != GIMPLE_LABEL
2130 || !gimple_has_location (gsi_stmt (*gsi_p))));
2131
2132 if (prev && gimple_has_location (prev))
2133 *prevloc = gimple_location (prev);
2134 return prev;
2135 }
2136
2137 /* Return true if the switch fallthough warning should occur. LABEL is
2138 the label statement that we're falling through to. */
2139
2140 static bool
2141 should_warn_for_implicit_fallthrough (gimple_stmt_iterator *gsi_p, tree label)
2142 {
2143 gimple_stmt_iterator gsi = *gsi_p;
2144
2145 /* Don't warn if the label is marked with a "falls through" comment. */
2146 if (FALLTHROUGH_LABEL_P (label))
2147 return false;
2148
2149 /* Don't warn for non-case labels followed by a statement:
2150 case 0:
2151 foo ();
2152 label:
2153 bar ();
2154 as these are likely intentional. */
2155 if (!case_label_p (&gimplify_ctxp->case_labels, label))
2156 {
2157 tree l;
2158 while (!gsi_end_p (gsi)
2159 && gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2160 && (l = gimple_label_label (as_a <glabel *> (gsi_stmt (gsi))))
2161 && !case_label_p (&gimplify_ctxp->case_labels, l))
2162 gsi_next_nondebug (&gsi);
2163 if (gsi_end_p (gsi) || gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL)
2164 return false;
2165 }
2166
2167 /* Don't warn for terminated branches, i.e. when the subsequent case labels
2168 immediately breaks. */
2169 gsi = *gsi_p;
2170
2171 /* Skip all immediately following labels. */
2172 while (!gsi_end_p (gsi)
2173 && (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2174 || gimple_code (gsi_stmt (gsi)) == GIMPLE_PREDICT))
2175 gsi_next_nondebug (&gsi);
2176
2177 /* { ... something; default:; } */
2178 if (gsi_end_p (gsi)
2179 /* { ... something; default: break; } or
2180 { ... something; default: goto L; } */
2181 || gimple_code (gsi_stmt (gsi)) == GIMPLE_GOTO
2182 /* { ... something; default: return; } */
2183 || gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
2184 return false;
2185
2186 return true;
2187 }
2188
2189 /* Callback for walk_gimple_seq. */
2190
2191 static tree
2192 warn_implicit_fallthrough_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2193 struct walk_stmt_info *)
2194 {
2195 gimple *stmt = gsi_stmt (*gsi_p);
2196
2197 *handled_ops_p = true;
2198 switch (gimple_code (stmt))
2199 {
2200 case GIMPLE_TRY:
2201 case GIMPLE_BIND:
2202 case GIMPLE_CATCH:
2203 case GIMPLE_EH_FILTER:
2204 case GIMPLE_TRANSACTION:
2205 /* Walk the sub-statements. */
2206 *handled_ops_p = false;
2207 break;
2208
2209 /* Find a sequence of form:
2210
2211 GIMPLE_LABEL
2212 [...]
2213 <may fallthru stmt>
2214 GIMPLE_LABEL
2215
2216 and possibly warn. */
2217 case GIMPLE_LABEL:
2218 {
2219 /* Found a label. Skip all immediately following labels. */
2220 while (!gsi_end_p (*gsi_p)
2221 && gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2222 gsi_next_nondebug (gsi_p);
2223
2224 /* There might be no more statements. */
2225 if (gsi_end_p (*gsi_p))
2226 return integer_zero_node;
2227
2228 /* Vector of labels that fall through. */
2229 auto_vec <struct label_entry> labels;
2230 location_t prevloc;
2231 gimple *prev = collect_fallthrough_labels (gsi_p, &labels, &prevloc);
2232
2233 /* There might be no more statements. */
2234 if (gsi_end_p (*gsi_p))
2235 return integer_zero_node;
2236
2237 gimple *next = gsi_stmt (*gsi_p);
2238 tree label;
2239 /* If what follows is a label, then we may have a fallthrough. */
2240 if (gimple_code (next) == GIMPLE_LABEL
2241 && gimple_has_location (next)
2242 && (label = gimple_label_label (as_a <glabel *> (next)))
2243 && prev != NULL)
2244 {
2245 struct label_entry *l;
2246 bool warned_p = false;
2247 auto_diagnostic_group d;
2248 if (!should_warn_for_implicit_fallthrough (gsi_p, label))
2249 /* Quiet. */;
2250 else if (gimple_code (prev) == GIMPLE_LABEL
2251 && (label = gimple_label_label (as_a <glabel *> (prev)))
2252 && (l = find_label_entry (&labels, label)))
2253 warned_p = warning_at (l->loc, OPT_Wimplicit_fallthrough_,
2254 "this statement may fall through");
2255 else if (!gimple_call_internal_p (prev, IFN_FALLTHROUGH)
2256 /* Try to be clever and don't warn when the statement
2257 can't actually fall through. */
2258 && gimple_stmt_may_fallthru (prev)
2259 && prevloc != UNKNOWN_LOCATION)
2260 warned_p = warning_at (prevloc,
2261 OPT_Wimplicit_fallthrough_,
2262 "this statement may fall through");
2263 if (warned_p)
2264 inform (gimple_location (next), "here");
2265
2266 /* Mark this label as processed so as to prevent multiple
2267 warnings in nested switches. */
2268 FALLTHROUGH_LABEL_P (label) = true;
2269
2270 /* So that next warn_implicit_fallthrough_r will start looking for
2271 a new sequence starting with this label. */
2272 gsi_prev (gsi_p);
2273 }
2274 }
2275 break;
2276 default:
2277 break;
2278 }
2279 return NULL_TREE;
2280 }
2281
2282 /* Warn when a switch case falls through. */
2283
2284 static void
2285 maybe_warn_implicit_fallthrough (gimple_seq seq)
2286 {
2287 if (!warn_implicit_fallthrough)
2288 return;
2289
2290 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2291 if (!(lang_GNU_C ()
2292 || lang_GNU_CXX ()
2293 || lang_GNU_OBJC ()))
2294 return;
2295
2296 struct walk_stmt_info wi;
2297 memset (&wi, 0, sizeof (wi));
2298 walk_gimple_seq (seq, warn_implicit_fallthrough_r, NULL, &wi);
2299 }
2300
2301 /* Callback for walk_gimple_seq. */
2302
2303 static tree
2304 expand_FALLTHROUGH_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2305 struct walk_stmt_info *wi)
2306 {
2307 gimple *stmt = gsi_stmt (*gsi_p);
2308
2309 *handled_ops_p = true;
2310 switch (gimple_code (stmt))
2311 {
2312 case GIMPLE_TRY:
2313 case GIMPLE_BIND:
2314 case GIMPLE_CATCH:
2315 case GIMPLE_EH_FILTER:
2316 case GIMPLE_TRANSACTION:
2317 /* Walk the sub-statements. */
2318 *handled_ops_p = false;
2319 break;
2320 case GIMPLE_CALL:
2321 if (gimple_call_internal_p (stmt, IFN_FALLTHROUGH))
2322 {
2323 gsi_remove (gsi_p, true);
2324 if (gsi_end_p (*gsi_p))
2325 {
2326 *static_cast<location_t *>(wi->info) = gimple_location (stmt);
2327 return integer_zero_node;
2328 }
2329
2330 bool found = false;
2331 location_t loc = gimple_location (stmt);
2332
2333 gimple_stmt_iterator gsi2 = *gsi_p;
2334 stmt = gsi_stmt (gsi2);
2335 if (gimple_code (stmt) == GIMPLE_GOTO && !gimple_has_location (stmt))
2336 {
2337 /* Go on until the artificial label. */
2338 tree goto_dest = gimple_goto_dest (stmt);
2339 for (; !gsi_end_p (gsi2); gsi_next (&gsi2))
2340 {
2341 if (gimple_code (gsi_stmt (gsi2)) == GIMPLE_LABEL
2342 && gimple_label_label (as_a <glabel *> (gsi_stmt (gsi2)))
2343 == goto_dest)
2344 break;
2345 }
2346
2347 /* Not found? Stop. */
2348 if (gsi_end_p (gsi2))
2349 break;
2350
2351 /* Look one past it. */
2352 gsi_next (&gsi2);
2353 }
2354
2355 /* We're looking for a case label or default label here. */
2356 while (!gsi_end_p (gsi2))
2357 {
2358 stmt = gsi_stmt (gsi2);
2359 if (gimple_code (stmt) == GIMPLE_LABEL)
2360 {
2361 tree label = gimple_label_label (as_a <glabel *> (stmt));
2362 if (gimple_has_location (stmt) && DECL_ARTIFICIAL (label))
2363 {
2364 found = true;
2365 break;
2366 }
2367 }
2368 else if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
2369 ;
2370 else if (!is_gimple_debug (stmt))
2371 /* Anything else is not expected. */
2372 break;
2373 gsi_next (&gsi2);
2374 }
2375 if (!found)
2376 warning_at (loc, 0, "attribute %<fallthrough%> not preceding "
2377 "a case label or default label");
2378 }
2379 break;
2380 default:
2381 break;
2382 }
2383 return NULL_TREE;
2384 }
2385
2386 /* Expand all FALLTHROUGH () calls in SEQ. */
2387
2388 static void
2389 expand_FALLTHROUGH (gimple_seq *seq_p)
2390 {
2391 struct walk_stmt_info wi;
2392 location_t loc;
2393 memset (&wi, 0, sizeof (wi));
2394 wi.info = (void *) &loc;
2395 walk_gimple_seq_mod (seq_p, expand_FALLTHROUGH_r, NULL, &wi);
2396 if (wi.callback_result == integer_zero_node)
2397 /* We've found [[fallthrough]]; at the end of a switch, which the C++
2398 standard says is ill-formed; see [dcl.attr.fallthrough]. */
2399 warning_at (loc, 0, "attribute %<fallthrough%> not preceding "
2400 "a case label or default label");
2401 }
2402
2403 \f
2404 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
2405 branch to. */
2406
2407 static enum gimplify_status
2408 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
2409 {
2410 tree switch_expr = *expr_p;
2411 gimple_seq switch_body_seq = NULL;
2412 enum gimplify_status ret;
2413 tree index_type = TREE_TYPE (switch_expr);
2414 if (index_type == NULL_TREE)
2415 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
2416
2417 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
2418 fb_rvalue);
2419 if (ret == GS_ERROR || ret == GS_UNHANDLED)
2420 return ret;
2421
2422 if (SWITCH_BODY (switch_expr))
2423 {
2424 vec<tree> labels;
2425 vec<tree> saved_labels;
2426 hash_set<tree> *saved_live_switch_vars = NULL;
2427 tree default_case = NULL_TREE;
2428 gswitch *switch_stmt;
2429
2430 /* Save old labels, get new ones from body, then restore the old
2431 labels. Save all the things from the switch body to append after. */
2432 saved_labels = gimplify_ctxp->case_labels;
2433 gimplify_ctxp->case_labels.create (8);
2434
2435 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
2436 saved_live_switch_vars = gimplify_ctxp->live_switch_vars;
2437 tree_code body_type = TREE_CODE (SWITCH_BODY (switch_expr));
2438 if (body_type == BIND_EXPR || body_type == STATEMENT_LIST)
2439 gimplify_ctxp->live_switch_vars = new hash_set<tree> (4);
2440 else
2441 gimplify_ctxp->live_switch_vars = NULL;
2442
2443 bool old_in_switch_expr = gimplify_ctxp->in_switch_expr;
2444 gimplify_ctxp->in_switch_expr = true;
2445
2446 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
2447
2448 gimplify_ctxp->in_switch_expr = old_in_switch_expr;
2449 maybe_warn_switch_unreachable (switch_body_seq);
2450 maybe_warn_implicit_fallthrough (switch_body_seq);
2451 /* Only do this for the outermost GIMPLE_SWITCH. */
2452 if (!gimplify_ctxp->in_switch_expr)
2453 expand_FALLTHROUGH (&switch_body_seq);
2454
2455 labels = gimplify_ctxp->case_labels;
2456 gimplify_ctxp->case_labels = saved_labels;
2457
2458 if (gimplify_ctxp->live_switch_vars)
2459 {
2460 gcc_assert (gimplify_ctxp->live_switch_vars->is_empty ());
2461 delete gimplify_ctxp->live_switch_vars;
2462 }
2463 gimplify_ctxp->live_switch_vars = saved_live_switch_vars;
2464
2465 preprocess_case_label_vec_for_gimple (labels, index_type,
2466 &default_case);
2467
2468 bool add_bind = false;
2469 if (!default_case)
2470 {
2471 glabel *new_default;
2472
2473 default_case
2474 = build_case_label (NULL_TREE, NULL_TREE,
2475 create_artificial_label (UNKNOWN_LOCATION));
2476 if (old_in_switch_expr)
2477 {
2478 SWITCH_BREAK_LABEL_P (CASE_LABEL (default_case)) = 1;
2479 add_bind = true;
2480 }
2481 new_default = gimple_build_label (CASE_LABEL (default_case));
2482 gimplify_seq_add_stmt (&switch_body_seq, new_default);
2483 }
2484 else if (old_in_switch_expr)
2485 {
2486 gimple *last = gimple_seq_last_stmt (switch_body_seq);
2487 if (last && gimple_code (last) == GIMPLE_LABEL)
2488 {
2489 tree label = gimple_label_label (as_a <glabel *> (last));
2490 if (SWITCH_BREAK_LABEL_P (label))
2491 add_bind = true;
2492 }
2493 }
2494
2495 switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
2496 default_case, labels);
2497 /* For the benefit of -Wimplicit-fallthrough, if switch_body_seq
2498 ends with a GIMPLE_LABEL holding SWITCH_BREAK_LABEL_P LABEL_DECL,
2499 wrap the GIMPLE_SWITCH up to that GIMPLE_LABEL into a GIMPLE_BIND,
2500 so that we can easily find the start and end of the switch
2501 statement. */
2502 if (add_bind)
2503 {
2504 gimple_seq bind_body = NULL;
2505 gimplify_seq_add_stmt (&bind_body, switch_stmt);
2506 gimple_seq_add_seq (&bind_body, switch_body_seq);
2507 gbind *bind = gimple_build_bind (NULL_TREE, bind_body, NULL_TREE);
2508 gimple_set_location (bind, EXPR_LOCATION (switch_expr));
2509 gimplify_seq_add_stmt (pre_p, bind);
2510 }
2511 else
2512 {
2513 gimplify_seq_add_stmt (pre_p, switch_stmt);
2514 gimplify_seq_add_seq (pre_p, switch_body_seq);
2515 }
2516 labels.release ();
2517 }
2518 else
2519 gcc_unreachable ();
2520
2521 return GS_ALL_DONE;
2522 }
2523
2524 /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
2525
2526 static enum gimplify_status
2527 gimplify_label_expr (tree *expr_p, gimple_seq *pre_p)
2528 {
2529 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
2530 == current_function_decl);
2531
2532 tree label = LABEL_EXPR_LABEL (*expr_p);
2533 glabel *label_stmt = gimple_build_label (label);
2534 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2535 gimplify_seq_add_stmt (pre_p, label_stmt);
2536
2537 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2538 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2539 NOT_TAKEN));
2540 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2541 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2542 TAKEN));
2543
2544 return GS_ALL_DONE;
2545 }
2546
2547 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
2548
2549 static enum gimplify_status
2550 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
2551 {
2552 struct gimplify_ctx *ctxp;
2553 glabel *label_stmt;
2554
2555 /* Invalid programs can play Duff's Device type games with, for example,
2556 #pragma omp parallel. At least in the C front end, we don't
2557 detect such invalid branches until after gimplification, in the
2558 diagnose_omp_blocks pass. */
2559 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
2560 if (ctxp->case_labels.exists ())
2561 break;
2562
2563 tree label = CASE_LABEL (*expr_p);
2564 label_stmt = gimple_build_label (label);
2565 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2566 ctxp->case_labels.safe_push (*expr_p);
2567 gimplify_seq_add_stmt (pre_p, label_stmt);
2568
2569 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2570 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2571 NOT_TAKEN));
2572 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2573 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2574 TAKEN));
2575
2576 return GS_ALL_DONE;
2577 }
2578
2579 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
2580 if necessary. */
2581
2582 tree
2583 build_and_jump (tree *label_p)
2584 {
2585 if (label_p == NULL)
2586 /* If there's nowhere to jump, just fall through. */
2587 return NULL_TREE;
2588
2589 if (*label_p == NULL_TREE)
2590 {
2591 tree label = create_artificial_label (UNKNOWN_LOCATION);
2592 *label_p = label;
2593 }
2594
2595 return build1 (GOTO_EXPR, void_type_node, *label_p);
2596 }
2597
2598 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
2599 This also involves building a label to jump to and communicating it to
2600 gimplify_loop_expr through gimplify_ctxp->exit_label. */
2601
2602 static enum gimplify_status
2603 gimplify_exit_expr (tree *expr_p)
2604 {
2605 tree cond = TREE_OPERAND (*expr_p, 0);
2606 tree expr;
2607
2608 expr = build_and_jump (&gimplify_ctxp->exit_label);
2609 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
2610 *expr_p = expr;
2611
2612 return GS_OK;
2613 }
2614
2615 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
2616 different from its canonical type, wrap the whole thing inside a
2617 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
2618 type.
2619
2620 The canonical type of a COMPONENT_REF is the type of the field being
2621 referenced--unless the field is a bit-field which can be read directly
2622 in a smaller mode, in which case the canonical type is the
2623 sign-appropriate type corresponding to that mode. */
2624
2625 static void
2626 canonicalize_component_ref (tree *expr_p)
2627 {
2628 tree expr = *expr_p;
2629 tree type;
2630
2631 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
2632
2633 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
2634 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
2635 else
2636 type = TREE_TYPE (TREE_OPERAND (expr, 1));
2637
2638 /* One could argue that all the stuff below is not necessary for
2639 the non-bitfield case and declare it a FE error if type
2640 adjustment would be needed. */
2641 if (TREE_TYPE (expr) != type)
2642 {
2643 #ifdef ENABLE_TYPES_CHECKING
2644 tree old_type = TREE_TYPE (expr);
2645 #endif
2646 int type_quals;
2647
2648 /* We need to preserve qualifiers and propagate them from
2649 operand 0. */
2650 type_quals = TYPE_QUALS (type)
2651 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
2652 if (TYPE_QUALS (type) != type_quals)
2653 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
2654
2655 /* Set the type of the COMPONENT_REF to the underlying type. */
2656 TREE_TYPE (expr) = type;
2657
2658 #ifdef ENABLE_TYPES_CHECKING
2659 /* It is now a FE error, if the conversion from the canonical
2660 type to the original expression type is not useless. */
2661 gcc_assert (useless_type_conversion_p (old_type, type));
2662 #endif
2663 }
2664 }
2665
2666 /* If a NOP conversion is changing a pointer to array of foo to a pointer
2667 to foo, embed that change in the ADDR_EXPR by converting
2668 T array[U];
2669 (T *)&array
2670 ==>
2671 &array[L]
2672 where L is the lower bound. For simplicity, only do this for constant
2673 lower bound.
2674 The constraint is that the type of &array[L] is trivially convertible
2675 to T *. */
2676
2677 static void
2678 canonicalize_addr_expr (tree *expr_p)
2679 {
2680 tree expr = *expr_p;
2681 tree addr_expr = TREE_OPERAND (expr, 0);
2682 tree datype, ddatype, pddatype;
2683
2684 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
2685 if (!POINTER_TYPE_P (TREE_TYPE (expr))
2686 || TREE_CODE (addr_expr) != ADDR_EXPR)
2687 return;
2688
2689 /* The addr_expr type should be a pointer to an array. */
2690 datype = TREE_TYPE (TREE_TYPE (addr_expr));
2691 if (TREE_CODE (datype) != ARRAY_TYPE)
2692 return;
2693
2694 /* The pointer to element type shall be trivially convertible to
2695 the expression pointer type. */
2696 ddatype = TREE_TYPE (datype);
2697 pddatype = build_pointer_type (ddatype);
2698 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
2699 pddatype))
2700 return;
2701
2702 /* The lower bound and element sizes must be constant. */
2703 if (!TYPE_SIZE_UNIT (ddatype)
2704 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
2705 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
2706 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
2707 return;
2708
2709 /* All checks succeeded. Build a new node to merge the cast. */
2710 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
2711 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
2712 NULL_TREE, NULL_TREE);
2713 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
2714
2715 /* We can have stripped a required restrict qualifier above. */
2716 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
2717 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
2718 }
2719
2720 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
2721 underneath as appropriate. */
2722
2723 static enum gimplify_status
2724 gimplify_conversion (tree *expr_p)
2725 {
2726 location_t loc = EXPR_LOCATION (*expr_p);
2727 gcc_assert (CONVERT_EXPR_P (*expr_p));
2728
2729 /* Then strip away all but the outermost conversion. */
2730 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
2731
2732 /* And remove the outermost conversion if it's useless. */
2733 if (tree_ssa_useless_type_conversion (*expr_p))
2734 *expr_p = TREE_OPERAND (*expr_p, 0);
2735
2736 /* If we still have a conversion at the toplevel,
2737 then canonicalize some constructs. */
2738 if (CONVERT_EXPR_P (*expr_p))
2739 {
2740 tree sub = TREE_OPERAND (*expr_p, 0);
2741
2742 /* If a NOP conversion is changing the type of a COMPONENT_REF
2743 expression, then canonicalize its type now in order to expose more
2744 redundant conversions. */
2745 if (TREE_CODE (sub) == COMPONENT_REF)
2746 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
2747
2748 /* If a NOP conversion is changing a pointer to array of foo
2749 to a pointer to foo, embed that change in the ADDR_EXPR. */
2750 else if (TREE_CODE (sub) == ADDR_EXPR)
2751 canonicalize_addr_expr (expr_p);
2752 }
2753
2754 /* If we have a conversion to a non-register type force the
2755 use of a VIEW_CONVERT_EXPR instead. */
2756 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
2757 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
2758 TREE_OPERAND (*expr_p, 0));
2759
2760 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
2761 if (TREE_CODE (*expr_p) == CONVERT_EXPR)
2762 TREE_SET_CODE (*expr_p, NOP_EXPR);
2763
2764 return GS_OK;
2765 }
2766
2767 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
2768 DECL_VALUE_EXPR, and it's worth re-examining things. */
2769
2770 static enum gimplify_status
2771 gimplify_var_or_parm_decl (tree *expr_p)
2772 {
2773 tree decl = *expr_p;
2774
2775 /* ??? If this is a local variable, and it has not been seen in any
2776 outer BIND_EXPR, then it's probably the result of a duplicate
2777 declaration, for which we've already issued an error. It would
2778 be really nice if the front end wouldn't leak these at all.
2779 Currently the only known culprit is C++ destructors, as seen
2780 in g++.old-deja/g++.jason/binding.C. */
2781 if (VAR_P (decl)
2782 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
2783 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
2784 && decl_function_context (decl) == current_function_decl)
2785 {
2786 gcc_assert (seen_error ());
2787 return GS_ERROR;
2788 }
2789
2790 /* When within an OMP context, notice uses of variables. */
2791 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
2792 return GS_ALL_DONE;
2793
2794 /* If the decl is an alias for another expression, substitute it now. */
2795 if (DECL_HAS_VALUE_EXPR_P (decl))
2796 {
2797 *expr_p = unshare_expr (DECL_VALUE_EXPR (decl));
2798 return GS_OK;
2799 }
2800
2801 return GS_ALL_DONE;
2802 }
2803
2804 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
2805
2806 static void
2807 recalculate_side_effects (tree t)
2808 {
2809 enum tree_code code = TREE_CODE (t);
2810 int len = TREE_OPERAND_LENGTH (t);
2811 int i;
2812
2813 switch (TREE_CODE_CLASS (code))
2814 {
2815 case tcc_expression:
2816 switch (code)
2817 {
2818 case INIT_EXPR:
2819 case MODIFY_EXPR:
2820 case VA_ARG_EXPR:
2821 case PREDECREMENT_EXPR:
2822 case PREINCREMENT_EXPR:
2823 case POSTDECREMENT_EXPR:
2824 case POSTINCREMENT_EXPR:
2825 /* All of these have side-effects, no matter what their
2826 operands are. */
2827 return;
2828
2829 default:
2830 break;
2831 }
2832 /* Fall through. */
2833
2834 case tcc_comparison: /* a comparison expression */
2835 case tcc_unary: /* a unary arithmetic expression */
2836 case tcc_binary: /* a binary arithmetic expression */
2837 case tcc_reference: /* a reference */
2838 case tcc_vl_exp: /* a function call */
2839 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
2840 for (i = 0; i < len; ++i)
2841 {
2842 tree op = TREE_OPERAND (t, i);
2843 if (op && TREE_SIDE_EFFECTS (op))
2844 TREE_SIDE_EFFECTS (t) = 1;
2845 }
2846 break;
2847
2848 case tcc_constant:
2849 /* No side-effects. */
2850 return;
2851
2852 default:
2853 gcc_unreachable ();
2854 }
2855 }
2856
2857 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
2858 node *EXPR_P.
2859
2860 compound_lval
2861 : min_lval '[' val ']'
2862 | min_lval '.' ID
2863 | compound_lval '[' val ']'
2864 | compound_lval '.' ID
2865
2866 This is not part of the original SIMPLE definition, which separates
2867 array and member references, but it seems reasonable to handle them
2868 together. Also, this way we don't run into problems with union
2869 aliasing; gcc requires that for accesses through a union to alias, the
2870 union reference must be explicit, which was not always the case when we
2871 were splitting up array and member refs.
2872
2873 PRE_P points to the sequence where side effects that must happen before
2874 *EXPR_P should be stored.
2875
2876 POST_P points to the sequence where side effects that must happen after
2877 *EXPR_P should be stored. */
2878
2879 static enum gimplify_status
2880 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2881 fallback_t fallback)
2882 {
2883 tree *p;
2884 enum gimplify_status ret = GS_ALL_DONE, tret;
2885 int i;
2886 location_t loc = EXPR_LOCATION (*expr_p);
2887 tree expr = *expr_p;
2888
2889 /* Create a stack of the subexpressions so later we can walk them in
2890 order from inner to outer. */
2891 auto_vec<tree, 10> expr_stack;
2892
2893 /* We can handle anything that get_inner_reference can deal with. */
2894 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
2895 {
2896 restart:
2897 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
2898 if (TREE_CODE (*p) == INDIRECT_REF)
2899 *p = fold_indirect_ref_loc (loc, *p);
2900
2901 if (handled_component_p (*p))
2902 ;
2903 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
2904 additional COMPONENT_REFs. */
2905 else if ((VAR_P (*p) || TREE_CODE (*p) == PARM_DECL)
2906 && gimplify_var_or_parm_decl (p) == GS_OK)
2907 goto restart;
2908 else
2909 break;
2910
2911 expr_stack.safe_push (*p);
2912 }
2913
2914 gcc_assert (expr_stack.length ());
2915
2916 /* Now EXPR_STACK is a stack of pointers to all the refs we've
2917 walked through and P points to the innermost expression.
2918
2919 Java requires that we elaborated nodes in source order. That
2920 means we must gimplify the inner expression followed by each of
2921 the indices, in order. But we can't gimplify the inner
2922 expression until we deal with any variable bounds, sizes, or
2923 positions in order to deal with PLACEHOLDER_EXPRs.
2924
2925 So we do this in three steps. First we deal with the annotations
2926 for any variables in the components, then we gimplify the base,
2927 then we gimplify any indices, from left to right. */
2928 for (i = expr_stack.length () - 1; i >= 0; i--)
2929 {
2930 tree t = expr_stack[i];
2931
2932 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2933 {
2934 /* Gimplify the low bound and element type size and put them into
2935 the ARRAY_REF. If these values are set, they have already been
2936 gimplified. */
2937 if (TREE_OPERAND (t, 2) == NULL_TREE)
2938 {
2939 tree low = unshare_expr (array_ref_low_bound (t));
2940 if (!is_gimple_min_invariant (low))
2941 {
2942 TREE_OPERAND (t, 2) = low;
2943 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2944 post_p, is_gimple_reg,
2945 fb_rvalue);
2946 ret = MIN (ret, tret);
2947 }
2948 }
2949 else
2950 {
2951 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2952 is_gimple_reg, fb_rvalue);
2953 ret = MIN (ret, tret);
2954 }
2955
2956 if (TREE_OPERAND (t, 3) == NULL_TREE)
2957 {
2958 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
2959 tree elmt_size = unshare_expr (array_ref_element_size (t));
2960 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
2961
2962 /* Divide the element size by the alignment of the element
2963 type (above). */
2964 elmt_size
2965 = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
2966
2967 if (!is_gimple_min_invariant (elmt_size))
2968 {
2969 TREE_OPERAND (t, 3) = elmt_size;
2970 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
2971 post_p, is_gimple_reg,
2972 fb_rvalue);
2973 ret = MIN (ret, tret);
2974 }
2975 }
2976 else
2977 {
2978 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
2979 is_gimple_reg, fb_rvalue);
2980 ret = MIN (ret, tret);
2981 }
2982 }
2983 else if (TREE_CODE (t) == COMPONENT_REF)
2984 {
2985 /* Set the field offset into T and gimplify it. */
2986 if (TREE_OPERAND (t, 2) == NULL_TREE)
2987 {
2988 tree offset = unshare_expr (component_ref_field_offset (t));
2989 tree field = TREE_OPERAND (t, 1);
2990 tree factor
2991 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
2992
2993 /* Divide the offset by its alignment. */
2994 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
2995
2996 if (!is_gimple_min_invariant (offset))
2997 {
2998 TREE_OPERAND (t, 2) = offset;
2999 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
3000 post_p, is_gimple_reg,
3001 fb_rvalue);
3002 ret = MIN (ret, tret);
3003 }
3004 }
3005 else
3006 {
3007 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
3008 is_gimple_reg, fb_rvalue);
3009 ret = MIN (ret, tret);
3010 }
3011 }
3012 }
3013
3014 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
3015 so as to match the min_lval predicate. Failure to do so may result
3016 in the creation of large aggregate temporaries. */
3017 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
3018 fallback | fb_lvalue);
3019 ret = MIN (ret, tret);
3020
3021 /* And finally, the indices and operands of ARRAY_REF. During this
3022 loop we also remove any useless conversions. */
3023 for (; expr_stack.length () > 0; )
3024 {
3025 tree t = expr_stack.pop ();
3026
3027 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3028 {
3029 /* Gimplify the dimension. */
3030 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
3031 {
3032 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
3033 is_gimple_val, fb_rvalue);
3034 ret = MIN (ret, tret);
3035 }
3036 }
3037
3038 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
3039
3040 /* The innermost expression P may have originally had
3041 TREE_SIDE_EFFECTS set which would have caused all the outer
3042 expressions in *EXPR_P leading to P to also have had
3043 TREE_SIDE_EFFECTS set. */
3044 recalculate_side_effects (t);
3045 }
3046
3047 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
3048 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
3049 {
3050 canonicalize_component_ref (expr_p);
3051 }
3052
3053 expr_stack.release ();
3054
3055 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
3056
3057 return ret;
3058 }
3059
3060 /* Gimplify the self modifying expression pointed to by EXPR_P
3061 (++, --, +=, -=).
3062
3063 PRE_P points to the list where side effects that must happen before
3064 *EXPR_P should be stored.
3065
3066 POST_P points to the list where side effects that must happen after
3067 *EXPR_P should be stored.
3068
3069 WANT_VALUE is nonzero iff we want to use the value of this expression
3070 in another expression.
3071
3072 ARITH_TYPE is the type the computation should be performed in. */
3073
3074 enum gimplify_status
3075 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3076 bool want_value, tree arith_type)
3077 {
3078 enum tree_code code;
3079 tree lhs, lvalue, rhs, t1;
3080 gimple_seq post = NULL, *orig_post_p = post_p;
3081 bool postfix;
3082 enum tree_code arith_code;
3083 enum gimplify_status ret;
3084 location_t loc = EXPR_LOCATION (*expr_p);
3085
3086 code = TREE_CODE (*expr_p);
3087
3088 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
3089 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
3090
3091 /* Prefix or postfix? */
3092 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
3093 /* Faster to treat as prefix if result is not used. */
3094 postfix = want_value;
3095 else
3096 postfix = false;
3097
3098 /* For postfix, make sure the inner expression's post side effects
3099 are executed after side effects from this expression. */
3100 if (postfix)
3101 post_p = &post;
3102
3103 /* Add or subtract? */
3104 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
3105 arith_code = PLUS_EXPR;
3106 else
3107 arith_code = MINUS_EXPR;
3108
3109 /* Gimplify the LHS into a GIMPLE lvalue. */
3110 lvalue = TREE_OPERAND (*expr_p, 0);
3111 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
3112 if (ret == GS_ERROR)
3113 return ret;
3114
3115 /* Extract the operands to the arithmetic operation. */
3116 lhs = lvalue;
3117 rhs = TREE_OPERAND (*expr_p, 1);
3118
3119 /* For postfix operator, we evaluate the LHS to an rvalue and then use
3120 that as the result value and in the postqueue operation. */
3121 if (postfix)
3122 {
3123 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
3124 if (ret == GS_ERROR)
3125 return ret;
3126
3127 lhs = get_initialized_tmp_var (lhs, pre_p, NULL);
3128 }
3129
3130 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
3131 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
3132 {
3133 rhs = convert_to_ptrofftype_loc (loc, rhs);
3134 if (arith_code == MINUS_EXPR)
3135 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
3136 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
3137 }
3138 else
3139 t1 = fold_convert (TREE_TYPE (*expr_p),
3140 fold_build2 (arith_code, arith_type,
3141 fold_convert (arith_type, lhs),
3142 fold_convert (arith_type, rhs)));
3143
3144 if (postfix)
3145 {
3146 gimplify_assign (lvalue, t1, pre_p);
3147 gimplify_seq_add_seq (orig_post_p, post);
3148 *expr_p = lhs;
3149 return GS_ALL_DONE;
3150 }
3151 else
3152 {
3153 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
3154 return GS_OK;
3155 }
3156 }
3157
3158 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
3159
3160 static void
3161 maybe_with_size_expr (tree *expr_p)
3162 {
3163 tree expr = *expr_p;
3164 tree type = TREE_TYPE (expr);
3165 tree size;
3166
3167 /* If we've already wrapped this or the type is error_mark_node, we can't do
3168 anything. */
3169 if (TREE_CODE (expr) == WITH_SIZE_EXPR
3170 || type == error_mark_node)
3171 return;
3172
3173 /* If the size isn't known or is a constant, we have nothing to do. */
3174 size = TYPE_SIZE_UNIT (type);
3175 if (!size || poly_int_tree_p (size))
3176 return;
3177
3178 /* Otherwise, make a WITH_SIZE_EXPR. */
3179 size = unshare_expr (size);
3180 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
3181 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
3182 }
3183
3184 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
3185 Store any side-effects in PRE_P. CALL_LOCATION is the location of
3186 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3187 gimplified to an SSA name. */
3188
3189 enum gimplify_status
3190 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
3191 bool allow_ssa)
3192 {
3193 bool (*test) (tree);
3194 fallback_t fb;
3195
3196 /* In general, we allow lvalues for function arguments to avoid
3197 extra overhead of copying large aggregates out of even larger
3198 aggregates into temporaries only to copy the temporaries to
3199 the argument list. Make optimizers happy by pulling out to
3200 temporaries those types that fit in registers. */
3201 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
3202 test = is_gimple_val, fb = fb_rvalue;
3203 else
3204 {
3205 test = is_gimple_lvalue, fb = fb_either;
3206 /* Also strip a TARGET_EXPR that would force an extra copy. */
3207 if (TREE_CODE (*arg_p) == TARGET_EXPR)
3208 {
3209 tree init = TARGET_EXPR_INITIAL (*arg_p);
3210 if (init
3211 && !VOID_TYPE_P (TREE_TYPE (init)))
3212 *arg_p = init;
3213 }
3214 }
3215
3216 /* If this is a variable sized type, we must remember the size. */
3217 maybe_with_size_expr (arg_p);
3218
3219 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
3220 /* Make sure arguments have the same location as the function call
3221 itself. */
3222 protected_set_expr_location (*arg_p, call_location);
3223
3224 /* There is a sequence point before a function call. Side effects in
3225 the argument list must occur before the actual call. So, when
3226 gimplifying arguments, force gimplify_expr to use an internal
3227 post queue which is then appended to the end of PRE_P. */
3228 return gimplify_expr (arg_p, pre_p, NULL, test, fb, allow_ssa);
3229 }
3230
3231 /* Don't fold inside offloading or taskreg regions: it can break code by
3232 adding decl references that weren't in the source. We'll do it during
3233 omplower pass instead. */
3234
3235 static bool
3236 maybe_fold_stmt (gimple_stmt_iterator *gsi)
3237 {
3238 struct gimplify_omp_ctx *ctx;
3239 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
3240 if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0)
3241 return false;
3242 else if ((ctx->region_type & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
3243 return false;
3244 /* Delay folding of builtins until the IL is in consistent state
3245 so the diagnostic machinery can do a better job. */
3246 if (gimple_call_builtin_p (gsi_stmt (*gsi)))
3247 return false;
3248 return fold_stmt (gsi);
3249 }
3250
3251 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
3252 WANT_VALUE is true if the result of the call is desired. */
3253
3254 static enum gimplify_status
3255 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
3256 {
3257 tree fndecl, parms, p, fnptrtype;
3258 enum gimplify_status ret;
3259 int i, nargs;
3260 gcall *call;
3261 bool builtin_va_start_p = false;
3262 location_t loc = EXPR_LOCATION (*expr_p);
3263
3264 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
3265
3266 /* For reliable diagnostics during inlining, it is necessary that
3267 every call_expr be annotated with file and line. */
3268 if (! EXPR_HAS_LOCATION (*expr_p))
3269 SET_EXPR_LOCATION (*expr_p, input_location);
3270
3271 /* Gimplify internal functions created in the FEs. */
3272 if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
3273 {
3274 if (want_value)
3275 return GS_ALL_DONE;
3276
3277 nargs = call_expr_nargs (*expr_p);
3278 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
3279 auto_vec<tree> vargs (nargs);
3280
3281 for (i = 0; i < nargs; i++)
3282 {
3283 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3284 EXPR_LOCATION (*expr_p));
3285 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
3286 }
3287
3288 gcall *call = gimple_build_call_internal_vec (ifn, vargs);
3289 gimple_call_set_nothrow (call, TREE_NOTHROW (*expr_p));
3290 gimplify_seq_add_stmt (pre_p, call);
3291 return GS_ALL_DONE;
3292 }
3293
3294 /* This may be a call to a builtin function.
3295
3296 Builtin function calls may be transformed into different
3297 (and more efficient) builtin function calls under certain
3298 circumstances. Unfortunately, gimplification can muck things
3299 up enough that the builtin expanders are not aware that certain
3300 transformations are still valid.
3301
3302 So we attempt transformation/gimplification of the call before
3303 we gimplify the CALL_EXPR. At this time we do not manage to
3304 transform all calls in the same manner as the expanders do, but
3305 we do transform most of them. */
3306 fndecl = get_callee_fndecl (*expr_p);
3307 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
3308 switch (DECL_FUNCTION_CODE (fndecl))
3309 {
3310 CASE_BUILT_IN_ALLOCA:
3311 /* If the call has been built for a variable-sized object, then we
3312 want to restore the stack level when the enclosing BIND_EXPR is
3313 exited to reclaim the allocated space; otherwise, we precisely
3314 need to do the opposite and preserve the latest stack level. */
3315 if (CALL_ALLOCA_FOR_VAR_P (*expr_p))
3316 gimplify_ctxp->save_stack = true;
3317 else
3318 gimplify_ctxp->keep_stack = true;
3319 break;
3320
3321 case BUILT_IN_VA_START:
3322 {
3323 builtin_va_start_p = TRUE;
3324 if (call_expr_nargs (*expr_p) < 2)
3325 {
3326 error ("too few arguments to function %<va_start%>");
3327 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3328 return GS_OK;
3329 }
3330
3331 if (fold_builtin_next_arg (*expr_p, true))
3332 {
3333 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3334 return GS_OK;
3335 }
3336 break;
3337 }
3338
3339 case BUILT_IN_EH_RETURN:
3340 cfun->calls_eh_return = true;
3341 break;
3342
3343 default:
3344 ;
3345 }
3346 if (fndecl && fndecl_built_in_p (fndecl))
3347 {
3348 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3349 if (new_tree && new_tree != *expr_p)
3350 {
3351 /* There was a transformation of this call which computes the
3352 same value, but in a more efficient way. Return and try
3353 again. */
3354 *expr_p = new_tree;
3355 return GS_OK;
3356 }
3357 }
3358
3359 /* Remember the original function pointer type. */
3360 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
3361
3362 /* There is a sequence point before the call, so any side effects in
3363 the calling expression must occur before the actual call. Force
3364 gimplify_expr to use an internal post queue. */
3365 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
3366 is_gimple_call_addr, fb_rvalue);
3367
3368 nargs = call_expr_nargs (*expr_p);
3369
3370 /* Get argument types for verification. */
3371 fndecl = get_callee_fndecl (*expr_p);
3372 parms = NULL_TREE;
3373 if (fndecl)
3374 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3375 else
3376 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
3377
3378 if (fndecl && DECL_ARGUMENTS (fndecl))
3379 p = DECL_ARGUMENTS (fndecl);
3380 else if (parms)
3381 p = parms;
3382 else
3383 p = NULL_TREE;
3384 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
3385 ;
3386
3387 /* If the last argument is __builtin_va_arg_pack () and it is not
3388 passed as a named argument, decrease the number of CALL_EXPR
3389 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
3390 if (!p
3391 && i < nargs
3392 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
3393 {
3394 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
3395 tree last_arg_fndecl = get_callee_fndecl (last_arg);
3396
3397 if (last_arg_fndecl
3398 && fndecl_built_in_p (last_arg_fndecl, BUILT_IN_VA_ARG_PACK))
3399 {
3400 tree call = *expr_p;
3401
3402 --nargs;
3403 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
3404 CALL_EXPR_FN (call),
3405 nargs, CALL_EXPR_ARGP (call));
3406
3407 /* Copy all CALL_EXPR flags, location and block, except
3408 CALL_EXPR_VA_ARG_PACK flag. */
3409 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
3410 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
3411 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
3412 = CALL_EXPR_RETURN_SLOT_OPT (call);
3413 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
3414 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
3415
3416 /* Set CALL_EXPR_VA_ARG_PACK. */
3417 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
3418 }
3419 }
3420
3421 /* If the call returns twice then after building the CFG the call
3422 argument computations will no longer dominate the call because
3423 we add an abnormal incoming edge to the call. So do not use SSA
3424 vars there. */
3425 bool returns_twice = call_expr_flags (*expr_p) & ECF_RETURNS_TWICE;
3426
3427 /* Gimplify the function arguments. */
3428 if (nargs > 0)
3429 {
3430 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
3431 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
3432 PUSH_ARGS_REVERSED ? i-- : i++)
3433 {
3434 enum gimplify_status t;
3435
3436 /* Avoid gimplifying the second argument to va_start, which needs to
3437 be the plain PARM_DECL. */
3438 if ((i != 1) || !builtin_va_start_p)
3439 {
3440 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3441 EXPR_LOCATION (*expr_p), ! returns_twice);
3442
3443 if (t == GS_ERROR)
3444 ret = GS_ERROR;
3445 }
3446 }
3447 }
3448
3449 /* Gimplify the static chain. */
3450 if (CALL_EXPR_STATIC_CHAIN (*expr_p))
3451 {
3452 if (fndecl && !DECL_STATIC_CHAIN (fndecl))
3453 CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
3454 else
3455 {
3456 enum gimplify_status t;
3457 t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
3458 EXPR_LOCATION (*expr_p), ! returns_twice);
3459 if (t == GS_ERROR)
3460 ret = GS_ERROR;
3461 }
3462 }
3463
3464 /* Verify the function result. */
3465 if (want_value && fndecl
3466 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
3467 {
3468 error_at (loc, "using result of function returning %<void%>");
3469 ret = GS_ERROR;
3470 }
3471
3472 /* Try this again in case gimplification exposed something. */
3473 if (ret != GS_ERROR)
3474 {
3475 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3476
3477 if (new_tree && new_tree != *expr_p)
3478 {
3479 /* There was a transformation of this call which computes the
3480 same value, but in a more efficient way. Return and try
3481 again. */
3482 *expr_p = new_tree;
3483 return GS_OK;
3484 }
3485 }
3486 else
3487 {
3488 *expr_p = error_mark_node;
3489 return GS_ERROR;
3490 }
3491
3492 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
3493 decl. This allows us to eliminate redundant or useless
3494 calls to "const" functions. */
3495 if (TREE_CODE (*expr_p) == CALL_EXPR)
3496 {
3497 int flags = call_expr_flags (*expr_p);
3498 if (flags & (ECF_CONST | ECF_PURE)
3499 /* An infinite loop is considered a side effect. */
3500 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
3501 TREE_SIDE_EFFECTS (*expr_p) = 0;
3502 }
3503
3504 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
3505 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
3506 form and delegate the creation of a GIMPLE_CALL to
3507 gimplify_modify_expr. This is always possible because when
3508 WANT_VALUE is true, the caller wants the result of this call into
3509 a temporary, which means that we will emit an INIT_EXPR in
3510 internal_get_tmp_var which will then be handled by
3511 gimplify_modify_expr. */
3512 if (!want_value)
3513 {
3514 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
3515 have to do is replicate it as a GIMPLE_CALL tuple. */
3516 gimple_stmt_iterator gsi;
3517 call = gimple_build_call_from_tree (*expr_p, fnptrtype);
3518 notice_special_calls (call);
3519 gimplify_seq_add_stmt (pre_p, call);
3520 gsi = gsi_last (*pre_p);
3521 maybe_fold_stmt (&gsi);
3522 *expr_p = NULL_TREE;
3523 }
3524 else
3525 /* Remember the original function type. */
3526 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
3527 CALL_EXPR_FN (*expr_p));
3528
3529 return ret;
3530 }
3531
3532 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
3533 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
3534
3535 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
3536 condition is true or false, respectively. If null, we should generate
3537 our own to skip over the evaluation of this specific expression.
3538
3539 LOCUS is the source location of the COND_EXPR.
3540
3541 This function is the tree equivalent of do_jump.
3542
3543 shortcut_cond_r should only be called by shortcut_cond_expr. */
3544
3545 static tree
3546 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
3547 location_t locus)
3548 {
3549 tree local_label = NULL_TREE;
3550 tree t, expr = NULL;
3551
3552 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
3553 retain the shortcut semantics. Just insert the gotos here;
3554 shortcut_cond_expr will append the real blocks later. */
3555 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3556 {
3557 location_t new_locus;
3558
3559 /* Turn if (a && b) into
3560
3561 if (a); else goto no;
3562 if (b) goto yes; else goto no;
3563 (no:) */
3564
3565 if (false_label_p == NULL)
3566 false_label_p = &local_label;
3567
3568 /* Keep the original source location on the first 'if'. */
3569 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
3570 append_to_statement_list (t, &expr);
3571
3572 /* Set the source location of the && on the second 'if'. */
3573 new_locus = rexpr_location (pred, locus);
3574 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3575 new_locus);
3576 append_to_statement_list (t, &expr);
3577 }
3578 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3579 {
3580 location_t new_locus;
3581
3582 /* Turn if (a || b) into
3583
3584 if (a) goto yes;
3585 if (b) goto yes; else goto no;
3586 (yes:) */
3587
3588 if (true_label_p == NULL)
3589 true_label_p = &local_label;
3590
3591 /* Keep the original source location on the first 'if'. */
3592 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
3593 append_to_statement_list (t, &expr);
3594
3595 /* Set the source location of the || on the second 'if'. */
3596 new_locus = rexpr_location (pred, locus);
3597 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3598 new_locus);
3599 append_to_statement_list (t, &expr);
3600 }
3601 else if (TREE_CODE (pred) == COND_EXPR
3602 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
3603 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
3604 {
3605 location_t new_locus;
3606
3607 /* As long as we're messing with gotos, turn if (a ? b : c) into
3608 if (a)
3609 if (b) goto yes; else goto no;
3610 else
3611 if (c) goto yes; else goto no;
3612
3613 Don't do this if one of the arms has void type, which can happen
3614 in C++ when the arm is throw. */
3615
3616 /* Keep the original source location on the first 'if'. Set the source
3617 location of the ? on the second 'if'. */
3618 new_locus = rexpr_location (pred, locus);
3619 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
3620 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
3621 false_label_p, locus),
3622 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
3623 false_label_p, new_locus));
3624 }
3625 else
3626 {
3627 expr = build3 (COND_EXPR, void_type_node, pred,
3628 build_and_jump (true_label_p),
3629 build_and_jump (false_label_p));
3630 SET_EXPR_LOCATION (expr, locus);
3631 }
3632
3633 if (local_label)
3634 {
3635 t = build1 (LABEL_EXPR, void_type_node, local_label);
3636 append_to_statement_list (t, &expr);
3637 }
3638
3639 return expr;
3640 }
3641
3642 /* If EXPR is a GOTO_EXPR, return it. If it is a STATEMENT_LIST, skip
3643 any of its leading DEBUG_BEGIN_STMTS and recurse on the subsequent
3644 statement, if it is the last one. Otherwise, return NULL. */
3645
3646 static tree
3647 find_goto (tree expr)
3648 {
3649 if (!expr)
3650 return NULL_TREE;
3651
3652 if (TREE_CODE (expr) == GOTO_EXPR)
3653 return expr;
3654
3655 if (TREE_CODE (expr) != STATEMENT_LIST)
3656 return NULL_TREE;
3657
3658 tree_stmt_iterator i = tsi_start (expr);
3659
3660 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
3661 tsi_next (&i);
3662
3663 if (!tsi_one_before_end_p (i))
3664 return NULL_TREE;
3665
3666 return find_goto (tsi_stmt (i));
3667 }
3668
3669 /* Same as find_goto, except that it returns NULL if the destination
3670 is not a LABEL_DECL. */
3671
3672 static inline tree
3673 find_goto_label (tree expr)
3674 {
3675 tree dest = find_goto (expr);
3676 if (dest && TREE_CODE (GOTO_DESTINATION (dest)) == LABEL_DECL)
3677 return dest;
3678 return NULL_TREE;
3679 }
3680
3681 /* Given a conditional expression EXPR with short-circuit boolean
3682 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
3683 predicate apart into the equivalent sequence of conditionals. */
3684
3685 static tree
3686 shortcut_cond_expr (tree expr)
3687 {
3688 tree pred = TREE_OPERAND (expr, 0);
3689 tree then_ = TREE_OPERAND (expr, 1);
3690 tree else_ = TREE_OPERAND (expr, 2);
3691 tree true_label, false_label, end_label, t;
3692 tree *true_label_p;
3693 tree *false_label_p;
3694 bool emit_end, emit_false, jump_over_else;
3695 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
3696 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
3697
3698 /* First do simple transformations. */
3699 if (!else_se)
3700 {
3701 /* If there is no 'else', turn
3702 if (a && b) then c
3703 into
3704 if (a) if (b) then c. */
3705 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3706 {
3707 /* Keep the original source location on the first 'if'. */
3708 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3709 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3710 /* Set the source location of the && on the second 'if'. */
3711 if (rexpr_has_location (pred))
3712 SET_EXPR_LOCATION (expr, rexpr_location (pred));
3713 then_ = shortcut_cond_expr (expr);
3714 then_se = then_ && TREE_SIDE_EFFECTS (then_);
3715 pred = TREE_OPERAND (pred, 0);
3716 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
3717 SET_EXPR_LOCATION (expr, locus);
3718 }
3719 }
3720
3721 if (!then_se)
3722 {
3723 /* If there is no 'then', turn
3724 if (a || b); else d
3725 into
3726 if (a); else if (b); else d. */
3727 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3728 {
3729 /* Keep the original source location on the first 'if'. */
3730 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3731 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3732 /* Set the source location of the || on the second 'if'. */
3733 if (rexpr_has_location (pred))
3734 SET_EXPR_LOCATION (expr, rexpr_location (pred));
3735 else_ = shortcut_cond_expr (expr);
3736 else_se = else_ && TREE_SIDE_EFFECTS (else_);
3737 pred = TREE_OPERAND (pred, 0);
3738 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
3739 SET_EXPR_LOCATION (expr, locus);
3740 }
3741 }
3742
3743 /* If we're done, great. */
3744 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
3745 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
3746 return expr;
3747
3748 /* Otherwise we need to mess with gotos. Change
3749 if (a) c; else d;
3750 to
3751 if (a); else goto no;
3752 c; goto end;
3753 no: d; end:
3754 and recursively gimplify the condition. */
3755
3756 true_label = false_label = end_label = NULL_TREE;
3757
3758 /* If our arms just jump somewhere, hijack those labels so we don't
3759 generate jumps to jumps. */
3760
3761 if (tree then_goto = find_goto_label (then_))
3762 {
3763 true_label = GOTO_DESTINATION (then_goto);
3764 then_ = NULL;
3765 then_se = false;
3766 }
3767
3768 if (tree else_goto = find_goto_label (else_))
3769 {
3770 false_label = GOTO_DESTINATION (else_goto);
3771 else_ = NULL;
3772 else_se = false;
3773 }
3774
3775 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
3776 if (true_label)
3777 true_label_p = &true_label;
3778 else
3779 true_label_p = NULL;
3780
3781 /* The 'else' branch also needs a label if it contains interesting code. */
3782 if (false_label || else_se)
3783 false_label_p = &false_label;
3784 else
3785 false_label_p = NULL;
3786
3787 /* If there was nothing else in our arms, just forward the label(s). */
3788 if (!then_se && !else_se)
3789 return shortcut_cond_r (pred, true_label_p, false_label_p,
3790 EXPR_LOC_OR_LOC (expr, input_location));
3791
3792 /* If our last subexpression already has a terminal label, reuse it. */
3793 if (else_se)
3794 t = expr_last (else_);
3795 else if (then_se)
3796 t = expr_last (then_);
3797 else
3798 t = NULL;
3799 if (t && TREE_CODE (t) == LABEL_EXPR)
3800 end_label = LABEL_EXPR_LABEL (t);
3801
3802 /* If we don't care about jumping to the 'else' branch, jump to the end
3803 if the condition is false. */
3804 if (!false_label_p)
3805 false_label_p = &end_label;
3806
3807 /* We only want to emit these labels if we aren't hijacking them. */
3808 emit_end = (end_label == NULL_TREE);
3809 emit_false = (false_label == NULL_TREE);
3810
3811 /* We only emit the jump over the else clause if we have to--if the
3812 then clause may fall through. Otherwise we can wind up with a
3813 useless jump and a useless label at the end of gimplified code,
3814 which will cause us to think that this conditional as a whole
3815 falls through even if it doesn't. If we then inline a function
3816 which ends with such a condition, that can cause us to issue an
3817 inappropriate warning about control reaching the end of a
3818 non-void function. */
3819 jump_over_else = block_may_fallthru (then_);
3820
3821 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
3822 EXPR_LOC_OR_LOC (expr, input_location));
3823
3824 expr = NULL;
3825 append_to_statement_list (pred, &expr);
3826
3827 append_to_statement_list (then_, &expr);
3828 if (else_se)
3829 {
3830 if (jump_over_else)
3831 {
3832 tree last = expr_last (expr);
3833 t = build_and_jump (&end_label);
3834 if (rexpr_has_location (last))
3835 SET_EXPR_LOCATION (t, rexpr_location (last));
3836 append_to_statement_list (t, &expr);
3837 }
3838 if (emit_false)
3839 {
3840 t = build1 (LABEL_EXPR, void_type_node, false_label);
3841 append_to_statement_list (t, &expr);
3842 }
3843 append_to_statement_list (else_, &expr);
3844 }
3845 if (emit_end && end_label)
3846 {
3847 t = build1 (LABEL_EXPR, void_type_node, end_label);
3848 append_to_statement_list (t, &expr);
3849 }
3850
3851 return expr;
3852 }
3853
3854 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
3855
3856 tree
3857 gimple_boolify (tree expr)
3858 {
3859 tree type = TREE_TYPE (expr);
3860 location_t loc = EXPR_LOCATION (expr);
3861
3862 if (TREE_CODE (expr) == NE_EXPR
3863 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
3864 && integer_zerop (TREE_OPERAND (expr, 1)))
3865 {
3866 tree call = TREE_OPERAND (expr, 0);
3867 tree fn = get_callee_fndecl (call);
3868
3869 /* For __builtin_expect ((long) (x), y) recurse into x as well
3870 if x is truth_value_p. */
3871 if (fn
3872 && fndecl_built_in_p (fn, BUILT_IN_EXPECT)
3873 && call_expr_nargs (call) == 2)
3874 {
3875 tree arg = CALL_EXPR_ARG (call, 0);
3876 if (arg)
3877 {
3878 if (TREE_CODE (arg) == NOP_EXPR
3879 && TREE_TYPE (arg) == TREE_TYPE (call))
3880 arg = TREE_OPERAND (arg, 0);
3881 if (truth_value_p (TREE_CODE (arg)))
3882 {
3883 arg = gimple_boolify (arg);
3884 CALL_EXPR_ARG (call, 0)
3885 = fold_convert_loc (loc, TREE_TYPE (call), arg);
3886 }
3887 }
3888 }
3889 }
3890
3891 switch (TREE_CODE (expr))
3892 {
3893 case TRUTH_AND_EXPR:
3894 case TRUTH_OR_EXPR:
3895 case TRUTH_XOR_EXPR:
3896 case TRUTH_ANDIF_EXPR:
3897 case TRUTH_ORIF_EXPR:
3898 /* Also boolify the arguments of truth exprs. */
3899 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
3900 /* FALLTHRU */
3901
3902 case TRUTH_NOT_EXPR:
3903 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3904
3905 /* These expressions always produce boolean results. */
3906 if (TREE_CODE (type) != BOOLEAN_TYPE)
3907 TREE_TYPE (expr) = boolean_type_node;
3908 return expr;
3909
3910 case ANNOTATE_EXPR:
3911 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
3912 {
3913 case annot_expr_ivdep_kind:
3914 case annot_expr_unroll_kind:
3915 case annot_expr_no_vector_kind:
3916 case annot_expr_vector_kind:
3917 case annot_expr_parallel_kind:
3918 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3919 if (TREE_CODE (type) != BOOLEAN_TYPE)
3920 TREE_TYPE (expr) = boolean_type_node;
3921 return expr;
3922 default:
3923 gcc_unreachable ();
3924 }
3925
3926 default:
3927 if (COMPARISON_CLASS_P (expr))
3928 {
3929 /* There expressions always prduce boolean results. */
3930 if (TREE_CODE (type) != BOOLEAN_TYPE)
3931 TREE_TYPE (expr) = boolean_type_node;
3932 return expr;
3933 }
3934 /* Other expressions that get here must have boolean values, but
3935 might need to be converted to the appropriate mode. */
3936 if (TREE_CODE (type) == BOOLEAN_TYPE)
3937 return expr;
3938 return fold_convert_loc (loc, boolean_type_node, expr);
3939 }
3940 }
3941
3942 /* Given a conditional expression *EXPR_P without side effects, gimplify
3943 its operands. New statements are inserted to PRE_P. */
3944
3945 static enum gimplify_status
3946 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
3947 {
3948 tree expr = *expr_p, cond;
3949 enum gimplify_status ret, tret;
3950 enum tree_code code;
3951
3952 cond = gimple_boolify (COND_EXPR_COND (expr));
3953
3954 /* We need to handle && and || specially, as their gimplification
3955 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
3956 code = TREE_CODE (cond);
3957 if (code == TRUTH_ANDIF_EXPR)
3958 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
3959 else if (code == TRUTH_ORIF_EXPR)
3960 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
3961 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
3962 COND_EXPR_COND (*expr_p) = cond;
3963
3964 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
3965 is_gimple_val, fb_rvalue);
3966 ret = MIN (ret, tret);
3967 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
3968 is_gimple_val, fb_rvalue);
3969
3970 return MIN (ret, tret);
3971 }
3972
3973 /* Return true if evaluating EXPR could trap.
3974 EXPR is GENERIC, while tree_could_trap_p can be called
3975 only on GIMPLE. */
3976
3977 bool
3978 generic_expr_could_trap_p (tree expr)
3979 {
3980 unsigned i, n;
3981
3982 if (!expr || is_gimple_val (expr))
3983 return false;
3984
3985 if (!EXPR_P (expr) || tree_could_trap_p (expr))
3986 return true;
3987
3988 n = TREE_OPERAND_LENGTH (expr);
3989 for (i = 0; i < n; i++)
3990 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
3991 return true;
3992
3993 return false;
3994 }
3995
3996 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
3997 into
3998
3999 if (p) if (p)
4000 t1 = a; a;
4001 else or else
4002 t1 = b; b;
4003 t1;
4004
4005 The second form is used when *EXPR_P is of type void.
4006
4007 PRE_P points to the list where side effects that must happen before
4008 *EXPR_P should be stored. */
4009
4010 static enum gimplify_status
4011 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
4012 {
4013 tree expr = *expr_p;
4014 tree type = TREE_TYPE (expr);
4015 location_t loc = EXPR_LOCATION (expr);
4016 tree tmp, arm1, arm2;
4017 enum gimplify_status ret;
4018 tree label_true, label_false, label_cont;
4019 bool have_then_clause_p, have_else_clause_p;
4020 gcond *cond_stmt;
4021 enum tree_code pred_code;
4022 gimple_seq seq = NULL;
4023
4024 /* If this COND_EXPR has a value, copy the values into a temporary within
4025 the arms. */
4026 if (!VOID_TYPE_P (type))
4027 {
4028 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
4029 tree result;
4030
4031 /* If either an rvalue is ok or we do not require an lvalue, create the
4032 temporary. But we cannot do that if the type is addressable. */
4033 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
4034 && !TREE_ADDRESSABLE (type))
4035 {
4036 if (gimplify_ctxp->allow_rhs_cond_expr
4037 /* If either branch has side effects or could trap, it can't be
4038 evaluated unconditionally. */
4039 && !TREE_SIDE_EFFECTS (then_)
4040 && !generic_expr_could_trap_p (then_)
4041 && !TREE_SIDE_EFFECTS (else_)
4042 && !generic_expr_could_trap_p (else_))
4043 return gimplify_pure_cond_expr (expr_p, pre_p);
4044
4045 tmp = create_tmp_var (type, "iftmp");
4046 result = tmp;
4047 }
4048
4049 /* Otherwise, only create and copy references to the values. */
4050 else
4051 {
4052 type = build_pointer_type (type);
4053
4054 if (!VOID_TYPE_P (TREE_TYPE (then_)))
4055 then_ = build_fold_addr_expr_loc (loc, then_);
4056
4057 if (!VOID_TYPE_P (TREE_TYPE (else_)))
4058 else_ = build_fold_addr_expr_loc (loc, else_);
4059
4060 expr
4061 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
4062
4063 tmp = create_tmp_var (type, "iftmp");
4064 result = build_simple_mem_ref_loc (loc, tmp);
4065 }
4066
4067 /* Build the new then clause, `tmp = then_;'. But don't build the
4068 assignment if the value is void; in C++ it can be if it's a throw. */
4069 if (!VOID_TYPE_P (TREE_TYPE (then_)))
4070 TREE_OPERAND (expr, 1) = build2 (INIT_EXPR, type, tmp, then_);
4071
4072 /* Similarly, build the new else clause, `tmp = else_;'. */
4073 if (!VOID_TYPE_P (TREE_TYPE (else_)))
4074 TREE_OPERAND (expr, 2) = build2 (INIT_EXPR, type, tmp, else_);
4075
4076 TREE_TYPE (expr) = void_type_node;
4077 recalculate_side_effects (expr);
4078
4079 /* Move the COND_EXPR to the prequeue. */
4080 gimplify_stmt (&expr, pre_p);
4081
4082 *expr_p = result;
4083 return GS_ALL_DONE;
4084 }
4085
4086 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
4087 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
4088 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
4089 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
4090
4091 /* Make sure the condition has BOOLEAN_TYPE. */
4092 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4093
4094 /* Break apart && and || conditions. */
4095 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
4096 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
4097 {
4098 expr = shortcut_cond_expr (expr);
4099
4100 if (expr != *expr_p)
4101 {
4102 *expr_p = expr;
4103
4104 /* We can't rely on gimplify_expr to re-gimplify the expanded
4105 form properly, as cleanups might cause the target labels to be
4106 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
4107 set up a conditional context. */
4108 gimple_push_condition ();
4109 gimplify_stmt (expr_p, &seq);
4110 gimple_pop_condition (pre_p);
4111 gimple_seq_add_seq (pre_p, seq);
4112
4113 return GS_ALL_DONE;
4114 }
4115 }
4116
4117 /* Now do the normal gimplification. */
4118
4119 /* Gimplify condition. */
4120 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
4121 fb_rvalue);
4122 if (ret == GS_ERROR)
4123 return GS_ERROR;
4124 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
4125
4126 gimple_push_condition ();
4127
4128 have_then_clause_p = have_else_clause_p = false;
4129 label_true = find_goto_label (TREE_OPERAND (expr, 1));
4130 if (label_true
4131 && DECL_CONTEXT (GOTO_DESTINATION (label_true)) == current_function_decl
4132 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4133 have different locations, otherwise we end up with incorrect
4134 location information on the branches. */
4135 && (optimize
4136 || !EXPR_HAS_LOCATION (expr)
4137 || !rexpr_has_location (label_true)
4138 || EXPR_LOCATION (expr) == rexpr_location (label_true)))
4139 {
4140 have_then_clause_p = true;
4141 label_true = GOTO_DESTINATION (label_true);
4142 }
4143 else
4144 label_true = create_artificial_label (UNKNOWN_LOCATION);
4145 label_false = find_goto_label (TREE_OPERAND (expr, 2));
4146 if (label_false
4147 && DECL_CONTEXT (GOTO_DESTINATION (label_false)) == current_function_decl
4148 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4149 have different locations, otherwise we end up with incorrect
4150 location information on the branches. */
4151 && (optimize
4152 || !EXPR_HAS_LOCATION (expr)
4153 || !rexpr_has_location (label_false)
4154 || EXPR_LOCATION (expr) == rexpr_location (label_false)))
4155 {
4156 have_else_clause_p = true;
4157 label_false = GOTO_DESTINATION (label_false);
4158 }
4159 else
4160 label_false = create_artificial_label (UNKNOWN_LOCATION);
4161
4162 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
4163 &arm2);
4164 cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
4165 label_false);
4166 gimple_set_no_warning (cond_stmt, TREE_NO_WARNING (COND_EXPR_COND (expr)));
4167 gimplify_seq_add_stmt (&seq, cond_stmt);
4168 gimple_stmt_iterator gsi = gsi_last (seq);
4169 maybe_fold_stmt (&gsi);
4170
4171 label_cont = NULL_TREE;
4172 if (!have_then_clause_p)
4173 {
4174 /* For if (...) {} else { code; } put label_true after
4175 the else block. */
4176 if (TREE_OPERAND (expr, 1) == NULL_TREE
4177 && !have_else_clause_p
4178 && TREE_OPERAND (expr, 2) != NULL_TREE)
4179 label_cont = label_true;
4180 else
4181 {
4182 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
4183 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
4184 /* For if (...) { code; } else {} or
4185 if (...) { code; } else goto label; or
4186 if (...) { code; return; } else { ... }
4187 label_cont isn't needed. */
4188 if (!have_else_clause_p
4189 && TREE_OPERAND (expr, 2) != NULL_TREE
4190 && gimple_seq_may_fallthru (seq))
4191 {
4192 gimple *g;
4193 label_cont = create_artificial_label (UNKNOWN_LOCATION);
4194
4195 g = gimple_build_goto (label_cont);
4196
4197 /* GIMPLE_COND's are very low level; they have embedded
4198 gotos. This particular embedded goto should not be marked
4199 with the location of the original COND_EXPR, as it would
4200 correspond to the COND_EXPR's condition, not the ELSE or the
4201 THEN arms. To avoid marking it with the wrong location, flag
4202 it as "no location". */
4203 gimple_set_do_not_emit_location (g);
4204
4205 gimplify_seq_add_stmt (&seq, g);
4206 }
4207 }
4208 }
4209 if (!have_else_clause_p)
4210 {
4211 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
4212 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
4213 }
4214 if (label_cont)
4215 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
4216
4217 gimple_pop_condition (pre_p);
4218 gimple_seq_add_seq (pre_p, seq);
4219
4220 if (ret == GS_ERROR)
4221 ; /* Do nothing. */
4222 else if (have_then_clause_p || have_else_clause_p)
4223 ret = GS_ALL_DONE;
4224 else
4225 {
4226 /* Both arms are empty; replace the COND_EXPR with its predicate. */
4227 expr = TREE_OPERAND (expr, 0);
4228 gimplify_stmt (&expr, pre_p);
4229 }
4230
4231 *expr_p = NULL;
4232 return ret;
4233 }
4234
4235 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4236 to be marked addressable.
4237
4238 We cannot rely on such an expression being directly markable if a temporary
4239 has been created by the gimplification. In this case, we create another
4240 temporary and initialize it with a copy, which will become a store after we
4241 mark it addressable. This can happen if the front-end passed us something
4242 that it could not mark addressable yet, like a Fortran pass-by-reference
4243 parameter (int) floatvar. */
4244
4245 static void
4246 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
4247 {
4248 while (handled_component_p (*expr_p))
4249 expr_p = &TREE_OPERAND (*expr_p, 0);
4250 if (is_gimple_reg (*expr_p))
4251 {
4252 /* Do not allow an SSA name as the temporary. */
4253 tree var = get_initialized_tmp_var (*expr_p, seq_p, NULL, false);
4254 DECL_GIMPLE_REG_P (var) = 0;
4255 *expr_p = var;
4256 }
4257 }
4258
4259 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4260 a call to __builtin_memcpy. */
4261
4262 static enum gimplify_status
4263 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
4264 gimple_seq *seq_p)
4265 {
4266 tree t, to, to_ptr, from, from_ptr;
4267 gcall *gs;
4268 location_t loc = EXPR_LOCATION (*expr_p);
4269
4270 to = TREE_OPERAND (*expr_p, 0);
4271 from = TREE_OPERAND (*expr_p, 1);
4272
4273 /* Mark the RHS addressable. Beware that it may not be possible to do so
4274 directly if a temporary has been created by the gimplification. */
4275 prepare_gimple_addressable (&from, seq_p);
4276
4277 mark_addressable (from);
4278 from_ptr = build_fold_addr_expr_loc (loc, from);
4279 gimplify_arg (&from_ptr, seq_p, loc);
4280
4281 mark_addressable (to);
4282 to_ptr = build_fold_addr_expr_loc (loc, to);
4283 gimplify_arg (&to_ptr, seq_p, loc);
4284
4285 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
4286
4287 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
4288
4289 if (want_value)
4290 {
4291 /* tmp = memcpy() */
4292 t = create_tmp_var (TREE_TYPE (to_ptr));
4293 gimple_call_set_lhs (gs, t);
4294 gimplify_seq_add_stmt (seq_p, gs);
4295
4296 *expr_p = build_simple_mem_ref (t);
4297 return GS_ALL_DONE;
4298 }
4299
4300 gimplify_seq_add_stmt (seq_p, gs);
4301 *expr_p = NULL;
4302 return GS_ALL_DONE;
4303 }
4304
4305 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4306 a call to __builtin_memset. In this case we know that the RHS is
4307 a CONSTRUCTOR with an empty element list. */
4308
4309 static enum gimplify_status
4310 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
4311 gimple_seq *seq_p)
4312 {
4313 tree t, from, to, to_ptr;
4314 gcall *gs;
4315 location_t loc = EXPR_LOCATION (*expr_p);
4316
4317 /* Assert our assumptions, to abort instead of producing wrong code
4318 silently if they are not met. Beware that the RHS CONSTRUCTOR might
4319 not be immediately exposed. */
4320 from = TREE_OPERAND (*expr_p, 1);
4321 if (TREE_CODE (from) == WITH_SIZE_EXPR)
4322 from = TREE_OPERAND (from, 0);
4323
4324 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
4325 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
4326
4327 /* Now proceed. */
4328 to = TREE_OPERAND (*expr_p, 0);
4329
4330 to_ptr = build_fold_addr_expr_loc (loc, to);
4331 gimplify_arg (&to_ptr, seq_p, loc);
4332 t = builtin_decl_implicit (BUILT_IN_MEMSET);
4333
4334 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
4335
4336 if (want_value)
4337 {
4338 /* tmp = memset() */
4339 t = create_tmp_var (TREE_TYPE (to_ptr));
4340 gimple_call_set_lhs (gs, t);
4341 gimplify_seq_add_stmt (seq_p, gs);
4342
4343 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
4344 return GS_ALL_DONE;
4345 }
4346
4347 gimplify_seq_add_stmt (seq_p, gs);
4348 *expr_p = NULL;
4349 return GS_ALL_DONE;
4350 }
4351
4352 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
4353 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
4354 assignment. Return non-null if we detect a potential overlap. */
4355
4356 struct gimplify_init_ctor_preeval_data
4357 {
4358 /* The base decl of the lhs object. May be NULL, in which case we
4359 have to assume the lhs is indirect. */
4360 tree lhs_base_decl;
4361
4362 /* The alias set of the lhs object. */
4363 alias_set_type lhs_alias_set;
4364 };
4365
4366 static tree
4367 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
4368 {
4369 struct gimplify_init_ctor_preeval_data *data
4370 = (struct gimplify_init_ctor_preeval_data *) xdata;
4371 tree t = *tp;
4372
4373 /* If we find the base object, obviously we have overlap. */
4374 if (data->lhs_base_decl == t)
4375 return t;
4376
4377 /* If the constructor component is indirect, determine if we have a
4378 potential overlap with the lhs. The only bits of information we
4379 have to go on at this point are addressability and alias sets. */
4380 if ((INDIRECT_REF_P (t)
4381 || TREE_CODE (t) == MEM_REF)
4382 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4383 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
4384 return t;
4385
4386 /* If the constructor component is a call, determine if it can hide a
4387 potential overlap with the lhs through an INDIRECT_REF like above.
4388 ??? Ugh - this is completely broken. In fact this whole analysis
4389 doesn't look conservative. */
4390 if (TREE_CODE (t) == CALL_EXPR)
4391 {
4392 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
4393
4394 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
4395 if (POINTER_TYPE_P (TREE_VALUE (type))
4396 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4397 && alias_sets_conflict_p (data->lhs_alias_set,
4398 get_alias_set
4399 (TREE_TYPE (TREE_VALUE (type)))))
4400 return t;
4401 }
4402
4403 if (IS_TYPE_OR_DECL_P (t))
4404 *walk_subtrees = 0;
4405 return NULL;
4406 }
4407
4408 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
4409 force values that overlap with the lhs (as described by *DATA)
4410 into temporaries. */
4411
4412 static void
4413 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4414 struct gimplify_init_ctor_preeval_data *data)
4415 {
4416 enum gimplify_status one;
4417
4418 /* If the value is constant, then there's nothing to pre-evaluate. */
4419 if (TREE_CONSTANT (*expr_p))
4420 {
4421 /* Ensure it does not have side effects, it might contain a reference to
4422 the object we're initializing. */
4423 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
4424 return;
4425 }
4426
4427 /* If the type has non-trivial constructors, we can't pre-evaluate. */
4428 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
4429 return;
4430
4431 /* Recurse for nested constructors. */
4432 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
4433 {
4434 unsigned HOST_WIDE_INT ix;
4435 constructor_elt *ce;
4436 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
4437
4438 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
4439 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
4440
4441 return;
4442 }
4443
4444 /* If this is a variable sized type, we must remember the size. */
4445 maybe_with_size_expr (expr_p);
4446
4447 /* Gimplify the constructor element to something appropriate for the rhs
4448 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
4449 the gimplifier will consider this a store to memory. Doing this
4450 gimplification now means that we won't have to deal with complicated
4451 language-specific trees, nor trees like SAVE_EXPR that can induce
4452 exponential search behavior. */
4453 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
4454 if (one == GS_ERROR)
4455 {
4456 *expr_p = NULL;
4457 return;
4458 }
4459
4460 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
4461 with the lhs, since "a = { .x=a }" doesn't make sense. This will
4462 always be true for all scalars, since is_gimple_mem_rhs insists on a
4463 temporary variable for them. */
4464 if (DECL_P (*expr_p))
4465 return;
4466
4467 /* If this is of variable size, we have no choice but to assume it doesn't
4468 overlap since we can't make a temporary for it. */
4469 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
4470 return;
4471
4472 /* Otherwise, we must search for overlap ... */
4473 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
4474 return;
4475
4476 /* ... and if found, force the value into a temporary. */
4477 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
4478 }
4479
4480 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
4481 a RANGE_EXPR in a CONSTRUCTOR for an array.
4482
4483 var = lower;
4484 loop_entry:
4485 object[var] = value;
4486 if (var == upper)
4487 goto loop_exit;
4488 var = var + 1;
4489 goto loop_entry;
4490 loop_exit:
4491
4492 We increment var _after_ the loop exit check because we might otherwise
4493 fail if upper == TYPE_MAX_VALUE (type for upper).
4494
4495 Note that we never have to deal with SAVE_EXPRs here, because this has
4496 already been taken care of for us, in gimplify_init_ctor_preeval(). */
4497
4498 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
4499 gimple_seq *, bool);
4500
4501 static void
4502 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
4503 tree value, tree array_elt_type,
4504 gimple_seq *pre_p, bool cleared)
4505 {
4506 tree loop_entry_label, loop_exit_label, fall_thru_label;
4507 tree var, var_type, cref, tmp;
4508
4509 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
4510 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
4511 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
4512
4513 /* Create and initialize the index variable. */
4514 var_type = TREE_TYPE (upper);
4515 var = create_tmp_var (var_type);
4516 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
4517
4518 /* Add the loop entry label. */
4519 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
4520
4521 /* Build the reference. */
4522 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4523 var, NULL_TREE, NULL_TREE);
4524
4525 /* If we are a constructor, just call gimplify_init_ctor_eval to do
4526 the store. Otherwise just assign value to the reference. */
4527
4528 if (TREE_CODE (value) == CONSTRUCTOR)
4529 /* NB we might have to call ourself recursively through
4530 gimplify_init_ctor_eval if the value is a constructor. */
4531 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4532 pre_p, cleared);
4533 else
4534 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
4535
4536 /* We exit the loop when the index var is equal to the upper bound. */
4537 gimplify_seq_add_stmt (pre_p,
4538 gimple_build_cond (EQ_EXPR, var, upper,
4539 loop_exit_label, fall_thru_label));
4540
4541 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
4542
4543 /* Otherwise, increment the index var... */
4544 tmp = build2 (PLUS_EXPR, var_type, var,
4545 fold_convert (var_type, integer_one_node));
4546 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
4547
4548 /* ...and jump back to the loop entry. */
4549 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
4550
4551 /* Add the loop exit label. */
4552 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
4553 }
4554
4555 /* Return true if FDECL is accessing a field that is zero sized. */
4556
4557 static bool
4558 zero_sized_field_decl (const_tree fdecl)
4559 {
4560 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
4561 && integer_zerop (DECL_SIZE (fdecl)))
4562 return true;
4563 return false;
4564 }
4565
4566 /* Return true if TYPE is zero sized. */
4567
4568 static bool
4569 zero_sized_type (const_tree type)
4570 {
4571 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
4572 && integer_zerop (TYPE_SIZE (type)))
4573 return true;
4574 return false;
4575 }
4576
4577 /* A subroutine of gimplify_init_constructor. Generate individual
4578 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
4579 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
4580 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
4581 zeroed first. */
4582
4583 static void
4584 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
4585 gimple_seq *pre_p, bool cleared)
4586 {
4587 tree array_elt_type = NULL;
4588 unsigned HOST_WIDE_INT ix;
4589 tree purpose, value;
4590
4591 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
4592 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
4593
4594 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
4595 {
4596 tree cref;
4597
4598 /* NULL values are created above for gimplification errors. */
4599 if (value == NULL)
4600 continue;
4601
4602 if (cleared && initializer_zerop (value))
4603 continue;
4604
4605 /* ??? Here's to hoping the front end fills in all of the indices,
4606 so we don't have to figure out what's missing ourselves. */
4607 gcc_assert (purpose);
4608
4609 /* Skip zero-sized fields, unless value has side-effects. This can
4610 happen with calls to functions returning a zero-sized type, which
4611 we shouldn't discard. As a number of downstream passes don't
4612 expect sets of zero-sized fields, we rely on the gimplification of
4613 the MODIFY_EXPR we make below to drop the assignment statement. */
4614 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
4615 continue;
4616
4617 /* If we have a RANGE_EXPR, we have to build a loop to assign the
4618 whole range. */
4619 if (TREE_CODE (purpose) == RANGE_EXPR)
4620 {
4621 tree lower = TREE_OPERAND (purpose, 0);
4622 tree upper = TREE_OPERAND (purpose, 1);
4623
4624 /* If the lower bound is equal to upper, just treat it as if
4625 upper was the index. */
4626 if (simple_cst_equal (lower, upper))
4627 purpose = upper;
4628 else
4629 {
4630 gimplify_init_ctor_eval_range (object, lower, upper, value,
4631 array_elt_type, pre_p, cleared);
4632 continue;
4633 }
4634 }
4635
4636 if (array_elt_type)
4637 {
4638 /* Do not use bitsizetype for ARRAY_REF indices. */
4639 if (TYPE_DOMAIN (TREE_TYPE (object)))
4640 purpose
4641 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
4642 purpose);
4643 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4644 purpose, NULL_TREE, NULL_TREE);
4645 }
4646 else
4647 {
4648 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
4649 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
4650 unshare_expr (object), purpose, NULL_TREE);
4651 }
4652
4653 if (TREE_CODE (value) == CONSTRUCTOR
4654 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
4655 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4656 pre_p, cleared);
4657 else
4658 {
4659 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
4660 gimplify_and_add (init, pre_p);
4661 ggc_free (init);
4662 }
4663 }
4664 }
4665
4666 /* Return the appropriate RHS predicate for this LHS. */
4667
4668 gimple_predicate
4669 rhs_predicate_for (tree lhs)
4670 {
4671 if (is_gimple_reg (lhs))
4672 return is_gimple_reg_rhs_or_call;
4673 else
4674 return is_gimple_mem_rhs_or_call;
4675 }
4676
4677 /* Return the initial guess for an appropriate RHS predicate for this LHS,
4678 before the LHS has been gimplified. */
4679
4680 static gimple_predicate
4681 initial_rhs_predicate_for (tree lhs)
4682 {
4683 if (is_gimple_reg_type (TREE_TYPE (lhs)))
4684 return is_gimple_reg_rhs_or_call;
4685 else
4686 return is_gimple_mem_rhs_or_call;
4687 }
4688
4689 /* Gimplify a C99 compound literal expression. This just means adding
4690 the DECL_EXPR before the current statement and using its anonymous
4691 decl instead. */
4692
4693 static enum gimplify_status
4694 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
4695 bool (*gimple_test_f) (tree),
4696 fallback_t fallback)
4697 {
4698 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
4699 tree decl = DECL_EXPR_DECL (decl_s);
4700 tree init = DECL_INITIAL (decl);
4701 /* Mark the decl as addressable if the compound literal
4702 expression is addressable now, otherwise it is marked too late
4703 after we gimplify the initialization expression. */
4704 if (TREE_ADDRESSABLE (*expr_p))
4705 TREE_ADDRESSABLE (decl) = 1;
4706 /* Otherwise, if we don't need an lvalue and have a literal directly
4707 substitute it. Check if it matches the gimple predicate, as
4708 otherwise we'd generate a new temporary, and we can as well just
4709 use the decl we already have. */
4710 else if (!TREE_ADDRESSABLE (decl)
4711 && !TREE_THIS_VOLATILE (decl)
4712 && init
4713 && (fallback & fb_lvalue) == 0
4714 && gimple_test_f (init))
4715 {
4716 *expr_p = init;
4717 return GS_OK;
4718 }
4719
4720 /* Preliminarily mark non-addressed complex variables as eligible
4721 for promotion to gimple registers. We'll transform their uses
4722 as we find them. */
4723 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
4724 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
4725 && !TREE_THIS_VOLATILE (decl)
4726 && !needs_to_live_in_memory (decl))
4727 DECL_GIMPLE_REG_P (decl) = 1;
4728
4729 /* If the decl is not addressable, then it is being used in some
4730 expression or on the right hand side of a statement, and it can
4731 be put into a readonly data section. */
4732 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
4733 TREE_READONLY (decl) = 1;
4734
4735 /* This decl isn't mentioned in the enclosing block, so add it to the
4736 list of temps. FIXME it seems a bit of a kludge to say that
4737 anonymous artificial vars aren't pushed, but everything else is. */
4738 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
4739 gimple_add_tmp_var (decl);
4740
4741 gimplify_and_add (decl_s, pre_p);
4742 *expr_p = decl;
4743 return GS_OK;
4744 }
4745
4746 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
4747 return a new CONSTRUCTOR if something changed. */
4748
4749 static tree
4750 optimize_compound_literals_in_ctor (tree orig_ctor)
4751 {
4752 tree ctor = orig_ctor;
4753 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
4754 unsigned int idx, num = vec_safe_length (elts);
4755
4756 for (idx = 0; idx < num; idx++)
4757 {
4758 tree value = (*elts)[idx].value;
4759 tree newval = value;
4760 if (TREE_CODE (value) == CONSTRUCTOR)
4761 newval = optimize_compound_literals_in_ctor (value);
4762 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
4763 {
4764 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
4765 tree decl = DECL_EXPR_DECL (decl_s);
4766 tree init = DECL_INITIAL (decl);
4767
4768 if (!TREE_ADDRESSABLE (value)
4769 && !TREE_ADDRESSABLE (decl)
4770 && init
4771 && TREE_CODE (init) == CONSTRUCTOR)
4772 newval = optimize_compound_literals_in_ctor (init);
4773 }
4774 if (newval == value)
4775 continue;
4776
4777 if (ctor == orig_ctor)
4778 {
4779 ctor = copy_node (orig_ctor);
4780 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
4781 elts = CONSTRUCTOR_ELTS (ctor);
4782 }
4783 (*elts)[idx].value = newval;
4784 }
4785 return ctor;
4786 }
4787
4788 /* A subroutine of gimplify_modify_expr. Break out elements of a
4789 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
4790
4791 Note that we still need to clear any elements that don't have explicit
4792 initializers, so if not all elements are initialized we keep the
4793 original MODIFY_EXPR, we just remove all of the constructor elements.
4794
4795 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
4796 GS_ERROR if we would have to create a temporary when gimplifying
4797 this constructor. Otherwise, return GS_OK.
4798
4799 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
4800
4801 static enum gimplify_status
4802 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4803 bool want_value, bool notify_temp_creation)
4804 {
4805 tree object, ctor, type;
4806 enum gimplify_status ret;
4807 vec<constructor_elt, va_gc> *elts;
4808
4809 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
4810
4811 if (!notify_temp_creation)
4812 {
4813 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4814 is_gimple_lvalue, fb_lvalue);
4815 if (ret == GS_ERROR)
4816 return ret;
4817 }
4818
4819 object = TREE_OPERAND (*expr_p, 0);
4820 ctor = TREE_OPERAND (*expr_p, 1)
4821 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
4822 type = TREE_TYPE (ctor);
4823 elts = CONSTRUCTOR_ELTS (ctor);
4824 ret = GS_ALL_DONE;
4825
4826 switch (TREE_CODE (type))
4827 {
4828 case RECORD_TYPE:
4829 case UNION_TYPE:
4830 case QUAL_UNION_TYPE:
4831 case ARRAY_TYPE:
4832 {
4833 struct gimplify_init_ctor_preeval_data preeval_data;
4834 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
4835 HOST_WIDE_INT num_unique_nonzero_elements;
4836 bool cleared, complete_p, valid_const_initializer;
4837 /* Use readonly data for initializers of this or smaller size
4838 regardless of the num_nonzero_elements / num_unique_nonzero_elements
4839 ratio. */
4840 const HOST_WIDE_INT min_unique_size = 64;
4841 /* If num_nonzero_elements / num_unique_nonzero_elements ratio
4842 is smaller than this, use readonly data. */
4843 const int unique_nonzero_ratio = 8;
4844
4845 /* Aggregate types must lower constructors to initialization of
4846 individual elements. The exception is that a CONSTRUCTOR node
4847 with no elements indicates zero-initialization of the whole. */
4848 if (vec_safe_is_empty (elts))
4849 {
4850 if (notify_temp_creation)
4851 return GS_OK;
4852 break;
4853 }
4854
4855 /* Fetch information about the constructor to direct later processing.
4856 We might want to make static versions of it in various cases, and
4857 can only do so if it known to be a valid constant initializer. */
4858 valid_const_initializer
4859 = categorize_ctor_elements (ctor, &num_nonzero_elements,
4860 &num_unique_nonzero_elements,
4861 &num_ctor_elements, &complete_p);
4862
4863 /* If a const aggregate variable is being initialized, then it
4864 should never be a lose to promote the variable to be static. */
4865 if (valid_const_initializer
4866 && num_nonzero_elements > 1
4867 && TREE_READONLY (object)
4868 && VAR_P (object)
4869 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object))
4870 /* For ctors that have many repeated nonzero elements
4871 represented through RANGE_EXPRs, prefer initializing
4872 those through runtime loops over copies of large amounts
4873 of data from readonly data section. */
4874 && (num_unique_nonzero_elements
4875 > num_nonzero_elements / unique_nonzero_ratio
4876 || ((unsigned HOST_WIDE_INT) int_size_in_bytes (type)
4877 <= (unsigned HOST_WIDE_INT) min_unique_size)))
4878 {
4879 if (notify_temp_creation)
4880 return GS_ERROR;
4881 DECL_INITIAL (object) = ctor;
4882 TREE_STATIC (object) = 1;
4883 if (!DECL_NAME (object))
4884 DECL_NAME (object) = create_tmp_var_name ("C");
4885 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
4886
4887 /* ??? C++ doesn't automatically append a .<number> to the
4888 assembler name, and even when it does, it looks at FE private
4889 data structures to figure out what that number should be,
4890 which are not set for this variable. I suppose this is
4891 important for local statics for inline functions, which aren't
4892 "local" in the object file sense. So in order to get a unique
4893 TU-local symbol, we must invoke the lhd version now. */
4894 lhd_set_decl_assembler_name (object);
4895
4896 *expr_p = NULL_TREE;
4897 break;
4898 }
4899
4900 /* If there are "lots" of initialized elements, even discounting
4901 those that are not address constants (and thus *must* be
4902 computed at runtime), then partition the constructor into
4903 constant and non-constant parts. Block copy the constant
4904 parts in, then generate code for the non-constant parts. */
4905 /* TODO. There's code in cp/typeck.c to do this. */
4906
4907 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
4908 /* store_constructor will ignore the clearing of variable-sized
4909 objects. Initializers for such objects must explicitly set
4910 every field that needs to be set. */
4911 cleared = false;
4912 else if (!complete_p)
4913 /* If the constructor isn't complete, clear the whole object
4914 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
4915
4916 ??? This ought not to be needed. For any element not present
4917 in the initializer, we should simply set them to zero. Except
4918 we'd need to *find* the elements that are not present, and that
4919 requires trickery to avoid quadratic compile-time behavior in
4920 large cases or excessive memory use in small cases. */
4921 cleared = !CONSTRUCTOR_NO_CLEARING (ctor);
4922 else if (num_ctor_elements - num_nonzero_elements
4923 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
4924 && num_nonzero_elements < num_ctor_elements / 4)
4925 /* If there are "lots" of zeros, it's more efficient to clear
4926 the memory and then set the nonzero elements. */
4927 cleared = true;
4928 else
4929 cleared = false;
4930
4931 /* If there are "lots" of initialized elements, and all of them
4932 are valid address constants, then the entire initializer can
4933 be dropped to memory, and then memcpy'd out. Don't do this
4934 for sparse arrays, though, as it's more efficient to follow
4935 the standard CONSTRUCTOR behavior of memset followed by
4936 individual element initialization. Also don't do this for small
4937 all-zero initializers (which aren't big enough to merit
4938 clearing), and don't try to make bitwise copies of
4939 TREE_ADDRESSABLE types. */
4940
4941 if (valid_const_initializer
4942 && !(cleared || num_nonzero_elements == 0)
4943 && !TREE_ADDRESSABLE (type))
4944 {
4945 HOST_WIDE_INT size = int_size_in_bytes (type);
4946 unsigned int align;
4947
4948 /* ??? We can still get unbounded array types, at least
4949 from the C++ front end. This seems wrong, but attempt
4950 to work around it for now. */
4951 if (size < 0)
4952 {
4953 size = int_size_in_bytes (TREE_TYPE (object));
4954 if (size >= 0)
4955 TREE_TYPE (ctor) = type = TREE_TYPE (object);
4956 }
4957
4958 /* Find the maximum alignment we can assume for the object. */
4959 /* ??? Make use of DECL_OFFSET_ALIGN. */
4960 if (DECL_P (object))
4961 align = DECL_ALIGN (object);
4962 else
4963 align = TYPE_ALIGN (type);
4964
4965 /* Do a block move either if the size is so small as to make
4966 each individual move a sub-unit move on average, or if it
4967 is so large as to make individual moves inefficient. */
4968 if (size > 0
4969 && num_nonzero_elements > 1
4970 /* For ctors that have many repeated nonzero elements
4971 represented through RANGE_EXPRs, prefer initializing
4972 those through runtime loops over copies of large amounts
4973 of data from readonly data section. */
4974 && (num_unique_nonzero_elements
4975 > num_nonzero_elements / unique_nonzero_ratio
4976 || size <= min_unique_size)
4977 && (size < num_nonzero_elements
4978 || !can_move_by_pieces (size, align)))
4979 {
4980 if (notify_temp_creation)
4981 return GS_ERROR;
4982
4983 walk_tree (&ctor, force_labels_r, NULL, NULL);
4984 ctor = tree_output_constant_def (ctor);
4985 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
4986 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
4987 TREE_OPERAND (*expr_p, 1) = ctor;
4988
4989 /* This is no longer an assignment of a CONSTRUCTOR, but
4990 we still may have processing to do on the LHS. So
4991 pretend we didn't do anything here to let that happen. */
4992 return GS_UNHANDLED;
4993 }
4994 }
4995
4996 /* If the target is volatile, we have non-zero elements and more than
4997 one field to assign, initialize the target from a temporary. */
4998 if (TREE_THIS_VOLATILE (object)
4999 && !TREE_ADDRESSABLE (type)
5000 && num_nonzero_elements > 0
5001 && vec_safe_length (elts) > 1)
5002 {
5003 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
5004 TREE_OPERAND (*expr_p, 0) = temp;
5005 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
5006 *expr_p,
5007 build2 (MODIFY_EXPR, void_type_node,
5008 object, temp));
5009 return GS_OK;
5010 }
5011
5012 if (notify_temp_creation)
5013 return GS_OK;
5014
5015 /* If there are nonzero elements and if needed, pre-evaluate to capture
5016 elements overlapping with the lhs into temporaries. We must do this
5017 before clearing to fetch the values before they are zeroed-out. */
5018 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
5019 {
5020 preeval_data.lhs_base_decl = get_base_address (object);
5021 if (!DECL_P (preeval_data.lhs_base_decl))
5022 preeval_data.lhs_base_decl = NULL;
5023 preeval_data.lhs_alias_set = get_alias_set (object);
5024
5025 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
5026 pre_p, post_p, &preeval_data);
5027 }
5028
5029 bool ctor_has_side_effects_p
5030 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1));
5031
5032 if (cleared)
5033 {
5034 /* Zap the CONSTRUCTOR element list, which simplifies this case.
5035 Note that we still have to gimplify, in order to handle the
5036 case of variable sized types. Avoid shared tree structures. */
5037 CONSTRUCTOR_ELTS (ctor) = NULL;
5038 TREE_SIDE_EFFECTS (ctor) = 0;
5039 object = unshare_expr (object);
5040 gimplify_stmt (expr_p, pre_p);
5041 }
5042
5043 /* If we have not block cleared the object, or if there are nonzero
5044 elements in the constructor, or if the constructor has side effects,
5045 add assignments to the individual scalar fields of the object. */
5046 if (!cleared
5047 || num_nonzero_elements > 0
5048 || ctor_has_side_effects_p)
5049 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
5050
5051 *expr_p = NULL_TREE;
5052 }
5053 break;
5054
5055 case COMPLEX_TYPE:
5056 {
5057 tree r, i;
5058
5059 if (notify_temp_creation)
5060 return GS_OK;
5061
5062 /* Extract the real and imaginary parts out of the ctor. */
5063 gcc_assert (elts->length () == 2);
5064 r = (*elts)[0].value;
5065 i = (*elts)[1].value;
5066 if (r == NULL || i == NULL)
5067 {
5068 tree zero = build_zero_cst (TREE_TYPE (type));
5069 if (r == NULL)
5070 r = zero;
5071 if (i == NULL)
5072 i = zero;
5073 }
5074
5075 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
5076 represent creation of a complex value. */
5077 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
5078 {
5079 ctor = build_complex (type, r, i);
5080 TREE_OPERAND (*expr_p, 1) = ctor;
5081 }
5082 else
5083 {
5084 ctor = build2 (COMPLEX_EXPR, type, r, i);
5085 TREE_OPERAND (*expr_p, 1) = ctor;
5086 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
5087 pre_p,
5088 post_p,
5089 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
5090 fb_rvalue);
5091 }
5092 }
5093 break;
5094
5095 case VECTOR_TYPE:
5096 {
5097 unsigned HOST_WIDE_INT ix;
5098 constructor_elt *ce;
5099
5100 if (notify_temp_creation)
5101 return GS_OK;
5102
5103 /* Go ahead and simplify constant constructors to VECTOR_CST. */
5104 if (TREE_CONSTANT (ctor))
5105 {
5106 bool constant_p = true;
5107 tree value;
5108
5109 /* Even when ctor is constant, it might contain non-*_CST
5110 elements, such as addresses or trapping values like
5111 1.0/0.0 - 1.0/0.0. Such expressions don't belong
5112 in VECTOR_CST nodes. */
5113 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
5114 if (!CONSTANT_CLASS_P (value))
5115 {
5116 constant_p = false;
5117 break;
5118 }
5119
5120 if (constant_p)
5121 {
5122 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
5123 break;
5124 }
5125
5126 TREE_CONSTANT (ctor) = 0;
5127 }
5128
5129 /* Vector types use CONSTRUCTOR all the way through gimple
5130 compilation as a general initializer. */
5131 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
5132 {
5133 enum gimplify_status tret;
5134 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
5135 fb_rvalue);
5136 if (tret == GS_ERROR)
5137 ret = GS_ERROR;
5138 else if (TREE_STATIC (ctor)
5139 && !initializer_constant_valid_p (ce->value,
5140 TREE_TYPE (ce->value)))
5141 TREE_STATIC (ctor) = 0;
5142 }
5143 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
5144 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
5145 }
5146 break;
5147
5148 default:
5149 /* So how did we get a CONSTRUCTOR for a scalar type? */
5150 gcc_unreachable ();
5151 }
5152
5153 if (ret == GS_ERROR)
5154 return GS_ERROR;
5155 /* If we have gimplified both sides of the initializer but have
5156 not emitted an assignment, do so now. */
5157 if (*expr_p)
5158 {
5159 tree lhs = TREE_OPERAND (*expr_p, 0);
5160 tree rhs = TREE_OPERAND (*expr_p, 1);
5161 if (want_value && object == lhs)
5162 lhs = unshare_expr (lhs);
5163 gassign *init = gimple_build_assign (lhs, rhs);
5164 gimplify_seq_add_stmt (pre_p, init);
5165 }
5166 if (want_value)
5167 {
5168 *expr_p = object;
5169 return GS_OK;
5170 }
5171 else
5172 {
5173 *expr_p = NULL;
5174 return GS_ALL_DONE;
5175 }
5176 }
5177
5178 /* Given a pointer value OP0, return a simplified version of an
5179 indirection through OP0, or NULL_TREE if no simplification is
5180 possible. This may only be applied to a rhs of an expression.
5181 Note that the resulting type may be different from the type pointed
5182 to in the sense that it is still compatible from the langhooks
5183 point of view. */
5184
5185 static tree
5186 gimple_fold_indirect_ref_rhs (tree t)
5187 {
5188 return gimple_fold_indirect_ref (t);
5189 }
5190
5191 /* Subroutine of gimplify_modify_expr to do simplifications of
5192 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
5193 something changes. */
5194
5195 static enum gimplify_status
5196 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
5197 gimple_seq *pre_p, gimple_seq *post_p,
5198 bool want_value)
5199 {
5200 enum gimplify_status ret = GS_UNHANDLED;
5201 bool changed;
5202
5203 do
5204 {
5205 changed = false;
5206 switch (TREE_CODE (*from_p))
5207 {
5208 case VAR_DECL:
5209 /* If we're assigning from a read-only variable initialized with
5210 a constructor, do the direct assignment from the constructor,
5211 but only if neither source nor target are volatile since this
5212 latter assignment might end up being done on a per-field basis. */
5213 if (DECL_INITIAL (*from_p)
5214 && TREE_READONLY (*from_p)
5215 && !TREE_THIS_VOLATILE (*from_p)
5216 && !TREE_THIS_VOLATILE (*to_p)
5217 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
5218 {
5219 tree old_from = *from_p;
5220 enum gimplify_status subret;
5221
5222 /* Move the constructor into the RHS. */
5223 *from_p = unshare_expr (DECL_INITIAL (*from_p));
5224
5225 /* Let's see if gimplify_init_constructor will need to put
5226 it in memory. */
5227 subret = gimplify_init_constructor (expr_p, NULL, NULL,
5228 false, true);
5229 if (subret == GS_ERROR)
5230 {
5231 /* If so, revert the change. */
5232 *from_p = old_from;
5233 }
5234 else
5235 {
5236 ret = GS_OK;
5237 changed = true;
5238 }
5239 }
5240 break;
5241 case INDIRECT_REF:
5242 {
5243 /* If we have code like
5244
5245 *(const A*)(A*)&x
5246
5247 where the type of "x" is a (possibly cv-qualified variant
5248 of "A"), treat the entire expression as identical to "x".
5249 This kind of code arises in C++ when an object is bound
5250 to a const reference, and if "x" is a TARGET_EXPR we want
5251 to take advantage of the optimization below. */
5252 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
5253 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
5254 if (t)
5255 {
5256 if (TREE_THIS_VOLATILE (t) != volatile_p)
5257 {
5258 if (DECL_P (t))
5259 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
5260 build_fold_addr_expr (t));
5261 if (REFERENCE_CLASS_P (t))
5262 TREE_THIS_VOLATILE (t) = volatile_p;
5263 }
5264 *from_p = t;
5265 ret = GS_OK;
5266 changed = true;
5267 }
5268 break;
5269 }
5270
5271 case TARGET_EXPR:
5272 {
5273 /* If we are initializing something from a TARGET_EXPR, strip the
5274 TARGET_EXPR and initialize it directly, if possible. This can't
5275 be done if the initializer is void, since that implies that the
5276 temporary is set in some non-trivial way.
5277
5278 ??? What about code that pulls out the temp and uses it
5279 elsewhere? I think that such code never uses the TARGET_EXPR as
5280 an initializer. If I'm wrong, we'll die because the temp won't
5281 have any RTL. In that case, I guess we'll need to replace
5282 references somehow. */
5283 tree init = TARGET_EXPR_INITIAL (*from_p);
5284
5285 if (init
5286 && (TREE_CODE (*expr_p) != MODIFY_EXPR
5287 || !TARGET_EXPR_NO_ELIDE (*from_p))
5288 && !VOID_TYPE_P (TREE_TYPE (init)))
5289 {
5290 *from_p = init;
5291 ret = GS_OK;
5292 changed = true;
5293 }
5294 }
5295 break;
5296
5297 case COMPOUND_EXPR:
5298 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
5299 caught. */
5300 gimplify_compound_expr (from_p, pre_p, true);
5301 ret = GS_OK;
5302 changed = true;
5303 break;
5304
5305 case CONSTRUCTOR:
5306 /* If we already made some changes, let the front end have a
5307 crack at this before we break it down. */
5308 if (ret != GS_UNHANDLED)
5309 break;
5310 /* If we're initializing from a CONSTRUCTOR, break this into
5311 individual MODIFY_EXPRs. */
5312 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
5313 false);
5314
5315 case COND_EXPR:
5316 /* If we're assigning to a non-register type, push the assignment
5317 down into the branches. This is mandatory for ADDRESSABLE types,
5318 since we cannot generate temporaries for such, but it saves a
5319 copy in other cases as well. */
5320 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
5321 {
5322 /* This code should mirror the code in gimplify_cond_expr. */
5323 enum tree_code code = TREE_CODE (*expr_p);
5324 tree cond = *from_p;
5325 tree result = *to_p;
5326
5327 ret = gimplify_expr (&result, pre_p, post_p,
5328 is_gimple_lvalue, fb_lvalue);
5329 if (ret != GS_ERROR)
5330 ret = GS_OK;
5331
5332 /* If we are going to write RESULT more than once, clear
5333 TREE_READONLY flag, otherwise we might incorrectly promote
5334 the variable to static const and initialize it at compile
5335 time in one of the branches. */
5336 if (VAR_P (result)
5337 && TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node
5338 && TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5339 TREE_READONLY (result) = 0;
5340 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
5341 TREE_OPERAND (cond, 1)
5342 = build2 (code, void_type_node, result,
5343 TREE_OPERAND (cond, 1));
5344 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5345 TREE_OPERAND (cond, 2)
5346 = build2 (code, void_type_node, unshare_expr (result),
5347 TREE_OPERAND (cond, 2));
5348
5349 TREE_TYPE (cond) = void_type_node;
5350 recalculate_side_effects (cond);
5351
5352 if (want_value)
5353 {
5354 gimplify_and_add (cond, pre_p);
5355 *expr_p = unshare_expr (result);
5356 }
5357 else
5358 *expr_p = cond;
5359 return ret;
5360 }
5361 break;
5362
5363 case CALL_EXPR:
5364 /* For calls that return in memory, give *to_p as the CALL_EXPR's
5365 return slot so that we don't generate a temporary. */
5366 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
5367 && aggregate_value_p (*from_p, *from_p))
5368 {
5369 bool use_target;
5370
5371 if (!(rhs_predicate_for (*to_p))(*from_p))
5372 /* If we need a temporary, *to_p isn't accurate. */
5373 use_target = false;
5374 /* It's OK to use the return slot directly unless it's an NRV. */
5375 else if (TREE_CODE (*to_p) == RESULT_DECL
5376 && DECL_NAME (*to_p) == NULL_TREE
5377 && needs_to_live_in_memory (*to_p))
5378 use_target = true;
5379 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
5380 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
5381 /* Don't force regs into memory. */
5382 use_target = false;
5383 else if (TREE_CODE (*expr_p) == INIT_EXPR)
5384 /* It's OK to use the target directly if it's being
5385 initialized. */
5386 use_target = true;
5387 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p)))
5388 != INTEGER_CST)
5389 /* Always use the target and thus RSO for variable-sized types.
5390 GIMPLE cannot deal with a variable-sized assignment
5391 embedded in a call statement. */
5392 use_target = true;
5393 else if (TREE_CODE (*to_p) != SSA_NAME
5394 && (!is_gimple_variable (*to_p)
5395 || needs_to_live_in_memory (*to_p)))
5396 /* Don't use the original target if it's already addressable;
5397 if its address escapes, and the called function uses the
5398 NRV optimization, a conforming program could see *to_p
5399 change before the called function returns; see c++/19317.
5400 When optimizing, the return_slot pass marks more functions
5401 as safe after we have escape info. */
5402 use_target = false;
5403 else
5404 use_target = true;
5405
5406 if (use_target)
5407 {
5408 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
5409 mark_addressable (*to_p);
5410 }
5411 }
5412 break;
5413
5414 case WITH_SIZE_EXPR:
5415 /* Likewise for calls that return an aggregate of non-constant size,
5416 since we would not be able to generate a temporary at all. */
5417 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
5418 {
5419 *from_p = TREE_OPERAND (*from_p, 0);
5420 /* We don't change ret in this case because the
5421 WITH_SIZE_EXPR might have been added in
5422 gimplify_modify_expr, so returning GS_OK would lead to an
5423 infinite loop. */
5424 changed = true;
5425 }
5426 break;
5427
5428 /* If we're initializing from a container, push the initialization
5429 inside it. */
5430 case CLEANUP_POINT_EXPR:
5431 case BIND_EXPR:
5432 case STATEMENT_LIST:
5433 {
5434 tree wrap = *from_p;
5435 tree t;
5436
5437 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
5438 fb_lvalue);
5439 if (ret != GS_ERROR)
5440 ret = GS_OK;
5441
5442 t = voidify_wrapper_expr (wrap, *expr_p);
5443 gcc_assert (t == *expr_p);
5444
5445 if (want_value)
5446 {
5447 gimplify_and_add (wrap, pre_p);
5448 *expr_p = unshare_expr (*to_p);
5449 }
5450 else
5451 *expr_p = wrap;
5452 return GS_OK;
5453 }
5454
5455 case COMPOUND_LITERAL_EXPR:
5456 {
5457 tree complit = TREE_OPERAND (*expr_p, 1);
5458 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
5459 tree decl = DECL_EXPR_DECL (decl_s);
5460 tree init = DECL_INITIAL (decl);
5461
5462 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
5463 into struct T x = { 0, 1, 2 } if the address of the
5464 compound literal has never been taken. */
5465 if (!TREE_ADDRESSABLE (complit)
5466 && !TREE_ADDRESSABLE (decl)
5467 && init)
5468 {
5469 *expr_p = copy_node (*expr_p);
5470 TREE_OPERAND (*expr_p, 1) = init;
5471 return GS_OK;
5472 }
5473 }
5474
5475 default:
5476 break;
5477 }
5478 }
5479 while (changed);
5480
5481 return ret;
5482 }
5483
5484
5485 /* Return true if T looks like a valid GIMPLE statement. */
5486
5487 static bool
5488 is_gimple_stmt (tree t)
5489 {
5490 const enum tree_code code = TREE_CODE (t);
5491
5492 switch (code)
5493 {
5494 case NOP_EXPR:
5495 /* The only valid NOP_EXPR is the empty statement. */
5496 return IS_EMPTY_STMT (t);
5497
5498 case BIND_EXPR:
5499 case COND_EXPR:
5500 /* These are only valid if they're void. */
5501 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
5502
5503 case SWITCH_EXPR:
5504 case GOTO_EXPR:
5505 case RETURN_EXPR:
5506 case LABEL_EXPR:
5507 case CASE_LABEL_EXPR:
5508 case TRY_CATCH_EXPR:
5509 case TRY_FINALLY_EXPR:
5510 case EH_FILTER_EXPR:
5511 case CATCH_EXPR:
5512 case ASM_EXPR:
5513 case STATEMENT_LIST:
5514 case OACC_PARALLEL:
5515 case OACC_KERNELS:
5516 case OACC_DATA:
5517 case OACC_HOST_DATA:
5518 case OACC_DECLARE:
5519 case OACC_UPDATE:
5520 case OACC_ENTER_DATA:
5521 case OACC_EXIT_DATA:
5522 case OACC_CACHE:
5523 case OMP_PARALLEL:
5524 case OMP_FOR:
5525 case OMP_SIMD:
5526 case OMP_DISTRIBUTE:
5527 case OACC_LOOP:
5528 case OMP_SCAN:
5529 case OMP_SECTIONS:
5530 case OMP_SECTION:
5531 case OMP_SINGLE:
5532 case OMP_MASTER:
5533 case OMP_TASKGROUP:
5534 case OMP_ORDERED:
5535 case OMP_CRITICAL:
5536 case OMP_TASK:
5537 case OMP_TARGET:
5538 case OMP_TARGET_DATA:
5539 case OMP_TARGET_UPDATE:
5540 case OMP_TARGET_ENTER_DATA:
5541 case OMP_TARGET_EXIT_DATA:
5542 case OMP_TASKLOOP:
5543 case OMP_TEAMS:
5544 /* These are always void. */
5545 return true;
5546
5547 case CALL_EXPR:
5548 case MODIFY_EXPR:
5549 case PREDICT_EXPR:
5550 /* These are valid regardless of their type. */
5551 return true;
5552
5553 default:
5554 return false;
5555 }
5556 }
5557
5558
5559 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
5560 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
5561 DECL_GIMPLE_REG_P set.
5562
5563 IMPORTANT NOTE: This promotion is performed by introducing a load of the
5564 other, unmodified part of the complex object just before the total store.
5565 As a consequence, if the object is still uninitialized, an undefined value
5566 will be loaded into a register, which may result in a spurious exception
5567 if the register is floating-point and the value happens to be a signaling
5568 NaN for example. Then the fully-fledged complex operations lowering pass
5569 followed by a DCE pass are necessary in order to fix things up. */
5570
5571 static enum gimplify_status
5572 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
5573 bool want_value)
5574 {
5575 enum tree_code code, ocode;
5576 tree lhs, rhs, new_rhs, other, realpart, imagpart;
5577
5578 lhs = TREE_OPERAND (*expr_p, 0);
5579 rhs = TREE_OPERAND (*expr_p, 1);
5580 code = TREE_CODE (lhs);
5581 lhs = TREE_OPERAND (lhs, 0);
5582
5583 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
5584 other = build1 (ocode, TREE_TYPE (rhs), lhs);
5585 TREE_NO_WARNING (other) = 1;
5586 other = get_formal_tmp_var (other, pre_p);
5587
5588 realpart = code == REALPART_EXPR ? rhs : other;
5589 imagpart = code == REALPART_EXPR ? other : rhs;
5590
5591 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
5592 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
5593 else
5594 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
5595
5596 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
5597 *expr_p = (want_value) ? rhs : NULL_TREE;
5598
5599 return GS_ALL_DONE;
5600 }
5601
5602 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
5603
5604 modify_expr
5605 : varname '=' rhs
5606 | '*' ID '=' rhs
5607
5608 PRE_P points to the list where side effects that must happen before
5609 *EXPR_P should be stored.
5610
5611 POST_P points to the list where side effects that must happen after
5612 *EXPR_P should be stored.
5613
5614 WANT_VALUE is nonzero iff we want to use the value of this expression
5615 in another expression. */
5616
5617 static enum gimplify_status
5618 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
5619 bool want_value)
5620 {
5621 tree *from_p = &TREE_OPERAND (*expr_p, 1);
5622 tree *to_p = &TREE_OPERAND (*expr_p, 0);
5623 enum gimplify_status ret = GS_UNHANDLED;
5624 gimple *assign;
5625 location_t loc = EXPR_LOCATION (*expr_p);
5626 gimple_stmt_iterator gsi;
5627
5628 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
5629 || TREE_CODE (*expr_p) == INIT_EXPR);
5630
5631 /* Trying to simplify a clobber using normal logic doesn't work,
5632 so handle it here. */
5633 if (TREE_CLOBBER_P (*from_p))
5634 {
5635 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5636 if (ret == GS_ERROR)
5637 return ret;
5638 gcc_assert (!want_value);
5639 if (!VAR_P (*to_p) && TREE_CODE (*to_p) != MEM_REF)
5640 {
5641 tree addr = get_initialized_tmp_var (build_fold_addr_expr (*to_p),
5642 pre_p, post_p);
5643 *to_p = build_simple_mem_ref_loc (EXPR_LOCATION (*to_p), addr);
5644 }
5645 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
5646 *expr_p = NULL;
5647 return GS_ALL_DONE;
5648 }
5649
5650 /* Insert pointer conversions required by the middle-end that are not
5651 required by the frontend. This fixes middle-end type checking for
5652 for example gcc.dg/redecl-6.c. */
5653 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
5654 {
5655 STRIP_USELESS_TYPE_CONVERSION (*from_p);
5656 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
5657 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
5658 }
5659
5660 /* See if any simplifications can be done based on what the RHS is. */
5661 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5662 want_value);
5663 if (ret != GS_UNHANDLED)
5664 return ret;
5665
5666 /* For zero sized types only gimplify the left hand side and right hand
5667 side as statements and throw away the assignment. Do this after
5668 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
5669 types properly. */
5670 if (zero_sized_type (TREE_TYPE (*from_p))
5671 && !want_value
5672 /* Don't do this for calls that return addressable types, expand_call
5673 relies on those having a lhs. */
5674 && !(TREE_ADDRESSABLE (TREE_TYPE (*from_p))
5675 && TREE_CODE (*from_p) == CALL_EXPR))
5676 {
5677 gimplify_stmt (from_p, pre_p);
5678 gimplify_stmt (to_p, pre_p);
5679 *expr_p = NULL_TREE;
5680 return GS_ALL_DONE;
5681 }
5682
5683 /* If the value being copied is of variable width, compute the length
5684 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
5685 before gimplifying any of the operands so that we can resolve any
5686 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
5687 the size of the expression to be copied, not of the destination, so
5688 that is what we must do here. */
5689 maybe_with_size_expr (from_p);
5690
5691 /* As a special case, we have to temporarily allow for assignments
5692 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
5693 a toplevel statement, when gimplifying the GENERIC expression
5694 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
5695 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
5696
5697 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
5698 prevent gimplify_expr from trying to create a new temporary for
5699 foo's LHS, we tell it that it should only gimplify until it
5700 reaches the CALL_EXPR. On return from gimplify_expr, the newly
5701 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
5702 and all we need to do here is set 'a' to be its LHS. */
5703
5704 /* Gimplify the RHS first for C++17 and bug 71104. */
5705 gimple_predicate initial_pred = initial_rhs_predicate_for (*to_p);
5706 ret = gimplify_expr (from_p, pre_p, post_p, initial_pred, fb_rvalue);
5707 if (ret == GS_ERROR)
5708 return ret;
5709
5710 /* Then gimplify the LHS. */
5711 /* If we gimplified the RHS to a CALL_EXPR and that call may return
5712 twice we have to make sure to gimplify into non-SSA as otherwise
5713 the abnormal edge added later will make those defs not dominate
5714 their uses.
5715 ??? Technically this applies only to the registers used in the
5716 resulting non-register *TO_P. */
5717 bool saved_into_ssa = gimplify_ctxp->into_ssa;
5718 if (saved_into_ssa
5719 && TREE_CODE (*from_p) == CALL_EXPR
5720 && call_expr_flags (*from_p) & ECF_RETURNS_TWICE)
5721 gimplify_ctxp->into_ssa = false;
5722 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5723 gimplify_ctxp->into_ssa = saved_into_ssa;
5724 if (ret == GS_ERROR)
5725 return ret;
5726
5727 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
5728 guess for the predicate was wrong. */
5729 gimple_predicate final_pred = rhs_predicate_for (*to_p);
5730 if (final_pred != initial_pred)
5731 {
5732 ret = gimplify_expr (from_p, pre_p, post_p, final_pred, fb_rvalue);
5733 if (ret == GS_ERROR)
5734 return ret;
5735 }
5736
5737 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
5738 size as argument to the call. */
5739 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5740 {
5741 tree call = TREE_OPERAND (*from_p, 0);
5742 tree vlasize = TREE_OPERAND (*from_p, 1);
5743
5744 if (TREE_CODE (call) == CALL_EXPR
5745 && CALL_EXPR_IFN (call) == IFN_VA_ARG)
5746 {
5747 int nargs = call_expr_nargs (call);
5748 tree type = TREE_TYPE (call);
5749 tree ap = CALL_EXPR_ARG (call, 0);
5750 tree tag = CALL_EXPR_ARG (call, 1);
5751 tree aptag = CALL_EXPR_ARG (call, 2);
5752 tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call),
5753 IFN_VA_ARG, type,
5754 nargs + 1, ap, tag,
5755 aptag, vlasize);
5756 TREE_OPERAND (*from_p, 0) = newcall;
5757 }
5758 }
5759
5760 /* Now see if the above changed *from_p to something we handle specially. */
5761 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5762 want_value);
5763 if (ret != GS_UNHANDLED)
5764 return ret;
5765
5766 /* If we've got a variable sized assignment between two lvalues (i.e. does
5767 not involve a call), then we can make things a bit more straightforward
5768 by converting the assignment to memcpy or memset. */
5769 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5770 {
5771 tree from = TREE_OPERAND (*from_p, 0);
5772 tree size = TREE_OPERAND (*from_p, 1);
5773
5774 if (TREE_CODE (from) == CONSTRUCTOR)
5775 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
5776
5777 if (is_gimple_addressable (from))
5778 {
5779 *from_p = from;
5780 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
5781 pre_p);
5782 }
5783 }
5784
5785 /* Transform partial stores to non-addressable complex variables into
5786 total stores. This allows us to use real instead of virtual operands
5787 for these variables, which improves optimization. */
5788 if ((TREE_CODE (*to_p) == REALPART_EXPR
5789 || TREE_CODE (*to_p) == IMAGPART_EXPR)
5790 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
5791 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
5792
5793 /* Try to alleviate the effects of the gimplification creating artificial
5794 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
5795 make sure not to create DECL_DEBUG_EXPR links across functions. */
5796 if (!gimplify_ctxp->into_ssa
5797 && VAR_P (*from_p)
5798 && DECL_IGNORED_P (*from_p)
5799 && DECL_P (*to_p)
5800 && !DECL_IGNORED_P (*to_p)
5801 && decl_function_context (*to_p) == current_function_decl
5802 && decl_function_context (*from_p) == current_function_decl)
5803 {
5804 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
5805 DECL_NAME (*from_p)
5806 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
5807 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
5808 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
5809 }
5810
5811 if (want_value && TREE_THIS_VOLATILE (*to_p))
5812 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
5813
5814 if (TREE_CODE (*from_p) == CALL_EXPR)
5815 {
5816 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
5817 instead of a GIMPLE_ASSIGN. */
5818 gcall *call_stmt;
5819 if (CALL_EXPR_FN (*from_p) == NULL_TREE)
5820 {
5821 /* Gimplify internal functions created in the FEs. */
5822 int nargs = call_expr_nargs (*from_p), i;
5823 enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
5824 auto_vec<tree> vargs (nargs);
5825
5826 for (i = 0; i < nargs; i++)
5827 {
5828 gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
5829 EXPR_LOCATION (*from_p));
5830 vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
5831 }
5832 call_stmt = gimple_build_call_internal_vec (ifn, vargs);
5833 gimple_call_set_nothrow (call_stmt, TREE_NOTHROW (*from_p));
5834 gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
5835 }
5836 else
5837 {
5838 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
5839 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
5840 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
5841 tree fndecl = get_callee_fndecl (*from_p);
5842 if (fndecl
5843 && fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
5844 && call_expr_nargs (*from_p) == 3)
5845 call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
5846 CALL_EXPR_ARG (*from_p, 0),
5847 CALL_EXPR_ARG (*from_p, 1),
5848 CALL_EXPR_ARG (*from_p, 2));
5849 else
5850 {
5851 call_stmt = gimple_build_call_from_tree (*from_p, fnptrtype);
5852 }
5853 }
5854 notice_special_calls (call_stmt);
5855 if (!gimple_call_noreturn_p (call_stmt) || !should_remove_lhs_p (*to_p))
5856 gimple_call_set_lhs (call_stmt, *to_p);
5857 else if (TREE_CODE (*to_p) == SSA_NAME)
5858 /* The above is somewhat premature, avoid ICEing later for a
5859 SSA name w/o a definition. We may have uses in the GIMPLE IL.
5860 ??? This doesn't make it a default-def. */
5861 SSA_NAME_DEF_STMT (*to_p) = gimple_build_nop ();
5862
5863 assign = call_stmt;
5864 }
5865 else
5866 {
5867 assign = gimple_build_assign (*to_p, *from_p);
5868 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
5869 if (COMPARISON_CLASS_P (*from_p))
5870 gimple_set_no_warning (assign, TREE_NO_WARNING (*from_p));
5871 }
5872
5873 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
5874 {
5875 /* We should have got an SSA name from the start. */
5876 gcc_assert (TREE_CODE (*to_p) == SSA_NAME
5877 || ! gimple_in_ssa_p (cfun));
5878 }
5879
5880 gimplify_seq_add_stmt (pre_p, assign);
5881 gsi = gsi_last (*pre_p);
5882 maybe_fold_stmt (&gsi);
5883
5884 if (want_value)
5885 {
5886 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
5887 return GS_OK;
5888 }
5889 else
5890 *expr_p = NULL;
5891
5892 return GS_ALL_DONE;
5893 }
5894
5895 /* Gimplify a comparison between two variable-sized objects. Do this
5896 with a call to BUILT_IN_MEMCMP. */
5897
5898 static enum gimplify_status
5899 gimplify_variable_sized_compare (tree *expr_p)
5900 {
5901 location_t loc = EXPR_LOCATION (*expr_p);
5902 tree op0 = TREE_OPERAND (*expr_p, 0);
5903 tree op1 = TREE_OPERAND (*expr_p, 1);
5904 tree t, arg, dest, src, expr;
5905
5906 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
5907 arg = unshare_expr (arg);
5908 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
5909 src = build_fold_addr_expr_loc (loc, op1);
5910 dest = build_fold_addr_expr_loc (loc, op0);
5911 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
5912 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
5913
5914 expr
5915 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
5916 SET_EXPR_LOCATION (expr, loc);
5917 *expr_p = expr;
5918
5919 return GS_OK;
5920 }
5921
5922 /* Gimplify a comparison between two aggregate objects of integral scalar
5923 mode as a comparison between the bitwise equivalent scalar values. */
5924
5925 static enum gimplify_status
5926 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
5927 {
5928 location_t loc = EXPR_LOCATION (*expr_p);
5929 tree op0 = TREE_OPERAND (*expr_p, 0);
5930 tree op1 = TREE_OPERAND (*expr_p, 1);
5931
5932 tree type = TREE_TYPE (op0);
5933 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
5934
5935 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
5936 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
5937
5938 *expr_p
5939 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
5940
5941 return GS_OK;
5942 }
5943
5944 /* Gimplify an expression sequence. This function gimplifies each
5945 expression and rewrites the original expression with the last
5946 expression of the sequence in GIMPLE form.
5947
5948 PRE_P points to the list where the side effects for all the
5949 expressions in the sequence will be emitted.
5950
5951 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
5952
5953 static enum gimplify_status
5954 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
5955 {
5956 tree t = *expr_p;
5957
5958 do
5959 {
5960 tree *sub_p = &TREE_OPERAND (t, 0);
5961
5962 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
5963 gimplify_compound_expr (sub_p, pre_p, false);
5964 else
5965 gimplify_stmt (sub_p, pre_p);
5966
5967 t = TREE_OPERAND (t, 1);
5968 }
5969 while (TREE_CODE (t) == COMPOUND_EXPR);
5970
5971 *expr_p = t;
5972 if (want_value)
5973 return GS_OK;
5974 else
5975 {
5976 gimplify_stmt (expr_p, pre_p);
5977 return GS_ALL_DONE;
5978 }
5979 }
5980
5981 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
5982 gimplify. After gimplification, EXPR_P will point to a new temporary
5983 that holds the original value of the SAVE_EXPR node.
5984
5985 PRE_P points to the list where side effects that must happen before
5986 *EXPR_P should be stored. */
5987
5988 static enum gimplify_status
5989 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5990 {
5991 enum gimplify_status ret = GS_ALL_DONE;
5992 tree val;
5993
5994 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
5995 val = TREE_OPERAND (*expr_p, 0);
5996
5997 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
5998 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
5999 {
6000 /* The operand may be a void-valued expression. It is
6001 being executed only for its side-effects. */
6002 if (TREE_TYPE (val) == void_type_node)
6003 {
6004 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6005 is_gimple_stmt, fb_none);
6006 val = NULL;
6007 }
6008 else
6009 /* The temporary may not be an SSA name as later abnormal and EH
6010 control flow may invalidate use/def domination. When in SSA
6011 form then assume there are no such issues and SAVE_EXPRs only
6012 appear via GENERIC foldings. */
6013 val = get_initialized_tmp_var (val, pre_p, post_p,
6014 gimple_in_ssa_p (cfun));
6015
6016 TREE_OPERAND (*expr_p, 0) = val;
6017 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
6018 }
6019
6020 *expr_p = val;
6021
6022 return ret;
6023 }
6024
6025 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
6026
6027 unary_expr
6028 : ...
6029 | '&' varname
6030 ...
6031
6032 PRE_P points to the list where side effects that must happen before
6033 *EXPR_P should be stored.
6034
6035 POST_P points to the list where side effects that must happen after
6036 *EXPR_P should be stored. */
6037
6038 static enum gimplify_status
6039 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6040 {
6041 tree expr = *expr_p;
6042 tree op0 = TREE_OPERAND (expr, 0);
6043 enum gimplify_status ret;
6044 location_t loc = EXPR_LOCATION (*expr_p);
6045
6046 switch (TREE_CODE (op0))
6047 {
6048 case INDIRECT_REF:
6049 do_indirect_ref:
6050 /* Check if we are dealing with an expression of the form '&*ptr'.
6051 While the front end folds away '&*ptr' into 'ptr', these
6052 expressions may be generated internally by the compiler (e.g.,
6053 builtins like __builtin_va_end). */
6054 /* Caution: the silent array decomposition semantics we allow for
6055 ADDR_EXPR means we can't always discard the pair. */
6056 /* Gimplification of the ADDR_EXPR operand may drop
6057 cv-qualification conversions, so make sure we add them if
6058 needed. */
6059 {
6060 tree op00 = TREE_OPERAND (op0, 0);
6061 tree t_expr = TREE_TYPE (expr);
6062 tree t_op00 = TREE_TYPE (op00);
6063
6064 if (!useless_type_conversion_p (t_expr, t_op00))
6065 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
6066 *expr_p = op00;
6067 ret = GS_OK;
6068 }
6069 break;
6070
6071 case VIEW_CONVERT_EXPR:
6072 /* Take the address of our operand and then convert it to the type of
6073 this ADDR_EXPR.
6074
6075 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
6076 all clear. The impact of this transformation is even less clear. */
6077
6078 /* If the operand is a useless conversion, look through it. Doing so
6079 guarantees that the ADDR_EXPR and its operand will remain of the
6080 same type. */
6081 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
6082 op0 = TREE_OPERAND (op0, 0);
6083
6084 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
6085 build_fold_addr_expr_loc (loc,
6086 TREE_OPERAND (op0, 0)));
6087 ret = GS_OK;
6088 break;
6089
6090 case MEM_REF:
6091 if (integer_zerop (TREE_OPERAND (op0, 1)))
6092 goto do_indirect_ref;
6093
6094 /* fall through */
6095
6096 default:
6097 /* If we see a call to a declared builtin or see its address
6098 being taken (we can unify those cases here) then we can mark
6099 the builtin for implicit generation by GCC. */
6100 if (TREE_CODE (op0) == FUNCTION_DECL
6101 && fndecl_built_in_p (op0, BUILT_IN_NORMAL)
6102 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
6103 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
6104
6105 /* We use fb_either here because the C frontend sometimes takes
6106 the address of a call that returns a struct; see
6107 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
6108 the implied temporary explicit. */
6109
6110 /* Make the operand addressable. */
6111 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
6112 is_gimple_addressable, fb_either);
6113 if (ret == GS_ERROR)
6114 break;
6115
6116 /* Then mark it. Beware that it may not be possible to do so directly
6117 if a temporary has been created by the gimplification. */
6118 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
6119
6120 op0 = TREE_OPERAND (expr, 0);
6121
6122 /* For various reasons, the gimplification of the expression
6123 may have made a new INDIRECT_REF. */
6124 if (TREE_CODE (op0) == INDIRECT_REF)
6125 goto do_indirect_ref;
6126
6127 mark_addressable (TREE_OPERAND (expr, 0));
6128
6129 /* The FEs may end up building ADDR_EXPRs early on a decl with
6130 an incomplete type. Re-build ADDR_EXPRs in canonical form
6131 here. */
6132 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
6133 *expr_p = build_fold_addr_expr (op0);
6134
6135 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
6136 recompute_tree_invariant_for_addr_expr (*expr_p);
6137
6138 /* If we re-built the ADDR_EXPR add a conversion to the original type
6139 if required. */
6140 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
6141 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
6142
6143 break;
6144 }
6145
6146 return ret;
6147 }
6148
6149 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
6150 value; output operands should be a gimple lvalue. */
6151
6152 static enum gimplify_status
6153 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6154 {
6155 tree expr;
6156 int noutputs;
6157 const char **oconstraints;
6158 int i;
6159 tree link;
6160 const char *constraint;
6161 bool allows_mem, allows_reg, is_inout;
6162 enum gimplify_status ret, tret;
6163 gasm *stmt;
6164 vec<tree, va_gc> *inputs;
6165 vec<tree, va_gc> *outputs;
6166 vec<tree, va_gc> *clobbers;
6167 vec<tree, va_gc> *labels;
6168 tree link_next;
6169
6170 expr = *expr_p;
6171 noutputs = list_length (ASM_OUTPUTS (expr));
6172 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
6173
6174 inputs = NULL;
6175 outputs = NULL;
6176 clobbers = NULL;
6177 labels = NULL;
6178
6179 ret = GS_ALL_DONE;
6180 link_next = NULL_TREE;
6181 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
6182 {
6183 bool ok;
6184 size_t constraint_len;
6185
6186 link_next = TREE_CHAIN (link);
6187
6188 oconstraints[i]
6189 = constraint
6190 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6191 constraint_len = strlen (constraint);
6192 if (constraint_len == 0)
6193 continue;
6194
6195 ok = parse_output_constraint (&constraint, i, 0, 0,
6196 &allows_mem, &allows_reg, &is_inout);
6197 if (!ok)
6198 {
6199 ret = GS_ERROR;
6200 is_inout = false;
6201 }
6202
6203 /* If we can't make copies, we can only accept memory. */
6204 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
6205 {
6206 if (allows_mem)
6207 allows_reg = 0;
6208 else
6209 {
6210 error ("impossible constraint in %<asm%>");
6211 error ("non-memory output %d must stay in memory", i);
6212 return GS_ERROR;
6213 }
6214 }
6215
6216 if (!allows_reg && allows_mem)
6217 mark_addressable (TREE_VALUE (link));
6218
6219 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6220 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
6221 fb_lvalue | fb_mayfail);
6222 if (tret == GS_ERROR)
6223 {
6224 error ("invalid lvalue in %<asm%> output %d", i);
6225 ret = tret;
6226 }
6227
6228 /* If the constraint does not allow memory make sure we gimplify
6229 it to a register if it is not already but its base is. This
6230 happens for complex and vector components. */
6231 if (!allows_mem)
6232 {
6233 tree op = TREE_VALUE (link);
6234 if (! is_gimple_val (op)
6235 && is_gimple_reg_type (TREE_TYPE (op))
6236 && is_gimple_reg (get_base_address (op)))
6237 {
6238 tree tem = create_tmp_reg (TREE_TYPE (op));
6239 tree ass;
6240 if (is_inout)
6241 {
6242 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem),
6243 tem, unshare_expr (op));
6244 gimplify_and_add (ass, pre_p);
6245 }
6246 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem), op, tem);
6247 gimplify_and_add (ass, post_p);
6248
6249 TREE_VALUE (link) = tem;
6250 tret = GS_OK;
6251 }
6252 }
6253
6254 vec_safe_push (outputs, link);
6255 TREE_CHAIN (link) = NULL_TREE;
6256
6257 if (is_inout)
6258 {
6259 /* An input/output operand. To give the optimizers more
6260 flexibility, split it into separate input and output
6261 operands. */
6262 tree input;
6263 /* Buffer big enough to format a 32-bit UINT_MAX into. */
6264 char buf[11];
6265
6266 /* Turn the in/out constraint into an output constraint. */
6267 char *p = xstrdup (constraint);
6268 p[0] = '=';
6269 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
6270
6271 /* And add a matching input constraint. */
6272 if (allows_reg)
6273 {
6274 sprintf (buf, "%u", i);
6275
6276 /* If there are multiple alternatives in the constraint,
6277 handle each of them individually. Those that allow register
6278 will be replaced with operand number, the others will stay
6279 unchanged. */
6280 if (strchr (p, ',') != NULL)
6281 {
6282 size_t len = 0, buflen = strlen (buf);
6283 char *beg, *end, *str, *dst;
6284
6285 for (beg = p + 1;;)
6286 {
6287 end = strchr (beg, ',');
6288 if (end == NULL)
6289 end = strchr (beg, '\0');
6290 if ((size_t) (end - beg) < buflen)
6291 len += buflen + 1;
6292 else
6293 len += end - beg + 1;
6294 if (*end)
6295 beg = end + 1;
6296 else
6297 break;
6298 }
6299
6300 str = (char *) alloca (len);
6301 for (beg = p + 1, dst = str;;)
6302 {
6303 const char *tem;
6304 bool mem_p, reg_p, inout_p;
6305
6306 end = strchr (beg, ',');
6307 if (end)
6308 *end = '\0';
6309 beg[-1] = '=';
6310 tem = beg - 1;
6311 parse_output_constraint (&tem, i, 0, 0,
6312 &mem_p, &reg_p, &inout_p);
6313 if (dst != str)
6314 *dst++ = ',';
6315 if (reg_p)
6316 {
6317 memcpy (dst, buf, buflen);
6318 dst += buflen;
6319 }
6320 else
6321 {
6322 if (end)
6323 len = end - beg;
6324 else
6325 len = strlen (beg);
6326 memcpy (dst, beg, len);
6327 dst += len;
6328 }
6329 if (end)
6330 beg = end + 1;
6331 else
6332 break;
6333 }
6334 *dst = '\0';
6335 input = build_string (dst - str, str);
6336 }
6337 else
6338 input = build_string (strlen (buf), buf);
6339 }
6340 else
6341 input = build_string (constraint_len - 1, constraint + 1);
6342
6343 free (p);
6344
6345 input = build_tree_list (build_tree_list (NULL_TREE, input),
6346 unshare_expr (TREE_VALUE (link)));
6347 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
6348 }
6349 }
6350
6351 link_next = NULL_TREE;
6352 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
6353 {
6354 link_next = TREE_CHAIN (link);
6355 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6356 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
6357 oconstraints, &allows_mem, &allows_reg);
6358
6359 /* If we can't make copies, we can only accept memory. */
6360 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
6361 {
6362 if (allows_mem)
6363 allows_reg = 0;
6364 else
6365 {
6366 error ("impossible constraint in %<asm%>");
6367 error ("non-memory input %d must stay in memory", i);
6368 return GS_ERROR;
6369 }
6370 }
6371
6372 /* If the operand is a memory input, it should be an lvalue. */
6373 if (!allows_reg && allows_mem)
6374 {
6375 tree inputv = TREE_VALUE (link);
6376 STRIP_NOPS (inputv);
6377 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
6378 || TREE_CODE (inputv) == PREINCREMENT_EXPR
6379 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
6380 || TREE_CODE (inputv) == POSTINCREMENT_EXPR
6381 || TREE_CODE (inputv) == MODIFY_EXPR)
6382 TREE_VALUE (link) = error_mark_node;
6383 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6384 is_gimple_lvalue, fb_lvalue | fb_mayfail);
6385 if (tret != GS_ERROR)
6386 {
6387 /* Unlike output operands, memory inputs are not guaranteed
6388 to be lvalues by the FE, and while the expressions are
6389 marked addressable there, if it is e.g. a statement
6390 expression, temporaries in it might not end up being
6391 addressable. They might be already used in the IL and thus
6392 it is too late to make them addressable now though. */
6393 tree x = TREE_VALUE (link);
6394 while (handled_component_p (x))
6395 x = TREE_OPERAND (x, 0);
6396 if (TREE_CODE (x) == MEM_REF
6397 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
6398 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
6399 if ((VAR_P (x)
6400 || TREE_CODE (x) == PARM_DECL
6401 || TREE_CODE (x) == RESULT_DECL)
6402 && !TREE_ADDRESSABLE (x)
6403 && is_gimple_reg (x))
6404 {
6405 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link),
6406 input_location), 0,
6407 "memory input %d is not directly addressable",
6408 i);
6409 prepare_gimple_addressable (&TREE_VALUE (link), pre_p);
6410 }
6411 }
6412 mark_addressable (TREE_VALUE (link));
6413 if (tret == GS_ERROR)
6414 {
6415 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), input_location),
6416 "memory input %d is not directly addressable", i);
6417 ret = tret;
6418 }
6419 }
6420 else
6421 {
6422 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6423 is_gimple_asm_val, fb_rvalue);
6424 if (tret == GS_ERROR)
6425 ret = tret;
6426 }
6427
6428 TREE_CHAIN (link) = NULL_TREE;
6429 vec_safe_push (inputs, link);
6430 }
6431
6432 link_next = NULL_TREE;
6433 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
6434 {
6435 link_next = TREE_CHAIN (link);
6436 TREE_CHAIN (link) = NULL_TREE;
6437 vec_safe_push (clobbers, link);
6438 }
6439
6440 link_next = NULL_TREE;
6441 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
6442 {
6443 link_next = TREE_CHAIN (link);
6444 TREE_CHAIN (link) = NULL_TREE;
6445 vec_safe_push (labels, link);
6446 }
6447
6448 /* Do not add ASMs with errors to the gimple IL stream. */
6449 if (ret != GS_ERROR)
6450 {
6451 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
6452 inputs, outputs, clobbers, labels);
6453
6454 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr) || noutputs == 0);
6455 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
6456 gimple_asm_set_inline (stmt, ASM_INLINE_P (expr));
6457
6458 gimplify_seq_add_stmt (pre_p, stmt);
6459 }
6460
6461 return ret;
6462 }
6463
6464 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
6465 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6466 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
6467 return to this function.
6468
6469 FIXME should we complexify the prequeue handling instead? Or use flags
6470 for all the cleanups and let the optimizer tighten them up? The current
6471 code seems pretty fragile; it will break on a cleanup within any
6472 non-conditional nesting. But any such nesting would be broken, anyway;
6473 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
6474 and continues out of it. We can do that at the RTL level, though, so
6475 having an optimizer to tighten up try/finally regions would be a Good
6476 Thing. */
6477
6478 static enum gimplify_status
6479 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
6480 {
6481 gimple_stmt_iterator iter;
6482 gimple_seq body_sequence = NULL;
6483
6484 tree temp = voidify_wrapper_expr (*expr_p, NULL);
6485
6486 /* We only care about the number of conditions between the innermost
6487 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
6488 any cleanups collected outside the CLEANUP_POINT_EXPR. */
6489 int old_conds = gimplify_ctxp->conditions;
6490 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
6491 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
6492 gimplify_ctxp->conditions = 0;
6493 gimplify_ctxp->conditional_cleanups = NULL;
6494 gimplify_ctxp->in_cleanup_point_expr = true;
6495
6496 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
6497
6498 gimplify_ctxp->conditions = old_conds;
6499 gimplify_ctxp->conditional_cleanups = old_cleanups;
6500 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
6501
6502 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
6503 {
6504 gimple *wce = gsi_stmt (iter);
6505
6506 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
6507 {
6508 if (gsi_one_before_end_p (iter))
6509 {
6510 /* Note that gsi_insert_seq_before and gsi_remove do not
6511 scan operands, unlike some other sequence mutators. */
6512 if (!gimple_wce_cleanup_eh_only (wce))
6513 gsi_insert_seq_before_without_update (&iter,
6514 gimple_wce_cleanup (wce),
6515 GSI_SAME_STMT);
6516 gsi_remove (&iter, true);
6517 break;
6518 }
6519 else
6520 {
6521 gtry *gtry;
6522 gimple_seq seq;
6523 enum gimple_try_flags kind;
6524
6525 if (gimple_wce_cleanup_eh_only (wce))
6526 kind = GIMPLE_TRY_CATCH;
6527 else
6528 kind = GIMPLE_TRY_FINALLY;
6529 seq = gsi_split_seq_after (iter);
6530
6531 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
6532 /* Do not use gsi_replace here, as it may scan operands.
6533 We want to do a simple structural modification only. */
6534 gsi_set_stmt (&iter, gtry);
6535 iter = gsi_start (gtry->eval);
6536 }
6537 }
6538 else
6539 gsi_next (&iter);
6540 }
6541
6542 gimplify_seq_add_seq (pre_p, body_sequence);
6543 if (temp)
6544 {
6545 *expr_p = temp;
6546 return GS_OK;
6547 }
6548 else
6549 {
6550 *expr_p = NULL;
6551 return GS_ALL_DONE;
6552 }
6553 }
6554
6555 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
6556 is the cleanup action required. EH_ONLY is true if the cleanup should
6557 only be executed if an exception is thrown, not on normal exit.
6558 If FORCE_UNCOND is true perform the cleanup unconditionally; this is
6559 only valid for clobbers. */
6560
6561 static void
6562 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p,
6563 bool force_uncond = false)
6564 {
6565 gimple *wce;
6566 gimple_seq cleanup_stmts = NULL;
6567
6568 /* Errors can result in improperly nested cleanups. Which results in
6569 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
6570 if (seen_error ())
6571 return;
6572
6573 if (gimple_conditional_context ())
6574 {
6575 /* If we're in a conditional context, this is more complex. We only
6576 want to run the cleanup if we actually ran the initialization that
6577 necessitates it, but we want to run it after the end of the
6578 conditional context. So we wrap the try/finally around the
6579 condition and use a flag to determine whether or not to actually
6580 run the destructor. Thus
6581
6582 test ? f(A()) : 0
6583
6584 becomes (approximately)
6585
6586 flag = 0;
6587 try {
6588 if (test) { A::A(temp); flag = 1; val = f(temp); }
6589 else { val = 0; }
6590 } finally {
6591 if (flag) A::~A(temp);
6592 }
6593 val
6594 */
6595 if (force_uncond)
6596 {
6597 gimplify_stmt (&cleanup, &cleanup_stmts);
6598 wce = gimple_build_wce (cleanup_stmts);
6599 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6600 }
6601 else
6602 {
6603 tree flag = create_tmp_var (boolean_type_node, "cleanup");
6604 gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
6605 gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
6606
6607 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
6608 gimplify_stmt (&cleanup, &cleanup_stmts);
6609 wce = gimple_build_wce (cleanup_stmts);
6610
6611 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
6612 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6613 gimplify_seq_add_stmt (pre_p, ftrue);
6614
6615 /* Because of this manipulation, and the EH edges that jump
6616 threading cannot redirect, the temporary (VAR) will appear
6617 to be used uninitialized. Don't warn. */
6618 TREE_NO_WARNING (var) = 1;
6619 }
6620 }
6621 else
6622 {
6623 gimplify_stmt (&cleanup, &cleanup_stmts);
6624 wce = gimple_build_wce (cleanup_stmts);
6625 gimple_wce_set_cleanup_eh_only (wce, eh_only);
6626 gimplify_seq_add_stmt (pre_p, wce);
6627 }
6628 }
6629
6630 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
6631
6632 static enum gimplify_status
6633 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6634 {
6635 tree targ = *expr_p;
6636 tree temp = TARGET_EXPR_SLOT (targ);
6637 tree init = TARGET_EXPR_INITIAL (targ);
6638 enum gimplify_status ret;
6639
6640 bool unpoison_empty_seq = false;
6641 gimple_stmt_iterator unpoison_it;
6642
6643 if (init)
6644 {
6645 tree cleanup = NULL_TREE;
6646
6647 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
6648 to the temps list. Handle also variable length TARGET_EXPRs. */
6649 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
6650 {
6651 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
6652 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
6653 gimplify_vla_decl (temp, pre_p);
6654 }
6655 else
6656 {
6657 /* Save location where we need to place unpoisoning. It's possible
6658 that a variable will be converted to needs_to_live_in_memory. */
6659 unpoison_it = gsi_last (*pre_p);
6660 unpoison_empty_seq = gsi_end_p (unpoison_it);
6661
6662 gimple_add_tmp_var (temp);
6663 }
6664
6665 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
6666 expression is supposed to initialize the slot. */
6667 if (VOID_TYPE_P (TREE_TYPE (init)))
6668 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6669 else
6670 {
6671 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
6672 init = init_expr;
6673 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6674 init = NULL;
6675 ggc_free (init_expr);
6676 }
6677 if (ret == GS_ERROR)
6678 {
6679 /* PR c++/28266 Make sure this is expanded only once. */
6680 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6681 return GS_ERROR;
6682 }
6683 if (init)
6684 gimplify_and_add (init, pre_p);
6685
6686 /* If needed, push the cleanup for the temp. */
6687 if (TARGET_EXPR_CLEANUP (targ))
6688 {
6689 if (CLEANUP_EH_ONLY (targ))
6690 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
6691 CLEANUP_EH_ONLY (targ), pre_p);
6692 else
6693 cleanup = TARGET_EXPR_CLEANUP (targ);
6694 }
6695
6696 /* Add a clobber for the temporary going out of scope, like
6697 gimplify_bind_expr. */
6698 if (gimplify_ctxp->in_cleanup_point_expr
6699 && needs_to_live_in_memory (temp))
6700 {
6701 if (flag_stack_reuse == SR_ALL)
6702 {
6703 tree clobber = build_clobber (TREE_TYPE (temp));
6704 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
6705 gimple_push_cleanup (temp, clobber, false, pre_p, true);
6706 }
6707 if (asan_poisoned_variables
6708 && DECL_ALIGN (temp) <= MAX_SUPPORTED_STACK_ALIGNMENT
6709 && !TREE_STATIC (temp)
6710 && dbg_cnt (asan_use_after_scope)
6711 && !gimplify_omp_ctxp)
6712 {
6713 tree asan_cleanup = build_asan_poison_call_expr (temp);
6714 if (asan_cleanup)
6715 {
6716 if (unpoison_empty_seq)
6717 unpoison_it = gsi_start (*pre_p);
6718
6719 asan_poison_variable (temp, false, &unpoison_it,
6720 unpoison_empty_seq);
6721 gimple_push_cleanup (temp, asan_cleanup, false, pre_p);
6722 }
6723 }
6724 }
6725 if (cleanup)
6726 gimple_push_cleanup (temp, cleanup, false, pre_p);
6727
6728 /* Only expand this once. */
6729 TREE_OPERAND (targ, 3) = init;
6730 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6731 }
6732 else
6733 /* We should have expanded this before. */
6734 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
6735
6736 *expr_p = temp;
6737 return GS_OK;
6738 }
6739
6740 /* Gimplification of expression trees. */
6741
6742 /* Gimplify an expression which appears at statement context. The
6743 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
6744 NULL, a new sequence is allocated.
6745
6746 Return true if we actually added a statement to the queue. */
6747
6748 bool
6749 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
6750 {
6751 gimple_seq_node last;
6752
6753 last = gimple_seq_last (*seq_p);
6754 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
6755 return last != gimple_seq_last (*seq_p);
6756 }
6757
6758 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
6759 to CTX. If entries already exist, force them to be some flavor of private.
6760 If there is no enclosing parallel, do nothing. */
6761
6762 void
6763 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
6764 {
6765 splay_tree_node n;
6766
6767 if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE)
6768 return;
6769
6770 do
6771 {
6772 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6773 if (n != NULL)
6774 {
6775 if (n->value & GOVD_SHARED)
6776 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
6777 else if (n->value & GOVD_MAP)
6778 n->value |= GOVD_MAP_TO_ONLY;
6779 else
6780 return;
6781 }
6782 else if ((ctx->region_type & ORT_TARGET) != 0)
6783 {
6784 if (ctx->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
6785 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6786 else
6787 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
6788 }
6789 else if (ctx->region_type != ORT_WORKSHARE
6790 && ctx->region_type != ORT_TASKGROUP
6791 && ctx->region_type != ORT_SIMD
6792 && ctx->region_type != ORT_ACC
6793 && !(ctx->region_type & ORT_TARGET_DATA))
6794 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6795
6796 ctx = ctx->outer_context;
6797 }
6798 while (ctx);
6799 }
6800
6801 /* Similarly for each of the type sizes of TYPE. */
6802
6803 static void
6804 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
6805 {
6806 if (type == NULL || type == error_mark_node)
6807 return;
6808 type = TYPE_MAIN_VARIANT (type);
6809
6810 if (ctx->privatized_types->add (type))
6811 return;
6812
6813 switch (TREE_CODE (type))
6814 {
6815 case INTEGER_TYPE:
6816 case ENUMERAL_TYPE:
6817 case BOOLEAN_TYPE:
6818 case REAL_TYPE:
6819 case FIXED_POINT_TYPE:
6820 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
6821 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
6822 break;
6823
6824 case ARRAY_TYPE:
6825 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6826 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
6827 break;
6828
6829 case RECORD_TYPE:
6830 case UNION_TYPE:
6831 case QUAL_UNION_TYPE:
6832 {
6833 tree field;
6834 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
6835 if (TREE_CODE (field) == FIELD_DECL)
6836 {
6837 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
6838 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
6839 }
6840 }
6841 break;
6842
6843 case POINTER_TYPE:
6844 case REFERENCE_TYPE:
6845 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6846 break;
6847
6848 default:
6849 break;
6850 }
6851
6852 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
6853 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
6854 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
6855 }
6856
6857 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
6858
6859 static void
6860 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
6861 {
6862 splay_tree_node n;
6863 unsigned int nflags;
6864 tree t;
6865
6866 if (error_operand_p (decl) || ctx->region_type == ORT_NONE)
6867 return;
6868
6869 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
6870 there are constructors involved somewhere. Exception is a shared clause,
6871 there is nothing privatized in that case. */
6872 if ((flags & GOVD_SHARED) == 0
6873 && (TREE_ADDRESSABLE (TREE_TYPE (decl))
6874 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl))))
6875 flags |= GOVD_SEEN;
6876
6877 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6878 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6879 {
6880 /* We shouldn't be re-adding the decl with the same data
6881 sharing class. */
6882 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
6883 nflags = n->value | flags;
6884 /* The only combination of data sharing classes we should see is
6885 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
6886 reduction variables to be used in data sharing clauses. */
6887 gcc_assert ((ctx->region_type & ORT_ACC) != 0
6888 || ((nflags & GOVD_DATA_SHARE_CLASS)
6889 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE))
6890 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
6891 n->value = nflags;
6892 return;
6893 }
6894
6895 /* When adding a variable-sized variable, we have to handle all sorts
6896 of additional bits of data: the pointer replacement variable, and
6897 the parameters of the type. */
6898 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6899 {
6900 /* Add the pointer replacement variable as PRIVATE if the variable
6901 replacement is private, else FIRSTPRIVATE since we'll need the
6902 address of the original variable either for SHARED, or for the
6903 copy into or out of the context. */
6904 if (!(flags & GOVD_LOCAL) && ctx->region_type != ORT_TASKGROUP)
6905 {
6906 if (flags & GOVD_MAP)
6907 nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
6908 else if (flags & GOVD_PRIVATE)
6909 nflags = GOVD_PRIVATE;
6910 else if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
6911 && (flags & GOVD_FIRSTPRIVATE))
6912 nflags = GOVD_PRIVATE | GOVD_EXPLICIT;
6913 else
6914 nflags = GOVD_FIRSTPRIVATE;
6915 nflags |= flags & GOVD_SEEN;
6916 t = DECL_VALUE_EXPR (decl);
6917 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
6918 t = TREE_OPERAND (t, 0);
6919 gcc_assert (DECL_P (t));
6920 omp_add_variable (ctx, t, nflags);
6921 }
6922
6923 /* Add all of the variable and type parameters (which should have
6924 been gimplified to a formal temporary) as FIRSTPRIVATE. */
6925 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
6926 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
6927 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
6928
6929 /* The variable-sized variable itself is never SHARED, only some form
6930 of PRIVATE. The sharing would take place via the pointer variable
6931 which we remapped above. */
6932 if (flags & GOVD_SHARED)
6933 flags = GOVD_SHARED | GOVD_DEBUG_PRIVATE
6934 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
6935
6936 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
6937 alloca statement we generate for the variable, so make sure it
6938 is available. This isn't automatically needed for the SHARED
6939 case, since we won't be allocating local storage then.
6940 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
6941 in this case omp_notice_variable will be called later
6942 on when it is gimplified. */
6943 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
6944 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
6945 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
6946 }
6947 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
6948 && lang_hooks.decls.omp_privatize_by_reference (decl))
6949 {
6950 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
6951
6952 /* Similar to the direct variable sized case above, we'll need the
6953 size of references being privatized. */
6954 if ((flags & GOVD_SHARED) == 0)
6955 {
6956 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
6957 if (DECL_P (t))
6958 omp_notice_variable (ctx, t, true);
6959 }
6960 }
6961
6962 if (n != NULL)
6963 n->value |= flags;
6964 else
6965 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
6966
6967 /* For reductions clauses in OpenACC loop directives, by default create a
6968 copy clause on the enclosing parallel construct for carrying back the
6969 results. */
6970 if (ctx->region_type == ORT_ACC && (flags & GOVD_REDUCTION))
6971 {
6972 struct gimplify_omp_ctx *outer_ctx = ctx->outer_context;
6973 while (outer_ctx)
6974 {
6975 n = splay_tree_lookup (outer_ctx->variables, (splay_tree_key)decl);
6976 if (n != NULL)
6977 {
6978 /* Ignore local variables and explicitly declared clauses. */
6979 if (n->value & (GOVD_LOCAL | GOVD_EXPLICIT))
6980 break;
6981 else if (outer_ctx->region_type == ORT_ACC_KERNELS)
6982 {
6983 /* According to the OpenACC spec, such a reduction variable
6984 should already have a copy map on a kernels construct,
6985 verify that here. */
6986 gcc_assert (!(n->value & GOVD_FIRSTPRIVATE)
6987 && (n->value & GOVD_MAP));
6988 }
6989 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
6990 {
6991 /* Remove firstprivate and make it a copy map. */
6992 n->value &= ~GOVD_FIRSTPRIVATE;
6993 n->value |= GOVD_MAP;
6994 }
6995 }
6996 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
6997 {
6998 splay_tree_insert (outer_ctx->variables, (splay_tree_key)decl,
6999 GOVD_MAP | GOVD_SEEN);
7000 break;
7001 }
7002 outer_ctx = outer_ctx->outer_context;
7003 }
7004 }
7005 }
7006
7007 /* Notice a threadprivate variable DECL used in OMP context CTX.
7008 This just prints out diagnostics about threadprivate variable uses
7009 in untied tasks. If DECL2 is non-NULL, prevent this warning
7010 on that variable. */
7011
7012 static bool
7013 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
7014 tree decl2)
7015 {
7016 splay_tree_node n;
7017 struct gimplify_omp_ctx *octx;
7018
7019 for (octx = ctx; octx; octx = octx->outer_context)
7020 if ((octx->region_type & ORT_TARGET) != 0)
7021 {
7022 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
7023 if (n == NULL)
7024 {
7025 error ("threadprivate variable %qE used in target region",
7026 DECL_NAME (decl));
7027 error_at (octx->location, "enclosing target region");
7028 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
7029 }
7030 if (decl2)
7031 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
7032 }
7033
7034 if (ctx->region_type != ORT_UNTIED_TASK)
7035 return false;
7036 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7037 if (n == NULL)
7038 {
7039 error ("threadprivate variable %qE used in untied task",
7040 DECL_NAME (decl));
7041 error_at (ctx->location, "enclosing task");
7042 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
7043 }
7044 if (decl2)
7045 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
7046 return false;
7047 }
7048
7049 /* Return true if global var DECL is device resident. */
7050
7051 static bool
7052 device_resident_p (tree decl)
7053 {
7054 tree attr = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl));
7055
7056 if (!attr)
7057 return false;
7058
7059 for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t))
7060 {
7061 tree c = TREE_VALUE (t);
7062 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT)
7063 return true;
7064 }
7065
7066 return false;
7067 }
7068
7069 /* Return true if DECL has an ACC DECLARE attribute. */
7070
7071 static bool
7072 is_oacc_declared (tree decl)
7073 {
7074 tree t = TREE_CODE (decl) == MEM_REF ? TREE_OPERAND (decl, 0) : decl;
7075 tree declared = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t));
7076 return declared != NULL_TREE;
7077 }
7078
7079 /* Determine outer default flags for DECL mentioned in an OMP region
7080 but not declared in an enclosing clause.
7081
7082 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
7083 remapped firstprivate instead of shared. To some extent this is
7084 addressed in omp_firstprivatize_type_sizes, but not
7085 effectively. */
7086
7087 static unsigned
7088 omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl,
7089 bool in_code, unsigned flags)
7090 {
7091 enum omp_clause_default_kind default_kind = ctx->default_kind;
7092 enum omp_clause_default_kind kind;
7093
7094 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
7095 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
7096 default_kind = kind;
7097
7098 switch (default_kind)
7099 {
7100 case OMP_CLAUSE_DEFAULT_NONE:
7101 {
7102 const char *rtype;
7103
7104 if (ctx->region_type & ORT_PARALLEL)
7105 rtype = "parallel";
7106 else if ((ctx->region_type & ORT_TASKLOOP) == ORT_TASKLOOP)
7107 rtype = "taskloop";
7108 else if (ctx->region_type & ORT_TASK)
7109 rtype = "task";
7110 else if (ctx->region_type & ORT_TEAMS)
7111 rtype = "teams";
7112 else
7113 gcc_unreachable ();
7114
7115 error ("%qE not specified in enclosing %qs",
7116 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype);
7117 error_at (ctx->location, "enclosing %qs", rtype);
7118 }
7119 /* FALLTHRU */
7120 case OMP_CLAUSE_DEFAULT_SHARED:
7121 flags |= GOVD_SHARED;
7122 break;
7123 case OMP_CLAUSE_DEFAULT_PRIVATE:
7124 flags |= GOVD_PRIVATE;
7125 break;
7126 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
7127 flags |= GOVD_FIRSTPRIVATE;
7128 break;
7129 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
7130 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
7131 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
7132 if (struct gimplify_omp_ctx *octx = ctx->outer_context)
7133 {
7134 omp_notice_variable (octx, decl, in_code);
7135 for (; octx; octx = octx->outer_context)
7136 {
7137 splay_tree_node n2;
7138
7139 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
7140 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0
7141 && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0))
7142 continue;
7143 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
7144 {
7145 flags |= GOVD_FIRSTPRIVATE;
7146 goto found_outer;
7147 }
7148 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
7149 {
7150 flags |= GOVD_SHARED;
7151 goto found_outer;
7152 }
7153 }
7154 }
7155
7156 if (TREE_CODE (decl) == PARM_DECL
7157 || (!is_global_var (decl)
7158 && DECL_CONTEXT (decl) == current_function_decl))
7159 flags |= GOVD_FIRSTPRIVATE;
7160 else
7161 flags |= GOVD_SHARED;
7162 found_outer:
7163 break;
7164
7165 default:
7166 gcc_unreachable ();
7167 }
7168
7169 return flags;
7170 }
7171
7172
7173 /* Determine outer default flags for DECL mentioned in an OACC region
7174 but not declared in an enclosing clause. */
7175
7176 static unsigned
7177 oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags)
7178 {
7179 const char *rkind;
7180 bool on_device = false;
7181 bool declared = is_oacc_declared (decl);
7182 tree type = TREE_TYPE (decl);
7183
7184 if (lang_hooks.decls.omp_privatize_by_reference (decl))
7185 type = TREE_TYPE (type);
7186
7187 if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0
7188 && is_global_var (decl)
7189 && device_resident_p (decl))
7190 {
7191 on_device = true;
7192 flags |= GOVD_MAP_TO_ONLY;
7193 }
7194
7195 switch (ctx->region_type)
7196 {
7197 case ORT_ACC_KERNELS:
7198 rkind = "kernels";
7199
7200 if (AGGREGATE_TYPE_P (type))
7201 {
7202 /* Aggregates default to 'present_or_copy', or 'present'. */
7203 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7204 flags |= GOVD_MAP;
7205 else
7206 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7207 }
7208 else
7209 /* Scalars default to 'copy'. */
7210 flags |= GOVD_MAP | GOVD_MAP_FORCE;
7211
7212 break;
7213
7214 case ORT_ACC_PARALLEL:
7215 rkind = "parallel";
7216
7217 if (on_device || declared)
7218 flags |= GOVD_MAP;
7219 else if (AGGREGATE_TYPE_P (type))
7220 {
7221 /* Aggregates default to 'present_or_copy', or 'present'. */
7222 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7223 flags |= GOVD_MAP;
7224 else
7225 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7226 }
7227 else
7228 /* Scalars default to 'firstprivate'. */
7229 flags |= GOVD_FIRSTPRIVATE;
7230
7231 break;
7232
7233 default:
7234 gcc_unreachable ();
7235 }
7236
7237 if (DECL_ARTIFICIAL (decl))
7238 ; /* We can get compiler-generated decls, and should not complain
7239 about them. */
7240 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_NONE)
7241 {
7242 error ("%qE not specified in enclosing OpenACC %qs construct",
7243 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rkind);
7244 inform (ctx->location, "enclosing OpenACC %qs construct", rkind);
7245 }
7246 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_PRESENT)
7247 ; /* Handled above. */
7248 else
7249 gcc_checking_assert (ctx->default_kind == OMP_CLAUSE_DEFAULT_SHARED);
7250
7251 return flags;
7252 }
7253
7254 /* Record the fact that DECL was used within the OMP context CTX.
7255 IN_CODE is true when real code uses DECL, and false when we should
7256 merely emit default(none) errors. Return true if DECL is going to
7257 be remapped and thus DECL shouldn't be gimplified into its
7258 DECL_VALUE_EXPR (if any). */
7259
7260 static bool
7261 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
7262 {
7263 splay_tree_node n;
7264 unsigned flags = in_code ? GOVD_SEEN : 0;
7265 bool ret = false, shared;
7266
7267 if (error_operand_p (decl))
7268 return false;
7269
7270 if (ctx->region_type == ORT_NONE)
7271 return lang_hooks.decls.omp_disregard_value_expr (decl, false);
7272
7273 if (is_global_var (decl))
7274 {
7275 /* Threadprivate variables are predetermined. */
7276 if (DECL_THREAD_LOCAL_P (decl))
7277 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
7278
7279 if (DECL_HAS_VALUE_EXPR_P (decl))
7280 {
7281 tree value = get_base_address (DECL_VALUE_EXPR (decl));
7282
7283 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
7284 return omp_notice_threadprivate_variable (ctx, decl, value);
7285 }
7286
7287 if (gimplify_omp_ctxp->outer_context == NULL
7288 && VAR_P (decl)
7289 && oacc_get_fn_attrib (current_function_decl))
7290 {
7291 location_t loc = DECL_SOURCE_LOCATION (decl);
7292
7293 if (lookup_attribute ("omp declare target link",
7294 DECL_ATTRIBUTES (decl)))
7295 {
7296 error_at (loc,
7297 "%qE with %<link%> clause used in %<routine%> function",
7298 DECL_NAME (decl));
7299 return false;
7300 }
7301 else if (!lookup_attribute ("omp declare target",
7302 DECL_ATTRIBUTES (decl)))
7303 {
7304 error_at (loc,
7305 "%qE requires a %<declare%> directive for use "
7306 "in a %<routine%> function", DECL_NAME (decl));
7307 return false;
7308 }
7309 }
7310 }
7311
7312 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7313 if ((ctx->region_type & ORT_TARGET) != 0)
7314 {
7315 ret = lang_hooks.decls.omp_disregard_value_expr (decl, true);
7316 if (n == NULL)
7317 {
7318 unsigned nflags = flags;
7319 if ((ctx->region_type & ORT_ACC) == 0)
7320 {
7321 bool is_declare_target = false;
7322 if (is_global_var (decl)
7323 && varpool_node::get_create (decl)->offloadable)
7324 {
7325 struct gimplify_omp_ctx *octx;
7326 for (octx = ctx->outer_context;
7327 octx; octx = octx->outer_context)
7328 {
7329 n = splay_tree_lookup (octx->variables,
7330 (splay_tree_key)decl);
7331 if (n
7332 && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED
7333 && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7334 break;
7335 }
7336 is_declare_target = octx == NULL;
7337 }
7338 if (!is_declare_target)
7339 {
7340 int gdmk;
7341 if (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
7342 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
7343 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
7344 == POINTER_TYPE)))
7345 gdmk = GDMK_POINTER;
7346 else if (lang_hooks.decls.omp_scalar_p (decl))
7347 gdmk = GDMK_SCALAR;
7348 else
7349 gdmk = GDMK_AGGREGATE;
7350 if (ctx->defaultmap[gdmk] == 0)
7351 {
7352 tree d = lang_hooks.decls.omp_report_decl (decl);
7353 error ("%qE not specified in enclosing %<target%>",
7354 DECL_NAME (d));
7355 error_at (ctx->location, "enclosing %<target%>");
7356 }
7357 else if (ctx->defaultmap[gdmk]
7358 & (GOVD_MAP_0LEN_ARRAY | GOVD_FIRSTPRIVATE))
7359 nflags |= ctx->defaultmap[gdmk];
7360 else
7361 {
7362 gcc_assert (ctx->defaultmap[gdmk] & GOVD_MAP);
7363 nflags |= ctx->defaultmap[gdmk] & ~GOVD_MAP;
7364 }
7365 }
7366 }
7367
7368 struct gimplify_omp_ctx *octx = ctx->outer_context;
7369 if ((ctx->region_type & ORT_ACC) && octx)
7370 {
7371 /* Look in outer OpenACC contexts, to see if there's a
7372 data attribute for this variable. */
7373 omp_notice_variable (octx, decl, in_code);
7374
7375 for (; octx; octx = octx->outer_context)
7376 {
7377 if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)))
7378 break;
7379 splay_tree_node n2
7380 = splay_tree_lookup (octx->variables,
7381 (splay_tree_key) decl);
7382 if (n2)
7383 {
7384 if (octx->region_type == ORT_ACC_HOST_DATA)
7385 error ("variable %qE declared in enclosing "
7386 "%<host_data%> region", DECL_NAME (decl));
7387 nflags |= GOVD_MAP;
7388 if (octx->region_type == ORT_ACC_DATA
7389 && (n2->value & GOVD_MAP_0LEN_ARRAY))
7390 nflags |= GOVD_MAP_0LEN_ARRAY;
7391 goto found_outer;
7392 }
7393 }
7394 }
7395
7396 if ((nflags & ~(GOVD_MAP_TO_ONLY | GOVD_MAP_FROM_ONLY
7397 | GOVD_MAP_ALLOC_ONLY)) == flags)
7398 {
7399 tree type = TREE_TYPE (decl);
7400
7401 if (gimplify_omp_ctxp->target_firstprivatize_array_bases
7402 && lang_hooks.decls.omp_privatize_by_reference (decl))
7403 type = TREE_TYPE (type);
7404 if (!lang_hooks.types.omp_mappable_type (type))
7405 {
7406 error ("%qD referenced in target region does not have "
7407 "a mappable type", decl);
7408 nflags |= GOVD_MAP | GOVD_EXPLICIT;
7409 }
7410 else
7411 {
7412 if ((ctx->region_type & ORT_ACC) != 0)
7413 nflags = oacc_default_clause (ctx, decl, flags);
7414 else
7415 nflags |= GOVD_MAP;
7416 }
7417 }
7418 found_outer:
7419 omp_add_variable (ctx, decl, nflags);
7420 }
7421 else
7422 {
7423 /* If nothing changed, there's nothing left to do. */
7424 if ((n->value & flags) == flags)
7425 return ret;
7426 flags |= n->value;
7427 n->value = flags;
7428 }
7429 goto do_outer;
7430 }
7431
7432 if (n == NULL)
7433 {
7434 if (ctx->region_type == ORT_WORKSHARE
7435 || ctx->region_type == ORT_TASKGROUP
7436 || ctx->region_type == ORT_SIMD
7437 || ctx->region_type == ORT_ACC
7438 || (ctx->region_type & ORT_TARGET_DATA) != 0)
7439 goto do_outer;
7440
7441 flags = omp_default_clause (ctx, decl, in_code, flags);
7442
7443 if ((flags & GOVD_PRIVATE)
7444 && lang_hooks.decls.omp_private_outer_ref (decl))
7445 flags |= GOVD_PRIVATE_OUTER_REF;
7446
7447 omp_add_variable (ctx, decl, flags);
7448
7449 shared = (flags & GOVD_SHARED) != 0;
7450 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7451 goto do_outer;
7452 }
7453
7454 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
7455 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
7456 && DECL_SIZE (decl))
7457 {
7458 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7459 {
7460 splay_tree_node n2;
7461 tree t = DECL_VALUE_EXPR (decl);
7462 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
7463 t = TREE_OPERAND (t, 0);
7464 gcc_assert (DECL_P (t));
7465 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7466 n2->value |= GOVD_SEEN;
7467 }
7468 else if (lang_hooks.decls.omp_privatize_by_reference (decl)
7469 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))
7470 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))))
7471 != INTEGER_CST))
7472 {
7473 splay_tree_node n2;
7474 tree t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
7475 gcc_assert (DECL_P (t));
7476 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7477 if (n2)
7478 omp_notice_variable (ctx, t, true);
7479 }
7480 }
7481
7482 shared = ((flags | n->value) & GOVD_SHARED) != 0;
7483 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7484
7485 /* If nothing changed, there's nothing left to do. */
7486 if ((n->value & flags) == flags)
7487 return ret;
7488 flags |= n->value;
7489 n->value = flags;
7490
7491 do_outer:
7492 /* If the variable is private in the current context, then we don't
7493 need to propagate anything to an outer context. */
7494 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
7495 return ret;
7496 if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7497 == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7498 return ret;
7499 if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
7500 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7501 == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7502 return ret;
7503 if (ctx->outer_context
7504 && omp_notice_variable (ctx->outer_context, decl, in_code))
7505 return true;
7506 return ret;
7507 }
7508
7509 /* Verify that DECL is private within CTX. If there's specific information
7510 to the contrary in the innermost scope, generate an error. */
7511
7512 static bool
7513 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
7514 {
7515 splay_tree_node n;
7516
7517 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7518 if (n != NULL)
7519 {
7520 if (n->value & GOVD_SHARED)
7521 {
7522 if (ctx == gimplify_omp_ctxp)
7523 {
7524 if (simd)
7525 error ("iteration variable %qE is predetermined linear",
7526 DECL_NAME (decl));
7527 else
7528 error ("iteration variable %qE should be private",
7529 DECL_NAME (decl));
7530 n->value = GOVD_PRIVATE;
7531 return true;
7532 }
7533 else
7534 return false;
7535 }
7536 else if ((n->value & GOVD_EXPLICIT) != 0
7537 && (ctx == gimplify_omp_ctxp
7538 || (ctx->region_type == ORT_COMBINED_PARALLEL
7539 && gimplify_omp_ctxp->outer_context == ctx)))
7540 {
7541 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
7542 error ("iteration variable %qE should not be firstprivate",
7543 DECL_NAME (decl));
7544 else if ((n->value & GOVD_REDUCTION) != 0)
7545 error ("iteration variable %qE should not be reduction",
7546 DECL_NAME (decl));
7547 else if (simd != 1 && (n->value & GOVD_LINEAR) != 0)
7548 error ("iteration variable %qE should not be linear",
7549 DECL_NAME (decl));
7550 }
7551 return (ctx == gimplify_omp_ctxp
7552 || (ctx->region_type == ORT_COMBINED_PARALLEL
7553 && gimplify_omp_ctxp->outer_context == ctx));
7554 }
7555
7556 if (ctx->region_type != ORT_WORKSHARE
7557 && ctx->region_type != ORT_TASKGROUP
7558 && ctx->region_type != ORT_SIMD
7559 && ctx->region_type != ORT_ACC)
7560 return false;
7561 else if (ctx->outer_context)
7562 return omp_is_private (ctx->outer_context, decl, simd);
7563 return false;
7564 }
7565
7566 /* Return true if DECL is private within a parallel region
7567 that binds to the current construct's context or in parallel
7568 region's REDUCTION clause. */
7569
7570 static bool
7571 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
7572 {
7573 splay_tree_node n;
7574
7575 do
7576 {
7577 ctx = ctx->outer_context;
7578 if (ctx == NULL)
7579 {
7580 if (is_global_var (decl))
7581 return false;
7582
7583 /* References might be private, but might be shared too,
7584 when checking for copyprivate, assume they might be
7585 private, otherwise assume they might be shared. */
7586 if (copyprivate)
7587 return true;
7588
7589 if (lang_hooks.decls.omp_privatize_by_reference (decl))
7590 return false;
7591
7592 /* Treat C++ privatized non-static data members outside
7593 of the privatization the same. */
7594 if (omp_member_access_dummy_var (decl))
7595 return false;
7596
7597 return true;
7598 }
7599
7600 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7601
7602 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
7603 && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0))
7604 continue;
7605
7606 if (n != NULL)
7607 {
7608 if ((n->value & GOVD_LOCAL) != 0
7609 && omp_member_access_dummy_var (decl))
7610 return false;
7611 return (n->value & GOVD_SHARED) == 0;
7612 }
7613 }
7614 while (ctx->region_type == ORT_WORKSHARE
7615 || ctx->region_type == ORT_TASKGROUP
7616 || ctx->region_type == ORT_SIMD
7617 || ctx->region_type == ORT_ACC);
7618 return false;
7619 }
7620
7621 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
7622
7623 static tree
7624 find_decl_expr (tree *tp, int *walk_subtrees, void *data)
7625 {
7626 tree t = *tp;
7627
7628 /* If this node has been visited, unmark it and keep looking. */
7629 if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data)
7630 return t;
7631
7632 if (IS_TYPE_OR_DECL_P (t))
7633 *walk_subtrees = 0;
7634 return NULL_TREE;
7635 }
7636
7637 /* If *LIST_P contains any OpenMP depend clauses with iterators,
7638 lower all the depend clauses by populating corresponding depend
7639 array. Returns 0 if there are no such depend clauses, or
7640 2 if all depend clauses should be removed, 1 otherwise. */
7641
7642 static int
7643 gimplify_omp_depend (tree *list_p, gimple_seq *pre_p)
7644 {
7645 tree c;
7646 gimple *g;
7647 size_t n[4] = { 0, 0, 0, 0 };
7648 bool unused[4];
7649 tree counts[4] = { NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE };
7650 tree last_iter = NULL_TREE, last_count = NULL_TREE;
7651 size_t i, j;
7652 location_t first_loc = UNKNOWN_LOCATION;
7653
7654 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
7655 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
7656 {
7657 switch (OMP_CLAUSE_DEPEND_KIND (c))
7658 {
7659 case OMP_CLAUSE_DEPEND_IN:
7660 i = 2;
7661 break;
7662 case OMP_CLAUSE_DEPEND_OUT:
7663 case OMP_CLAUSE_DEPEND_INOUT:
7664 i = 0;
7665 break;
7666 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
7667 i = 1;
7668 break;
7669 case OMP_CLAUSE_DEPEND_DEPOBJ:
7670 i = 3;
7671 break;
7672 case OMP_CLAUSE_DEPEND_SOURCE:
7673 case OMP_CLAUSE_DEPEND_SINK:
7674 continue;
7675 default:
7676 gcc_unreachable ();
7677 }
7678 tree t = OMP_CLAUSE_DECL (c);
7679 if (first_loc == UNKNOWN_LOCATION)
7680 first_loc = OMP_CLAUSE_LOCATION (c);
7681 if (TREE_CODE (t) == TREE_LIST
7682 && TREE_PURPOSE (t)
7683 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
7684 {
7685 if (TREE_PURPOSE (t) != last_iter)
7686 {
7687 tree tcnt = size_one_node;
7688 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
7689 {
7690 if (gimplify_expr (&TREE_VEC_ELT (it, 1), pre_p, NULL,
7691 is_gimple_val, fb_rvalue) == GS_ERROR
7692 || gimplify_expr (&TREE_VEC_ELT (it, 2), pre_p, NULL,
7693 is_gimple_val, fb_rvalue) == GS_ERROR
7694 || gimplify_expr (&TREE_VEC_ELT (it, 3), pre_p, NULL,
7695 is_gimple_val, fb_rvalue) == GS_ERROR
7696 || (gimplify_expr (&TREE_VEC_ELT (it, 4), pre_p, NULL,
7697 is_gimple_val, fb_rvalue)
7698 == GS_ERROR))
7699 return 2;
7700 tree var = TREE_VEC_ELT (it, 0);
7701 tree begin = TREE_VEC_ELT (it, 1);
7702 tree end = TREE_VEC_ELT (it, 2);
7703 tree step = TREE_VEC_ELT (it, 3);
7704 tree orig_step = TREE_VEC_ELT (it, 4);
7705 tree type = TREE_TYPE (var);
7706 tree stype = TREE_TYPE (step);
7707 location_t loc = DECL_SOURCE_LOCATION (var);
7708 tree endmbegin;
7709 /* Compute count for this iterator as
7710 orig_step > 0
7711 ? (begin < end ? (end - begin + (step - 1)) / step : 0)
7712 : (begin > end ? (end - begin + (step + 1)) / step : 0)
7713 and compute product of those for the entire depend
7714 clause. */
7715 if (POINTER_TYPE_P (type))
7716 endmbegin = fold_build2_loc (loc, POINTER_DIFF_EXPR,
7717 stype, end, begin);
7718 else
7719 endmbegin = fold_build2_loc (loc, MINUS_EXPR, type,
7720 end, begin);
7721 tree stepm1 = fold_build2_loc (loc, MINUS_EXPR, stype,
7722 step,
7723 build_int_cst (stype, 1));
7724 tree stepp1 = fold_build2_loc (loc, PLUS_EXPR, stype, step,
7725 build_int_cst (stype, 1));
7726 tree pos = fold_build2_loc (loc, PLUS_EXPR, stype,
7727 unshare_expr (endmbegin),
7728 stepm1);
7729 pos = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
7730 pos, step);
7731 tree neg = fold_build2_loc (loc, PLUS_EXPR, stype,
7732 endmbegin, stepp1);
7733 if (TYPE_UNSIGNED (stype))
7734 {
7735 neg = fold_build1_loc (loc, NEGATE_EXPR, stype, neg);
7736 step = fold_build1_loc (loc, NEGATE_EXPR, stype, step);
7737 }
7738 neg = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
7739 neg, step);
7740 step = NULL_TREE;
7741 tree cond = fold_build2_loc (loc, LT_EXPR,
7742 boolean_type_node,
7743 begin, end);
7744 pos = fold_build3_loc (loc, COND_EXPR, stype, cond, pos,
7745 build_int_cst (stype, 0));
7746 cond = fold_build2_loc (loc, LT_EXPR, boolean_type_node,
7747 end, begin);
7748 neg = fold_build3_loc (loc, COND_EXPR, stype, cond, neg,
7749 build_int_cst (stype, 0));
7750 tree osteptype = TREE_TYPE (orig_step);
7751 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
7752 orig_step,
7753 build_int_cst (osteptype, 0));
7754 tree cnt = fold_build3_loc (loc, COND_EXPR, stype,
7755 cond, pos, neg);
7756 cnt = fold_convert_loc (loc, sizetype, cnt);
7757 if (gimplify_expr (&cnt, pre_p, NULL, is_gimple_val,
7758 fb_rvalue) == GS_ERROR)
7759 return 2;
7760 tcnt = size_binop_loc (loc, MULT_EXPR, tcnt, cnt);
7761 }
7762 if (gimplify_expr (&tcnt, pre_p, NULL, is_gimple_val,
7763 fb_rvalue) == GS_ERROR)
7764 return 2;
7765 last_iter = TREE_PURPOSE (t);
7766 last_count = tcnt;
7767 }
7768 if (counts[i] == NULL_TREE)
7769 counts[i] = last_count;
7770 else
7771 counts[i] = size_binop_loc (OMP_CLAUSE_LOCATION (c),
7772 PLUS_EXPR, counts[i], last_count);
7773 }
7774 else
7775 n[i]++;
7776 }
7777 for (i = 0; i < 4; i++)
7778 if (counts[i])
7779 break;
7780 if (i == 4)
7781 return 0;
7782
7783 tree total = size_zero_node;
7784 for (i = 0; i < 4; i++)
7785 {
7786 unused[i] = counts[i] == NULL_TREE && n[i] == 0;
7787 if (counts[i] == NULL_TREE)
7788 counts[i] = size_zero_node;
7789 if (n[i])
7790 counts[i] = size_binop (PLUS_EXPR, counts[i], size_int (n[i]));
7791 if (gimplify_expr (&counts[i], pre_p, NULL, is_gimple_val,
7792 fb_rvalue) == GS_ERROR)
7793 return 2;
7794 total = size_binop (PLUS_EXPR, total, counts[i]);
7795 }
7796
7797 if (gimplify_expr (&total, pre_p, NULL, is_gimple_val, fb_rvalue)
7798 == GS_ERROR)
7799 return 2;
7800 bool is_old = unused[1] && unused[3];
7801 tree totalpx = size_binop (PLUS_EXPR, unshare_expr (total),
7802 size_int (is_old ? 1 : 4));
7803 tree type = build_array_type (ptr_type_node, build_index_type (totalpx));
7804 tree array = create_tmp_var_raw (type);
7805 TREE_ADDRESSABLE (array) = 1;
7806 if (TREE_CODE (totalpx) != INTEGER_CST)
7807 {
7808 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (array)))
7809 gimplify_type_sizes (TREE_TYPE (array), pre_p);
7810 if (gimplify_omp_ctxp)
7811 {
7812 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7813 while (ctx
7814 && (ctx->region_type == ORT_WORKSHARE
7815 || ctx->region_type == ORT_TASKGROUP
7816 || ctx->region_type == ORT_SIMD
7817 || ctx->region_type == ORT_ACC))
7818 ctx = ctx->outer_context;
7819 if (ctx)
7820 omp_add_variable (ctx, array, GOVD_LOCAL | GOVD_SEEN);
7821 }
7822 gimplify_vla_decl (array, pre_p);
7823 }
7824 else
7825 gimple_add_tmp_var (array);
7826 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
7827 NULL_TREE);
7828 tree tem;
7829 if (!is_old)
7830 {
7831 tem = build2 (MODIFY_EXPR, void_type_node, r,
7832 build_int_cst (ptr_type_node, 0));
7833 gimplify_and_add (tem, pre_p);
7834 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
7835 NULL_TREE);
7836 }
7837 tem = build2 (MODIFY_EXPR, void_type_node, r,
7838 fold_convert (ptr_type_node, total));
7839 gimplify_and_add (tem, pre_p);
7840 for (i = 1; i < (is_old ? 2 : 4); i++)
7841 {
7842 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (i + !is_old),
7843 NULL_TREE, NULL_TREE);
7844 tem = build2 (MODIFY_EXPR, void_type_node, r, counts[i - 1]);
7845 gimplify_and_add (tem, pre_p);
7846 }
7847
7848 tree cnts[4];
7849 for (j = 4; j; j--)
7850 if (!unused[j - 1])
7851 break;
7852 for (i = 0; i < 4; i++)
7853 {
7854 if (i && (i >= j || unused[i - 1]))
7855 {
7856 cnts[i] = cnts[i - 1];
7857 continue;
7858 }
7859 cnts[i] = create_tmp_var (sizetype);
7860 if (i == 0)
7861 g = gimple_build_assign (cnts[i], size_int (is_old ? 2 : 5));
7862 else
7863 {
7864 tree t;
7865 if (is_old)
7866 t = size_binop (PLUS_EXPR, counts[0], size_int (2));
7867 else
7868 t = size_binop (PLUS_EXPR, cnts[i - 1], counts[i - 1]);
7869 if (gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue)
7870 == GS_ERROR)
7871 return 2;
7872 g = gimple_build_assign (cnts[i], t);
7873 }
7874 gimple_seq_add_stmt (pre_p, g);
7875 }
7876
7877 last_iter = NULL_TREE;
7878 tree last_bind = NULL_TREE;
7879 tree *last_body = NULL;
7880 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
7881 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
7882 {
7883 switch (OMP_CLAUSE_DEPEND_KIND (c))
7884 {
7885 case OMP_CLAUSE_DEPEND_IN:
7886 i = 2;
7887 break;
7888 case OMP_CLAUSE_DEPEND_OUT:
7889 case OMP_CLAUSE_DEPEND_INOUT:
7890 i = 0;
7891 break;
7892 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
7893 i = 1;
7894 break;
7895 case OMP_CLAUSE_DEPEND_DEPOBJ:
7896 i = 3;
7897 break;
7898 case OMP_CLAUSE_DEPEND_SOURCE:
7899 case OMP_CLAUSE_DEPEND_SINK:
7900 continue;
7901 default:
7902 gcc_unreachable ();
7903 }
7904 tree t = OMP_CLAUSE_DECL (c);
7905 if (TREE_CODE (t) == TREE_LIST
7906 && TREE_PURPOSE (t)
7907 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
7908 {
7909 if (TREE_PURPOSE (t) != last_iter)
7910 {
7911 if (last_bind)
7912 gimplify_and_add (last_bind, pre_p);
7913 tree block = TREE_VEC_ELT (TREE_PURPOSE (t), 5);
7914 last_bind = build3 (BIND_EXPR, void_type_node,
7915 BLOCK_VARS (block), NULL, block);
7916 TREE_SIDE_EFFECTS (last_bind) = 1;
7917 SET_EXPR_LOCATION (last_bind, OMP_CLAUSE_LOCATION (c));
7918 tree *p = &BIND_EXPR_BODY (last_bind);
7919 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
7920 {
7921 tree var = TREE_VEC_ELT (it, 0);
7922 tree begin = TREE_VEC_ELT (it, 1);
7923 tree end = TREE_VEC_ELT (it, 2);
7924 tree step = TREE_VEC_ELT (it, 3);
7925 tree orig_step = TREE_VEC_ELT (it, 4);
7926 tree type = TREE_TYPE (var);
7927 location_t loc = DECL_SOURCE_LOCATION (var);
7928 /* Emit:
7929 var = begin;
7930 goto cond_label;
7931 beg_label:
7932 ...
7933 var = var + step;
7934 cond_label:
7935 if (orig_step > 0) {
7936 if (var < end) goto beg_label;
7937 } else {
7938 if (var > end) goto beg_label;
7939 }
7940 for each iterator, with inner iterators added to
7941 the ... above. */
7942 tree beg_label = create_artificial_label (loc);
7943 tree cond_label = NULL_TREE;
7944 tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
7945 var, begin);
7946 append_to_statement_list_force (tem, p);
7947 tem = build_and_jump (&cond_label);
7948 append_to_statement_list_force (tem, p);
7949 tem = build1 (LABEL_EXPR, void_type_node, beg_label);
7950 append_to_statement_list (tem, p);
7951 tree bind = build3 (BIND_EXPR, void_type_node, NULL_TREE,
7952 NULL_TREE, NULL_TREE);
7953 TREE_SIDE_EFFECTS (bind) = 1;
7954 SET_EXPR_LOCATION (bind, loc);
7955 append_to_statement_list_force (bind, p);
7956 if (POINTER_TYPE_P (type))
7957 tem = build2_loc (loc, POINTER_PLUS_EXPR, type,
7958 var, fold_convert_loc (loc, sizetype,
7959 step));
7960 else
7961 tem = build2_loc (loc, PLUS_EXPR, type, var, step);
7962 tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
7963 var, tem);
7964 append_to_statement_list_force (tem, p);
7965 tem = build1 (LABEL_EXPR, void_type_node, cond_label);
7966 append_to_statement_list (tem, p);
7967 tree cond = fold_build2_loc (loc, LT_EXPR,
7968 boolean_type_node,
7969 var, end);
7970 tree pos
7971 = fold_build3_loc (loc, COND_EXPR, void_type_node,
7972 cond, build_and_jump (&beg_label),
7973 void_node);
7974 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
7975 var, end);
7976 tree neg
7977 = fold_build3_loc (loc, COND_EXPR, void_type_node,
7978 cond, build_and_jump (&beg_label),
7979 void_node);
7980 tree osteptype = TREE_TYPE (orig_step);
7981 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
7982 orig_step,
7983 build_int_cst (osteptype, 0));
7984 tem = fold_build3_loc (loc, COND_EXPR, void_type_node,
7985 cond, pos, neg);
7986 append_to_statement_list_force (tem, p);
7987 p = &BIND_EXPR_BODY (bind);
7988 }
7989 last_body = p;
7990 }
7991 last_iter = TREE_PURPOSE (t);
7992 if (TREE_CODE (TREE_VALUE (t)) == COMPOUND_EXPR)
7993 {
7994 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t),
7995 0), last_body);
7996 TREE_VALUE (t) = TREE_OPERAND (TREE_VALUE (t), 1);
7997 }
7998 if (error_operand_p (TREE_VALUE (t)))
7999 return 2;
8000 TREE_VALUE (t) = build_fold_addr_expr (TREE_VALUE (t));
8001 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
8002 NULL_TREE, NULL_TREE);
8003 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8004 void_type_node, r, TREE_VALUE (t));
8005 append_to_statement_list_force (tem, last_body);
8006 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8007 void_type_node, cnts[i],
8008 size_binop (PLUS_EXPR, cnts[i], size_int (1)));
8009 append_to_statement_list_force (tem, last_body);
8010 TREE_VALUE (t) = null_pointer_node;
8011 }
8012 else
8013 {
8014 if (last_bind)
8015 {
8016 gimplify_and_add (last_bind, pre_p);
8017 last_bind = NULL_TREE;
8018 }
8019 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
8020 {
8021 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
8022 NULL, is_gimple_val, fb_rvalue);
8023 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
8024 }
8025 if (error_operand_p (OMP_CLAUSE_DECL (c)))
8026 return 2;
8027 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
8028 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
8029 is_gimple_val, fb_rvalue) == GS_ERROR)
8030 return 2;
8031 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
8032 NULL_TREE, NULL_TREE);
8033 tem = build2 (MODIFY_EXPR, void_type_node, r, OMP_CLAUSE_DECL (c));
8034 gimplify_and_add (tem, pre_p);
8035 g = gimple_build_assign (cnts[i], size_binop (PLUS_EXPR, cnts[i],
8036 size_int (1)));
8037 gimple_seq_add_stmt (pre_p, g);
8038 }
8039 }
8040 if (last_bind)
8041 gimplify_and_add (last_bind, pre_p);
8042 tree cond = boolean_false_node;
8043 if (is_old)
8044 {
8045 if (!unused[0])
8046 cond = build2_loc (first_loc, NE_EXPR, boolean_type_node, cnts[0],
8047 size_binop_loc (first_loc, PLUS_EXPR, counts[0],
8048 size_int (2)));
8049 if (!unused[2])
8050 cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
8051 build2_loc (first_loc, NE_EXPR, boolean_type_node,
8052 cnts[2],
8053 size_binop_loc (first_loc, PLUS_EXPR,
8054 totalpx,
8055 size_int (1))));
8056 }
8057 else
8058 {
8059 tree prev = size_int (5);
8060 for (i = 0; i < 4; i++)
8061 {
8062 if (unused[i])
8063 continue;
8064 prev = size_binop_loc (first_loc, PLUS_EXPR, counts[i], prev);
8065 cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
8066 build2_loc (first_loc, NE_EXPR, boolean_type_node,
8067 cnts[i], unshare_expr (prev)));
8068 }
8069 }
8070 tem = build3_loc (first_loc, COND_EXPR, void_type_node, cond,
8071 build_call_expr_loc (first_loc,
8072 builtin_decl_explicit (BUILT_IN_TRAP),
8073 0), void_node);
8074 gimplify_and_add (tem, pre_p);
8075 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
8076 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
8077 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
8078 OMP_CLAUSE_CHAIN (c) = *list_p;
8079 *list_p = c;
8080 return 1;
8081 }
8082
8083 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
8084 and previous omp contexts. */
8085
8086 static void
8087 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
8088 enum omp_region_type region_type,
8089 enum tree_code code)
8090 {
8091 struct gimplify_omp_ctx *ctx, *outer_ctx;
8092 tree c;
8093 hash_map<tree, tree> *struct_map_to_clause = NULL;
8094 tree *prev_list_p = NULL, *orig_list_p = list_p;
8095 int handled_depend_iterators = -1;
8096 int nowait = -1;
8097
8098 ctx = new_omp_context (region_type);
8099 outer_ctx = ctx->outer_context;
8100 if (code == OMP_TARGET)
8101 {
8102 if (!lang_GNU_Fortran ())
8103 ctx->defaultmap[GDMK_POINTER] = GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
8104 ctx->defaultmap[GDMK_SCALAR] = GOVD_FIRSTPRIVATE;
8105 }
8106 if (!lang_GNU_Fortran ())
8107 switch (code)
8108 {
8109 case OMP_TARGET:
8110 case OMP_TARGET_DATA:
8111 case OMP_TARGET_ENTER_DATA:
8112 case OMP_TARGET_EXIT_DATA:
8113 case OACC_DECLARE:
8114 case OACC_HOST_DATA:
8115 case OACC_PARALLEL:
8116 case OACC_KERNELS:
8117 ctx->target_firstprivatize_array_bases = true;
8118 default:
8119 break;
8120 }
8121
8122 while ((c = *list_p) != NULL)
8123 {
8124 bool remove = false;
8125 bool notice_outer = true;
8126 const char *check_non_private = NULL;
8127 unsigned int flags;
8128 tree decl;
8129
8130 switch (OMP_CLAUSE_CODE (c))
8131 {
8132 case OMP_CLAUSE_PRIVATE:
8133 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
8134 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
8135 {
8136 flags |= GOVD_PRIVATE_OUTER_REF;
8137 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
8138 }
8139 else
8140 notice_outer = false;
8141 goto do_add;
8142 case OMP_CLAUSE_SHARED:
8143 flags = GOVD_SHARED | GOVD_EXPLICIT;
8144 goto do_add;
8145 case OMP_CLAUSE_FIRSTPRIVATE:
8146 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
8147 check_non_private = "firstprivate";
8148 goto do_add;
8149 case OMP_CLAUSE_LASTPRIVATE:
8150 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
8151 switch (code)
8152 {
8153 case OMP_DISTRIBUTE:
8154 error_at (OMP_CLAUSE_LOCATION (c),
8155 "conditional %<lastprivate%> clause on "
8156 "%qs construct", "distribute");
8157 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
8158 break;
8159 case OMP_TASKLOOP:
8160 error_at (OMP_CLAUSE_LOCATION (c),
8161 "conditional %<lastprivate%> clause on "
8162 "%qs construct", "taskloop");
8163 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
8164 break;
8165 default:
8166 break;
8167 }
8168 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
8169 check_non_private = "lastprivate";
8170 decl = OMP_CLAUSE_DECL (c);
8171 if (error_operand_p (decl))
8172 goto do_add;
8173 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
8174 && !lang_hooks.decls.omp_scalar_p (decl))
8175 {
8176 error_at (OMP_CLAUSE_LOCATION (c),
8177 "non-scalar variable %qD in conditional "
8178 "%<lastprivate%> clause", decl);
8179 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
8180 }
8181 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
8182 flags |= GOVD_LASTPRIVATE_CONDITIONAL;
8183 if (outer_ctx
8184 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
8185 || ((outer_ctx->region_type & ORT_COMBINED_TEAMS)
8186 == ORT_COMBINED_TEAMS))
8187 && splay_tree_lookup (outer_ctx->variables,
8188 (splay_tree_key) decl) == NULL)
8189 {
8190 omp_add_variable (outer_ctx, decl, GOVD_SHARED | GOVD_SEEN);
8191 if (outer_ctx->outer_context)
8192 omp_notice_variable (outer_ctx->outer_context, decl, true);
8193 }
8194 else if (outer_ctx
8195 && (outer_ctx->region_type & ORT_TASK) != 0
8196 && outer_ctx->combined_loop
8197 && splay_tree_lookup (outer_ctx->variables,
8198 (splay_tree_key) decl) == NULL)
8199 {
8200 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
8201 if (outer_ctx->outer_context)
8202 omp_notice_variable (outer_ctx->outer_context, decl, true);
8203 }
8204 else if (outer_ctx
8205 && (outer_ctx->region_type == ORT_WORKSHARE
8206 || outer_ctx->region_type == ORT_ACC)
8207 && outer_ctx->combined_loop
8208 && splay_tree_lookup (outer_ctx->variables,
8209 (splay_tree_key) decl) == NULL
8210 && !omp_check_private (outer_ctx, decl, false))
8211 {
8212 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
8213 if (outer_ctx->outer_context
8214 && (outer_ctx->outer_context->region_type
8215 == ORT_COMBINED_PARALLEL)
8216 && splay_tree_lookup (outer_ctx->outer_context->variables,
8217 (splay_tree_key) decl) == NULL)
8218 {
8219 struct gimplify_omp_ctx *octx = outer_ctx->outer_context;
8220 omp_add_variable (octx, decl, GOVD_SHARED | GOVD_SEEN);
8221 if (octx->outer_context)
8222 {
8223 octx = octx->outer_context;
8224 if (octx->region_type == ORT_WORKSHARE
8225 && octx->combined_loop
8226 && splay_tree_lookup (octx->variables,
8227 (splay_tree_key) decl) == NULL
8228 && !omp_check_private (octx, decl, false))
8229 {
8230 omp_add_variable (octx, decl,
8231 GOVD_LASTPRIVATE | GOVD_SEEN);
8232 octx = octx->outer_context;
8233 if (octx
8234 && ((octx->region_type & ORT_COMBINED_TEAMS)
8235 == ORT_COMBINED_TEAMS)
8236 && (splay_tree_lookup (octx->variables,
8237 (splay_tree_key) decl)
8238 == NULL))
8239 {
8240 omp_add_variable (octx, decl,
8241 GOVD_SHARED | GOVD_SEEN);
8242 octx = octx->outer_context;
8243 }
8244 }
8245 if (octx)
8246 omp_notice_variable (octx, decl, true);
8247 }
8248 }
8249 else if (outer_ctx->outer_context)
8250 omp_notice_variable (outer_ctx->outer_context, decl, true);
8251 }
8252 goto do_add;
8253 case OMP_CLAUSE_REDUCTION:
8254 if (OMP_CLAUSE_REDUCTION_TASK (c))
8255 {
8256 if (region_type == ORT_WORKSHARE)
8257 {
8258 if (nowait == -1)
8259 nowait = omp_find_clause (*list_p,
8260 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8261 if (nowait
8262 && (outer_ctx == NULL
8263 || outer_ctx->region_type != ORT_COMBINED_PARALLEL))
8264 {
8265 error_at (OMP_CLAUSE_LOCATION (c),
8266 "%<task%> reduction modifier on a construct "
8267 "with a %<nowait%> clause");
8268 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
8269 }
8270 }
8271 else if ((region_type & ORT_PARALLEL) != ORT_PARALLEL)
8272 {
8273 error_at (OMP_CLAUSE_LOCATION (c),
8274 "invalid %<task%> reduction modifier on construct "
8275 "other than %<parallel%>, %<for%> or %<sections%>");
8276 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
8277 }
8278 }
8279 if (OMP_CLAUSE_REDUCTION_INSCAN (c))
8280 switch (code)
8281 {
8282 case OMP_SECTIONS:
8283 error_at (OMP_CLAUSE_LOCATION (c),
8284 "%<inscan%> %<reduction%> clause on "
8285 "%qs construct", "sections");
8286 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
8287 break;
8288 case OMP_PARALLEL:
8289 error_at (OMP_CLAUSE_LOCATION (c),
8290 "%<inscan%> %<reduction%> clause on "
8291 "%qs construct", "parallel");
8292 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
8293 break;
8294 case OMP_TEAMS:
8295 error_at (OMP_CLAUSE_LOCATION (c),
8296 "%<inscan%> %<reduction%> clause on "
8297 "%qs construct", "teams");
8298 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
8299 break;
8300 case OMP_TASKLOOP:
8301 error_at (OMP_CLAUSE_LOCATION (c),
8302 "%<inscan%> %<reduction%> clause on "
8303 "%qs construct", "taskloop");
8304 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
8305 break;
8306 default:
8307 break;
8308 }
8309 /* FALLTHRU */
8310 case OMP_CLAUSE_IN_REDUCTION:
8311 case OMP_CLAUSE_TASK_REDUCTION:
8312 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
8313 /* OpenACC permits reductions on private variables. */
8314 if (!(region_type & ORT_ACC)
8315 /* taskgroup is actually not a worksharing region. */
8316 && code != OMP_TASKGROUP)
8317 check_non_private = omp_clause_code_name[OMP_CLAUSE_CODE (c)];
8318 decl = OMP_CLAUSE_DECL (c);
8319 if (TREE_CODE (decl) == MEM_REF)
8320 {
8321 tree type = TREE_TYPE (decl);
8322 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p,
8323 NULL, is_gimple_val, fb_rvalue, false)
8324 == GS_ERROR)
8325 {
8326 remove = true;
8327 break;
8328 }
8329 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
8330 if (DECL_P (v))
8331 {
8332 omp_firstprivatize_variable (ctx, v);
8333 omp_notice_variable (ctx, v, true);
8334 }
8335 decl = TREE_OPERAND (decl, 0);
8336 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
8337 {
8338 if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p,
8339 NULL, is_gimple_val, fb_rvalue, false)
8340 == GS_ERROR)
8341 {
8342 remove = true;
8343 break;
8344 }
8345 v = TREE_OPERAND (decl, 1);
8346 if (DECL_P (v))
8347 {
8348 omp_firstprivatize_variable (ctx, v);
8349 omp_notice_variable (ctx, v, true);
8350 }
8351 decl = TREE_OPERAND (decl, 0);
8352 }
8353 if (TREE_CODE (decl) == ADDR_EXPR
8354 || TREE_CODE (decl) == INDIRECT_REF)
8355 decl = TREE_OPERAND (decl, 0);
8356 }
8357 goto do_add_decl;
8358 case OMP_CLAUSE_LINEAR:
8359 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
8360 is_gimple_val, fb_rvalue) == GS_ERROR)
8361 {
8362 remove = true;
8363 break;
8364 }
8365 else
8366 {
8367 if (code == OMP_SIMD
8368 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
8369 {
8370 struct gimplify_omp_ctx *octx = outer_ctx;
8371 if (octx
8372 && octx->region_type == ORT_WORKSHARE
8373 && octx->combined_loop
8374 && !octx->distribute)
8375 {
8376 if (octx->outer_context
8377 && (octx->outer_context->region_type
8378 == ORT_COMBINED_PARALLEL))
8379 octx = octx->outer_context->outer_context;
8380 else
8381 octx = octx->outer_context;
8382 }
8383 if (octx
8384 && octx->region_type == ORT_WORKSHARE
8385 && octx->combined_loop
8386 && octx->distribute)
8387 {
8388 error_at (OMP_CLAUSE_LOCATION (c),
8389 "%<linear%> clause for variable other than "
8390 "loop iterator specified on construct "
8391 "combined with %<distribute%>");
8392 remove = true;
8393 break;
8394 }
8395 }
8396 /* For combined #pragma omp parallel for simd, need to put
8397 lastprivate and perhaps firstprivate too on the
8398 parallel. Similarly for #pragma omp for simd. */
8399 struct gimplify_omp_ctx *octx = outer_ctx;
8400 decl = NULL_TREE;
8401 do
8402 {
8403 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
8404 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8405 break;
8406 decl = OMP_CLAUSE_DECL (c);
8407 if (error_operand_p (decl))
8408 {
8409 decl = NULL_TREE;
8410 break;
8411 }
8412 flags = GOVD_SEEN;
8413 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
8414 flags |= GOVD_FIRSTPRIVATE;
8415 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8416 flags |= GOVD_LASTPRIVATE;
8417 if (octx
8418 && octx->region_type == ORT_WORKSHARE
8419 && octx->combined_loop)
8420 {
8421 if (octx->outer_context
8422 && (octx->outer_context->region_type
8423 == ORT_COMBINED_PARALLEL))
8424 octx = octx->outer_context;
8425 else if (omp_check_private (octx, decl, false))
8426 break;
8427 }
8428 else if (octx
8429 && (octx->region_type & ORT_TASK) != 0
8430 && octx->combined_loop)
8431 ;
8432 else if (octx
8433 && octx->region_type == ORT_COMBINED_PARALLEL
8434 && ctx->region_type == ORT_WORKSHARE
8435 && octx == outer_ctx)
8436 flags = GOVD_SEEN | GOVD_SHARED;
8437 else if (octx
8438 && ((octx->region_type & ORT_COMBINED_TEAMS)
8439 == ORT_COMBINED_TEAMS))
8440 flags = GOVD_SEEN | GOVD_SHARED;
8441 else if (octx
8442 && octx->region_type == ORT_COMBINED_TARGET)
8443 {
8444 flags &= ~GOVD_LASTPRIVATE;
8445 if (flags == GOVD_SEEN)
8446 break;
8447 }
8448 else
8449 break;
8450 splay_tree_node on
8451 = splay_tree_lookup (octx->variables,
8452 (splay_tree_key) decl);
8453 if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0)
8454 {
8455 octx = NULL;
8456 break;
8457 }
8458 omp_add_variable (octx, decl, flags);
8459 if (octx->outer_context == NULL)
8460 break;
8461 octx = octx->outer_context;
8462 }
8463 while (1);
8464 if (octx
8465 && decl
8466 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
8467 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
8468 omp_notice_variable (octx, decl, true);
8469 }
8470 flags = GOVD_LINEAR | GOVD_EXPLICIT;
8471 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
8472 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8473 {
8474 notice_outer = false;
8475 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
8476 }
8477 goto do_add;
8478
8479 case OMP_CLAUSE_MAP:
8480 decl = OMP_CLAUSE_DECL (c);
8481 if (error_operand_p (decl))
8482 remove = true;
8483 switch (code)
8484 {
8485 case OMP_TARGET:
8486 break;
8487 case OACC_DATA:
8488 if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE)
8489 break;
8490 /* FALLTHRU */
8491 case OMP_TARGET_DATA:
8492 case OMP_TARGET_ENTER_DATA:
8493 case OMP_TARGET_EXIT_DATA:
8494 case OACC_ENTER_DATA:
8495 case OACC_EXIT_DATA:
8496 case OACC_HOST_DATA:
8497 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8498 || (OMP_CLAUSE_MAP_KIND (c)
8499 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
8500 /* For target {,enter ,exit }data only the array slice is
8501 mapped, but not the pointer to it. */
8502 remove = true;
8503 break;
8504 default:
8505 break;
8506 }
8507 if (remove)
8508 break;
8509 if (DECL_P (decl) && outer_ctx && (region_type & ORT_ACC))
8510 {
8511 struct gimplify_omp_ctx *octx;
8512 for (octx = outer_ctx; octx; octx = octx->outer_context)
8513 {
8514 if (octx->region_type != ORT_ACC_HOST_DATA)
8515 break;
8516 splay_tree_node n2
8517 = splay_tree_lookup (octx->variables,
8518 (splay_tree_key) decl);
8519 if (n2)
8520 error_at (OMP_CLAUSE_LOCATION (c), "variable %qE "
8521 "declared in enclosing %<host_data%> region",
8522 DECL_NAME (decl));
8523 }
8524 }
8525 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
8526 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
8527 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
8528 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
8529 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
8530 {
8531 remove = true;
8532 break;
8533 }
8534 else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8535 || (OMP_CLAUSE_MAP_KIND (c)
8536 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
8537 && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST)
8538 {
8539 OMP_CLAUSE_SIZE (c)
8540 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL,
8541 false);
8542 omp_add_variable (ctx, OMP_CLAUSE_SIZE (c),
8543 GOVD_FIRSTPRIVATE | GOVD_SEEN);
8544 }
8545 if (!DECL_P (decl))
8546 {
8547 tree d = decl, *pd;
8548 if (TREE_CODE (d) == ARRAY_REF)
8549 {
8550 while (TREE_CODE (d) == ARRAY_REF)
8551 d = TREE_OPERAND (d, 0);
8552 if (TREE_CODE (d) == COMPONENT_REF
8553 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
8554 decl = d;
8555 }
8556 pd = &OMP_CLAUSE_DECL (c);
8557 if (d == decl
8558 && TREE_CODE (decl) == INDIRECT_REF
8559 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
8560 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
8561 == REFERENCE_TYPE))
8562 {
8563 pd = &TREE_OPERAND (decl, 0);
8564 decl = TREE_OPERAND (decl, 0);
8565 }
8566 if (TREE_CODE (decl) == COMPONENT_REF)
8567 {
8568 while (TREE_CODE (decl) == COMPONENT_REF)
8569 decl = TREE_OPERAND (decl, 0);
8570 if (TREE_CODE (decl) == INDIRECT_REF
8571 && DECL_P (TREE_OPERAND (decl, 0))
8572 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
8573 == REFERENCE_TYPE))
8574 decl = TREE_OPERAND (decl, 0);
8575 }
8576 if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue, fb_lvalue)
8577 == GS_ERROR)
8578 {
8579 remove = true;
8580 break;
8581 }
8582 if (DECL_P (decl))
8583 {
8584 if (error_operand_p (decl))
8585 {
8586 remove = true;
8587 break;
8588 }
8589
8590 tree stype = TREE_TYPE (decl);
8591 if (TREE_CODE (stype) == REFERENCE_TYPE)
8592 stype = TREE_TYPE (stype);
8593 if (TYPE_SIZE_UNIT (stype) == NULL
8594 || TREE_CODE (TYPE_SIZE_UNIT (stype)) != INTEGER_CST)
8595 {
8596 error_at (OMP_CLAUSE_LOCATION (c),
8597 "mapping field %qE of variable length "
8598 "structure", OMP_CLAUSE_DECL (c));
8599 remove = true;
8600 break;
8601 }
8602
8603 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
8604 {
8605 /* Error recovery. */
8606 if (prev_list_p == NULL)
8607 {
8608 remove = true;
8609 break;
8610 }
8611 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
8612 {
8613 tree ch = OMP_CLAUSE_CHAIN (*prev_list_p);
8614 if (ch == NULL_TREE || OMP_CLAUSE_CHAIN (ch) != c)
8615 {
8616 remove = true;
8617 break;
8618 }
8619 }
8620 }
8621
8622 tree offset;
8623 poly_int64 bitsize, bitpos;
8624 machine_mode mode;
8625 int unsignedp, reversep, volatilep = 0;
8626 tree base = OMP_CLAUSE_DECL (c);
8627 while (TREE_CODE (base) == ARRAY_REF)
8628 base = TREE_OPERAND (base, 0);
8629 if (TREE_CODE (base) == INDIRECT_REF)
8630 base = TREE_OPERAND (base, 0);
8631 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
8632 &mode, &unsignedp, &reversep,
8633 &volatilep);
8634 tree orig_base = base;
8635 if ((TREE_CODE (base) == INDIRECT_REF
8636 || (TREE_CODE (base) == MEM_REF
8637 && integer_zerop (TREE_OPERAND (base, 1))))
8638 && DECL_P (TREE_OPERAND (base, 0))
8639 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0)))
8640 == REFERENCE_TYPE))
8641 base = TREE_OPERAND (base, 0);
8642 gcc_assert (base == decl
8643 && (offset == NULL_TREE
8644 || poly_int_tree_p (offset)));
8645
8646 splay_tree_node n
8647 = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
8648 bool ptr = (OMP_CLAUSE_MAP_KIND (c)
8649 == GOMP_MAP_ALWAYS_POINTER);
8650 if (n == NULL || (n->value & GOVD_MAP) == 0)
8651 {
8652 tree l = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8653 OMP_CLAUSE_MAP);
8654 OMP_CLAUSE_SET_MAP_KIND (l, GOMP_MAP_STRUCT);
8655 if (orig_base != base)
8656 OMP_CLAUSE_DECL (l) = unshare_expr (orig_base);
8657 else
8658 OMP_CLAUSE_DECL (l) = decl;
8659 OMP_CLAUSE_SIZE (l) = size_int (1);
8660 if (struct_map_to_clause == NULL)
8661 struct_map_to_clause = new hash_map<tree, tree>;
8662 struct_map_to_clause->put (decl, l);
8663 if (ptr)
8664 {
8665 enum gomp_map_kind mkind
8666 = code == OMP_TARGET_EXIT_DATA
8667 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
8668 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8669 OMP_CLAUSE_MAP);
8670 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
8671 OMP_CLAUSE_DECL (c2)
8672 = unshare_expr (OMP_CLAUSE_DECL (c));
8673 OMP_CLAUSE_CHAIN (c2) = *prev_list_p;
8674 OMP_CLAUSE_SIZE (c2)
8675 = TYPE_SIZE_UNIT (ptr_type_node);
8676 OMP_CLAUSE_CHAIN (l) = c2;
8677 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
8678 {
8679 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
8680 tree c3
8681 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8682 OMP_CLAUSE_MAP);
8683 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
8684 OMP_CLAUSE_DECL (c3)
8685 = unshare_expr (OMP_CLAUSE_DECL (c4));
8686 OMP_CLAUSE_SIZE (c3)
8687 = TYPE_SIZE_UNIT (ptr_type_node);
8688 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
8689 OMP_CLAUSE_CHAIN (c2) = c3;
8690 }
8691 *prev_list_p = l;
8692 prev_list_p = NULL;
8693 }
8694 else
8695 {
8696 OMP_CLAUSE_CHAIN (l) = c;
8697 *list_p = l;
8698 list_p = &OMP_CLAUSE_CHAIN (l);
8699 }
8700 if (orig_base != base && code == OMP_TARGET)
8701 {
8702 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8703 OMP_CLAUSE_MAP);
8704 enum gomp_map_kind mkind
8705 = GOMP_MAP_FIRSTPRIVATE_REFERENCE;
8706 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
8707 OMP_CLAUSE_DECL (c2) = decl;
8708 OMP_CLAUSE_SIZE (c2) = size_zero_node;
8709 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (l);
8710 OMP_CLAUSE_CHAIN (l) = c2;
8711 }
8712 flags = GOVD_MAP | GOVD_EXPLICIT;
8713 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
8714 flags |= GOVD_SEEN;
8715 goto do_add_decl;
8716 }
8717 else
8718 {
8719 tree *osc = struct_map_to_clause->get (decl);
8720 tree *sc = NULL, *scp = NULL;
8721 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
8722 n->value |= GOVD_SEEN;
8723 poly_offset_int o1, o2;
8724 if (offset)
8725 o1 = wi::to_poly_offset (offset);
8726 else
8727 o1 = 0;
8728 if (maybe_ne (bitpos, 0))
8729 o1 += bits_to_bytes_round_down (bitpos);
8730 sc = &OMP_CLAUSE_CHAIN (*osc);
8731 if (*sc != c
8732 && (OMP_CLAUSE_MAP_KIND (*sc)
8733 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
8734 sc = &OMP_CLAUSE_CHAIN (*sc);
8735 for (; *sc != c; sc = &OMP_CLAUSE_CHAIN (*sc))
8736 if (ptr && sc == prev_list_p)
8737 break;
8738 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc))
8739 != COMPONENT_REF
8740 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
8741 != INDIRECT_REF)
8742 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
8743 != ARRAY_REF))
8744 break;
8745 else
8746 {
8747 tree offset2;
8748 poly_int64 bitsize2, bitpos2;
8749 base = OMP_CLAUSE_DECL (*sc);
8750 if (TREE_CODE (base) == ARRAY_REF)
8751 {
8752 while (TREE_CODE (base) == ARRAY_REF)
8753 base = TREE_OPERAND (base, 0);
8754 if (TREE_CODE (base) != COMPONENT_REF
8755 || (TREE_CODE (TREE_TYPE (base))
8756 != ARRAY_TYPE))
8757 break;
8758 }
8759 else if (TREE_CODE (base) == INDIRECT_REF
8760 && (TREE_CODE (TREE_OPERAND (base, 0))
8761 == COMPONENT_REF)
8762 && (TREE_CODE (TREE_TYPE
8763 (TREE_OPERAND (base, 0)))
8764 == REFERENCE_TYPE))
8765 base = TREE_OPERAND (base, 0);
8766 base = get_inner_reference (base, &bitsize2,
8767 &bitpos2, &offset2,
8768 &mode, &unsignedp,
8769 &reversep, &volatilep);
8770 if ((TREE_CODE (base) == INDIRECT_REF
8771 || (TREE_CODE (base) == MEM_REF
8772 && integer_zerop (TREE_OPERAND (base,
8773 1))))
8774 && DECL_P (TREE_OPERAND (base, 0))
8775 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base,
8776 0)))
8777 == REFERENCE_TYPE))
8778 base = TREE_OPERAND (base, 0);
8779 if (base != decl)
8780 break;
8781 if (scp)
8782 continue;
8783 gcc_assert (offset == NULL_TREE
8784 || poly_int_tree_p (offset));
8785 tree d1 = OMP_CLAUSE_DECL (*sc);
8786 tree d2 = OMP_CLAUSE_DECL (c);
8787 while (TREE_CODE (d1) == ARRAY_REF)
8788 d1 = TREE_OPERAND (d1, 0);
8789 while (TREE_CODE (d2) == ARRAY_REF)
8790 d2 = TREE_OPERAND (d2, 0);
8791 if (TREE_CODE (d1) == INDIRECT_REF)
8792 d1 = TREE_OPERAND (d1, 0);
8793 if (TREE_CODE (d2) == INDIRECT_REF)
8794 d2 = TREE_OPERAND (d2, 0);
8795 while (TREE_CODE (d1) == COMPONENT_REF)
8796 if (TREE_CODE (d2) == COMPONENT_REF
8797 && TREE_OPERAND (d1, 1)
8798 == TREE_OPERAND (d2, 1))
8799 {
8800 d1 = TREE_OPERAND (d1, 0);
8801 d2 = TREE_OPERAND (d2, 0);
8802 }
8803 else
8804 break;
8805 if (d1 == d2)
8806 {
8807 error_at (OMP_CLAUSE_LOCATION (c),
8808 "%qE appears more than once in map "
8809 "clauses", OMP_CLAUSE_DECL (c));
8810 remove = true;
8811 break;
8812 }
8813 if (offset2)
8814 o2 = wi::to_poly_offset (offset2);
8815 else
8816 o2 = 0;
8817 o2 += bits_to_bytes_round_down (bitpos2);
8818 if (maybe_lt (o1, o2)
8819 || (known_eq (o1, o2)
8820 && maybe_lt (bitpos, bitpos2)))
8821 {
8822 if (ptr)
8823 scp = sc;
8824 else
8825 break;
8826 }
8827 }
8828 if (remove)
8829 break;
8830 OMP_CLAUSE_SIZE (*osc)
8831 = size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc),
8832 size_one_node);
8833 if (ptr)
8834 {
8835 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8836 OMP_CLAUSE_MAP);
8837 tree cl = NULL_TREE;
8838 enum gomp_map_kind mkind
8839 = code == OMP_TARGET_EXIT_DATA
8840 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
8841 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
8842 OMP_CLAUSE_DECL (c2)
8843 = unshare_expr (OMP_CLAUSE_DECL (c));
8844 OMP_CLAUSE_CHAIN (c2) = scp ? *scp : *prev_list_p;
8845 OMP_CLAUSE_SIZE (c2)
8846 = TYPE_SIZE_UNIT (ptr_type_node);
8847 cl = scp ? *prev_list_p : c2;
8848 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
8849 {
8850 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
8851 tree c3
8852 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8853 OMP_CLAUSE_MAP);
8854 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
8855 OMP_CLAUSE_DECL (c3)
8856 = unshare_expr (OMP_CLAUSE_DECL (c4));
8857 OMP_CLAUSE_SIZE (c3)
8858 = TYPE_SIZE_UNIT (ptr_type_node);
8859 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
8860 if (!scp)
8861 OMP_CLAUSE_CHAIN (c2) = c3;
8862 else
8863 cl = c3;
8864 }
8865 if (scp)
8866 *scp = c2;
8867 if (sc == prev_list_p)
8868 {
8869 *sc = cl;
8870 prev_list_p = NULL;
8871 }
8872 else
8873 {
8874 *prev_list_p = OMP_CLAUSE_CHAIN (c);
8875 list_p = prev_list_p;
8876 prev_list_p = NULL;
8877 OMP_CLAUSE_CHAIN (c) = *sc;
8878 *sc = cl;
8879 continue;
8880 }
8881 }
8882 else if (*sc != c)
8883 {
8884 *list_p = OMP_CLAUSE_CHAIN (c);
8885 OMP_CLAUSE_CHAIN (c) = *sc;
8886 *sc = c;
8887 continue;
8888 }
8889 }
8890 }
8891 if (!remove
8892 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_POINTER
8893 && OMP_CLAUSE_CHAIN (c)
8894 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c)) == OMP_CLAUSE_MAP
8895 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8896 == GOMP_MAP_ALWAYS_POINTER))
8897 prev_list_p = list_p;
8898 break;
8899 }
8900 flags = GOVD_MAP | GOVD_EXPLICIT;
8901 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
8902 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM)
8903 flags |= GOVD_MAP_ALWAYS_TO;
8904 goto do_add;
8905
8906 case OMP_CLAUSE_DEPEND:
8907 if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
8908 {
8909 tree deps = OMP_CLAUSE_DECL (c);
8910 while (deps && TREE_CODE (deps) == TREE_LIST)
8911 {
8912 if (TREE_CODE (TREE_PURPOSE (deps)) == TRUNC_DIV_EXPR
8913 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps), 1)))
8914 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps), 1),
8915 pre_p, NULL, is_gimple_val, fb_rvalue);
8916 deps = TREE_CHAIN (deps);
8917 }
8918 break;
8919 }
8920 else if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
8921 break;
8922 if (handled_depend_iterators == -1)
8923 handled_depend_iterators = gimplify_omp_depend (list_p, pre_p);
8924 if (handled_depend_iterators)
8925 {
8926 if (handled_depend_iterators == 2)
8927 remove = true;
8928 break;
8929 }
8930 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
8931 {
8932 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
8933 NULL, is_gimple_val, fb_rvalue);
8934 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
8935 }
8936 if (error_operand_p (OMP_CLAUSE_DECL (c)))
8937 {
8938 remove = true;
8939 break;
8940 }
8941 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
8942 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
8943 is_gimple_val, fb_rvalue) == GS_ERROR)
8944 {
8945 remove = true;
8946 break;
8947 }
8948 break;
8949
8950 case OMP_CLAUSE_TO:
8951 case OMP_CLAUSE_FROM:
8952 case OMP_CLAUSE__CACHE_:
8953 decl = OMP_CLAUSE_DECL (c);
8954 if (error_operand_p (decl))
8955 {
8956 remove = true;
8957 break;
8958 }
8959 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
8960 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
8961 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
8962 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
8963 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
8964 {
8965 remove = true;
8966 break;
8967 }
8968 if (!DECL_P (decl))
8969 {
8970 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
8971 NULL, is_gimple_lvalue, fb_lvalue)
8972 == GS_ERROR)
8973 {
8974 remove = true;
8975 break;
8976 }
8977 break;
8978 }
8979 goto do_notice;
8980
8981 case OMP_CLAUSE_USE_DEVICE_PTR:
8982 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
8983 goto do_add;
8984 case OMP_CLAUSE_IS_DEVICE_PTR:
8985 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
8986 goto do_add;
8987
8988 do_add:
8989 decl = OMP_CLAUSE_DECL (c);
8990 do_add_decl:
8991 if (error_operand_p (decl))
8992 {
8993 remove = true;
8994 break;
8995 }
8996 if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0)
8997 {
8998 tree t = omp_member_access_dummy_var (decl);
8999 if (t)
9000 {
9001 tree v = DECL_VALUE_EXPR (decl);
9002 DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1));
9003 if (outer_ctx)
9004 omp_notice_variable (outer_ctx, t, true);
9005 }
9006 }
9007 if (code == OACC_DATA
9008 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9009 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
9010 flags |= GOVD_MAP_0LEN_ARRAY;
9011 omp_add_variable (ctx, decl, flags);
9012 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9013 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
9014 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
9015 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9016 {
9017 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
9018 GOVD_LOCAL | GOVD_SEEN);
9019 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
9020 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
9021 find_decl_expr,
9022 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
9023 NULL) == NULL_TREE)
9024 omp_add_variable (ctx,
9025 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
9026 GOVD_LOCAL | GOVD_SEEN);
9027 gimplify_omp_ctxp = ctx;
9028 push_gimplify_context ();
9029
9030 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
9031 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9032
9033 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
9034 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
9035 pop_gimplify_context
9036 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
9037 push_gimplify_context ();
9038 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
9039 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
9040 pop_gimplify_context
9041 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
9042 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
9043 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
9044
9045 gimplify_omp_ctxp = outer_ctx;
9046 }
9047 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
9048 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
9049 {
9050 gimplify_omp_ctxp = ctx;
9051 push_gimplify_context ();
9052 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
9053 {
9054 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
9055 NULL, NULL);
9056 TREE_SIDE_EFFECTS (bind) = 1;
9057 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
9058 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
9059 }
9060 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
9061 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
9062 pop_gimplify_context
9063 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
9064 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
9065
9066 gimplify_omp_ctxp = outer_ctx;
9067 }
9068 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
9069 && OMP_CLAUSE_LINEAR_STMT (c))
9070 {
9071 gimplify_omp_ctxp = ctx;
9072 push_gimplify_context ();
9073 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
9074 {
9075 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
9076 NULL, NULL);
9077 TREE_SIDE_EFFECTS (bind) = 1;
9078 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
9079 OMP_CLAUSE_LINEAR_STMT (c) = bind;
9080 }
9081 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
9082 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
9083 pop_gimplify_context
9084 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
9085 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
9086
9087 gimplify_omp_ctxp = outer_ctx;
9088 }
9089 if (notice_outer)
9090 goto do_notice;
9091 break;
9092
9093 case OMP_CLAUSE_COPYIN:
9094 case OMP_CLAUSE_COPYPRIVATE:
9095 decl = OMP_CLAUSE_DECL (c);
9096 if (error_operand_p (decl))
9097 {
9098 remove = true;
9099 break;
9100 }
9101 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
9102 && !remove
9103 && !omp_check_private (ctx, decl, true))
9104 {
9105 remove = true;
9106 if (is_global_var (decl))
9107 {
9108 if (DECL_THREAD_LOCAL_P (decl))
9109 remove = false;
9110 else if (DECL_HAS_VALUE_EXPR_P (decl))
9111 {
9112 tree value = get_base_address (DECL_VALUE_EXPR (decl));
9113
9114 if (value
9115 && DECL_P (value)
9116 && DECL_THREAD_LOCAL_P (value))
9117 remove = false;
9118 }
9119 }
9120 if (remove)
9121 error_at (OMP_CLAUSE_LOCATION (c),
9122 "copyprivate variable %qE is not threadprivate"
9123 " or private in outer context", DECL_NAME (decl));
9124 }
9125 do_notice:
9126 if ((region_type & ORT_TASKLOOP) == ORT_TASKLOOP
9127 && outer_ctx
9128 && outer_ctx->region_type == ORT_COMBINED_PARALLEL
9129 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9130 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
9131 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE))
9132 {
9133 splay_tree_node on
9134 = splay_tree_lookup (outer_ctx->variables,
9135 (splay_tree_key)decl);
9136 if (on == NULL || (on->value & GOVD_DATA_SHARE_CLASS) == 0)
9137 {
9138 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9139 && TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
9140 && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
9141 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
9142 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
9143 == POINTER_TYPE))))
9144 omp_firstprivatize_variable (outer_ctx, decl);
9145 else
9146 omp_add_variable (outer_ctx, decl,
9147 GOVD_SEEN | GOVD_SHARED);
9148 omp_notice_variable (outer_ctx, decl, true);
9149 }
9150 }
9151 if (outer_ctx)
9152 omp_notice_variable (outer_ctx, decl, true);
9153 if (check_non_private
9154 && region_type == ORT_WORKSHARE
9155 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
9156 || decl == OMP_CLAUSE_DECL (c)
9157 || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
9158 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
9159 == ADDR_EXPR
9160 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
9161 == POINTER_PLUS_EXPR
9162 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
9163 (OMP_CLAUSE_DECL (c), 0), 0))
9164 == ADDR_EXPR)))))
9165 && omp_check_private (ctx, decl, false))
9166 {
9167 error ("%s variable %qE is private in outer context",
9168 check_non_private, DECL_NAME (decl));
9169 remove = true;
9170 }
9171 break;
9172
9173 case OMP_CLAUSE_IF:
9174 if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK
9175 && OMP_CLAUSE_IF_MODIFIER (c) != code)
9176 {
9177 const char *p[2];
9178 for (int i = 0; i < 2; i++)
9179 switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code)
9180 {
9181 case VOID_CST: p[i] = "cancel"; break;
9182 case OMP_PARALLEL: p[i] = "parallel"; break;
9183 case OMP_SIMD: p[i] = "simd"; break;
9184 case OMP_TASK: p[i] = "task"; break;
9185 case OMP_TASKLOOP: p[i] = "taskloop"; break;
9186 case OMP_TARGET_DATA: p[i] = "target data"; break;
9187 case OMP_TARGET: p[i] = "target"; break;
9188 case OMP_TARGET_UPDATE: p[i] = "target update"; break;
9189 case OMP_TARGET_ENTER_DATA:
9190 p[i] = "target enter data"; break;
9191 case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break;
9192 default: gcc_unreachable ();
9193 }
9194 error_at (OMP_CLAUSE_LOCATION (c),
9195 "expected %qs %<if%> clause modifier rather than %qs",
9196 p[0], p[1]);
9197 remove = true;
9198 }
9199 /* Fall through. */
9200
9201 case OMP_CLAUSE_FINAL:
9202 OMP_CLAUSE_OPERAND (c, 0)
9203 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
9204 /* Fall through. */
9205
9206 case OMP_CLAUSE_SCHEDULE:
9207 case OMP_CLAUSE_NUM_THREADS:
9208 case OMP_CLAUSE_NUM_TEAMS:
9209 case OMP_CLAUSE_THREAD_LIMIT:
9210 case OMP_CLAUSE_DIST_SCHEDULE:
9211 case OMP_CLAUSE_DEVICE:
9212 case OMP_CLAUSE_PRIORITY:
9213 case OMP_CLAUSE_GRAINSIZE:
9214 case OMP_CLAUSE_NUM_TASKS:
9215 case OMP_CLAUSE_HINT:
9216 case OMP_CLAUSE_ASYNC:
9217 case OMP_CLAUSE_WAIT:
9218 case OMP_CLAUSE_NUM_GANGS:
9219 case OMP_CLAUSE_NUM_WORKERS:
9220 case OMP_CLAUSE_VECTOR_LENGTH:
9221 case OMP_CLAUSE_WORKER:
9222 case OMP_CLAUSE_VECTOR:
9223 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
9224 is_gimple_val, fb_rvalue) == GS_ERROR)
9225 remove = true;
9226 break;
9227
9228 case OMP_CLAUSE_GANG:
9229 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
9230 is_gimple_val, fb_rvalue) == GS_ERROR)
9231 remove = true;
9232 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
9233 is_gimple_val, fb_rvalue) == GS_ERROR)
9234 remove = true;
9235 break;
9236
9237 case OMP_CLAUSE_NOWAIT:
9238 nowait = 1;
9239 break;
9240
9241 case OMP_CLAUSE_ORDERED:
9242 case OMP_CLAUSE_UNTIED:
9243 case OMP_CLAUSE_COLLAPSE:
9244 case OMP_CLAUSE_TILE:
9245 case OMP_CLAUSE_AUTO:
9246 case OMP_CLAUSE_SEQ:
9247 case OMP_CLAUSE_INDEPENDENT:
9248 case OMP_CLAUSE_MERGEABLE:
9249 case OMP_CLAUSE_PROC_BIND:
9250 case OMP_CLAUSE_SAFELEN:
9251 case OMP_CLAUSE_SIMDLEN:
9252 case OMP_CLAUSE_NOGROUP:
9253 case OMP_CLAUSE_THREADS:
9254 case OMP_CLAUSE_SIMD:
9255 case OMP_CLAUSE_IF_PRESENT:
9256 case OMP_CLAUSE_FINALIZE:
9257 break;
9258
9259 case OMP_CLAUSE_DEFAULTMAP:
9260 enum gimplify_defaultmap_kind gdmkmin, gdmkmax;
9261 switch (OMP_CLAUSE_DEFAULTMAP_CATEGORY (c))
9262 {
9263 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED:
9264 gdmkmin = GDMK_SCALAR;
9265 gdmkmax = GDMK_POINTER;
9266 break;
9267 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_SCALAR:
9268 gdmkmin = gdmkmax = GDMK_SCALAR;
9269 break;
9270 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_AGGREGATE:
9271 gdmkmin = gdmkmax = GDMK_AGGREGATE;
9272 break;
9273 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALLOCATABLE:
9274 gdmkmin = gdmkmax = GDMK_ALLOCATABLE;
9275 break;
9276 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_POINTER:
9277 gdmkmin = gdmkmax = GDMK_POINTER;
9278 break;
9279 default:
9280 gcc_unreachable ();
9281 }
9282 for (int gdmk = gdmkmin; gdmk <= gdmkmax; gdmk++)
9283 switch (OMP_CLAUSE_DEFAULTMAP_BEHAVIOR (c))
9284 {
9285 case OMP_CLAUSE_DEFAULTMAP_ALLOC:
9286 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_ALLOC_ONLY;
9287 break;
9288 case OMP_CLAUSE_DEFAULTMAP_TO:
9289 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_TO_ONLY;
9290 break;
9291 case OMP_CLAUSE_DEFAULTMAP_FROM:
9292 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_FROM_ONLY;
9293 break;
9294 case OMP_CLAUSE_DEFAULTMAP_TOFROM:
9295 ctx->defaultmap[gdmk] = GOVD_MAP;
9296 break;
9297 case OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE:
9298 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
9299 break;
9300 case OMP_CLAUSE_DEFAULTMAP_NONE:
9301 ctx->defaultmap[gdmk] = 0;
9302 break;
9303 case OMP_CLAUSE_DEFAULTMAP_DEFAULT:
9304 switch (gdmk)
9305 {
9306 case GDMK_SCALAR:
9307 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
9308 break;
9309 case GDMK_AGGREGATE:
9310 case GDMK_ALLOCATABLE:
9311 ctx->defaultmap[gdmk] = GOVD_MAP;
9312 break;
9313 case GDMK_POINTER:
9314 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
9315 break;
9316 default:
9317 gcc_unreachable ();
9318 }
9319 break;
9320 default:
9321 gcc_unreachable ();
9322 }
9323 break;
9324
9325 case OMP_CLAUSE_ALIGNED:
9326 decl = OMP_CLAUSE_DECL (c);
9327 if (error_operand_p (decl))
9328 {
9329 remove = true;
9330 break;
9331 }
9332 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
9333 is_gimple_val, fb_rvalue) == GS_ERROR)
9334 {
9335 remove = true;
9336 break;
9337 }
9338 if (!is_global_var (decl)
9339 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
9340 omp_add_variable (ctx, decl, GOVD_ALIGNED);
9341 break;
9342
9343 case OMP_CLAUSE_NONTEMPORAL:
9344 decl = OMP_CLAUSE_DECL (c);
9345 if (error_operand_p (decl))
9346 {
9347 remove = true;
9348 break;
9349 }
9350 omp_add_variable (ctx, decl, GOVD_NONTEMPORAL);
9351 break;
9352
9353 case OMP_CLAUSE_DEFAULT:
9354 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
9355 break;
9356
9357 case OMP_CLAUSE_INCLUSIVE:
9358 case OMP_CLAUSE_EXCLUSIVE:
9359 decl = OMP_CLAUSE_DECL (c);
9360 {
9361 splay_tree_node n = splay_tree_lookup (outer_ctx->variables,
9362 (splay_tree_key) decl);
9363 if (n == NULL || (n->value & GOVD_REDUCTION) == 0)
9364 {
9365 error_at (OMP_CLAUSE_LOCATION (c),
9366 "%qD specified in %qs clause but not in %<inscan%> "
9367 "%<reduction%> clause on the containing construct",
9368 decl, omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
9369 remove = true;
9370 }
9371 else
9372 {
9373 n->value |= GOVD_REDUCTION_INSCAN;
9374 if (outer_ctx->region_type == ORT_SIMD
9375 && outer_ctx->outer_context
9376 && outer_ctx->outer_context->region_type == ORT_WORKSHARE)
9377 {
9378 n = splay_tree_lookup (outer_ctx->outer_context->variables,
9379 (splay_tree_key) decl);
9380 if (n && (n->value & GOVD_REDUCTION) != 0)
9381 n->value |= GOVD_REDUCTION_INSCAN;
9382 }
9383 }
9384 }
9385 break;
9386
9387 default:
9388 gcc_unreachable ();
9389 }
9390
9391 if (code == OACC_DATA
9392 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9393 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
9394 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
9395 remove = true;
9396 if (remove)
9397 *list_p = OMP_CLAUSE_CHAIN (c);
9398 else
9399 list_p = &OMP_CLAUSE_CHAIN (c);
9400 }
9401
9402 ctx->clauses = *orig_list_p;
9403 gimplify_omp_ctxp = ctx;
9404 if (struct_map_to_clause)
9405 delete struct_map_to_clause;
9406 }
9407
9408 /* Return true if DECL is a candidate for shared to firstprivate
9409 optimization. We only consider non-addressable scalars, not
9410 too big, and not references. */
9411
9412 static bool
9413 omp_shared_to_firstprivate_optimizable_decl_p (tree decl)
9414 {
9415 if (TREE_ADDRESSABLE (decl))
9416 return false;
9417 tree type = TREE_TYPE (decl);
9418 if (!is_gimple_reg_type (type)
9419 || TREE_CODE (type) == REFERENCE_TYPE
9420 || TREE_ADDRESSABLE (type))
9421 return false;
9422 /* Don't optimize too large decls, as each thread/task will have
9423 its own. */
9424 HOST_WIDE_INT len = int_size_in_bytes (type);
9425 if (len == -1 || len > 4 * POINTER_SIZE / BITS_PER_UNIT)
9426 return false;
9427 if (lang_hooks.decls.omp_privatize_by_reference (decl))
9428 return false;
9429 return true;
9430 }
9431
9432 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
9433 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
9434 GOVD_WRITTEN in outer contexts. */
9435
9436 static void
9437 omp_mark_stores (struct gimplify_omp_ctx *ctx, tree decl)
9438 {
9439 for (; ctx; ctx = ctx->outer_context)
9440 {
9441 splay_tree_node n = splay_tree_lookup (ctx->variables,
9442 (splay_tree_key) decl);
9443 if (n == NULL)
9444 continue;
9445 else if (n->value & GOVD_SHARED)
9446 {
9447 n->value |= GOVD_WRITTEN;
9448 return;
9449 }
9450 else if (n->value & GOVD_DATA_SHARE_CLASS)
9451 return;
9452 }
9453 }
9454
9455 /* Helper callback for walk_gimple_seq to discover possible stores
9456 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
9457 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
9458 for those. */
9459
9460 static tree
9461 omp_find_stores_op (tree *tp, int *walk_subtrees, void *data)
9462 {
9463 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
9464
9465 *walk_subtrees = 0;
9466 if (!wi->is_lhs)
9467 return NULL_TREE;
9468
9469 tree op = *tp;
9470 do
9471 {
9472 if (handled_component_p (op))
9473 op = TREE_OPERAND (op, 0);
9474 else if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
9475 && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
9476 op = TREE_OPERAND (TREE_OPERAND (op, 0), 0);
9477 else
9478 break;
9479 }
9480 while (1);
9481 if (!DECL_P (op) || !omp_shared_to_firstprivate_optimizable_decl_p (op))
9482 return NULL_TREE;
9483
9484 omp_mark_stores (gimplify_omp_ctxp, op);
9485 return NULL_TREE;
9486 }
9487
9488 /* Helper callback for walk_gimple_seq to discover possible stores
9489 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
9490 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
9491 for those. */
9492
9493 static tree
9494 omp_find_stores_stmt (gimple_stmt_iterator *gsi_p,
9495 bool *handled_ops_p,
9496 struct walk_stmt_info *wi)
9497 {
9498 gimple *stmt = gsi_stmt (*gsi_p);
9499 switch (gimple_code (stmt))
9500 {
9501 /* Don't recurse on OpenMP constructs for which
9502 gimplify_adjust_omp_clauses already handled the bodies,
9503 except handle gimple_omp_for_pre_body. */
9504 case GIMPLE_OMP_FOR:
9505 *handled_ops_p = true;
9506 if (gimple_omp_for_pre_body (stmt))
9507 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
9508 omp_find_stores_stmt, omp_find_stores_op, wi);
9509 break;
9510 case GIMPLE_OMP_PARALLEL:
9511 case GIMPLE_OMP_TASK:
9512 case GIMPLE_OMP_SECTIONS:
9513 case GIMPLE_OMP_SINGLE:
9514 case GIMPLE_OMP_TARGET:
9515 case GIMPLE_OMP_TEAMS:
9516 case GIMPLE_OMP_CRITICAL:
9517 *handled_ops_p = true;
9518 break;
9519 default:
9520 break;
9521 }
9522 return NULL_TREE;
9523 }
9524
9525 struct gimplify_adjust_omp_clauses_data
9526 {
9527 tree *list_p;
9528 gimple_seq *pre_p;
9529 };
9530
9531 /* For all variables that were not actually used within the context,
9532 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
9533
9534 static int
9535 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
9536 {
9537 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
9538 gimple_seq *pre_p
9539 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
9540 tree decl = (tree) n->key;
9541 unsigned flags = n->value;
9542 enum omp_clause_code code;
9543 tree clause;
9544 bool private_debug;
9545
9546 if (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
9547 && (flags & GOVD_LASTPRIVATE_CONDITIONAL) != 0)
9548 flags = GOVD_SHARED | GOVD_SEEN | GOVD_WRITTEN;
9549 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
9550 return 0;
9551 if ((flags & GOVD_SEEN) == 0)
9552 return 0;
9553 if (flags & GOVD_DEBUG_PRIVATE)
9554 {
9555 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_SHARED);
9556 private_debug = true;
9557 }
9558 else if (flags & GOVD_MAP)
9559 private_debug = false;
9560 else
9561 private_debug
9562 = lang_hooks.decls.omp_private_debug_clause (decl,
9563 !!(flags & GOVD_SHARED));
9564 if (private_debug)
9565 code = OMP_CLAUSE_PRIVATE;
9566 else if (flags & GOVD_MAP)
9567 {
9568 code = OMP_CLAUSE_MAP;
9569 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0
9570 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
9571 {
9572 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl);
9573 return 0;
9574 }
9575 }
9576 else if (flags & GOVD_SHARED)
9577 {
9578 if (is_global_var (decl))
9579 {
9580 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
9581 while (ctx != NULL)
9582 {
9583 splay_tree_node on
9584 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9585 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
9586 | GOVD_PRIVATE | GOVD_REDUCTION
9587 | GOVD_LINEAR | GOVD_MAP)) != 0)
9588 break;
9589 ctx = ctx->outer_context;
9590 }
9591 if (ctx == NULL)
9592 return 0;
9593 }
9594 code = OMP_CLAUSE_SHARED;
9595 }
9596 else if (flags & GOVD_PRIVATE)
9597 code = OMP_CLAUSE_PRIVATE;
9598 else if (flags & GOVD_FIRSTPRIVATE)
9599 {
9600 code = OMP_CLAUSE_FIRSTPRIVATE;
9601 if ((gimplify_omp_ctxp->region_type & ORT_TARGET)
9602 && (gimplify_omp_ctxp->region_type & ORT_ACC) == 0
9603 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
9604 {
9605 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
9606 "%<target%> construct", decl);
9607 return 0;
9608 }
9609 }
9610 else if (flags & GOVD_LASTPRIVATE)
9611 code = OMP_CLAUSE_LASTPRIVATE;
9612 else if (flags & (GOVD_ALIGNED | GOVD_NONTEMPORAL))
9613 return 0;
9614 else if (flags & GOVD_CONDTEMP)
9615 {
9616 code = OMP_CLAUSE__CONDTEMP_;
9617 gimple_add_tmp_var (decl);
9618 }
9619 else
9620 gcc_unreachable ();
9621
9622 if (((flags & GOVD_LASTPRIVATE)
9623 || (code == OMP_CLAUSE_SHARED && (flags & GOVD_WRITTEN)))
9624 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9625 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
9626
9627 tree chain = *list_p;
9628 clause = build_omp_clause (input_location, code);
9629 OMP_CLAUSE_DECL (clause) = decl;
9630 OMP_CLAUSE_CHAIN (clause) = chain;
9631 if (private_debug)
9632 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
9633 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
9634 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
9635 else if (code == OMP_CLAUSE_SHARED
9636 && (flags & GOVD_WRITTEN) == 0
9637 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9638 OMP_CLAUSE_SHARED_READONLY (clause) = 1;
9639 else if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_EXPLICIT) == 0)
9640 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause) = 1;
9641 else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0)
9642 {
9643 tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP);
9644 OMP_CLAUSE_DECL (nc) = decl;
9645 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
9646 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
9647 OMP_CLAUSE_DECL (clause)
9648 = build_simple_mem_ref_loc (input_location, decl);
9649 OMP_CLAUSE_DECL (clause)
9650 = build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause),
9651 build_int_cst (build_pointer_type (char_type_node), 0));
9652 OMP_CLAUSE_SIZE (clause) = size_zero_node;
9653 OMP_CLAUSE_SIZE (nc) = size_zero_node;
9654 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC);
9655 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1;
9656 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
9657 OMP_CLAUSE_CHAIN (nc) = chain;
9658 OMP_CLAUSE_CHAIN (clause) = nc;
9659 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9660 gimplify_omp_ctxp = ctx->outer_context;
9661 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0),
9662 pre_p, NULL, is_gimple_val, fb_rvalue);
9663 gimplify_omp_ctxp = ctx;
9664 }
9665 else if (code == OMP_CLAUSE_MAP)
9666 {
9667 int kind;
9668 /* Not all combinations of these GOVD_MAP flags are actually valid. */
9669 switch (flags & (GOVD_MAP_TO_ONLY
9670 | GOVD_MAP_FORCE
9671 | GOVD_MAP_FORCE_PRESENT
9672 | GOVD_MAP_ALLOC_ONLY
9673 | GOVD_MAP_FROM_ONLY))
9674 {
9675 case 0:
9676 kind = GOMP_MAP_TOFROM;
9677 break;
9678 case GOVD_MAP_FORCE:
9679 kind = GOMP_MAP_TOFROM | GOMP_MAP_FLAG_FORCE;
9680 break;
9681 case GOVD_MAP_TO_ONLY:
9682 kind = GOMP_MAP_TO;
9683 break;
9684 case GOVD_MAP_FROM_ONLY:
9685 kind = GOMP_MAP_FROM;
9686 break;
9687 case GOVD_MAP_ALLOC_ONLY:
9688 kind = GOMP_MAP_ALLOC;
9689 break;
9690 case GOVD_MAP_TO_ONLY | GOVD_MAP_FORCE:
9691 kind = GOMP_MAP_TO | GOMP_MAP_FLAG_FORCE;
9692 break;
9693 case GOVD_MAP_FORCE_PRESENT:
9694 kind = GOMP_MAP_FORCE_PRESENT;
9695 break;
9696 default:
9697 gcc_unreachable ();
9698 }
9699 OMP_CLAUSE_SET_MAP_KIND (clause, kind);
9700 if (DECL_SIZE (decl)
9701 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
9702 {
9703 tree decl2 = DECL_VALUE_EXPR (decl);
9704 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
9705 decl2 = TREE_OPERAND (decl2, 0);
9706 gcc_assert (DECL_P (decl2));
9707 tree mem = build_simple_mem_ref (decl2);
9708 OMP_CLAUSE_DECL (clause) = mem;
9709 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
9710 if (gimplify_omp_ctxp->outer_context)
9711 {
9712 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
9713 omp_notice_variable (ctx, decl2, true);
9714 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
9715 }
9716 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
9717 OMP_CLAUSE_MAP);
9718 OMP_CLAUSE_DECL (nc) = decl;
9719 OMP_CLAUSE_SIZE (nc) = size_zero_node;
9720 if (gimplify_omp_ctxp->target_firstprivatize_array_bases)
9721 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
9722 else
9723 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
9724 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
9725 OMP_CLAUSE_CHAIN (clause) = nc;
9726 }
9727 else if (gimplify_omp_ctxp->target_firstprivatize_array_bases
9728 && lang_hooks.decls.omp_privatize_by_reference (decl))
9729 {
9730 OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl);
9731 OMP_CLAUSE_SIZE (clause)
9732 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))));
9733 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9734 gimplify_omp_ctxp = ctx->outer_context;
9735 gimplify_expr (&OMP_CLAUSE_SIZE (clause),
9736 pre_p, NULL, is_gimple_val, fb_rvalue);
9737 gimplify_omp_ctxp = ctx;
9738 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
9739 OMP_CLAUSE_MAP);
9740 OMP_CLAUSE_DECL (nc) = decl;
9741 OMP_CLAUSE_SIZE (nc) = size_zero_node;
9742 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
9743 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
9744 OMP_CLAUSE_CHAIN (clause) = nc;
9745 }
9746 else
9747 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
9748 }
9749 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
9750 {
9751 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
9752 OMP_CLAUSE_DECL (nc) = decl;
9753 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
9754 OMP_CLAUSE_CHAIN (nc) = chain;
9755 OMP_CLAUSE_CHAIN (clause) = nc;
9756 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9757 gimplify_omp_ctxp = ctx->outer_context;
9758 lang_hooks.decls.omp_finish_clause (nc, pre_p);
9759 gimplify_omp_ctxp = ctx;
9760 }
9761 *list_p = clause;
9762 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9763 gimplify_omp_ctxp = ctx->outer_context;
9764 lang_hooks.decls.omp_finish_clause (clause, pre_p);
9765 if (gimplify_omp_ctxp)
9766 for (; clause != chain; clause = OMP_CLAUSE_CHAIN (clause))
9767 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP
9768 && DECL_P (OMP_CLAUSE_SIZE (clause)))
9769 omp_notice_variable (gimplify_omp_ctxp, OMP_CLAUSE_SIZE (clause),
9770 true);
9771 gimplify_omp_ctxp = ctx;
9772 return 0;
9773 }
9774
9775 static void
9776 gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
9777 enum tree_code code)
9778 {
9779 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9780 tree *orig_list_p = list_p;
9781 tree c, decl;
9782 bool has_inscan_reductions = false;
9783
9784 if (body)
9785 {
9786 struct gimplify_omp_ctx *octx;
9787 for (octx = ctx; octx; octx = octx->outer_context)
9788 if ((octx->region_type & (ORT_PARALLEL | ORT_TASK | ORT_TEAMS)) != 0)
9789 break;
9790 if (octx)
9791 {
9792 struct walk_stmt_info wi;
9793 memset (&wi, 0, sizeof (wi));
9794 walk_gimple_seq (body, omp_find_stores_stmt,
9795 omp_find_stores_op, &wi);
9796 }
9797 }
9798
9799 if (ctx->region_type == ORT_WORKSHARE
9800 && ctx->outer_context
9801 && ctx->outer_context->region_type == ORT_COMBINED_PARALLEL)
9802 {
9803 for (c = ctx->outer_context->clauses; c; c = OMP_CLAUSE_CHAIN (c))
9804 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
9805 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
9806 {
9807 decl = OMP_CLAUSE_DECL (c);
9808 splay_tree_node n
9809 = splay_tree_lookup (ctx->outer_context->variables,
9810 (splay_tree_key) decl);
9811 gcc_checking_assert (!splay_tree_lookup (ctx->variables,
9812 (splay_tree_key) decl));
9813 omp_add_variable (ctx, decl, n->value);
9814 tree c2 = copy_node (c);
9815 OMP_CLAUSE_CHAIN (c2) = *list_p;
9816 *list_p = c2;
9817 if ((n->value & GOVD_FIRSTPRIVATE) == 0)
9818 continue;
9819 c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9820 OMP_CLAUSE_FIRSTPRIVATE);
9821 OMP_CLAUSE_DECL (c2) = decl;
9822 OMP_CLAUSE_CHAIN (c2) = *list_p;
9823 *list_p = c2;
9824 }
9825 }
9826 while ((c = *list_p) != NULL)
9827 {
9828 splay_tree_node n;
9829 bool remove = false;
9830
9831 switch (OMP_CLAUSE_CODE (c))
9832 {
9833 case OMP_CLAUSE_FIRSTPRIVATE:
9834 if ((ctx->region_type & ORT_TARGET)
9835 && (ctx->region_type & ORT_ACC) == 0
9836 && TYPE_ATOMIC (strip_array_types
9837 (TREE_TYPE (OMP_CLAUSE_DECL (c)))))
9838 {
9839 error_at (OMP_CLAUSE_LOCATION (c),
9840 "%<_Atomic%> %qD in %<firstprivate%> clause on "
9841 "%<target%> construct", OMP_CLAUSE_DECL (c));
9842 remove = true;
9843 break;
9844 }
9845 /* FALLTHRU */
9846 case OMP_CLAUSE_PRIVATE:
9847 case OMP_CLAUSE_SHARED:
9848 case OMP_CLAUSE_LINEAR:
9849 decl = OMP_CLAUSE_DECL (c);
9850 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9851 remove = !(n->value & GOVD_SEEN);
9852 if ((n->value & GOVD_LASTPRIVATE_CONDITIONAL) != 0
9853 && code == OMP_PARALLEL
9854 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
9855 remove = true;
9856 if (! remove)
9857 {
9858 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
9859 if ((n->value & GOVD_DEBUG_PRIVATE)
9860 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
9861 {
9862 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
9863 || ((n->value & GOVD_DATA_SHARE_CLASS)
9864 == GOVD_SHARED));
9865 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
9866 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
9867 }
9868 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
9869 && (n->value & GOVD_WRITTEN) == 0
9870 && DECL_P (decl)
9871 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9872 OMP_CLAUSE_SHARED_READONLY (c) = 1;
9873 else if (DECL_P (decl)
9874 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
9875 && (n->value & GOVD_WRITTEN) != 0)
9876 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
9877 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
9878 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9879 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
9880 }
9881 break;
9882
9883 case OMP_CLAUSE_LASTPRIVATE:
9884 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
9885 accurately reflect the presence of a FIRSTPRIVATE clause. */
9886 decl = OMP_CLAUSE_DECL (c);
9887 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9888 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
9889 = (n->value & GOVD_FIRSTPRIVATE) != 0;
9890 if (code == OMP_DISTRIBUTE
9891 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
9892 {
9893 remove = true;
9894 error_at (OMP_CLAUSE_LOCATION (c),
9895 "same variable used in %<firstprivate%> and "
9896 "%<lastprivate%> clauses on %<distribute%> "
9897 "construct");
9898 }
9899 if (!remove
9900 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
9901 && DECL_P (decl)
9902 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9903 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
9904 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) && code == OMP_PARALLEL)
9905 remove = true;
9906 break;
9907
9908 case OMP_CLAUSE_ALIGNED:
9909 decl = OMP_CLAUSE_DECL (c);
9910 if (!is_global_var (decl))
9911 {
9912 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9913 remove = n == NULL || !(n->value & GOVD_SEEN);
9914 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
9915 {
9916 struct gimplify_omp_ctx *octx;
9917 if (n != NULL
9918 && (n->value & (GOVD_DATA_SHARE_CLASS
9919 & ~GOVD_FIRSTPRIVATE)))
9920 remove = true;
9921 else
9922 for (octx = ctx->outer_context; octx;
9923 octx = octx->outer_context)
9924 {
9925 n = splay_tree_lookup (octx->variables,
9926 (splay_tree_key) decl);
9927 if (n == NULL)
9928 continue;
9929 if (n->value & GOVD_LOCAL)
9930 break;
9931 /* We have to avoid assigning a shared variable
9932 to itself when trying to add
9933 __builtin_assume_aligned. */
9934 if (n->value & GOVD_SHARED)
9935 {
9936 remove = true;
9937 break;
9938 }
9939 }
9940 }
9941 }
9942 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
9943 {
9944 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9945 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
9946 remove = true;
9947 }
9948 break;
9949
9950 case OMP_CLAUSE_NONTEMPORAL:
9951 decl = OMP_CLAUSE_DECL (c);
9952 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9953 remove = n == NULL || !(n->value & GOVD_SEEN);
9954 break;
9955
9956 case OMP_CLAUSE_MAP:
9957 if (code == OMP_TARGET_EXIT_DATA
9958 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
9959 {
9960 remove = true;
9961 break;
9962 }
9963 decl = OMP_CLAUSE_DECL (c);
9964 /* Data clauses associated with acc parallel reductions must be
9965 compatible with present_or_copy. Warn and adjust the clause
9966 if that is not the case. */
9967 if (ctx->region_type == ORT_ACC_PARALLEL)
9968 {
9969 tree t = DECL_P (decl) ? decl : TREE_OPERAND (decl, 0);
9970 n = NULL;
9971
9972 if (DECL_P (t))
9973 n = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
9974
9975 if (n && (n->value & GOVD_REDUCTION))
9976 {
9977 enum gomp_map_kind kind = OMP_CLAUSE_MAP_KIND (c);
9978
9979 OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1;
9980 if ((kind & GOMP_MAP_TOFROM) != GOMP_MAP_TOFROM
9981 && kind != GOMP_MAP_FORCE_PRESENT
9982 && kind != GOMP_MAP_POINTER)
9983 {
9984 warning_at (OMP_CLAUSE_LOCATION (c), 0,
9985 "incompatible data clause with reduction "
9986 "on %qE; promoting to %<present_or_copy%>",
9987 DECL_NAME (t));
9988 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
9989 }
9990 }
9991 }
9992 if (!DECL_P (decl))
9993 {
9994 if ((ctx->region_type & ORT_TARGET) != 0
9995 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
9996 {
9997 if (TREE_CODE (decl) == INDIRECT_REF
9998 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
9999 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
10000 == REFERENCE_TYPE))
10001 decl = TREE_OPERAND (decl, 0);
10002 if (TREE_CODE (decl) == COMPONENT_REF)
10003 {
10004 while (TREE_CODE (decl) == COMPONENT_REF)
10005 decl = TREE_OPERAND (decl, 0);
10006 if (DECL_P (decl))
10007 {
10008 n = splay_tree_lookup (ctx->variables,
10009 (splay_tree_key) decl);
10010 if (!(n->value & GOVD_SEEN))
10011 remove = true;
10012 }
10013 }
10014 }
10015 break;
10016 }
10017 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10018 if ((ctx->region_type & ORT_TARGET) != 0
10019 && !(n->value & GOVD_SEEN)
10020 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0
10021 && (!is_global_var (decl)
10022 || !lookup_attribute ("omp declare target link",
10023 DECL_ATTRIBUTES (decl))))
10024 {
10025 remove = true;
10026 /* For struct element mapping, if struct is never referenced
10027 in target block and none of the mapping has always modifier,
10028 remove all the struct element mappings, which immediately
10029 follow the GOMP_MAP_STRUCT map clause. */
10030 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
10031 {
10032 HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
10033 while (cnt--)
10034 OMP_CLAUSE_CHAIN (c)
10035 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c));
10036 }
10037 }
10038 else if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
10039 && code == OMP_TARGET_EXIT_DATA)
10040 remove = true;
10041 else if (DECL_SIZE (decl)
10042 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
10043 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER
10044 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
10045 && (OMP_CLAUSE_MAP_KIND (c)
10046 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
10047 {
10048 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
10049 for these, TREE_CODE (DECL_SIZE (decl)) will always be
10050 INTEGER_CST. */
10051 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
10052
10053 tree decl2 = DECL_VALUE_EXPR (decl);
10054 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
10055 decl2 = TREE_OPERAND (decl2, 0);
10056 gcc_assert (DECL_P (decl2));
10057 tree mem = build_simple_mem_ref (decl2);
10058 OMP_CLAUSE_DECL (c) = mem;
10059 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
10060 if (ctx->outer_context)
10061 {
10062 omp_notice_variable (ctx->outer_context, decl2, true);
10063 omp_notice_variable (ctx->outer_context,
10064 OMP_CLAUSE_SIZE (c), true);
10065 }
10066 if (((ctx->region_type & ORT_TARGET) != 0
10067 || !ctx->target_firstprivatize_array_bases)
10068 && ((n->value & GOVD_SEEN) == 0
10069 || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0))
10070 {
10071 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10072 OMP_CLAUSE_MAP);
10073 OMP_CLAUSE_DECL (nc) = decl;
10074 OMP_CLAUSE_SIZE (nc) = size_zero_node;
10075 if (ctx->target_firstprivatize_array_bases)
10076 OMP_CLAUSE_SET_MAP_KIND (nc,
10077 GOMP_MAP_FIRSTPRIVATE_POINTER);
10078 else
10079 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
10080 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
10081 OMP_CLAUSE_CHAIN (c) = nc;
10082 c = nc;
10083 }
10084 }
10085 else
10086 {
10087 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
10088 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
10089 gcc_assert ((n->value & GOVD_SEEN) == 0
10090 || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
10091 == 0));
10092 }
10093 break;
10094
10095 case OMP_CLAUSE_TO:
10096 case OMP_CLAUSE_FROM:
10097 case OMP_CLAUSE__CACHE_:
10098 decl = OMP_CLAUSE_DECL (c);
10099 if (!DECL_P (decl))
10100 break;
10101 if (DECL_SIZE (decl)
10102 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
10103 {
10104 tree decl2 = DECL_VALUE_EXPR (decl);
10105 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
10106 decl2 = TREE_OPERAND (decl2, 0);
10107 gcc_assert (DECL_P (decl2));
10108 tree mem = build_simple_mem_ref (decl2);
10109 OMP_CLAUSE_DECL (c) = mem;
10110 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
10111 if (ctx->outer_context)
10112 {
10113 omp_notice_variable (ctx->outer_context, decl2, true);
10114 omp_notice_variable (ctx->outer_context,
10115 OMP_CLAUSE_SIZE (c), true);
10116 }
10117 }
10118 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
10119 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
10120 break;
10121
10122 case OMP_CLAUSE_REDUCTION:
10123 if (OMP_CLAUSE_REDUCTION_INSCAN (c))
10124 {
10125 decl = OMP_CLAUSE_DECL (c);
10126 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10127 if ((n->value & GOVD_REDUCTION_INSCAN) == 0)
10128 {
10129 remove = true;
10130 error_at (OMP_CLAUSE_LOCATION (c),
10131 "%qD specified in %<inscan%> %<reduction%> clause "
10132 "but not in %<scan%> directive clause", decl);
10133 break;
10134 }
10135 has_inscan_reductions = true;
10136 }
10137 /* FALLTHRU */
10138 case OMP_CLAUSE_IN_REDUCTION:
10139 case OMP_CLAUSE_TASK_REDUCTION:
10140 decl = OMP_CLAUSE_DECL (c);
10141 /* OpenACC reductions need a present_or_copy data clause.
10142 Add one if necessary. Emit error when the reduction is private. */
10143 if (ctx->region_type == ORT_ACC_PARALLEL)
10144 {
10145 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10146 if (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
10147 {
10148 remove = true;
10149 error_at (OMP_CLAUSE_LOCATION (c), "invalid private "
10150 "reduction on %qE", DECL_NAME (decl));
10151 }
10152 else if ((n->value & GOVD_MAP) == 0)
10153 {
10154 tree next = OMP_CLAUSE_CHAIN (c);
10155 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_MAP);
10156 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_TOFROM);
10157 OMP_CLAUSE_DECL (nc) = decl;
10158 OMP_CLAUSE_CHAIN (c) = nc;
10159 lang_hooks.decls.omp_finish_clause (nc, pre_p);
10160 while (1)
10161 {
10162 OMP_CLAUSE_MAP_IN_REDUCTION (nc) = 1;
10163 if (OMP_CLAUSE_CHAIN (nc) == NULL)
10164 break;
10165 nc = OMP_CLAUSE_CHAIN (nc);
10166 }
10167 OMP_CLAUSE_CHAIN (nc) = next;
10168 n->value |= GOVD_MAP;
10169 }
10170 }
10171 if (DECL_P (decl)
10172 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
10173 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
10174 break;
10175 case OMP_CLAUSE_COPYIN:
10176 case OMP_CLAUSE_COPYPRIVATE:
10177 case OMP_CLAUSE_IF:
10178 case OMP_CLAUSE_NUM_THREADS:
10179 case OMP_CLAUSE_NUM_TEAMS:
10180 case OMP_CLAUSE_THREAD_LIMIT:
10181 case OMP_CLAUSE_DIST_SCHEDULE:
10182 case OMP_CLAUSE_DEVICE:
10183 case OMP_CLAUSE_SCHEDULE:
10184 case OMP_CLAUSE_NOWAIT:
10185 case OMP_CLAUSE_ORDERED:
10186 case OMP_CLAUSE_DEFAULT:
10187 case OMP_CLAUSE_UNTIED:
10188 case OMP_CLAUSE_COLLAPSE:
10189 case OMP_CLAUSE_FINAL:
10190 case OMP_CLAUSE_MERGEABLE:
10191 case OMP_CLAUSE_PROC_BIND:
10192 case OMP_CLAUSE_SAFELEN:
10193 case OMP_CLAUSE_SIMDLEN:
10194 case OMP_CLAUSE_DEPEND:
10195 case OMP_CLAUSE_PRIORITY:
10196 case OMP_CLAUSE_GRAINSIZE:
10197 case OMP_CLAUSE_NUM_TASKS:
10198 case OMP_CLAUSE_NOGROUP:
10199 case OMP_CLAUSE_THREADS:
10200 case OMP_CLAUSE_SIMD:
10201 case OMP_CLAUSE_HINT:
10202 case OMP_CLAUSE_DEFAULTMAP:
10203 case OMP_CLAUSE_USE_DEVICE_PTR:
10204 case OMP_CLAUSE_IS_DEVICE_PTR:
10205 case OMP_CLAUSE_ASYNC:
10206 case OMP_CLAUSE_WAIT:
10207 case OMP_CLAUSE_INDEPENDENT:
10208 case OMP_CLAUSE_NUM_GANGS:
10209 case OMP_CLAUSE_NUM_WORKERS:
10210 case OMP_CLAUSE_VECTOR_LENGTH:
10211 case OMP_CLAUSE_GANG:
10212 case OMP_CLAUSE_WORKER:
10213 case OMP_CLAUSE_VECTOR:
10214 case OMP_CLAUSE_AUTO:
10215 case OMP_CLAUSE_SEQ:
10216 case OMP_CLAUSE_TILE:
10217 case OMP_CLAUSE_IF_PRESENT:
10218 case OMP_CLAUSE_FINALIZE:
10219 case OMP_CLAUSE_INCLUSIVE:
10220 case OMP_CLAUSE_EXCLUSIVE:
10221 break;
10222
10223 default:
10224 gcc_unreachable ();
10225 }
10226
10227 if (remove)
10228 *list_p = OMP_CLAUSE_CHAIN (c);
10229 else
10230 list_p = &OMP_CLAUSE_CHAIN (c);
10231 }
10232
10233 /* Add in any implicit data sharing. */
10234 struct gimplify_adjust_omp_clauses_data data;
10235 data.list_p = list_p;
10236 data.pre_p = pre_p;
10237 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
10238
10239 if (has_inscan_reductions)
10240 for (c = *orig_list_p; c; c = OMP_CLAUSE_CHAIN (c))
10241 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10242 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
10243 {
10244 error_at (OMP_CLAUSE_LOCATION (c),
10245 "%<inscan%> %<reduction%> clause used together with "
10246 "%<linear%> clause for a variable other than loop "
10247 "iterator");
10248 break;
10249 }
10250
10251 gimplify_omp_ctxp = ctx->outer_context;
10252 delete_omp_context (ctx);
10253 }
10254
10255 /* Gimplify OACC_CACHE. */
10256
10257 static void
10258 gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
10259 {
10260 tree expr = *expr_p;
10261
10262 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_ACC,
10263 OACC_CACHE);
10264 gimplify_adjust_omp_clauses (pre_p, NULL, &OACC_CACHE_CLAUSES (expr),
10265 OACC_CACHE);
10266
10267 /* TODO: Do something sensible with this information. */
10268
10269 *expr_p = NULL_TREE;
10270 }
10271
10272 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
10273 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
10274 kind. The entry kind will replace the one in CLAUSE, while the exit
10275 kind will be used in a new omp_clause and returned to the caller. */
10276
10277 static tree
10278 gimplify_oacc_declare_1 (tree clause)
10279 {
10280 HOST_WIDE_INT kind, new_op;
10281 bool ret = false;
10282 tree c = NULL;
10283
10284 kind = OMP_CLAUSE_MAP_KIND (clause);
10285
10286 switch (kind)
10287 {
10288 case GOMP_MAP_ALLOC:
10289 new_op = GOMP_MAP_RELEASE;
10290 ret = true;
10291 break;
10292
10293 case GOMP_MAP_FROM:
10294 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
10295 new_op = GOMP_MAP_FROM;
10296 ret = true;
10297 break;
10298
10299 case GOMP_MAP_TOFROM:
10300 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO);
10301 new_op = GOMP_MAP_FROM;
10302 ret = true;
10303 break;
10304
10305 case GOMP_MAP_DEVICE_RESIDENT:
10306 case GOMP_MAP_FORCE_DEVICEPTR:
10307 case GOMP_MAP_FORCE_PRESENT:
10308 case GOMP_MAP_LINK:
10309 case GOMP_MAP_POINTER:
10310 case GOMP_MAP_TO:
10311 break;
10312
10313 default:
10314 gcc_unreachable ();
10315 break;
10316 }
10317
10318 if (ret)
10319 {
10320 c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP);
10321 OMP_CLAUSE_SET_MAP_KIND (c, new_op);
10322 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause);
10323 }
10324
10325 return c;
10326 }
10327
10328 /* Gimplify OACC_DECLARE. */
10329
10330 static void
10331 gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p)
10332 {
10333 tree expr = *expr_p;
10334 gomp_target *stmt;
10335 tree clauses, t, decl;
10336
10337 clauses = OACC_DECLARE_CLAUSES (expr);
10338
10339 gimplify_scan_omp_clauses (&clauses, pre_p, ORT_TARGET_DATA, OACC_DECLARE);
10340 gimplify_adjust_omp_clauses (pre_p, NULL, &clauses, OACC_DECLARE);
10341
10342 for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t))
10343 {
10344 decl = OMP_CLAUSE_DECL (t);
10345
10346 if (TREE_CODE (decl) == MEM_REF)
10347 decl = TREE_OPERAND (decl, 0);
10348
10349 if (VAR_P (decl) && !is_oacc_declared (decl))
10350 {
10351 tree attr = get_identifier ("oacc declare target");
10352 DECL_ATTRIBUTES (decl) = tree_cons (attr, NULL_TREE,
10353 DECL_ATTRIBUTES (decl));
10354 }
10355
10356 if (VAR_P (decl)
10357 && !is_global_var (decl)
10358 && DECL_CONTEXT (decl) == current_function_decl)
10359 {
10360 tree c = gimplify_oacc_declare_1 (t);
10361 if (c)
10362 {
10363 if (oacc_declare_returns == NULL)
10364 oacc_declare_returns = new hash_map<tree, tree>;
10365
10366 oacc_declare_returns->put (decl, c);
10367 }
10368 }
10369
10370 if (gimplify_omp_ctxp)
10371 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_SEEN);
10372 }
10373
10374 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
10375 clauses);
10376
10377 gimplify_seq_add_stmt (pre_p, stmt);
10378
10379 *expr_p = NULL_TREE;
10380 }
10381
10382 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
10383 gimplification of the body, as well as scanning the body for used
10384 variables. We need to do this scan now, because variable-sized
10385 decls will be decomposed during gimplification. */
10386
10387 static void
10388 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
10389 {
10390 tree expr = *expr_p;
10391 gimple *g;
10392 gimple_seq body = NULL;
10393
10394 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
10395 OMP_PARALLEL_COMBINED (expr)
10396 ? ORT_COMBINED_PARALLEL
10397 : ORT_PARALLEL, OMP_PARALLEL);
10398
10399 push_gimplify_context ();
10400
10401 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
10402 if (gimple_code (g) == GIMPLE_BIND)
10403 pop_gimplify_context (g);
10404 else
10405 pop_gimplify_context (NULL);
10406
10407 gimplify_adjust_omp_clauses (pre_p, body, &OMP_PARALLEL_CLAUSES (expr),
10408 OMP_PARALLEL);
10409
10410 g = gimple_build_omp_parallel (body,
10411 OMP_PARALLEL_CLAUSES (expr),
10412 NULL_TREE, NULL_TREE);
10413 if (OMP_PARALLEL_COMBINED (expr))
10414 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
10415 gimplify_seq_add_stmt (pre_p, g);
10416 *expr_p = NULL_TREE;
10417 }
10418
10419 /* Gimplify the contents of an OMP_TASK statement. This involves
10420 gimplification of the body, as well as scanning the body for used
10421 variables. We need to do this scan now, because variable-sized
10422 decls will be decomposed during gimplification. */
10423
10424 static void
10425 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
10426 {
10427 tree expr = *expr_p;
10428 gimple *g;
10429 gimple_seq body = NULL;
10430
10431 if (OMP_TASK_BODY (expr) == NULL_TREE)
10432 for (tree c = OMP_TASK_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
10433 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
10434 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_MUTEXINOUTSET)
10435 {
10436 error_at (OMP_CLAUSE_LOCATION (c),
10437 "%<mutexinoutset%> kind in %<depend%> clause on a "
10438 "%<taskwait%> construct");
10439 break;
10440 }
10441
10442 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
10443 omp_find_clause (OMP_TASK_CLAUSES (expr),
10444 OMP_CLAUSE_UNTIED)
10445 ? ORT_UNTIED_TASK : ORT_TASK, OMP_TASK);
10446
10447 if (OMP_TASK_BODY (expr))
10448 {
10449 push_gimplify_context ();
10450
10451 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
10452 if (gimple_code (g) == GIMPLE_BIND)
10453 pop_gimplify_context (g);
10454 else
10455 pop_gimplify_context (NULL);
10456 }
10457
10458 gimplify_adjust_omp_clauses (pre_p, body, &OMP_TASK_CLAUSES (expr),
10459 OMP_TASK);
10460
10461 g = gimple_build_omp_task (body,
10462 OMP_TASK_CLAUSES (expr),
10463 NULL_TREE, NULL_TREE,
10464 NULL_TREE, NULL_TREE, NULL_TREE);
10465 if (OMP_TASK_BODY (expr) == NULL_TREE)
10466 gimple_omp_task_set_taskwait_p (g, true);
10467 gimplify_seq_add_stmt (pre_p, g);
10468 *expr_p = NULL_TREE;
10469 }
10470
10471 /* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD
10472 with non-NULL OMP_FOR_INIT. Also, fill in pdata array,
10473 pdata[0] non-NULL if there is anything non-trivial in between, pdata[1]
10474 is address of OMP_PARALLEL in between if any, pdata[2] is address of
10475 OMP_FOR in between if any and pdata[3] is address of the inner
10476 OMP_FOR/OMP_SIMD. */
10477
10478 static tree
10479 find_combined_omp_for (tree *tp, int *walk_subtrees, void *data)
10480 {
10481 tree **pdata = (tree **) data;
10482 *walk_subtrees = 0;
10483 switch (TREE_CODE (*tp))
10484 {
10485 case OMP_FOR:
10486 if (OMP_FOR_INIT (*tp) != NULL_TREE)
10487 {
10488 pdata[3] = tp;
10489 return *tp;
10490 }
10491 pdata[2] = tp;
10492 *walk_subtrees = 1;
10493 break;
10494 case OMP_SIMD:
10495 if (OMP_FOR_INIT (*tp) != NULL_TREE)
10496 {
10497 pdata[3] = tp;
10498 return *tp;
10499 }
10500 break;
10501 case BIND_EXPR:
10502 if (BIND_EXPR_VARS (*tp)
10503 || (BIND_EXPR_BLOCK (*tp)
10504 && BLOCK_VARS (BIND_EXPR_BLOCK (*tp))))
10505 pdata[0] = tp;
10506 *walk_subtrees = 1;
10507 break;
10508 case STATEMENT_LIST:
10509 if (!tsi_one_before_end_p (tsi_start (*tp)))
10510 pdata[0] = tp;
10511 *walk_subtrees = 1;
10512 break;
10513 case TRY_FINALLY_EXPR:
10514 pdata[0] = tp;
10515 *walk_subtrees = 1;
10516 break;
10517 case OMP_PARALLEL:
10518 pdata[1] = tp;
10519 *walk_subtrees = 1;
10520 break;
10521 default:
10522 break;
10523 }
10524 return NULL_TREE;
10525 }
10526
10527 /* Gimplify the gross structure of an OMP_FOR statement. */
10528
10529 static enum gimplify_status
10530 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
10531 {
10532 tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
10533 enum gimplify_status ret = GS_ALL_DONE;
10534 enum gimplify_status tret;
10535 gomp_for *gfor;
10536 gimple_seq for_body, for_pre_body;
10537 int i;
10538 bitmap has_decl_expr = NULL;
10539 enum omp_region_type ort = ORT_WORKSHARE;
10540
10541 orig_for_stmt = for_stmt = *expr_p;
10542
10543 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
10544 {
10545 tree *data[4] = { NULL, NULL, NULL, NULL };
10546 gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
10547 inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
10548 find_combined_omp_for, data, NULL);
10549 if (inner_for_stmt == NULL_TREE)
10550 {
10551 gcc_assert (seen_error ());
10552 *expr_p = NULL_TREE;
10553 return GS_ERROR;
10554 }
10555 if (data[2] && OMP_FOR_PRE_BODY (*data[2]))
10556 {
10557 append_to_statement_list_force (OMP_FOR_PRE_BODY (*data[2]),
10558 &OMP_FOR_PRE_BODY (for_stmt));
10559 OMP_FOR_PRE_BODY (*data[2]) = NULL_TREE;
10560 }
10561 if (OMP_FOR_PRE_BODY (inner_for_stmt))
10562 {
10563 append_to_statement_list_force (OMP_FOR_PRE_BODY (inner_for_stmt),
10564 &OMP_FOR_PRE_BODY (for_stmt));
10565 OMP_FOR_PRE_BODY (inner_for_stmt) = NULL_TREE;
10566 }
10567
10568 if (data[0])
10569 {
10570 /* We have some statements or variable declarations in between
10571 the composite construct directives. Move them around the
10572 inner_for_stmt. */
10573 data[0] = expr_p;
10574 for (i = 0; i < 3; i++)
10575 if (data[i])
10576 {
10577 tree t = *data[i];
10578 if (i < 2 && data[i + 1] == &OMP_BODY (t))
10579 data[i + 1] = data[i];
10580 *data[i] = OMP_BODY (t);
10581 tree body = build3 (BIND_EXPR, void_type_node, NULL_TREE,
10582 NULL_TREE, make_node (BLOCK));
10583 OMP_BODY (t) = body;
10584 append_to_statement_list_force (inner_for_stmt,
10585 &BIND_EXPR_BODY (body));
10586 *data[3] = t;
10587 data[3] = tsi_stmt_ptr (tsi_start (BIND_EXPR_BODY (body)));
10588 gcc_assert (*data[3] == inner_for_stmt);
10589 }
10590 return GS_OK;
10591 }
10592
10593 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
10594 if (OMP_FOR_ORIG_DECLS (inner_for_stmt)
10595 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
10596 i)) == TREE_LIST
10597 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
10598 i)))
10599 {
10600 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
10601 /* Class iterators aren't allowed on OMP_SIMD, so the only
10602 case we need to solve is distribute parallel for. */
10603 gcc_assert (TREE_CODE (inner_for_stmt) == OMP_FOR
10604 && TREE_CODE (for_stmt) == OMP_DISTRIBUTE
10605 && data[1]);
10606 tree orig_decl = TREE_PURPOSE (orig);
10607 tree last = TREE_VALUE (orig);
10608 tree *pc;
10609 for (pc = &OMP_FOR_CLAUSES (inner_for_stmt);
10610 *pc; pc = &OMP_CLAUSE_CHAIN (*pc))
10611 if ((OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE
10612 || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_LASTPRIVATE)
10613 && OMP_CLAUSE_DECL (*pc) == orig_decl)
10614 break;
10615 if (*pc == NULL_TREE)
10616 ;
10617 else if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE)
10618 {
10619 /* private clause will appear only on inner_for_stmt.
10620 Change it into firstprivate, and add private clause
10621 on for_stmt. */
10622 tree c = copy_node (*pc);
10623 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
10624 OMP_FOR_CLAUSES (for_stmt) = c;
10625 OMP_CLAUSE_CODE (*pc) = OMP_CLAUSE_FIRSTPRIVATE;
10626 lang_hooks.decls.omp_finish_clause (*pc, pre_p);
10627 }
10628 else
10629 {
10630 /* lastprivate clause will appear on both inner_for_stmt
10631 and for_stmt. Add firstprivate clause to
10632 inner_for_stmt. */
10633 tree c = build_omp_clause (OMP_CLAUSE_LOCATION (*pc),
10634 OMP_CLAUSE_FIRSTPRIVATE);
10635 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (*pc);
10636 OMP_CLAUSE_CHAIN (c) = *pc;
10637 *pc = c;
10638 lang_hooks.decls.omp_finish_clause (*pc, pre_p);
10639 }
10640 tree c = build_omp_clause (UNKNOWN_LOCATION,
10641 OMP_CLAUSE_FIRSTPRIVATE);
10642 OMP_CLAUSE_DECL (c) = last;
10643 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
10644 OMP_PARALLEL_CLAUSES (*data[1]) = c;
10645 c = build_omp_clause (UNKNOWN_LOCATION,
10646 *pc ? OMP_CLAUSE_SHARED
10647 : OMP_CLAUSE_FIRSTPRIVATE);
10648 OMP_CLAUSE_DECL (c) = orig_decl;
10649 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
10650 OMP_PARALLEL_CLAUSES (*data[1]) = c;
10651 }
10652 /* Similarly, take care of C++ range for temporaries, those should
10653 be firstprivate on OMP_PARALLEL if any. */
10654 if (data[1])
10655 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
10656 if (OMP_FOR_ORIG_DECLS (inner_for_stmt)
10657 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
10658 i)) == TREE_LIST
10659 && TREE_CHAIN (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
10660 i)))
10661 {
10662 tree orig
10663 = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
10664 tree v = TREE_CHAIN (orig);
10665 tree c = build_omp_clause (UNKNOWN_LOCATION,
10666 OMP_CLAUSE_FIRSTPRIVATE);
10667 /* First add firstprivate clause for the __for_end artificial
10668 decl. */
10669 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 1);
10670 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
10671 == REFERENCE_TYPE)
10672 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
10673 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
10674 OMP_PARALLEL_CLAUSES (*data[1]) = c;
10675 if (TREE_VEC_ELT (v, 0))
10676 {
10677 /* And now the same for __for_range artificial decl if it
10678 exists. */
10679 c = build_omp_clause (UNKNOWN_LOCATION,
10680 OMP_CLAUSE_FIRSTPRIVATE);
10681 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 0);
10682 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
10683 == REFERENCE_TYPE)
10684 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
10685 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
10686 OMP_PARALLEL_CLAUSES (*data[1]) = c;
10687 }
10688 }
10689 }
10690
10691 switch (TREE_CODE (for_stmt))
10692 {
10693 case OMP_FOR:
10694 case OMP_DISTRIBUTE:
10695 break;
10696 case OACC_LOOP:
10697 ort = ORT_ACC;
10698 break;
10699 case OMP_TASKLOOP:
10700 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_UNTIED))
10701 ort = ORT_UNTIED_TASKLOOP;
10702 else
10703 ort = ORT_TASKLOOP;
10704 break;
10705 case OMP_SIMD:
10706 ort = ORT_SIMD;
10707 break;
10708 default:
10709 gcc_unreachable ();
10710 }
10711
10712 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
10713 clause for the IV. */
10714 if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
10715 {
10716 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
10717 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
10718 decl = TREE_OPERAND (t, 0);
10719 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
10720 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10721 && OMP_CLAUSE_DECL (c) == decl)
10722 {
10723 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
10724 break;
10725 }
10726 }
10727
10728 if (TREE_CODE (for_stmt) != OMP_TASKLOOP)
10729 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort,
10730 TREE_CODE (for_stmt));
10731
10732 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
10733 gimplify_omp_ctxp->distribute = true;
10734
10735 /* Handle OMP_FOR_INIT. */
10736 for_pre_body = NULL;
10737 if ((ort == ORT_SIMD
10738 || (inner_for_stmt && TREE_CODE (inner_for_stmt) == OMP_SIMD))
10739 && OMP_FOR_PRE_BODY (for_stmt))
10740 {
10741 has_decl_expr = BITMAP_ALLOC (NULL);
10742 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
10743 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
10744 == VAR_DECL)
10745 {
10746 t = OMP_FOR_PRE_BODY (for_stmt);
10747 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
10748 }
10749 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
10750 {
10751 tree_stmt_iterator si;
10752 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
10753 tsi_next (&si))
10754 {
10755 t = tsi_stmt (si);
10756 if (TREE_CODE (t) == DECL_EXPR
10757 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
10758 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
10759 }
10760 }
10761 }
10762 if (OMP_FOR_PRE_BODY (for_stmt))
10763 {
10764 if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp)
10765 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
10766 else
10767 {
10768 struct gimplify_omp_ctx ctx;
10769 memset (&ctx, 0, sizeof (ctx));
10770 ctx.region_type = ORT_NONE;
10771 gimplify_omp_ctxp = &ctx;
10772 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
10773 gimplify_omp_ctxp = NULL;
10774 }
10775 }
10776 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
10777
10778 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
10779 for_stmt = inner_for_stmt;
10780
10781 /* For taskloop, need to gimplify the start, end and step before the
10782 taskloop, outside of the taskloop omp context. */
10783 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10784 {
10785 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
10786 {
10787 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
10788 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
10789 {
10790 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
10791 TREE_OPERAND (t, 1)
10792 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
10793 gimple_seq_empty_p (for_pre_body)
10794 ? pre_p : &for_pre_body, NULL,
10795 false);
10796 /* Reference to pointer conversion is considered useless,
10797 but is significant for firstprivate clause. Force it
10798 here. */
10799 if (TREE_CODE (type) == POINTER_TYPE
10800 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 1)))
10801 == REFERENCE_TYPE))
10802 {
10803 tree v = create_tmp_var (TYPE_MAIN_VARIANT (type));
10804 tree m = build2 (INIT_EXPR, TREE_TYPE (v), v,
10805 TREE_OPERAND (t, 1));
10806 gimplify_and_add (m, gimple_seq_empty_p (for_pre_body)
10807 ? pre_p : &for_pre_body);
10808 TREE_OPERAND (t, 1) = v;
10809 }
10810 tree c = build_omp_clause (input_location,
10811 OMP_CLAUSE_FIRSTPRIVATE);
10812 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
10813 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
10814 OMP_FOR_CLAUSES (orig_for_stmt) = c;
10815 }
10816
10817 /* Handle OMP_FOR_COND. */
10818 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
10819 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
10820 {
10821 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
10822 TREE_OPERAND (t, 1)
10823 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
10824 gimple_seq_empty_p (for_pre_body)
10825 ? pre_p : &for_pre_body, NULL,
10826 false);
10827 /* Reference to pointer conversion is considered useless,
10828 but is significant for firstprivate clause. Force it
10829 here. */
10830 if (TREE_CODE (type) == POINTER_TYPE
10831 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 1)))
10832 == REFERENCE_TYPE))
10833 {
10834 tree v = create_tmp_var (TYPE_MAIN_VARIANT (type));
10835 tree m = build2 (INIT_EXPR, TREE_TYPE (v), v,
10836 TREE_OPERAND (t, 1));
10837 gimplify_and_add (m, gimple_seq_empty_p (for_pre_body)
10838 ? pre_p : &for_pre_body);
10839 TREE_OPERAND (t, 1) = v;
10840 }
10841 tree c = build_omp_clause (input_location,
10842 OMP_CLAUSE_FIRSTPRIVATE);
10843 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
10844 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
10845 OMP_FOR_CLAUSES (orig_for_stmt) = c;
10846 }
10847
10848 /* Handle OMP_FOR_INCR. */
10849 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10850 if (TREE_CODE (t) == MODIFY_EXPR)
10851 {
10852 decl = TREE_OPERAND (t, 0);
10853 t = TREE_OPERAND (t, 1);
10854 tree *tp = &TREE_OPERAND (t, 1);
10855 if (TREE_CODE (t) == PLUS_EXPR && *tp == decl)
10856 tp = &TREE_OPERAND (t, 0);
10857
10858 if (!is_gimple_constant (*tp))
10859 {
10860 gimple_seq *seq = gimple_seq_empty_p (for_pre_body)
10861 ? pre_p : &for_pre_body;
10862 *tp = get_initialized_tmp_var (*tp, seq, NULL, false);
10863 tree c = build_omp_clause (input_location,
10864 OMP_CLAUSE_FIRSTPRIVATE);
10865 OMP_CLAUSE_DECL (c) = *tp;
10866 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
10867 OMP_FOR_CLAUSES (orig_for_stmt) = c;
10868 }
10869 }
10870 }
10871
10872 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt), pre_p, ort,
10873 OMP_TASKLOOP);
10874 }
10875
10876 if (orig_for_stmt != for_stmt)
10877 gimplify_omp_ctxp->combined_loop = true;
10878
10879 for_body = NULL;
10880 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
10881 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
10882 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
10883 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
10884
10885 tree c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED);
10886 bool is_doacross = false;
10887 if (c && OMP_CLAUSE_ORDERED_EXPR (c))
10888 {
10889 is_doacross = true;
10890 gimplify_omp_ctxp->loop_iter_var.create (TREE_VEC_LENGTH
10891 (OMP_FOR_INIT (for_stmt))
10892 * 2);
10893 }
10894 int collapse = 1, tile = 0;
10895 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_COLLAPSE);
10896 if (c)
10897 collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
10898 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_TILE);
10899 if (c)
10900 tile = list_length (OMP_CLAUSE_TILE_LIST (c));
10901 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
10902 {
10903 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
10904 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
10905 decl = TREE_OPERAND (t, 0);
10906 gcc_assert (DECL_P (decl));
10907 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
10908 || POINTER_TYPE_P (TREE_TYPE (decl)));
10909 if (is_doacross)
10910 {
10911 if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt))
10912 {
10913 tree orig_decl = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
10914 if (TREE_CODE (orig_decl) == TREE_LIST)
10915 {
10916 orig_decl = TREE_PURPOSE (orig_decl);
10917 if (!orig_decl)
10918 orig_decl = decl;
10919 }
10920 gimplify_omp_ctxp->loop_iter_var.quick_push (orig_decl);
10921 }
10922 else
10923 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
10924 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
10925 }
10926
10927 /* Make sure the iteration variable is private. */
10928 tree c = NULL_TREE;
10929 tree c2 = NULL_TREE;
10930 if (orig_for_stmt != for_stmt)
10931 {
10932 /* Preserve this information until we gimplify the inner simd. */
10933 if (has_decl_expr
10934 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
10935 TREE_PRIVATE (t) = 1;
10936 }
10937 else if (ort == ORT_SIMD)
10938 {
10939 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
10940 (splay_tree_key) decl);
10941 omp_is_private (gimplify_omp_ctxp, decl,
10942 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
10943 != 1));
10944 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
10945 {
10946 omp_notice_variable (gimplify_omp_ctxp, decl, true);
10947 if (n->value & GOVD_LASTPRIVATE_CONDITIONAL)
10948 for (tree c3 = omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
10949 OMP_CLAUSE_LASTPRIVATE);
10950 c3; c3 = omp_find_clause (OMP_CLAUSE_CHAIN (c3),
10951 OMP_CLAUSE_LASTPRIVATE))
10952 if (OMP_CLAUSE_DECL (c3) == decl)
10953 {
10954 warning_at (OMP_CLAUSE_LOCATION (c3), 0,
10955 "conditional %<lastprivate%> on loop "
10956 "iterator %qD ignored", decl);
10957 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3) = 0;
10958 n->value &= ~GOVD_LASTPRIVATE_CONDITIONAL;
10959 }
10960 }
10961 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
10962 {
10963 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
10964 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
10965 unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
10966 if ((has_decl_expr
10967 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
10968 || TREE_PRIVATE (t))
10969 {
10970 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
10971 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
10972 }
10973 struct gimplify_omp_ctx *outer
10974 = gimplify_omp_ctxp->outer_context;
10975 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
10976 {
10977 if (outer->region_type == ORT_WORKSHARE
10978 && outer->combined_loop)
10979 {
10980 n = splay_tree_lookup (outer->variables,
10981 (splay_tree_key)decl);
10982 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
10983 {
10984 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
10985 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
10986 }
10987 else
10988 {
10989 struct gimplify_omp_ctx *octx = outer->outer_context;
10990 if (octx
10991 && octx->region_type == ORT_COMBINED_PARALLEL
10992 && octx->outer_context
10993 && (octx->outer_context->region_type
10994 == ORT_WORKSHARE)
10995 && octx->outer_context->combined_loop)
10996 {
10997 octx = octx->outer_context;
10998 n = splay_tree_lookup (octx->variables,
10999 (splay_tree_key)decl);
11000 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
11001 {
11002 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
11003 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
11004 }
11005 }
11006 }
11007 }
11008 }
11009
11010 OMP_CLAUSE_DECL (c) = decl;
11011 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
11012 OMP_FOR_CLAUSES (for_stmt) = c;
11013 omp_add_variable (gimplify_omp_ctxp, decl, flags);
11014 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
11015 {
11016 if (outer->region_type == ORT_WORKSHARE
11017 && outer->combined_loop)
11018 {
11019 if (outer->outer_context
11020 && (outer->outer_context->region_type
11021 == ORT_COMBINED_PARALLEL))
11022 outer = outer->outer_context;
11023 else if (omp_check_private (outer, decl, false))
11024 outer = NULL;
11025 }
11026 else if (((outer->region_type & ORT_TASKLOOP)
11027 == ORT_TASKLOOP)
11028 && outer->combined_loop
11029 && !omp_check_private (gimplify_omp_ctxp,
11030 decl, false))
11031 ;
11032 else if (outer->region_type != ORT_COMBINED_PARALLEL)
11033 {
11034 omp_notice_variable (outer, decl, true);
11035 outer = NULL;
11036 }
11037 if (outer)
11038 {
11039 n = splay_tree_lookup (outer->variables,
11040 (splay_tree_key)decl);
11041 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
11042 {
11043 omp_add_variable (outer, decl,
11044 GOVD_LASTPRIVATE | GOVD_SEEN);
11045 if (outer->region_type == ORT_COMBINED_PARALLEL
11046 && outer->outer_context
11047 && (outer->outer_context->region_type
11048 == ORT_WORKSHARE)
11049 && outer->outer_context->combined_loop)
11050 {
11051 outer = outer->outer_context;
11052 n = splay_tree_lookup (outer->variables,
11053 (splay_tree_key)decl);
11054 if (omp_check_private (outer, decl, false))
11055 outer = NULL;
11056 else if (n == NULL
11057 || ((n->value & GOVD_DATA_SHARE_CLASS)
11058 == 0))
11059 omp_add_variable (outer, decl,
11060 GOVD_LASTPRIVATE
11061 | GOVD_SEEN);
11062 else
11063 outer = NULL;
11064 }
11065 if (outer && outer->outer_context
11066 && ((outer->outer_context->region_type
11067 & ORT_COMBINED_TEAMS) == ORT_COMBINED_TEAMS
11068 || (((outer->region_type & ORT_TASKLOOP)
11069 == ORT_TASKLOOP)
11070 && (outer->outer_context->region_type
11071 == ORT_COMBINED_PARALLEL))))
11072 {
11073 outer = outer->outer_context;
11074 n = splay_tree_lookup (outer->variables,
11075 (splay_tree_key)decl);
11076 if (n == NULL
11077 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
11078 omp_add_variable (outer, decl,
11079 GOVD_SHARED | GOVD_SEEN);
11080 else
11081 outer = NULL;
11082 }
11083 if (outer && outer->outer_context)
11084 omp_notice_variable (outer->outer_context, decl,
11085 true);
11086 }
11087 }
11088 }
11089 }
11090 else
11091 {
11092 bool lastprivate
11093 = (!has_decl_expr
11094 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
11095 if (TREE_PRIVATE (t))
11096 lastprivate = false;
11097 struct gimplify_omp_ctx *outer
11098 = gimplify_omp_ctxp->outer_context;
11099 if (outer && lastprivate)
11100 {
11101 if (outer->region_type == ORT_WORKSHARE
11102 && outer->combined_loop)
11103 {
11104 n = splay_tree_lookup (outer->variables,
11105 (splay_tree_key)decl);
11106 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
11107 {
11108 lastprivate = false;
11109 outer = NULL;
11110 }
11111 else if (outer->outer_context
11112 && (outer->outer_context->region_type
11113 == ORT_COMBINED_PARALLEL))
11114 outer = outer->outer_context;
11115 else if (omp_check_private (outer, decl, false))
11116 outer = NULL;
11117 }
11118 else if (((outer->region_type & ORT_TASKLOOP)
11119 == ORT_TASKLOOP)
11120 && outer->combined_loop
11121 && !omp_check_private (gimplify_omp_ctxp,
11122 decl, false))
11123 ;
11124 else if (outer->region_type != ORT_COMBINED_PARALLEL)
11125 {
11126 omp_notice_variable (outer, decl, true);
11127 outer = NULL;
11128 }
11129 if (outer)
11130 {
11131 n = splay_tree_lookup (outer->variables,
11132 (splay_tree_key)decl);
11133 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
11134 {
11135 omp_add_variable (outer, decl,
11136 GOVD_LASTPRIVATE | GOVD_SEEN);
11137 if (outer->region_type == ORT_COMBINED_PARALLEL
11138 && outer->outer_context
11139 && (outer->outer_context->region_type
11140 == ORT_WORKSHARE)
11141 && outer->outer_context->combined_loop)
11142 {
11143 outer = outer->outer_context;
11144 n = splay_tree_lookup (outer->variables,
11145 (splay_tree_key)decl);
11146 if (omp_check_private (outer, decl, false))
11147 outer = NULL;
11148 else if (n == NULL
11149 || ((n->value & GOVD_DATA_SHARE_CLASS)
11150 == 0))
11151 omp_add_variable (outer, decl,
11152 GOVD_LASTPRIVATE
11153 | GOVD_SEEN);
11154 else
11155 outer = NULL;
11156 }
11157 if (outer && outer->outer_context
11158 && ((outer->outer_context->region_type
11159 & ORT_COMBINED_TEAMS) == ORT_COMBINED_TEAMS
11160 || (((outer->region_type & ORT_TASKLOOP)
11161 == ORT_TASKLOOP)
11162 && (outer->outer_context->region_type
11163 == ORT_COMBINED_PARALLEL))))
11164 {
11165 outer = outer->outer_context;
11166 n = splay_tree_lookup (outer->variables,
11167 (splay_tree_key)decl);
11168 if (n == NULL
11169 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
11170 omp_add_variable (outer, decl,
11171 GOVD_SHARED | GOVD_SEEN);
11172 else
11173 outer = NULL;
11174 }
11175 if (outer && outer->outer_context)
11176 omp_notice_variable (outer->outer_context, decl,
11177 true);
11178 }
11179 }
11180 }
11181
11182 c = build_omp_clause (input_location,
11183 lastprivate ? OMP_CLAUSE_LASTPRIVATE
11184 : OMP_CLAUSE_PRIVATE);
11185 OMP_CLAUSE_DECL (c) = decl;
11186 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
11187 OMP_FOR_CLAUSES (for_stmt) = c;
11188 omp_add_variable (gimplify_omp_ctxp, decl,
11189 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
11190 | GOVD_EXPLICIT | GOVD_SEEN);
11191 c = NULL_TREE;
11192 }
11193 }
11194 else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
11195 {
11196 omp_notice_variable (gimplify_omp_ctxp, decl, true);
11197 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
11198 (splay_tree_key) decl);
11199 if (n && (n->value & GOVD_LASTPRIVATE_CONDITIONAL))
11200 for (tree c3 = omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
11201 OMP_CLAUSE_LASTPRIVATE);
11202 c3; c3 = omp_find_clause (OMP_CLAUSE_CHAIN (c3),
11203 OMP_CLAUSE_LASTPRIVATE))
11204 if (OMP_CLAUSE_DECL (c3) == decl)
11205 {
11206 warning_at (OMP_CLAUSE_LOCATION (c3), 0,
11207 "conditional %<lastprivate%> on loop "
11208 "iterator %qD ignored", decl);
11209 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3) = 0;
11210 n->value &= ~GOVD_LASTPRIVATE_CONDITIONAL;
11211 }
11212 }
11213 else
11214 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
11215
11216 /* If DECL is not a gimple register, create a temporary variable to act
11217 as an iteration counter. This is valid, since DECL cannot be
11218 modified in the body of the loop. Similarly for any iteration vars
11219 in simd with collapse > 1 where the iterator vars must be
11220 lastprivate. */
11221 if (orig_for_stmt != for_stmt)
11222 var = decl;
11223 else if (!is_gimple_reg (decl)
11224 || (ort == ORT_SIMD
11225 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1))
11226 {
11227 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
11228 /* Make sure omp_add_variable is not called on it prematurely.
11229 We call it ourselves a few lines later. */
11230 gimplify_omp_ctxp = NULL;
11231 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
11232 gimplify_omp_ctxp = ctx;
11233 TREE_OPERAND (t, 0) = var;
11234
11235 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
11236
11237 if (ort == ORT_SIMD
11238 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
11239 {
11240 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
11241 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
11242 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
11243 OMP_CLAUSE_DECL (c2) = var;
11244 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
11245 OMP_FOR_CLAUSES (for_stmt) = c2;
11246 omp_add_variable (gimplify_omp_ctxp, var,
11247 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
11248 if (c == NULL_TREE)
11249 {
11250 c = c2;
11251 c2 = NULL_TREE;
11252 }
11253 }
11254 else
11255 omp_add_variable (gimplify_omp_ctxp, var,
11256 GOVD_PRIVATE | GOVD_SEEN);
11257 }
11258 else
11259 var = decl;
11260
11261 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
11262 is_gimple_val, fb_rvalue, false);
11263 ret = MIN (ret, tret);
11264 if (ret == GS_ERROR)
11265 return ret;
11266
11267 /* Handle OMP_FOR_COND. */
11268 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
11269 gcc_assert (COMPARISON_CLASS_P (t));
11270 gcc_assert (TREE_OPERAND (t, 0) == decl);
11271
11272 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
11273 is_gimple_val, fb_rvalue, false);
11274 ret = MIN (ret, tret);
11275
11276 /* Handle OMP_FOR_INCR. */
11277 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
11278 switch (TREE_CODE (t))
11279 {
11280 case PREINCREMENT_EXPR:
11281 case POSTINCREMENT_EXPR:
11282 {
11283 tree decl = TREE_OPERAND (t, 0);
11284 /* c_omp_for_incr_canonicalize_ptr() should have been
11285 called to massage things appropriately. */
11286 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
11287
11288 if (orig_for_stmt != for_stmt)
11289 break;
11290 t = build_int_cst (TREE_TYPE (decl), 1);
11291 if (c)
11292 OMP_CLAUSE_LINEAR_STEP (c) = t;
11293 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
11294 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
11295 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
11296 break;
11297 }
11298
11299 case PREDECREMENT_EXPR:
11300 case POSTDECREMENT_EXPR:
11301 /* c_omp_for_incr_canonicalize_ptr() should have been
11302 called to massage things appropriately. */
11303 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
11304 if (orig_for_stmt != for_stmt)
11305 break;
11306 t = build_int_cst (TREE_TYPE (decl), -1);
11307 if (c)
11308 OMP_CLAUSE_LINEAR_STEP (c) = t;
11309 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
11310 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
11311 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
11312 break;
11313
11314 case MODIFY_EXPR:
11315 gcc_assert (TREE_OPERAND (t, 0) == decl);
11316 TREE_OPERAND (t, 0) = var;
11317
11318 t = TREE_OPERAND (t, 1);
11319 switch (TREE_CODE (t))
11320 {
11321 case PLUS_EXPR:
11322 if (TREE_OPERAND (t, 1) == decl)
11323 {
11324 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
11325 TREE_OPERAND (t, 0) = var;
11326 break;
11327 }
11328
11329 /* Fallthru. */
11330 case MINUS_EXPR:
11331 case POINTER_PLUS_EXPR:
11332 gcc_assert (TREE_OPERAND (t, 0) == decl);
11333 TREE_OPERAND (t, 0) = var;
11334 break;
11335 default:
11336 gcc_unreachable ();
11337 }
11338
11339 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
11340 is_gimple_val, fb_rvalue, false);
11341 ret = MIN (ret, tret);
11342 if (c)
11343 {
11344 tree step = TREE_OPERAND (t, 1);
11345 tree stept = TREE_TYPE (decl);
11346 if (POINTER_TYPE_P (stept))
11347 stept = sizetype;
11348 step = fold_convert (stept, step);
11349 if (TREE_CODE (t) == MINUS_EXPR)
11350 step = fold_build1 (NEGATE_EXPR, stept, step);
11351 OMP_CLAUSE_LINEAR_STEP (c) = step;
11352 if (step != TREE_OPERAND (t, 1))
11353 {
11354 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
11355 &for_pre_body, NULL,
11356 is_gimple_val, fb_rvalue, false);
11357 ret = MIN (ret, tret);
11358 }
11359 }
11360 break;
11361
11362 default:
11363 gcc_unreachable ();
11364 }
11365
11366 if (c2)
11367 {
11368 gcc_assert (c);
11369 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
11370 }
11371
11372 if ((var != decl || collapse > 1 || tile) && orig_for_stmt == for_stmt)
11373 {
11374 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
11375 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
11376 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
11377 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
11378 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
11379 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
11380 && OMP_CLAUSE_DECL (c) == decl)
11381 {
11382 if (is_doacross && (collapse == 1 || i >= collapse))
11383 t = var;
11384 else
11385 {
11386 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
11387 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
11388 gcc_assert (TREE_OPERAND (t, 0) == var);
11389 t = TREE_OPERAND (t, 1);
11390 gcc_assert (TREE_CODE (t) == PLUS_EXPR
11391 || TREE_CODE (t) == MINUS_EXPR
11392 || TREE_CODE (t) == POINTER_PLUS_EXPR);
11393 gcc_assert (TREE_OPERAND (t, 0) == var);
11394 t = build2 (TREE_CODE (t), TREE_TYPE (decl),
11395 is_doacross ? var : decl,
11396 TREE_OPERAND (t, 1));
11397 }
11398 gimple_seq *seq;
11399 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
11400 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
11401 else
11402 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
11403 push_gimplify_context ();
11404 gimplify_assign (decl, t, seq);
11405 gimple *bind = NULL;
11406 if (gimplify_ctxp->temps)
11407 {
11408 bind = gimple_build_bind (NULL_TREE, *seq, NULL_TREE);
11409 *seq = NULL;
11410 gimplify_seq_add_stmt (seq, bind);
11411 }
11412 pop_gimplify_context (bind);
11413 }
11414 }
11415 }
11416
11417 BITMAP_FREE (has_decl_expr);
11418
11419 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
11420 {
11421 push_gimplify_context ();
11422 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR)
11423 {
11424 OMP_FOR_BODY (orig_for_stmt)
11425 = build3 (BIND_EXPR, void_type_node, NULL,
11426 OMP_FOR_BODY (orig_for_stmt), NULL);
11427 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1;
11428 }
11429 }
11430
11431 gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt),
11432 &for_body);
11433
11434 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
11435 {
11436 if (gimple_code (g) == GIMPLE_BIND)
11437 pop_gimplify_context (g);
11438 else
11439 pop_gimplify_context (NULL);
11440 }
11441
11442 if (orig_for_stmt != for_stmt)
11443 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
11444 {
11445 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
11446 decl = TREE_OPERAND (t, 0);
11447 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
11448 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
11449 gimplify_omp_ctxp = ctx->outer_context;
11450 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
11451 gimplify_omp_ctxp = ctx;
11452 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
11453 TREE_OPERAND (t, 0) = var;
11454 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
11455 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
11456 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
11457 }
11458
11459 gimplify_adjust_omp_clauses (pre_p, for_body,
11460 &OMP_FOR_CLAUSES (orig_for_stmt),
11461 TREE_CODE (orig_for_stmt));
11462
11463 int kind;
11464 switch (TREE_CODE (orig_for_stmt))
11465 {
11466 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
11467 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
11468 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
11469 case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break;
11470 case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
11471 default:
11472 gcc_unreachable ();
11473 }
11474 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
11475 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
11476 for_pre_body);
11477 if (orig_for_stmt != for_stmt)
11478 gimple_omp_for_set_combined_p (gfor, true);
11479 if (gimplify_omp_ctxp
11480 && (gimplify_omp_ctxp->combined_loop
11481 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
11482 && gimplify_omp_ctxp->outer_context
11483 && gimplify_omp_ctxp->outer_context->combined_loop)))
11484 {
11485 gimple_omp_for_set_combined_into_p (gfor, true);
11486 if (gimplify_omp_ctxp->combined_loop)
11487 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
11488 else
11489 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
11490 }
11491
11492 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
11493 {
11494 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
11495 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
11496 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
11497 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
11498 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
11499 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
11500 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
11501 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
11502 }
11503
11504 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
11505 constructs with GIMPLE_OMP_TASK sandwiched in between them.
11506 The outer taskloop stands for computing the number of iterations,
11507 counts for collapsed loops and holding taskloop specific clauses.
11508 The task construct stands for the effect of data sharing on the
11509 explicit task it creates and the inner taskloop stands for expansion
11510 of the static loop inside of the explicit task construct. */
11511 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
11512 {
11513 tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gfor);
11514 tree task_clauses = NULL_TREE;
11515 tree c = *gfor_clauses_ptr;
11516 tree *gtask_clauses_ptr = &task_clauses;
11517 tree outer_for_clauses = NULL_TREE;
11518 tree *gforo_clauses_ptr = &outer_for_clauses;
11519 for (; c; c = OMP_CLAUSE_CHAIN (c))
11520 switch (OMP_CLAUSE_CODE (c))
11521 {
11522 /* These clauses are allowed on task, move them there. */
11523 case OMP_CLAUSE_SHARED:
11524 case OMP_CLAUSE_FIRSTPRIVATE:
11525 case OMP_CLAUSE_DEFAULT:
11526 case OMP_CLAUSE_IF:
11527 case OMP_CLAUSE_UNTIED:
11528 case OMP_CLAUSE_FINAL:
11529 case OMP_CLAUSE_MERGEABLE:
11530 case OMP_CLAUSE_PRIORITY:
11531 case OMP_CLAUSE_REDUCTION:
11532 case OMP_CLAUSE_IN_REDUCTION:
11533 *gtask_clauses_ptr = c;
11534 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
11535 break;
11536 case OMP_CLAUSE_PRIVATE:
11537 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c))
11538 {
11539 /* We want private on outer for and firstprivate
11540 on task. */
11541 *gtask_clauses_ptr
11542 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
11543 OMP_CLAUSE_FIRSTPRIVATE);
11544 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
11545 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
11546 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
11547 *gforo_clauses_ptr = c;
11548 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
11549 }
11550 else
11551 {
11552 *gtask_clauses_ptr = c;
11553 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
11554 }
11555 break;
11556 /* These clauses go into outer taskloop clauses. */
11557 case OMP_CLAUSE_GRAINSIZE:
11558 case OMP_CLAUSE_NUM_TASKS:
11559 case OMP_CLAUSE_NOGROUP:
11560 *gforo_clauses_ptr = c;
11561 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
11562 break;
11563 /* Taskloop clause we duplicate on both taskloops. */
11564 case OMP_CLAUSE_COLLAPSE:
11565 *gfor_clauses_ptr = c;
11566 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
11567 *gforo_clauses_ptr = copy_node (c);
11568 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
11569 break;
11570 /* For lastprivate, keep the clause on inner taskloop, and add
11571 a shared clause on task. If the same decl is also firstprivate,
11572 add also firstprivate clause on the inner taskloop. */
11573 case OMP_CLAUSE_LASTPRIVATE:
11574 if (OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
11575 {
11576 /* For taskloop C++ lastprivate IVs, we want:
11577 1) private on outer taskloop
11578 2) firstprivate and shared on task
11579 3) lastprivate on inner taskloop */
11580 *gtask_clauses_ptr
11581 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
11582 OMP_CLAUSE_FIRSTPRIVATE);
11583 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
11584 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
11585 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
11586 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1;
11587 *gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c),
11588 OMP_CLAUSE_PRIVATE);
11589 OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c);
11590 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1;
11591 TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c);
11592 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
11593 }
11594 *gfor_clauses_ptr = c;
11595 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
11596 *gtask_clauses_ptr
11597 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED);
11598 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
11599 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
11600 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1;
11601 gtask_clauses_ptr
11602 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
11603 break;
11604 default:
11605 gcc_unreachable ();
11606 }
11607 *gfor_clauses_ptr = NULL_TREE;
11608 *gtask_clauses_ptr = NULL_TREE;
11609 *gforo_clauses_ptr = NULL_TREE;
11610 g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE);
11611 g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE,
11612 NULL_TREE, NULL_TREE, NULL_TREE);
11613 gimple_omp_task_set_taskloop_p (g, true);
11614 g = gimple_build_bind (NULL_TREE, g, NULL_TREE);
11615 gomp_for *gforo
11616 = gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses,
11617 gimple_omp_for_collapse (gfor),
11618 gimple_omp_for_pre_body (gfor));
11619 gimple_omp_for_set_pre_body (gfor, NULL);
11620 gimple_omp_for_set_combined_p (gforo, true);
11621 gimple_omp_for_set_combined_into_p (gfor, true);
11622 for (i = 0; i < (int) gimple_omp_for_collapse (gfor); i++)
11623 {
11624 tree type = TREE_TYPE (gimple_omp_for_index (gfor, i));
11625 tree v = create_tmp_var (type);
11626 gimple_omp_for_set_index (gforo, i, v);
11627 t = unshare_expr (gimple_omp_for_initial (gfor, i));
11628 gimple_omp_for_set_initial (gforo, i, t);
11629 gimple_omp_for_set_cond (gforo, i,
11630 gimple_omp_for_cond (gfor, i));
11631 t = unshare_expr (gimple_omp_for_final (gfor, i));
11632 gimple_omp_for_set_final (gforo, i, t);
11633 t = unshare_expr (gimple_omp_for_incr (gfor, i));
11634 gcc_assert (TREE_OPERAND (t, 0) == gimple_omp_for_index (gfor, i));
11635 TREE_OPERAND (t, 0) = v;
11636 gimple_omp_for_set_incr (gforo, i, t);
11637 t = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
11638 OMP_CLAUSE_DECL (t) = v;
11639 OMP_CLAUSE_CHAIN (t) = gimple_omp_for_clauses (gforo);
11640 gimple_omp_for_set_clauses (gforo, t);
11641 }
11642 gimplify_seq_add_stmt (pre_p, gforo);
11643 }
11644 else
11645 gimplify_seq_add_stmt (pre_p, gfor);
11646
11647 if (TREE_CODE (orig_for_stmt) == OMP_FOR)
11648 {
11649 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
11650 unsigned lastprivate_conditional = 0;
11651 while (ctx
11652 && (ctx->region_type == ORT_TARGET_DATA
11653 || ctx->region_type == ORT_TASKGROUP))
11654 ctx = ctx->outer_context;
11655 if (ctx && (ctx->region_type & ORT_PARALLEL) != 0)
11656 for (tree c = gimple_omp_for_clauses (gfor);
11657 c; c = OMP_CLAUSE_CHAIN (c))
11658 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
11659 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
11660 ++lastprivate_conditional;
11661 if (lastprivate_conditional)
11662 {
11663 struct omp_for_data fd;
11664 omp_extract_for_data (gfor, &fd, NULL);
11665 tree type = build_array_type_nelts (unsigned_type_for (fd.iter_type),
11666 lastprivate_conditional);
11667 tree var = create_tmp_var_raw (type);
11668 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
11669 OMP_CLAUSE_DECL (c) = var;
11670 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gfor);
11671 gimple_omp_for_set_clauses (gfor, c);
11672 omp_add_variable (ctx, var, GOVD_CONDTEMP | GOVD_SEEN);
11673 }
11674 }
11675 else if (TREE_CODE (orig_for_stmt) == OMP_SIMD)
11676 {
11677 unsigned lastprivate_conditional = 0;
11678 for (tree c = gimple_omp_for_clauses (gfor); c; c = OMP_CLAUSE_CHAIN (c))
11679 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
11680 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
11681 ++lastprivate_conditional;
11682 if (lastprivate_conditional)
11683 {
11684 struct omp_for_data fd;
11685 omp_extract_for_data (gfor, &fd, NULL);
11686 tree type = unsigned_type_for (fd.iter_type);
11687 while (lastprivate_conditional--)
11688 {
11689 tree c = build_omp_clause (UNKNOWN_LOCATION,
11690 OMP_CLAUSE__CONDTEMP_);
11691 OMP_CLAUSE_DECL (c) = create_tmp_var (type);
11692 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gfor);
11693 gimple_omp_for_set_clauses (gfor, c);
11694 }
11695 }
11696 }
11697
11698 if (ret != GS_ALL_DONE)
11699 return GS_ERROR;
11700 *expr_p = NULL_TREE;
11701 return GS_ALL_DONE;
11702 }
11703
11704 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
11705 of OMP_TARGET's body. */
11706
11707 static tree
11708 find_omp_teams (tree *tp, int *walk_subtrees, void *)
11709 {
11710 *walk_subtrees = 0;
11711 switch (TREE_CODE (*tp))
11712 {
11713 case OMP_TEAMS:
11714 return *tp;
11715 case BIND_EXPR:
11716 case STATEMENT_LIST:
11717 *walk_subtrees = 1;
11718 break;
11719 default:
11720 break;
11721 }
11722 return NULL_TREE;
11723 }
11724
11725 /* Helper function of optimize_target_teams, determine if the expression
11726 can be computed safely before the target construct on the host. */
11727
11728 static tree
11729 computable_teams_clause (tree *tp, int *walk_subtrees, void *)
11730 {
11731 splay_tree_node n;
11732
11733 if (TYPE_P (*tp))
11734 {
11735 *walk_subtrees = 0;
11736 return NULL_TREE;
11737 }
11738 switch (TREE_CODE (*tp))
11739 {
11740 case VAR_DECL:
11741 case PARM_DECL:
11742 case RESULT_DECL:
11743 *walk_subtrees = 0;
11744 if (error_operand_p (*tp)
11745 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp))
11746 || DECL_HAS_VALUE_EXPR_P (*tp)
11747 || DECL_THREAD_LOCAL_P (*tp)
11748 || TREE_SIDE_EFFECTS (*tp)
11749 || TREE_THIS_VOLATILE (*tp))
11750 return *tp;
11751 if (is_global_var (*tp)
11752 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp))
11753 || lookup_attribute ("omp declare target link",
11754 DECL_ATTRIBUTES (*tp))))
11755 return *tp;
11756 if (VAR_P (*tp)
11757 && !DECL_SEEN_IN_BIND_EXPR_P (*tp)
11758 && !is_global_var (*tp)
11759 && decl_function_context (*tp) == current_function_decl)
11760 return *tp;
11761 n = splay_tree_lookup (gimplify_omp_ctxp->variables,
11762 (splay_tree_key) *tp);
11763 if (n == NULL)
11764 {
11765 if (gimplify_omp_ctxp->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
11766 return NULL_TREE;
11767 return *tp;
11768 }
11769 else if (n->value & GOVD_LOCAL)
11770 return *tp;
11771 else if (n->value & GOVD_FIRSTPRIVATE)
11772 return NULL_TREE;
11773 else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
11774 == (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
11775 return NULL_TREE;
11776 return *tp;
11777 case INTEGER_CST:
11778 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
11779 return *tp;
11780 return NULL_TREE;
11781 case TARGET_EXPR:
11782 if (TARGET_EXPR_INITIAL (*tp)
11783 || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL)
11784 return *tp;
11785 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp),
11786 walk_subtrees, NULL);
11787 /* Allow some reasonable subset of integral arithmetics. */
11788 case PLUS_EXPR:
11789 case MINUS_EXPR:
11790 case MULT_EXPR:
11791 case TRUNC_DIV_EXPR:
11792 case CEIL_DIV_EXPR:
11793 case FLOOR_DIV_EXPR:
11794 case ROUND_DIV_EXPR:
11795 case TRUNC_MOD_EXPR:
11796 case CEIL_MOD_EXPR:
11797 case FLOOR_MOD_EXPR:
11798 case ROUND_MOD_EXPR:
11799 case RDIV_EXPR:
11800 case EXACT_DIV_EXPR:
11801 case MIN_EXPR:
11802 case MAX_EXPR:
11803 case LSHIFT_EXPR:
11804 case RSHIFT_EXPR:
11805 case BIT_IOR_EXPR:
11806 case BIT_XOR_EXPR:
11807 case BIT_AND_EXPR:
11808 case NEGATE_EXPR:
11809 case ABS_EXPR:
11810 case BIT_NOT_EXPR:
11811 case NON_LVALUE_EXPR:
11812 CASE_CONVERT:
11813 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
11814 return *tp;
11815 return NULL_TREE;
11816 /* And disallow anything else, except for comparisons. */
11817 default:
11818 if (COMPARISON_CLASS_P (*tp))
11819 return NULL_TREE;
11820 return *tp;
11821 }
11822 }
11823
11824 /* Try to determine if the num_teams and/or thread_limit expressions
11825 can have their values determined already before entering the
11826 target construct.
11827 INTEGER_CSTs trivially are,
11828 integral decls that are firstprivate (explicitly or implicitly)
11829 or explicitly map(always, to:) or map(always, tofrom:) on the target
11830 region too, and expressions involving simple arithmetics on those
11831 too, function calls are not ok, dereferencing something neither etc.
11832 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
11833 EXPR based on what we find:
11834 0 stands for clause not specified at all, use implementation default
11835 -1 stands for value that can't be determined easily before entering
11836 the target construct.
11837 If teams construct is not present at all, use 1 for num_teams
11838 and 0 for thread_limit (only one team is involved, and the thread
11839 limit is implementation defined. */
11840
11841 static void
11842 optimize_target_teams (tree target, gimple_seq *pre_p)
11843 {
11844 tree body = OMP_BODY (target);
11845 tree teams = walk_tree (&body, find_omp_teams, NULL, NULL);
11846 tree num_teams = integer_zero_node;
11847 tree thread_limit = integer_zero_node;
11848 location_t num_teams_loc = EXPR_LOCATION (target);
11849 location_t thread_limit_loc = EXPR_LOCATION (target);
11850 tree c, *p, expr;
11851 struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp;
11852
11853 if (teams == NULL_TREE)
11854 num_teams = integer_one_node;
11855 else
11856 for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c))
11857 {
11858 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS)
11859 {
11860 p = &num_teams;
11861 num_teams_loc = OMP_CLAUSE_LOCATION (c);
11862 }
11863 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT)
11864 {
11865 p = &thread_limit;
11866 thread_limit_loc = OMP_CLAUSE_LOCATION (c);
11867 }
11868 else
11869 continue;
11870 expr = OMP_CLAUSE_OPERAND (c, 0);
11871 if (TREE_CODE (expr) == INTEGER_CST)
11872 {
11873 *p = expr;
11874 continue;
11875 }
11876 if (walk_tree (&expr, computable_teams_clause, NULL, NULL))
11877 {
11878 *p = integer_minus_one_node;
11879 continue;
11880 }
11881 *p = expr;
11882 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
11883 if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue, false)
11884 == GS_ERROR)
11885 {
11886 gimplify_omp_ctxp = target_ctx;
11887 *p = integer_minus_one_node;
11888 continue;
11889 }
11890 gimplify_omp_ctxp = target_ctx;
11891 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
11892 OMP_CLAUSE_OPERAND (c, 0) = *p;
11893 }
11894 c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT);
11895 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
11896 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
11897 OMP_TARGET_CLAUSES (target) = c;
11898 c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS);
11899 OMP_CLAUSE_NUM_TEAMS_EXPR (c) = num_teams;
11900 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
11901 OMP_TARGET_CLAUSES (target) = c;
11902 }
11903
11904 /* Gimplify the gross structure of several OMP constructs. */
11905
11906 static void
11907 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
11908 {
11909 tree expr = *expr_p;
11910 gimple *stmt;
11911 gimple_seq body = NULL;
11912 enum omp_region_type ort;
11913
11914 switch (TREE_CODE (expr))
11915 {
11916 case OMP_SECTIONS:
11917 case OMP_SINGLE:
11918 ort = ORT_WORKSHARE;
11919 break;
11920 case OMP_TARGET:
11921 ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET;
11922 break;
11923 case OACC_KERNELS:
11924 ort = ORT_ACC_KERNELS;
11925 break;
11926 case OACC_PARALLEL:
11927 ort = ORT_ACC_PARALLEL;
11928 break;
11929 case OACC_DATA:
11930 ort = ORT_ACC_DATA;
11931 break;
11932 case OMP_TARGET_DATA:
11933 ort = ORT_TARGET_DATA;
11934 break;
11935 case OMP_TEAMS:
11936 ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
11937 if (gimplify_omp_ctxp == NULL
11938 || (gimplify_omp_ctxp->region_type == ORT_TARGET
11939 && gimplify_omp_ctxp->outer_context == NULL
11940 && lookup_attribute ("omp declare target",
11941 DECL_ATTRIBUTES (current_function_decl))))
11942 ort = (enum omp_region_type) (ort | ORT_HOST_TEAMS);
11943 break;
11944 case OACC_HOST_DATA:
11945 ort = ORT_ACC_HOST_DATA;
11946 break;
11947 default:
11948 gcc_unreachable ();
11949 }
11950 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort,
11951 TREE_CODE (expr));
11952 if (TREE_CODE (expr) == OMP_TARGET)
11953 optimize_target_teams (expr, pre_p);
11954 if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0
11955 || (ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
11956 {
11957 push_gimplify_context ();
11958 gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body);
11959 if (gimple_code (g) == GIMPLE_BIND)
11960 pop_gimplify_context (g);
11961 else
11962 pop_gimplify_context (NULL);
11963 if ((ort & ORT_TARGET_DATA) != 0)
11964 {
11965 enum built_in_function end_ix;
11966 switch (TREE_CODE (expr))
11967 {
11968 case OACC_DATA:
11969 case OACC_HOST_DATA:
11970 end_ix = BUILT_IN_GOACC_DATA_END;
11971 break;
11972 case OMP_TARGET_DATA:
11973 end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
11974 break;
11975 default:
11976 gcc_unreachable ();
11977 }
11978 tree fn = builtin_decl_explicit (end_ix);
11979 g = gimple_build_call (fn, 0);
11980 gimple_seq cleanup = NULL;
11981 gimple_seq_add_stmt (&cleanup, g);
11982 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
11983 body = NULL;
11984 gimple_seq_add_stmt (&body, g);
11985 }
11986 }
11987 else
11988 gimplify_and_add (OMP_BODY (expr), &body);
11989 gimplify_adjust_omp_clauses (pre_p, body, &OMP_CLAUSES (expr),
11990 TREE_CODE (expr));
11991
11992 switch (TREE_CODE (expr))
11993 {
11994 case OACC_DATA:
11995 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
11996 OMP_CLAUSES (expr));
11997 break;
11998 case OACC_KERNELS:
11999 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
12000 OMP_CLAUSES (expr));
12001 break;
12002 case OACC_HOST_DATA:
12003 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_HOST_DATA,
12004 OMP_CLAUSES (expr));
12005 break;
12006 case OACC_PARALLEL:
12007 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
12008 OMP_CLAUSES (expr));
12009 break;
12010 case OMP_SECTIONS:
12011 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
12012 break;
12013 case OMP_SINGLE:
12014 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
12015 break;
12016 case OMP_TARGET:
12017 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
12018 OMP_CLAUSES (expr));
12019 break;
12020 case OMP_TARGET_DATA:
12021 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
12022 OMP_CLAUSES (expr));
12023 break;
12024 case OMP_TEAMS:
12025 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
12026 if ((ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
12027 gimple_omp_teams_set_host (as_a <gomp_teams *> (stmt), true);
12028 break;
12029 default:
12030 gcc_unreachable ();
12031 }
12032
12033 gimplify_seq_add_stmt (pre_p, stmt);
12034 *expr_p = NULL_TREE;
12035 }
12036
12037 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
12038 target update constructs. */
12039
12040 static void
12041 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
12042 {
12043 tree expr = *expr_p;
12044 int kind;
12045 gomp_target *stmt;
12046 enum omp_region_type ort = ORT_WORKSHARE;
12047
12048 switch (TREE_CODE (expr))
12049 {
12050 case OACC_ENTER_DATA:
12051 case OACC_EXIT_DATA:
12052 kind = GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA;
12053 ort = ORT_ACC;
12054 break;
12055 case OACC_UPDATE:
12056 kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
12057 ort = ORT_ACC;
12058 break;
12059 case OMP_TARGET_UPDATE:
12060 kind = GF_OMP_TARGET_KIND_UPDATE;
12061 break;
12062 case OMP_TARGET_ENTER_DATA:
12063 kind = GF_OMP_TARGET_KIND_ENTER_DATA;
12064 break;
12065 case OMP_TARGET_EXIT_DATA:
12066 kind = GF_OMP_TARGET_KIND_EXIT_DATA;
12067 break;
12068 default:
12069 gcc_unreachable ();
12070 }
12071 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p,
12072 ort, TREE_CODE (expr));
12073 gimplify_adjust_omp_clauses (pre_p, NULL, &OMP_STANDALONE_CLAUSES (expr),
12074 TREE_CODE (expr));
12075 if (TREE_CODE (expr) == OACC_UPDATE
12076 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
12077 OMP_CLAUSE_IF_PRESENT))
12078 {
12079 /* The runtime uses GOMP_MAP_{TO,FROM} to denote the if_present
12080 clause. */
12081 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
12082 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
12083 switch (OMP_CLAUSE_MAP_KIND (c))
12084 {
12085 case GOMP_MAP_FORCE_TO:
12086 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
12087 break;
12088 case GOMP_MAP_FORCE_FROM:
12089 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FROM);
12090 break;
12091 default:
12092 break;
12093 }
12094 }
12095 else if (TREE_CODE (expr) == OACC_EXIT_DATA
12096 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
12097 OMP_CLAUSE_FINALIZE))
12098 {
12099 /* Use GOMP_MAP_DELETE/GOMP_MAP_FORCE_FROM to denote that "finalize"
12100 semantics apply to all mappings of this OpenACC directive. */
12101 bool finalize_marked = false;
12102 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
12103 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
12104 switch (OMP_CLAUSE_MAP_KIND (c))
12105 {
12106 case GOMP_MAP_FROM:
12107 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_FROM);
12108 finalize_marked = true;
12109 break;
12110 case GOMP_MAP_RELEASE:
12111 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_DELETE);
12112 finalize_marked = true;
12113 break;
12114 default:
12115 /* Check consistency: libgomp relies on the very first data
12116 mapping clause being marked, so make sure we did that before
12117 any other mapping clauses. */
12118 gcc_assert (finalize_marked);
12119 break;
12120 }
12121 }
12122 stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr));
12123
12124 gimplify_seq_add_stmt (pre_p, stmt);
12125 *expr_p = NULL_TREE;
12126 }
12127
12128 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
12129 stabilized the lhs of the atomic operation as *ADDR. Return true if
12130 EXPR is this stabilized form. */
12131
12132 static bool
12133 goa_lhs_expr_p (tree expr, tree addr)
12134 {
12135 /* Also include casts to other type variants. The C front end is fond
12136 of adding these for e.g. volatile variables. This is like
12137 STRIP_TYPE_NOPS but includes the main variant lookup. */
12138 STRIP_USELESS_TYPE_CONVERSION (expr);
12139
12140 if (TREE_CODE (expr) == INDIRECT_REF)
12141 {
12142 expr = TREE_OPERAND (expr, 0);
12143 while (expr != addr
12144 && (CONVERT_EXPR_P (expr)
12145 || TREE_CODE (expr) == NON_LVALUE_EXPR)
12146 && TREE_CODE (expr) == TREE_CODE (addr)
12147 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
12148 {
12149 expr = TREE_OPERAND (expr, 0);
12150 addr = TREE_OPERAND (addr, 0);
12151 }
12152 if (expr == addr)
12153 return true;
12154 return (TREE_CODE (addr) == ADDR_EXPR
12155 && TREE_CODE (expr) == ADDR_EXPR
12156 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
12157 }
12158 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
12159 return true;
12160 return false;
12161 }
12162
12163 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
12164 expression does not involve the lhs, evaluate it into a temporary.
12165 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
12166 or -1 if an error was encountered. */
12167
12168 static int
12169 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
12170 tree lhs_var)
12171 {
12172 tree expr = *expr_p;
12173 int saw_lhs;
12174
12175 if (goa_lhs_expr_p (expr, lhs_addr))
12176 {
12177 *expr_p = lhs_var;
12178 return 1;
12179 }
12180 if (is_gimple_val (expr))
12181 return 0;
12182
12183 saw_lhs = 0;
12184 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
12185 {
12186 case tcc_binary:
12187 case tcc_comparison:
12188 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
12189 lhs_var);
12190 /* FALLTHRU */
12191 case tcc_unary:
12192 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
12193 lhs_var);
12194 break;
12195 case tcc_expression:
12196 switch (TREE_CODE (expr))
12197 {
12198 case TRUTH_ANDIF_EXPR:
12199 case TRUTH_ORIF_EXPR:
12200 case TRUTH_AND_EXPR:
12201 case TRUTH_OR_EXPR:
12202 case TRUTH_XOR_EXPR:
12203 case BIT_INSERT_EXPR:
12204 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
12205 lhs_addr, lhs_var);
12206 /* FALLTHRU */
12207 case TRUTH_NOT_EXPR:
12208 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
12209 lhs_addr, lhs_var);
12210 break;
12211 case COMPOUND_EXPR:
12212 /* Break out any preevaluations from cp_build_modify_expr. */
12213 for (; TREE_CODE (expr) == COMPOUND_EXPR;
12214 expr = TREE_OPERAND (expr, 1))
12215 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
12216 *expr_p = expr;
12217 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
12218 default:
12219 break;
12220 }
12221 break;
12222 case tcc_reference:
12223 if (TREE_CODE (expr) == BIT_FIELD_REF)
12224 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
12225 lhs_addr, lhs_var);
12226 break;
12227 default:
12228 break;
12229 }
12230
12231 if (saw_lhs == 0)
12232 {
12233 enum gimplify_status gs;
12234 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
12235 if (gs != GS_ALL_DONE)
12236 saw_lhs = -1;
12237 }
12238
12239 return saw_lhs;
12240 }
12241
12242 /* Gimplify an OMP_ATOMIC statement. */
12243
12244 static enum gimplify_status
12245 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
12246 {
12247 tree addr = TREE_OPERAND (*expr_p, 0);
12248 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
12249 ? NULL : TREE_OPERAND (*expr_p, 1);
12250 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
12251 tree tmp_load;
12252 gomp_atomic_load *loadstmt;
12253 gomp_atomic_store *storestmt;
12254
12255 tmp_load = create_tmp_reg (type);
12256 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
12257 return GS_ERROR;
12258
12259 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
12260 != GS_ALL_DONE)
12261 return GS_ERROR;
12262
12263 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr,
12264 OMP_ATOMIC_MEMORY_ORDER (*expr_p));
12265 gimplify_seq_add_stmt (pre_p, loadstmt);
12266 if (rhs)
12267 {
12268 /* BIT_INSERT_EXPR is not valid for non-integral bitfield
12269 representatives. Use BIT_FIELD_REF on the lhs instead. */
12270 if (TREE_CODE (rhs) == BIT_INSERT_EXPR
12271 && !INTEGRAL_TYPE_P (TREE_TYPE (tmp_load)))
12272 {
12273 tree bitpos = TREE_OPERAND (rhs, 2);
12274 tree op1 = TREE_OPERAND (rhs, 1);
12275 tree bitsize;
12276 tree tmp_store = tmp_load;
12277 if (TREE_CODE (*expr_p) == OMP_ATOMIC_CAPTURE_OLD)
12278 tmp_store = get_initialized_tmp_var (tmp_load, pre_p, NULL);
12279 if (INTEGRAL_TYPE_P (TREE_TYPE (op1)))
12280 bitsize = bitsize_int (TYPE_PRECISION (TREE_TYPE (op1)));
12281 else
12282 bitsize = TYPE_SIZE (TREE_TYPE (op1));
12283 gcc_assert (TREE_OPERAND (rhs, 0) == tmp_load);
12284 tree t = build2_loc (EXPR_LOCATION (rhs),
12285 MODIFY_EXPR, void_type_node,
12286 build3_loc (EXPR_LOCATION (rhs), BIT_FIELD_REF,
12287 TREE_TYPE (op1), tmp_store, bitsize,
12288 bitpos), op1);
12289 gimplify_and_add (t, pre_p);
12290 rhs = tmp_store;
12291 }
12292 if (gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
12293 != GS_ALL_DONE)
12294 return GS_ERROR;
12295 }
12296
12297 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
12298 rhs = tmp_load;
12299 storestmt
12300 = gimple_build_omp_atomic_store (rhs, OMP_ATOMIC_MEMORY_ORDER (*expr_p));
12301 gimplify_seq_add_stmt (pre_p, storestmt);
12302 switch (TREE_CODE (*expr_p))
12303 {
12304 case OMP_ATOMIC_READ:
12305 case OMP_ATOMIC_CAPTURE_OLD:
12306 *expr_p = tmp_load;
12307 gimple_omp_atomic_set_need_value (loadstmt);
12308 break;
12309 case OMP_ATOMIC_CAPTURE_NEW:
12310 *expr_p = rhs;
12311 gimple_omp_atomic_set_need_value (storestmt);
12312 break;
12313 default:
12314 *expr_p = NULL;
12315 break;
12316 }
12317
12318 return GS_ALL_DONE;
12319 }
12320
12321 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
12322 body, and adding some EH bits. */
12323
12324 static enum gimplify_status
12325 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
12326 {
12327 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
12328 gimple *body_stmt;
12329 gtransaction *trans_stmt;
12330 gimple_seq body = NULL;
12331 int subcode = 0;
12332
12333 /* Wrap the transaction body in a BIND_EXPR so we have a context
12334 where to put decls for OMP. */
12335 if (TREE_CODE (tbody) != BIND_EXPR)
12336 {
12337 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
12338 TREE_SIDE_EFFECTS (bind) = 1;
12339 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
12340 TRANSACTION_EXPR_BODY (expr) = bind;
12341 }
12342
12343 push_gimplify_context ();
12344 temp = voidify_wrapper_expr (*expr_p, NULL);
12345
12346 body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
12347 pop_gimplify_context (body_stmt);
12348
12349 trans_stmt = gimple_build_transaction (body);
12350 if (TRANSACTION_EXPR_OUTER (expr))
12351 subcode = GTMA_IS_OUTER;
12352 else if (TRANSACTION_EXPR_RELAXED (expr))
12353 subcode = GTMA_IS_RELAXED;
12354 gimple_transaction_set_subcode (trans_stmt, subcode);
12355
12356 gimplify_seq_add_stmt (pre_p, trans_stmt);
12357
12358 if (temp)
12359 {
12360 *expr_p = temp;
12361 return GS_OK;
12362 }
12363
12364 *expr_p = NULL_TREE;
12365 return GS_ALL_DONE;
12366 }
12367
12368 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
12369 is the OMP_BODY of the original EXPR (which has already been
12370 gimplified so it's not present in the EXPR).
12371
12372 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
12373
12374 static gimple *
12375 gimplify_omp_ordered (tree expr, gimple_seq body)
12376 {
12377 tree c, decls;
12378 int failures = 0;
12379 unsigned int i;
12380 tree source_c = NULL_TREE;
12381 tree sink_c = NULL_TREE;
12382
12383 if (gimplify_omp_ctxp)
12384 {
12385 for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
12386 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
12387 && gimplify_omp_ctxp->loop_iter_var.is_empty ()
12388 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK
12389 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE))
12390 {
12391 error_at (OMP_CLAUSE_LOCATION (c),
12392 "%<ordered%> construct with %<depend%> clause must be "
12393 "closely nested inside a loop with %<ordered%> clause "
12394 "with a parameter");
12395 failures++;
12396 }
12397 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
12398 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
12399 {
12400 bool fail = false;
12401 for (decls = OMP_CLAUSE_DECL (c), i = 0;
12402 decls && TREE_CODE (decls) == TREE_LIST;
12403 decls = TREE_CHAIN (decls), ++i)
12404 if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2)
12405 continue;
12406 else if (TREE_VALUE (decls)
12407 != gimplify_omp_ctxp->loop_iter_var[2 * i])
12408 {
12409 error_at (OMP_CLAUSE_LOCATION (c),
12410 "variable %qE is not an iteration "
12411 "of outermost loop %d, expected %qE",
12412 TREE_VALUE (decls), i + 1,
12413 gimplify_omp_ctxp->loop_iter_var[2 * i]);
12414 fail = true;
12415 failures++;
12416 }
12417 else
12418 TREE_VALUE (decls)
12419 = gimplify_omp_ctxp->loop_iter_var[2 * i + 1];
12420 if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2)
12421 {
12422 error_at (OMP_CLAUSE_LOCATION (c),
12423 "number of variables in %<depend%> clause with "
12424 "%<sink%> modifier does not match number of "
12425 "iteration variables");
12426 failures++;
12427 }
12428 sink_c = c;
12429 }
12430 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
12431 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
12432 {
12433 if (source_c)
12434 {
12435 error_at (OMP_CLAUSE_LOCATION (c),
12436 "more than one %<depend%> clause with %<source%> "
12437 "modifier on an %<ordered%> construct");
12438 failures++;
12439 }
12440 else
12441 source_c = c;
12442 }
12443 }
12444 if (source_c && sink_c)
12445 {
12446 error_at (OMP_CLAUSE_LOCATION (source_c),
12447 "%<depend%> clause with %<source%> modifier specified "
12448 "together with %<depend%> clauses with %<sink%> modifier "
12449 "on the same construct");
12450 failures++;
12451 }
12452
12453 if (failures)
12454 return gimple_build_nop ();
12455 return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr));
12456 }
12457
12458 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
12459 expression produces a value to be used as an operand inside a GIMPLE
12460 statement, the value will be stored back in *EXPR_P. This value will
12461 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
12462 an SSA_NAME. The corresponding sequence of GIMPLE statements is
12463 emitted in PRE_P and POST_P.
12464
12465 Additionally, this process may overwrite parts of the input
12466 expression during gimplification. Ideally, it should be
12467 possible to do non-destructive gimplification.
12468
12469 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
12470 the expression needs to evaluate to a value to be used as
12471 an operand in a GIMPLE statement, this value will be stored in
12472 *EXPR_P on exit. This happens when the caller specifies one
12473 of fb_lvalue or fb_rvalue fallback flags.
12474
12475 PRE_P will contain the sequence of GIMPLE statements corresponding
12476 to the evaluation of EXPR and all the side-effects that must
12477 be executed before the main expression. On exit, the last
12478 statement of PRE_P is the core statement being gimplified. For
12479 instance, when gimplifying 'if (++a)' the last statement in
12480 PRE_P will be 'if (t.1)' where t.1 is the result of
12481 pre-incrementing 'a'.
12482
12483 POST_P will contain the sequence of GIMPLE statements corresponding
12484 to the evaluation of all the side-effects that must be executed
12485 after the main expression. If this is NULL, the post
12486 side-effects are stored at the end of PRE_P.
12487
12488 The reason why the output is split in two is to handle post
12489 side-effects explicitly. In some cases, an expression may have
12490 inner and outer post side-effects which need to be emitted in
12491 an order different from the one given by the recursive
12492 traversal. For instance, for the expression (*p--)++ the post
12493 side-effects of '--' must actually occur *after* the post
12494 side-effects of '++'. However, gimplification will first visit
12495 the inner expression, so if a separate POST sequence was not
12496 used, the resulting sequence would be:
12497
12498 1 t.1 = *p
12499 2 p = p - 1
12500 3 t.2 = t.1 + 1
12501 4 *p = t.2
12502
12503 However, the post-decrement operation in line #2 must not be
12504 evaluated until after the store to *p at line #4, so the
12505 correct sequence should be:
12506
12507 1 t.1 = *p
12508 2 t.2 = t.1 + 1
12509 3 *p = t.2
12510 4 p = p - 1
12511
12512 So, by specifying a separate post queue, it is possible
12513 to emit the post side-effects in the correct order.
12514 If POST_P is NULL, an internal queue will be used. Before
12515 returning to the caller, the sequence POST_P is appended to
12516 the main output sequence PRE_P.
12517
12518 GIMPLE_TEST_F points to a function that takes a tree T and
12519 returns nonzero if T is in the GIMPLE form requested by the
12520 caller. The GIMPLE predicates are in gimple.c.
12521
12522 FALLBACK tells the function what sort of a temporary we want if
12523 gimplification cannot produce an expression that complies with
12524 GIMPLE_TEST_F.
12525
12526 fb_none means that no temporary should be generated
12527 fb_rvalue means that an rvalue is OK to generate
12528 fb_lvalue means that an lvalue is OK to generate
12529 fb_either means that either is OK, but an lvalue is preferable.
12530 fb_mayfail means that gimplification may fail (in which case
12531 GS_ERROR will be returned)
12532
12533 The return value is either GS_ERROR or GS_ALL_DONE, since this
12534 function iterates until EXPR is completely gimplified or an error
12535 occurs. */
12536
12537 enum gimplify_status
12538 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
12539 bool (*gimple_test_f) (tree), fallback_t fallback)
12540 {
12541 tree tmp;
12542 gimple_seq internal_pre = NULL;
12543 gimple_seq internal_post = NULL;
12544 tree save_expr;
12545 bool is_statement;
12546 location_t saved_location;
12547 enum gimplify_status ret;
12548 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
12549 tree label;
12550
12551 save_expr = *expr_p;
12552 if (save_expr == NULL_TREE)
12553 return GS_ALL_DONE;
12554
12555 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
12556 is_statement = gimple_test_f == is_gimple_stmt;
12557 if (is_statement)
12558 gcc_assert (pre_p);
12559
12560 /* Consistency checks. */
12561 if (gimple_test_f == is_gimple_reg)
12562 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
12563 else if (gimple_test_f == is_gimple_val
12564 || gimple_test_f == is_gimple_call_addr
12565 || gimple_test_f == is_gimple_condexpr
12566 || gimple_test_f == is_gimple_mem_rhs
12567 || gimple_test_f == is_gimple_mem_rhs_or_call
12568 || gimple_test_f == is_gimple_reg_rhs
12569 || gimple_test_f == is_gimple_reg_rhs_or_call
12570 || gimple_test_f == is_gimple_asm_val
12571 || gimple_test_f == is_gimple_mem_ref_addr)
12572 gcc_assert (fallback & fb_rvalue);
12573 else if (gimple_test_f == is_gimple_min_lval
12574 || gimple_test_f == is_gimple_lvalue)
12575 gcc_assert (fallback & fb_lvalue);
12576 else if (gimple_test_f == is_gimple_addressable)
12577 gcc_assert (fallback & fb_either);
12578 else if (gimple_test_f == is_gimple_stmt)
12579 gcc_assert (fallback == fb_none);
12580 else
12581 {
12582 /* We should have recognized the GIMPLE_TEST_F predicate to
12583 know what kind of fallback to use in case a temporary is
12584 needed to hold the value or address of *EXPR_P. */
12585 gcc_unreachable ();
12586 }
12587
12588 /* We used to check the predicate here and return immediately if it
12589 succeeds. This is wrong; the design is for gimplification to be
12590 idempotent, and for the predicates to only test for valid forms, not
12591 whether they are fully simplified. */
12592 if (pre_p == NULL)
12593 pre_p = &internal_pre;
12594
12595 if (post_p == NULL)
12596 post_p = &internal_post;
12597
12598 /* Remember the last statements added to PRE_P and POST_P. Every
12599 new statement added by the gimplification helpers needs to be
12600 annotated with location information. To centralize the
12601 responsibility, we remember the last statement that had been
12602 added to both queues before gimplifying *EXPR_P. If
12603 gimplification produces new statements in PRE_P and POST_P, those
12604 statements will be annotated with the same location information
12605 as *EXPR_P. */
12606 pre_last_gsi = gsi_last (*pre_p);
12607 post_last_gsi = gsi_last (*post_p);
12608
12609 saved_location = input_location;
12610 if (save_expr != error_mark_node
12611 && EXPR_HAS_LOCATION (*expr_p))
12612 input_location = EXPR_LOCATION (*expr_p);
12613
12614 /* Loop over the specific gimplifiers until the toplevel node
12615 remains the same. */
12616 do
12617 {
12618 /* Strip away as many useless type conversions as possible
12619 at the toplevel. */
12620 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
12621
12622 /* Remember the expr. */
12623 save_expr = *expr_p;
12624
12625 /* Die, die, die, my darling. */
12626 if (error_operand_p (save_expr))
12627 {
12628 ret = GS_ERROR;
12629 break;
12630 }
12631
12632 /* Do any language-specific gimplification. */
12633 ret = ((enum gimplify_status)
12634 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
12635 if (ret == GS_OK)
12636 {
12637 if (*expr_p == NULL_TREE)
12638 break;
12639 if (*expr_p != save_expr)
12640 continue;
12641 }
12642 else if (ret != GS_UNHANDLED)
12643 break;
12644
12645 /* Make sure that all the cases set 'ret' appropriately. */
12646 ret = GS_UNHANDLED;
12647 switch (TREE_CODE (*expr_p))
12648 {
12649 /* First deal with the special cases. */
12650
12651 case POSTINCREMENT_EXPR:
12652 case POSTDECREMENT_EXPR:
12653 case PREINCREMENT_EXPR:
12654 case PREDECREMENT_EXPR:
12655 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
12656 fallback != fb_none,
12657 TREE_TYPE (*expr_p));
12658 break;
12659
12660 case VIEW_CONVERT_EXPR:
12661 if ((fallback & fb_rvalue)
12662 && is_gimple_reg_type (TREE_TYPE (*expr_p))
12663 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
12664 {
12665 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12666 post_p, is_gimple_val, fb_rvalue);
12667 recalculate_side_effects (*expr_p);
12668 break;
12669 }
12670 /* Fallthru. */
12671
12672 case ARRAY_REF:
12673 case ARRAY_RANGE_REF:
12674 case REALPART_EXPR:
12675 case IMAGPART_EXPR:
12676 case COMPONENT_REF:
12677 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
12678 fallback ? fallback : fb_rvalue);
12679 break;
12680
12681 case COND_EXPR:
12682 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
12683
12684 /* C99 code may assign to an array in a structure value of a
12685 conditional expression, and this has undefined behavior
12686 only on execution, so create a temporary if an lvalue is
12687 required. */
12688 if (fallback == fb_lvalue)
12689 {
12690 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
12691 mark_addressable (*expr_p);
12692 ret = GS_OK;
12693 }
12694 break;
12695
12696 case CALL_EXPR:
12697 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
12698
12699 /* C99 code may assign to an array in a structure returned
12700 from a function, and this has undefined behavior only on
12701 execution, so create a temporary if an lvalue is
12702 required. */
12703 if (fallback == fb_lvalue)
12704 {
12705 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
12706 mark_addressable (*expr_p);
12707 ret = GS_OK;
12708 }
12709 break;
12710
12711 case TREE_LIST:
12712 gcc_unreachable ();
12713
12714 case COMPOUND_EXPR:
12715 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
12716 break;
12717
12718 case COMPOUND_LITERAL_EXPR:
12719 ret = gimplify_compound_literal_expr (expr_p, pre_p,
12720 gimple_test_f, fallback);
12721 break;
12722
12723 case MODIFY_EXPR:
12724 case INIT_EXPR:
12725 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
12726 fallback != fb_none);
12727 break;
12728
12729 case TRUTH_ANDIF_EXPR:
12730 case TRUTH_ORIF_EXPR:
12731 {
12732 /* Preserve the original type of the expression and the
12733 source location of the outer expression. */
12734 tree org_type = TREE_TYPE (*expr_p);
12735 *expr_p = gimple_boolify (*expr_p);
12736 *expr_p = build3_loc (input_location, COND_EXPR,
12737 org_type, *expr_p,
12738 fold_convert_loc
12739 (input_location,
12740 org_type, boolean_true_node),
12741 fold_convert_loc
12742 (input_location,
12743 org_type, boolean_false_node));
12744 ret = GS_OK;
12745 break;
12746 }
12747
12748 case TRUTH_NOT_EXPR:
12749 {
12750 tree type = TREE_TYPE (*expr_p);
12751 /* The parsers are careful to generate TRUTH_NOT_EXPR
12752 only with operands that are always zero or one.
12753 We do not fold here but handle the only interesting case
12754 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
12755 *expr_p = gimple_boolify (*expr_p);
12756 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
12757 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
12758 TREE_TYPE (*expr_p),
12759 TREE_OPERAND (*expr_p, 0));
12760 else
12761 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
12762 TREE_TYPE (*expr_p),
12763 TREE_OPERAND (*expr_p, 0),
12764 build_int_cst (TREE_TYPE (*expr_p), 1));
12765 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
12766 *expr_p = fold_convert_loc (input_location, type, *expr_p);
12767 ret = GS_OK;
12768 break;
12769 }
12770
12771 case ADDR_EXPR:
12772 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
12773 break;
12774
12775 case ANNOTATE_EXPR:
12776 {
12777 tree cond = TREE_OPERAND (*expr_p, 0);
12778 tree kind = TREE_OPERAND (*expr_p, 1);
12779 tree data = TREE_OPERAND (*expr_p, 2);
12780 tree type = TREE_TYPE (cond);
12781 if (!INTEGRAL_TYPE_P (type))
12782 {
12783 *expr_p = cond;
12784 ret = GS_OK;
12785 break;
12786 }
12787 tree tmp = create_tmp_var (type);
12788 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
12789 gcall *call
12790 = gimple_build_call_internal (IFN_ANNOTATE, 3, cond, kind, data);
12791 gimple_call_set_lhs (call, tmp);
12792 gimplify_seq_add_stmt (pre_p, call);
12793 *expr_p = tmp;
12794 ret = GS_ALL_DONE;
12795 break;
12796 }
12797
12798 case VA_ARG_EXPR:
12799 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
12800 break;
12801
12802 CASE_CONVERT:
12803 if (IS_EMPTY_STMT (*expr_p))
12804 {
12805 ret = GS_ALL_DONE;
12806 break;
12807 }
12808
12809 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
12810 || fallback == fb_none)
12811 {
12812 /* Just strip a conversion to void (or in void context) and
12813 try again. */
12814 *expr_p = TREE_OPERAND (*expr_p, 0);
12815 ret = GS_OK;
12816 break;
12817 }
12818
12819 ret = gimplify_conversion (expr_p);
12820 if (ret == GS_ERROR)
12821 break;
12822 if (*expr_p != save_expr)
12823 break;
12824 /* FALLTHRU */
12825
12826 case FIX_TRUNC_EXPR:
12827 /* unary_expr: ... | '(' cast ')' val | ... */
12828 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
12829 is_gimple_val, fb_rvalue);
12830 recalculate_side_effects (*expr_p);
12831 break;
12832
12833 case INDIRECT_REF:
12834 {
12835 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
12836 bool notrap = TREE_THIS_NOTRAP (*expr_p);
12837 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
12838
12839 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
12840 if (*expr_p != save_expr)
12841 {
12842 ret = GS_OK;
12843 break;
12844 }
12845
12846 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
12847 is_gimple_reg, fb_rvalue);
12848 if (ret == GS_ERROR)
12849 break;
12850
12851 recalculate_side_effects (*expr_p);
12852 *expr_p = fold_build2_loc (input_location, MEM_REF,
12853 TREE_TYPE (*expr_p),
12854 TREE_OPERAND (*expr_p, 0),
12855 build_int_cst (saved_ptr_type, 0));
12856 TREE_THIS_VOLATILE (*expr_p) = volatilep;
12857 TREE_THIS_NOTRAP (*expr_p) = notrap;
12858 ret = GS_OK;
12859 break;
12860 }
12861
12862 /* We arrive here through the various re-gimplifcation paths. */
12863 case MEM_REF:
12864 /* First try re-folding the whole thing. */
12865 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
12866 TREE_OPERAND (*expr_p, 0),
12867 TREE_OPERAND (*expr_p, 1));
12868 if (tmp)
12869 {
12870 REF_REVERSE_STORAGE_ORDER (tmp)
12871 = REF_REVERSE_STORAGE_ORDER (*expr_p);
12872 *expr_p = tmp;
12873 recalculate_side_effects (*expr_p);
12874 ret = GS_OK;
12875 break;
12876 }
12877 /* Avoid re-gimplifying the address operand if it is already
12878 in suitable form. Re-gimplifying would mark the address
12879 operand addressable. Always gimplify when not in SSA form
12880 as we still may have to gimplify decls with value-exprs. */
12881 if (!gimplify_ctxp || !gimple_in_ssa_p (cfun)
12882 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
12883 {
12884 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
12885 is_gimple_mem_ref_addr, fb_rvalue);
12886 if (ret == GS_ERROR)
12887 break;
12888 }
12889 recalculate_side_effects (*expr_p);
12890 ret = GS_ALL_DONE;
12891 break;
12892
12893 /* Constants need not be gimplified. */
12894 case INTEGER_CST:
12895 case REAL_CST:
12896 case FIXED_CST:
12897 case STRING_CST:
12898 case COMPLEX_CST:
12899 case VECTOR_CST:
12900 /* Drop the overflow flag on constants, we do not want
12901 that in the GIMPLE IL. */
12902 if (TREE_OVERFLOW_P (*expr_p))
12903 *expr_p = drop_tree_overflow (*expr_p);
12904 ret = GS_ALL_DONE;
12905 break;
12906
12907 case CONST_DECL:
12908 /* If we require an lvalue, such as for ADDR_EXPR, retain the
12909 CONST_DECL node. Otherwise the decl is replaceable by its
12910 value. */
12911 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
12912 if (fallback & fb_lvalue)
12913 ret = GS_ALL_DONE;
12914 else
12915 {
12916 *expr_p = DECL_INITIAL (*expr_p);
12917 ret = GS_OK;
12918 }
12919 break;
12920
12921 case DECL_EXPR:
12922 ret = gimplify_decl_expr (expr_p, pre_p);
12923 break;
12924
12925 case BIND_EXPR:
12926 ret = gimplify_bind_expr (expr_p, pre_p);
12927 break;
12928
12929 case LOOP_EXPR:
12930 ret = gimplify_loop_expr (expr_p, pre_p);
12931 break;
12932
12933 case SWITCH_EXPR:
12934 ret = gimplify_switch_expr (expr_p, pre_p);
12935 break;
12936
12937 case EXIT_EXPR:
12938 ret = gimplify_exit_expr (expr_p);
12939 break;
12940
12941 case GOTO_EXPR:
12942 /* If the target is not LABEL, then it is a computed jump
12943 and the target needs to be gimplified. */
12944 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
12945 {
12946 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
12947 NULL, is_gimple_val, fb_rvalue);
12948 if (ret == GS_ERROR)
12949 break;
12950 }
12951 gimplify_seq_add_stmt (pre_p,
12952 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
12953 ret = GS_ALL_DONE;
12954 break;
12955
12956 case PREDICT_EXPR:
12957 gimplify_seq_add_stmt (pre_p,
12958 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
12959 PREDICT_EXPR_OUTCOME (*expr_p)));
12960 ret = GS_ALL_DONE;
12961 break;
12962
12963 case LABEL_EXPR:
12964 ret = gimplify_label_expr (expr_p, pre_p);
12965 label = LABEL_EXPR_LABEL (*expr_p);
12966 gcc_assert (decl_function_context (label) == current_function_decl);
12967
12968 /* If the label is used in a goto statement, or address of the label
12969 is taken, we need to unpoison all variables that were seen so far.
12970 Doing so would prevent us from reporting a false positives. */
12971 if (asan_poisoned_variables
12972 && asan_used_labels != NULL
12973 && asan_used_labels->contains (label))
12974 asan_poison_variables (asan_poisoned_variables, false, pre_p);
12975 break;
12976
12977 case CASE_LABEL_EXPR:
12978 ret = gimplify_case_label_expr (expr_p, pre_p);
12979
12980 if (gimplify_ctxp->live_switch_vars)
12981 asan_poison_variables (gimplify_ctxp->live_switch_vars, false,
12982 pre_p);
12983 break;
12984
12985 case RETURN_EXPR:
12986 ret = gimplify_return_expr (*expr_p, pre_p);
12987 break;
12988
12989 case CONSTRUCTOR:
12990 /* Don't reduce this in place; let gimplify_init_constructor work its
12991 magic. Buf if we're just elaborating this for side effects, just
12992 gimplify any element that has side-effects. */
12993 if (fallback == fb_none)
12994 {
12995 unsigned HOST_WIDE_INT ix;
12996 tree val;
12997 tree temp = NULL_TREE;
12998 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
12999 if (TREE_SIDE_EFFECTS (val))
13000 append_to_statement_list (val, &temp);
13001
13002 *expr_p = temp;
13003 ret = temp ? GS_OK : GS_ALL_DONE;
13004 }
13005 /* C99 code may assign to an array in a constructed
13006 structure or union, and this has undefined behavior only
13007 on execution, so create a temporary if an lvalue is
13008 required. */
13009 else if (fallback == fb_lvalue)
13010 {
13011 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
13012 mark_addressable (*expr_p);
13013 ret = GS_OK;
13014 }
13015 else
13016 ret = GS_ALL_DONE;
13017 break;
13018
13019 /* The following are special cases that are not handled by the
13020 original GIMPLE grammar. */
13021
13022 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
13023 eliminated. */
13024 case SAVE_EXPR:
13025 ret = gimplify_save_expr (expr_p, pre_p, post_p);
13026 break;
13027
13028 case BIT_FIELD_REF:
13029 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
13030 post_p, is_gimple_lvalue, fb_either);
13031 recalculate_side_effects (*expr_p);
13032 break;
13033
13034 case TARGET_MEM_REF:
13035 {
13036 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
13037
13038 if (TMR_BASE (*expr_p))
13039 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
13040 post_p, is_gimple_mem_ref_addr, fb_either);
13041 if (TMR_INDEX (*expr_p))
13042 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
13043 post_p, is_gimple_val, fb_rvalue);
13044 if (TMR_INDEX2 (*expr_p))
13045 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
13046 post_p, is_gimple_val, fb_rvalue);
13047 /* TMR_STEP and TMR_OFFSET are always integer constants. */
13048 ret = MIN (r0, r1);
13049 }
13050 break;
13051
13052 case NON_LVALUE_EXPR:
13053 /* This should have been stripped above. */
13054 gcc_unreachable ();
13055
13056 case ASM_EXPR:
13057 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
13058 break;
13059
13060 case TRY_FINALLY_EXPR:
13061 case TRY_CATCH_EXPR:
13062 {
13063 gimple_seq eval, cleanup;
13064 gtry *try_;
13065
13066 /* Calls to destructors are generated automatically in FINALLY/CATCH
13067 block. They should have location as UNKNOWN_LOCATION. However,
13068 gimplify_call_expr will reset these call stmts to input_location
13069 if it finds stmt's location is unknown. To prevent resetting for
13070 destructors, we set the input_location to unknown.
13071 Note that this only affects the destructor calls in FINALLY/CATCH
13072 block, and will automatically reset to its original value by the
13073 end of gimplify_expr. */
13074 input_location = UNKNOWN_LOCATION;
13075 eval = cleanup = NULL;
13076 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
13077 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
13078 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
13079 if (gimple_seq_empty_p (cleanup))
13080 {
13081 gimple_seq_add_seq (pre_p, eval);
13082 ret = GS_ALL_DONE;
13083 break;
13084 }
13085 try_ = gimple_build_try (eval, cleanup,
13086 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
13087 ? GIMPLE_TRY_FINALLY
13088 : GIMPLE_TRY_CATCH);
13089 if (EXPR_HAS_LOCATION (save_expr))
13090 gimple_set_location (try_, EXPR_LOCATION (save_expr));
13091 else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
13092 gimple_set_location (try_, saved_location);
13093 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
13094 gimple_try_set_catch_is_cleanup (try_,
13095 TRY_CATCH_IS_CLEANUP (*expr_p));
13096 gimplify_seq_add_stmt (pre_p, try_);
13097 ret = GS_ALL_DONE;
13098 break;
13099 }
13100
13101 case CLEANUP_POINT_EXPR:
13102 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
13103 break;
13104
13105 case TARGET_EXPR:
13106 ret = gimplify_target_expr (expr_p, pre_p, post_p);
13107 break;
13108
13109 case CATCH_EXPR:
13110 {
13111 gimple *c;
13112 gimple_seq handler = NULL;
13113 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
13114 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
13115 gimplify_seq_add_stmt (pre_p, c);
13116 ret = GS_ALL_DONE;
13117 break;
13118 }
13119
13120 case EH_FILTER_EXPR:
13121 {
13122 gimple *ehf;
13123 gimple_seq failure = NULL;
13124
13125 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
13126 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
13127 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
13128 gimplify_seq_add_stmt (pre_p, ehf);
13129 ret = GS_ALL_DONE;
13130 break;
13131 }
13132
13133 case OBJ_TYPE_REF:
13134 {
13135 enum gimplify_status r0, r1;
13136 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
13137 post_p, is_gimple_val, fb_rvalue);
13138 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
13139 post_p, is_gimple_val, fb_rvalue);
13140 TREE_SIDE_EFFECTS (*expr_p) = 0;
13141 ret = MIN (r0, r1);
13142 }
13143 break;
13144
13145 case LABEL_DECL:
13146 /* We get here when taking the address of a label. We mark
13147 the label as "forced"; meaning it can never be removed and
13148 it is a potential target for any computed goto. */
13149 FORCED_LABEL (*expr_p) = 1;
13150 ret = GS_ALL_DONE;
13151 break;
13152
13153 case STATEMENT_LIST:
13154 ret = gimplify_statement_list (expr_p, pre_p);
13155 break;
13156
13157 case WITH_SIZE_EXPR:
13158 {
13159 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
13160 post_p == &internal_post ? NULL : post_p,
13161 gimple_test_f, fallback);
13162 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
13163 is_gimple_val, fb_rvalue);
13164 ret = GS_ALL_DONE;
13165 }
13166 break;
13167
13168 case VAR_DECL:
13169 case PARM_DECL:
13170 ret = gimplify_var_or_parm_decl (expr_p);
13171 break;
13172
13173 case RESULT_DECL:
13174 /* When within an OMP context, notice uses of variables. */
13175 if (gimplify_omp_ctxp)
13176 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
13177 ret = GS_ALL_DONE;
13178 break;
13179
13180 case DEBUG_EXPR_DECL:
13181 gcc_unreachable ();
13182
13183 case DEBUG_BEGIN_STMT:
13184 gimplify_seq_add_stmt (pre_p,
13185 gimple_build_debug_begin_stmt
13186 (TREE_BLOCK (*expr_p),
13187 EXPR_LOCATION (*expr_p)));
13188 ret = GS_ALL_DONE;
13189 *expr_p = NULL;
13190 break;
13191
13192 case SSA_NAME:
13193 /* Allow callbacks into the gimplifier during optimization. */
13194 ret = GS_ALL_DONE;
13195 break;
13196
13197 case OMP_PARALLEL:
13198 gimplify_omp_parallel (expr_p, pre_p);
13199 ret = GS_ALL_DONE;
13200 break;
13201
13202 case OMP_TASK:
13203 gimplify_omp_task (expr_p, pre_p);
13204 ret = GS_ALL_DONE;
13205 break;
13206
13207 case OMP_FOR:
13208 case OMP_SIMD:
13209 case OMP_DISTRIBUTE:
13210 case OMP_TASKLOOP:
13211 case OACC_LOOP:
13212 ret = gimplify_omp_for (expr_p, pre_p);
13213 break;
13214
13215 case OACC_CACHE:
13216 gimplify_oacc_cache (expr_p, pre_p);
13217 ret = GS_ALL_DONE;
13218 break;
13219
13220 case OACC_DECLARE:
13221 gimplify_oacc_declare (expr_p, pre_p);
13222 ret = GS_ALL_DONE;
13223 break;
13224
13225 case OACC_HOST_DATA:
13226 case OACC_DATA:
13227 case OACC_KERNELS:
13228 case OACC_PARALLEL:
13229 case OMP_SECTIONS:
13230 case OMP_SINGLE:
13231 case OMP_TARGET:
13232 case OMP_TARGET_DATA:
13233 case OMP_TEAMS:
13234 gimplify_omp_workshare (expr_p, pre_p);
13235 ret = GS_ALL_DONE;
13236 break;
13237
13238 case OACC_ENTER_DATA:
13239 case OACC_EXIT_DATA:
13240 case OACC_UPDATE:
13241 case OMP_TARGET_UPDATE:
13242 case OMP_TARGET_ENTER_DATA:
13243 case OMP_TARGET_EXIT_DATA:
13244 gimplify_omp_target_update (expr_p, pre_p);
13245 ret = GS_ALL_DONE;
13246 break;
13247
13248 case OMP_SECTION:
13249 case OMP_MASTER:
13250 case OMP_ORDERED:
13251 case OMP_CRITICAL:
13252 case OMP_SCAN:
13253 {
13254 gimple_seq body = NULL;
13255 gimple *g;
13256
13257 gimplify_and_add (OMP_BODY (*expr_p), &body);
13258 switch (TREE_CODE (*expr_p))
13259 {
13260 case OMP_SECTION:
13261 g = gimple_build_omp_section (body);
13262 break;
13263 case OMP_MASTER:
13264 g = gimple_build_omp_master (body);
13265 break;
13266 case OMP_ORDERED:
13267 g = gimplify_omp_ordered (*expr_p, body);
13268 break;
13269 case OMP_CRITICAL:
13270 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p),
13271 pre_p, ORT_WORKSHARE, OMP_CRITICAL);
13272 gimplify_adjust_omp_clauses (pre_p, body,
13273 &OMP_CRITICAL_CLAUSES (*expr_p),
13274 OMP_CRITICAL);
13275 g = gimple_build_omp_critical (body,
13276 OMP_CRITICAL_NAME (*expr_p),
13277 OMP_CRITICAL_CLAUSES (*expr_p));
13278 break;
13279 case OMP_SCAN:
13280 gimplify_scan_omp_clauses (&OMP_SCAN_CLAUSES (*expr_p),
13281 pre_p, ORT_WORKSHARE, OMP_SCAN);
13282 gimplify_adjust_omp_clauses (pre_p, body,
13283 &OMP_SCAN_CLAUSES (*expr_p),
13284 OMP_SCAN);
13285 g = gimple_build_omp_scan (body, OMP_SCAN_CLAUSES (*expr_p));
13286 break;
13287 default:
13288 gcc_unreachable ();
13289 }
13290 gimplify_seq_add_stmt (pre_p, g);
13291 ret = GS_ALL_DONE;
13292 break;
13293 }
13294
13295 case OMP_TASKGROUP:
13296 {
13297 gimple_seq body = NULL;
13298
13299 tree *pclauses = &OMP_TASKGROUP_CLAUSES (*expr_p);
13300 gimplify_scan_omp_clauses (pclauses, pre_p, ORT_TASKGROUP,
13301 OMP_TASKGROUP);
13302 gimplify_adjust_omp_clauses (pre_p, NULL, pclauses, OMP_TASKGROUP);
13303 gimplify_and_add (OMP_BODY (*expr_p), &body);
13304 gimple_seq cleanup = NULL;
13305 tree fn = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
13306 gimple *g = gimple_build_call (fn, 0);
13307 gimple_seq_add_stmt (&cleanup, g);
13308 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
13309 body = NULL;
13310 gimple_seq_add_stmt (&body, g);
13311 g = gimple_build_omp_taskgroup (body, *pclauses);
13312 gimplify_seq_add_stmt (pre_p, g);
13313 ret = GS_ALL_DONE;
13314 break;
13315 }
13316
13317 case OMP_ATOMIC:
13318 case OMP_ATOMIC_READ:
13319 case OMP_ATOMIC_CAPTURE_OLD:
13320 case OMP_ATOMIC_CAPTURE_NEW:
13321 ret = gimplify_omp_atomic (expr_p, pre_p);
13322 break;
13323
13324 case TRANSACTION_EXPR:
13325 ret = gimplify_transaction (expr_p, pre_p);
13326 break;
13327
13328 case TRUTH_AND_EXPR:
13329 case TRUTH_OR_EXPR:
13330 case TRUTH_XOR_EXPR:
13331 {
13332 tree orig_type = TREE_TYPE (*expr_p);
13333 tree new_type, xop0, xop1;
13334 *expr_p = gimple_boolify (*expr_p);
13335 new_type = TREE_TYPE (*expr_p);
13336 if (!useless_type_conversion_p (orig_type, new_type))
13337 {
13338 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
13339 ret = GS_OK;
13340 break;
13341 }
13342
13343 /* Boolified binary truth expressions are semantically equivalent
13344 to bitwise binary expressions. Canonicalize them to the
13345 bitwise variant. */
13346 switch (TREE_CODE (*expr_p))
13347 {
13348 case TRUTH_AND_EXPR:
13349 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
13350 break;
13351 case TRUTH_OR_EXPR:
13352 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
13353 break;
13354 case TRUTH_XOR_EXPR:
13355 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
13356 break;
13357 default:
13358 break;
13359 }
13360 /* Now make sure that operands have compatible type to
13361 expression's new_type. */
13362 xop0 = TREE_OPERAND (*expr_p, 0);
13363 xop1 = TREE_OPERAND (*expr_p, 1);
13364 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
13365 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
13366 new_type,
13367 xop0);
13368 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
13369 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
13370 new_type,
13371 xop1);
13372 /* Continue classified as tcc_binary. */
13373 goto expr_2;
13374 }
13375
13376 case VEC_COND_EXPR:
13377 {
13378 enum gimplify_status r0, r1, r2;
13379
13380 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
13381 post_p, is_gimple_condexpr, fb_rvalue);
13382 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
13383 post_p, is_gimple_val, fb_rvalue);
13384 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
13385 post_p, is_gimple_val, fb_rvalue);
13386
13387 ret = MIN (MIN (r0, r1), r2);
13388 recalculate_side_effects (*expr_p);
13389 }
13390 break;
13391
13392 case VEC_PERM_EXPR:
13393 /* Classified as tcc_expression. */
13394 goto expr_3;
13395
13396 case BIT_INSERT_EXPR:
13397 /* Argument 3 is a constant. */
13398 goto expr_2;
13399
13400 case POINTER_PLUS_EXPR:
13401 {
13402 enum gimplify_status r0, r1;
13403 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
13404 post_p, is_gimple_val, fb_rvalue);
13405 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
13406 post_p, is_gimple_val, fb_rvalue);
13407 recalculate_side_effects (*expr_p);
13408 ret = MIN (r0, r1);
13409 break;
13410 }
13411
13412 default:
13413 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
13414 {
13415 case tcc_comparison:
13416 /* Handle comparison of objects of non scalar mode aggregates
13417 with a call to memcmp. It would be nice to only have to do
13418 this for variable-sized objects, but then we'd have to allow
13419 the same nest of reference nodes we allow for MODIFY_EXPR and
13420 that's too complex.
13421
13422 Compare scalar mode aggregates as scalar mode values. Using
13423 memcmp for them would be very inefficient at best, and is
13424 plain wrong if bitfields are involved. */
13425 {
13426 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
13427
13428 /* Vector comparisons need no boolification. */
13429 if (TREE_CODE (type) == VECTOR_TYPE)
13430 goto expr_2;
13431 else if (!AGGREGATE_TYPE_P (type))
13432 {
13433 tree org_type = TREE_TYPE (*expr_p);
13434 *expr_p = gimple_boolify (*expr_p);
13435 if (!useless_type_conversion_p (org_type,
13436 TREE_TYPE (*expr_p)))
13437 {
13438 *expr_p = fold_convert_loc (input_location,
13439 org_type, *expr_p);
13440 ret = GS_OK;
13441 }
13442 else
13443 goto expr_2;
13444 }
13445 else if (TYPE_MODE (type) != BLKmode)
13446 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
13447 else
13448 ret = gimplify_variable_sized_compare (expr_p);
13449
13450 break;
13451 }
13452
13453 /* If *EXPR_P does not need to be special-cased, handle it
13454 according to its class. */
13455 case tcc_unary:
13456 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
13457 post_p, is_gimple_val, fb_rvalue);
13458 break;
13459
13460 case tcc_binary:
13461 expr_2:
13462 {
13463 enum gimplify_status r0, r1;
13464
13465 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
13466 post_p, is_gimple_val, fb_rvalue);
13467 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
13468 post_p, is_gimple_val, fb_rvalue);
13469
13470 ret = MIN (r0, r1);
13471 break;
13472 }
13473
13474 expr_3:
13475 {
13476 enum gimplify_status r0, r1, r2;
13477
13478 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
13479 post_p, is_gimple_val, fb_rvalue);
13480 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
13481 post_p, is_gimple_val, fb_rvalue);
13482 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
13483 post_p, is_gimple_val, fb_rvalue);
13484
13485 ret = MIN (MIN (r0, r1), r2);
13486 break;
13487 }
13488
13489 case tcc_declaration:
13490 case tcc_constant:
13491 ret = GS_ALL_DONE;
13492 goto dont_recalculate;
13493
13494 default:
13495 gcc_unreachable ();
13496 }
13497
13498 recalculate_side_effects (*expr_p);
13499
13500 dont_recalculate:
13501 break;
13502 }
13503
13504 gcc_assert (*expr_p || ret != GS_OK);
13505 }
13506 while (ret == GS_OK);
13507
13508 /* If we encountered an error_mark somewhere nested inside, either
13509 stub out the statement or propagate the error back out. */
13510 if (ret == GS_ERROR)
13511 {
13512 if (is_statement)
13513 *expr_p = NULL;
13514 goto out;
13515 }
13516
13517 /* This was only valid as a return value from the langhook, which
13518 we handled. Make sure it doesn't escape from any other context. */
13519 gcc_assert (ret != GS_UNHANDLED);
13520
13521 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
13522 {
13523 /* We aren't looking for a value, and we don't have a valid
13524 statement. If it doesn't have side-effects, throw it away.
13525 We can also get here with code such as "*&&L;", where L is
13526 a LABEL_DECL that is marked as FORCED_LABEL. */
13527 if (TREE_CODE (*expr_p) == LABEL_DECL
13528 || !TREE_SIDE_EFFECTS (*expr_p))
13529 *expr_p = NULL;
13530 else if (!TREE_THIS_VOLATILE (*expr_p))
13531 {
13532 /* This is probably a _REF that contains something nested that
13533 has side effects. Recurse through the operands to find it. */
13534 enum tree_code code = TREE_CODE (*expr_p);
13535
13536 switch (code)
13537 {
13538 case COMPONENT_REF:
13539 case REALPART_EXPR:
13540 case IMAGPART_EXPR:
13541 case VIEW_CONVERT_EXPR:
13542 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
13543 gimple_test_f, fallback);
13544 break;
13545
13546 case ARRAY_REF:
13547 case ARRAY_RANGE_REF:
13548 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
13549 gimple_test_f, fallback);
13550 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
13551 gimple_test_f, fallback);
13552 break;
13553
13554 default:
13555 /* Anything else with side-effects must be converted to
13556 a valid statement before we get here. */
13557 gcc_unreachable ();
13558 }
13559
13560 *expr_p = NULL;
13561 }
13562 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
13563 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
13564 {
13565 /* Historically, the compiler has treated a bare reference
13566 to a non-BLKmode volatile lvalue as forcing a load. */
13567 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
13568
13569 /* Normally, we do not want to create a temporary for a
13570 TREE_ADDRESSABLE type because such a type should not be
13571 copied by bitwise-assignment. However, we make an
13572 exception here, as all we are doing here is ensuring that
13573 we read the bytes that make up the type. We use
13574 create_tmp_var_raw because create_tmp_var will abort when
13575 given a TREE_ADDRESSABLE type. */
13576 tree tmp = create_tmp_var_raw (type, "vol");
13577 gimple_add_tmp_var (tmp);
13578 gimplify_assign (tmp, *expr_p, pre_p);
13579 *expr_p = NULL;
13580 }
13581 else
13582 /* We can't do anything useful with a volatile reference to
13583 an incomplete type, so just throw it away. Likewise for
13584 a BLKmode type, since any implicit inner load should
13585 already have been turned into an explicit one by the
13586 gimplification process. */
13587 *expr_p = NULL;
13588 }
13589
13590 /* If we are gimplifying at the statement level, we're done. Tack
13591 everything together and return. */
13592 if (fallback == fb_none || is_statement)
13593 {
13594 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
13595 it out for GC to reclaim it. */
13596 *expr_p = NULL_TREE;
13597
13598 if (!gimple_seq_empty_p (internal_pre)
13599 || !gimple_seq_empty_p (internal_post))
13600 {
13601 gimplify_seq_add_seq (&internal_pre, internal_post);
13602 gimplify_seq_add_seq (pre_p, internal_pre);
13603 }
13604
13605 /* The result of gimplifying *EXPR_P is going to be the last few
13606 statements in *PRE_P and *POST_P. Add location information
13607 to all the statements that were added by the gimplification
13608 helpers. */
13609 if (!gimple_seq_empty_p (*pre_p))
13610 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
13611
13612 if (!gimple_seq_empty_p (*post_p))
13613 annotate_all_with_location_after (*post_p, post_last_gsi,
13614 input_location);
13615
13616 goto out;
13617 }
13618
13619 #ifdef ENABLE_GIMPLE_CHECKING
13620 if (*expr_p)
13621 {
13622 enum tree_code code = TREE_CODE (*expr_p);
13623 /* These expressions should already be in gimple IR form. */
13624 gcc_assert (code != MODIFY_EXPR
13625 && code != ASM_EXPR
13626 && code != BIND_EXPR
13627 && code != CATCH_EXPR
13628 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
13629 && code != EH_FILTER_EXPR
13630 && code != GOTO_EXPR
13631 && code != LABEL_EXPR
13632 && code != LOOP_EXPR
13633 && code != SWITCH_EXPR
13634 && code != TRY_FINALLY_EXPR
13635 && code != OACC_PARALLEL
13636 && code != OACC_KERNELS
13637 && code != OACC_DATA
13638 && code != OACC_HOST_DATA
13639 && code != OACC_DECLARE
13640 && code != OACC_UPDATE
13641 && code != OACC_ENTER_DATA
13642 && code != OACC_EXIT_DATA
13643 && code != OACC_CACHE
13644 && code != OMP_CRITICAL
13645 && code != OMP_FOR
13646 && code != OACC_LOOP
13647 && code != OMP_MASTER
13648 && code != OMP_TASKGROUP
13649 && code != OMP_ORDERED
13650 && code != OMP_PARALLEL
13651 && code != OMP_SCAN
13652 && code != OMP_SECTIONS
13653 && code != OMP_SECTION
13654 && code != OMP_SINGLE);
13655 }
13656 #endif
13657
13658 /* Otherwise we're gimplifying a subexpression, so the resulting
13659 value is interesting. If it's a valid operand that matches
13660 GIMPLE_TEST_F, we're done. Unless we are handling some
13661 post-effects internally; if that's the case, we need to copy into
13662 a temporary before adding the post-effects to POST_P. */
13663 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
13664 goto out;
13665
13666 /* Otherwise, we need to create a new temporary for the gimplified
13667 expression. */
13668
13669 /* We can't return an lvalue if we have an internal postqueue. The
13670 object the lvalue refers to would (probably) be modified by the
13671 postqueue; we need to copy the value out first, which means an
13672 rvalue. */
13673 if ((fallback & fb_lvalue)
13674 && gimple_seq_empty_p (internal_post)
13675 && is_gimple_addressable (*expr_p))
13676 {
13677 /* An lvalue will do. Take the address of the expression, store it
13678 in a temporary, and replace the expression with an INDIRECT_REF of
13679 that temporary. */
13680 tree ref_alias_type = reference_alias_ptr_type (*expr_p);
13681 unsigned int ref_align = get_object_alignment (*expr_p);
13682 tree ref_type = TREE_TYPE (*expr_p);
13683 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
13684 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
13685 if (TYPE_ALIGN (ref_type) != ref_align)
13686 ref_type = build_aligned_type (ref_type, ref_align);
13687 *expr_p = build2 (MEM_REF, ref_type,
13688 tmp, build_zero_cst (ref_alias_type));
13689 }
13690 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
13691 {
13692 /* An rvalue will do. Assign the gimplified expression into a
13693 new temporary TMP and replace the original expression with
13694 TMP. First, make sure that the expression has a type so that
13695 it can be assigned into a temporary. */
13696 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
13697 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
13698 }
13699 else
13700 {
13701 #ifdef ENABLE_GIMPLE_CHECKING
13702 if (!(fallback & fb_mayfail))
13703 {
13704 fprintf (stderr, "gimplification failed:\n");
13705 print_generic_expr (stderr, *expr_p);
13706 debug_tree (*expr_p);
13707 internal_error ("gimplification failed");
13708 }
13709 #endif
13710 gcc_assert (fallback & fb_mayfail);
13711
13712 /* If this is an asm statement, and the user asked for the
13713 impossible, don't die. Fail and let gimplify_asm_expr
13714 issue an error. */
13715 ret = GS_ERROR;
13716 goto out;
13717 }
13718
13719 /* Make sure the temporary matches our predicate. */
13720 gcc_assert ((*gimple_test_f) (*expr_p));
13721
13722 if (!gimple_seq_empty_p (internal_post))
13723 {
13724 annotate_all_with_location (internal_post, input_location);
13725 gimplify_seq_add_seq (pre_p, internal_post);
13726 }
13727
13728 out:
13729 input_location = saved_location;
13730 return ret;
13731 }
13732
13733 /* Like gimplify_expr but make sure the gimplified result is not itself
13734 a SSA name (but a decl if it were). Temporaries required by
13735 evaluating *EXPR_P may be still SSA names. */
13736
13737 static enum gimplify_status
13738 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
13739 bool (*gimple_test_f) (tree), fallback_t fallback,
13740 bool allow_ssa)
13741 {
13742 bool was_ssa_name_p = TREE_CODE (*expr_p) == SSA_NAME;
13743 enum gimplify_status ret = gimplify_expr (expr_p, pre_p, post_p,
13744 gimple_test_f, fallback);
13745 if (! allow_ssa
13746 && TREE_CODE (*expr_p) == SSA_NAME)
13747 {
13748 tree name = *expr_p;
13749 if (was_ssa_name_p)
13750 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, NULL, false);
13751 else
13752 {
13753 /* Avoid the extra copy if possible. */
13754 *expr_p = create_tmp_reg (TREE_TYPE (name));
13755 gimple_set_lhs (SSA_NAME_DEF_STMT (name), *expr_p);
13756 release_ssa_name (name);
13757 }
13758 }
13759 return ret;
13760 }
13761
13762 /* Look through TYPE for variable-sized objects and gimplify each such
13763 size that we find. Add to LIST_P any statements generated. */
13764
13765 void
13766 gimplify_type_sizes (tree type, gimple_seq *list_p)
13767 {
13768 tree field, t;
13769
13770 if (type == NULL || type == error_mark_node)
13771 return;
13772
13773 /* We first do the main variant, then copy into any other variants. */
13774 type = TYPE_MAIN_VARIANT (type);
13775
13776 /* Avoid infinite recursion. */
13777 if (TYPE_SIZES_GIMPLIFIED (type))
13778 return;
13779
13780 TYPE_SIZES_GIMPLIFIED (type) = 1;
13781
13782 switch (TREE_CODE (type))
13783 {
13784 case INTEGER_TYPE:
13785 case ENUMERAL_TYPE:
13786 case BOOLEAN_TYPE:
13787 case REAL_TYPE:
13788 case FIXED_POINT_TYPE:
13789 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
13790 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
13791
13792 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
13793 {
13794 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
13795 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
13796 }
13797 break;
13798
13799 case ARRAY_TYPE:
13800 /* These types may not have declarations, so handle them here. */
13801 gimplify_type_sizes (TREE_TYPE (type), list_p);
13802 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
13803 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
13804 with assigned stack slots, for -O1+ -g they should be tracked
13805 by VTA. */
13806 if (!(TYPE_NAME (type)
13807 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
13808 && DECL_IGNORED_P (TYPE_NAME (type)))
13809 && TYPE_DOMAIN (type)
13810 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
13811 {
13812 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
13813 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
13814 DECL_IGNORED_P (t) = 0;
13815 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
13816 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
13817 DECL_IGNORED_P (t) = 0;
13818 }
13819 break;
13820
13821 case RECORD_TYPE:
13822 case UNION_TYPE:
13823 case QUAL_UNION_TYPE:
13824 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
13825 if (TREE_CODE (field) == FIELD_DECL)
13826 {
13827 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
13828 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
13829 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
13830 gimplify_type_sizes (TREE_TYPE (field), list_p);
13831 }
13832 break;
13833
13834 case POINTER_TYPE:
13835 case REFERENCE_TYPE:
13836 /* We used to recurse on the pointed-to type here, which turned out to
13837 be incorrect because its definition might refer to variables not
13838 yet initialized at this point if a forward declaration is involved.
13839
13840 It was actually useful for anonymous pointed-to types to ensure
13841 that the sizes evaluation dominates every possible later use of the
13842 values. Restricting to such types here would be safe since there
13843 is no possible forward declaration around, but would introduce an
13844 undesirable middle-end semantic to anonymity. We then defer to
13845 front-ends the responsibility of ensuring that the sizes are
13846 evaluated both early and late enough, e.g. by attaching artificial
13847 type declarations to the tree. */
13848 break;
13849
13850 default:
13851 break;
13852 }
13853
13854 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
13855 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
13856
13857 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
13858 {
13859 TYPE_SIZE (t) = TYPE_SIZE (type);
13860 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
13861 TYPE_SIZES_GIMPLIFIED (t) = 1;
13862 }
13863 }
13864
13865 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
13866 a size or position, has had all of its SAVE_EXPRs evaluated.
13867 We add any required statements to *STMT_P. */
13868
13869 void
13870 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
13871 {
13872 tree expr = *expr_p;
13873
13874 /* We don't do anything if the value isn't there, is constant, or contains
13875 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
13876 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
13877 will want to replace it with a new variable, but that will cause problems
13878 if this type is from outside the function. It's OK to have that here. */
13879 if (expr == NULL_TREE
13880 || is_gimple_constant (expr)
13881 || TREE_CODE (expr) == VAR_DECL
13882 || CONTAINS_PLACEHOLDER_P (expr))
13883 return;
13884
13885 *expr_p = unshare_expr (expr);
13886
13887 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
13888 if the def vanishes. */
13889 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue, false);
13890
13891 /* If expr wasn't already is_gimple_sizepos or is_gimple_constant from the
13892 FE, ensure that it is a VAR_DECL, otherwise we might handle some decls
13893 as gimplify_vla_decl even when they would have all sizes INTEGER_CSTs. */
13894 if (is_gimple_constant (*expr_p))
13895 *expr_p = get_initialized_tmp_var (*expr_p, stmt_p, NULL, false);
13896 }
13897
13898 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
13899 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
13900 is true, also gimplify the parameters. */
13901
13902 gbind *
13903 gimplify_body (tree fndecl, bool do_parms)
13904 {
13905 location_t saved_location = input_location;
13906 gimple_seq parm_stmts, parm_cleanup = NULL, seq;
13907 gimple *outer_stmt;
13908 gbind *outer_bind;
13909
13910 timevar_push (TV_TREE_GIMPLIFY);
13911
13912 init_tree_ssa (cfun);
13913
13914 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
13915 gimplification. */
13916 default_rtl_profile ();
13917
13918 gcc_assert (gimplify_ctxp == NULL);
13919 push_gimplify_context (true);
13920
13921 if (flag_openacc || flag_openmp)
13922 {
13923 gcc_assert (gimplify_omp_ctxp == NULL);
13924 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
13925 gimplify_omp_ctxp = new_omp_context (ORT_TARGET);
13926 }
13927
13928 /* Unshare most shared trees in the body and in that of any nested functions.
13929 It would seem we don't have to do this for nested functions because
13930 they are supposed to be output and then the outer function gimplified
13931 first, but the g++ front end doesn't always do it that way. */
13932 unshare_body (fndecl);
13933 unvisit_body (fndecl);
13934
13935 /* Make sure input_location isn't set to something weird. */
13936 input_location = DECL_SOURCE_LOCATION (fndecl);
13937
13938 /* Resolve callee-copies. This has to be done before processing
13939 the body so that DECL_VALUE_EXPR gets processed correctly. */
13940 parm_stmts = do_parms ? gimplify_parameters (&parm_cleanup) : NULL;
13941
13942 /* Gimplify the function's body. */
13943 seq = NULL;
13944 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
13945 outer_stmt = gimple_seq_first_stmt (seq);
13946 if (!outer_stmt)
13947 {
13948 outer_stmt = gimple_build_nop ();
13949 gimplify_seq_add_stmt (&seq, outer_stmt);
13950 }
13951
13952 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
13953 not the case, wrap everything in a GIMPLE_BIND to make it so. */
13954 if (gimple_code (outer_stmt) == GIMPLE_BIND
13955 && gimple_seq_first (seq) == gimple_seq_last (seq))
13956 outer_bind = as_a <gbind *> (outer_stmt);
13957 else
13958 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
13959
13960 DECL_SAVED_TREE (fndecl) = NULL_TREE;
13961
13962 /* If we had callee-copies statements, insert them at the beginning
13963 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
13964 if (!gimple_seq_empty_p (parm_stmts))
13965 {
13966 tree parm;
13967
13968 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
13969 if (parm_cleanup)
13970 {
13971 gtry *g = gimple_build_try (parm_stmts, parm_cleanup,
13972 GIMPLE_TRY_FINALLY);
13973 parm_stmts = NULL;
13974 gimple_seq_add_stmt (&parm_stmts, g);
13975 }
13976 gimple_bind_set_body (outer_bind, parm_stmts);
13977
13978 for (parm = DECL_ARGUMENTS (current_function_decl);
13979 parm; parm = DECL_CHAIN (parm))
13980 if (DECL_HAS_VALUE_EXPR_P (parm))
13981 {
13982 DECL_HAS_VALUE_EXPR_P (parm) = 0;
13983 DECL_IGNORED_P (parm) = 0;
13984 }
13985 }
13986
13987 if ((flag_openacc || flag_openmp || flag_openmp_simd)
13988 && gimplify_omp_ctxp)
13989 {
13990 delete_omp_context (gimplify_omp_ctxp);
13991 gimplify_omp_ctxp = NULL;
13992 }
13993
13994 pop_gimplify_context (outer_bind);
13995 gcc_assert (gimplify_ctxp == NULL);
13996
13997 if (flag_checking && !seen_error ())
13998 verify_gimple_in_seq (gimple_bind_body (outer_bind));
13999
14000 timevar_pop (TV_TREE_GIMPLIFY);
14001 input_location = saved_location;
14002
14003 return outer_bind;
14004 }
14005
14006 typedef char *char_p; /* For DEF_VEC_P. */
14007
14008 /* Return whether we should exclude FNDECL from instrumentation. */
14009
14010 static bool
14011 flag_instrument_functions_exclude_p (tree fndecl)
14012 {
14013 vec<char_p> *v;
14014
14015 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
14016 if (v && v->length () > 0)
14017 {
14018 const char *name;
14019 int i;
14020 char *s;
14021
14022 name = lang_hooks.decl_printable_name (fndecl, 0);
14023 FOR_EACH_VEC_ELT (*v, i, s)
14024 if (strstr (name, s) != NULL)
14025 return true;
14026 }
14027
14028 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
14029 if (v && v->length () > 0)
14030 {
14031 const char *name;
14032 int i;
14033 char *s;
14034
14035 name = DECL_SOURCE_FILE (fndecl);
14036 FOR_EACH_VEC_ELT (*v, i, s)
14037 if (strstr (name, s) != NULL)
14038 return true;
14039 }
14040
14041 return false;
14042 }
14043
14044 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
14045 node for the function we want to gimplify.
14046
14047 Return the sequence of GIMPLE statements corresponding to the body
14048 of FNDECL. */
14049
14050 void
14051 gimplify_function_tree (tree fndecl)
14052 {
14053 tree parm, ret;
14054 gimple_seq seq;
14055 gbind *bind;
14056
14057 gcc_assert (!gimple_body (fndecl));
14058
14059 if (DECL_STRUCT_FUNCTION (fndecl))
14060 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
14061 else
14062 push_struct_function (fndecl);
14063
14064 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
14065 if necessary. */
14066 cfun->curr_properties |= PROP_gimple_lva;
14067
14068 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
14069 {
14070 /* Preliminarily mark non-addressed complex variables as eligible
14071 for promotion to gimple registers. We'll transform their uses
14072 as we find them. */
14073 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
14074 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
14075 && !TREE_THIS_VOLATILE (parm)
14076 && !needs_to_live_in_memory (parm))
14077 DECL_GIMPLE_REG_P (parm) = 1;
14078 }
14079
14080 ret = DECL_RESULT (fndecl);
14081 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
14082 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
14083 && !needs_to_live_in_memory (ret))
14084 DECL_GIMPLE_REG_P (ret) = 1;
14085
14086 if (asan_sanitize_use_after_scope () && sanitize_flags_p (SANITIZE_ADDRESS))
14087 asan_poisoned_variables = new hash_set<tree> ();
14088 bind = gimplify_body (fndecl, true);
14089 if (asan_poisoned_variables)
14090 {
14091 delete asan_poisoned_variables;
14092 asan_poisoned_variables = NULL;
14093 }
14094
14095 /* The tree body of the function is no longer needed, replace it
14096 with the new GIMPLE body. */
14097 seq = NULL;
14098 gimple_seq_add_stmt (&seq, bind);
14099 gimple_set_body (fndecl, seq);
14100
14101 /* If we're instrumenting function entry/exit, then prepend the call to
14102 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
14103 catch the exit hook. */
14104 /* ??? Add some way to ignore exceptions for this TFE. */
14105 if (flag_instrument_function_entry_exit
14106 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
14107 /* Do not instrument extern inline functions. */
14108 && !(DECL_DECLARED_INLINE_P (fndecl)
14109 && DECL_EXTERNAL (fndecl)
14110 && DECL_DISREGARD_INLINE_LIMITS (fndecl))
14111 && !flag_instrument_functions_exclude_p (fndecl))
14112 {
14113 tree x;
14114 gbind *new_bind;
14115 gimple *tf;
14116 gimple_seq cleanup = NULL, body = NULL;
14117 tree tmp_var, this_fn_addr;
14118 gcall *call;
14119
14120 /* The instrumentation hooks aren't going to call the instrumented
14121 function and the address they receive is expected to be matchable
14122 against symbol addresses. Make sure we don't create a trampoline,
14123 in case the current function is nested. */
14124 this_fn_addr = build_fold_addr_expr (current_function_decl);
14125 TREE_NO_TRAMPOLINE (this_fn_addr) = 1;
14126
14127 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
14128 call = gimple_build_call (x, 1, integer_zero_node);
14129 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
14130 gimple_call_set_lhs (call, tmp_var);
14131 gimplify_seq_add_stmt (&cleanup, call);
14132 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
14133 call = gimple_build_call (x, 2, this_fn_addr, tmp_var);
14134 gimplify_seq_add_stmt (&cleanup, call);
14135 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
14136
14137 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
14138 call = gimple_build_call (x, 1, integer_zero_node);
14139 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
14140 gimple_call_set_lhs (call, tmp_var);
14141 gimplify_seq_add_stmt (&body, call);
14142 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
14143 call = gimple_build_call (x, 2, this_fn_addr, tmp_var);
14144 gimplify_seq_add_stmt (&body, call);
14145 gimplify_seq_add_stmt (&body, tf);
14146 new_bind = gimple_build_bind (NULL, body, NULL);
14147
14148 /* Replace the current function body with the body
14149 wrapped in the try/finally TF. */
14150 seq = NULL;
14151 gimple_seq_add_stmt (&seq, new_bind);
14152 gimple_set_body (fndecl, seq);
14153 bind = new_bind;
14154 }
14155
14156 if (sanitize_flags_p (SANITIZE_THREAD))
14157 {
14158 gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
14159 gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
14160 gbind *new_bind = gimple_build_bind (NULL, tf, NULL);
14161 /* Replace the current function body with the body
14162 wrapped in the try/finally TF. */
14163 seq = NULL;
14164 gimple_seq_add_stmt (&seq, new_bind);
14165 gimple_set_body (fndecl, seq);
14166 }
14167
14168 DECL_SAVED_TREE (fndecl) = NULL_TREE;
14169 cfun->curr_properties |= PROP_gimple_any;
14170
14171 pop_cfun ();
14172
14173 dump_function (TDI_gimple, fndecl);
14174 }
14175
14176 /* Return a dummy expression of type TYPE in order to keep going after an
14177 error. */
14178
14179 static tree
14180 dummy_object (tree type)
14181 {
14182 tree t = build_int_cst (build_pointer_type (type), 0);
14183 return build2 (MEM_REF, type, t, t);
14184 }
14185
14186 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
14187 builtin function, but a very special sort of operator. */
14188
14189 enum gimplify_status
14190 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
14191 gimple_seq *post_p ATTRIBUTE_UNUSED)
14192 {
14193 tree promoted_type, have_va_type;
14194 tree valist = TREE_OPERAND (*expr_p, 0);
14195 tree type = TREE_TYPE (*expr_p);
14196 tree t, tag, aptag;
14197 location_t loc = EXPR_LOCATION (*expr_p);
14198
14199 /* Verify that valist is of the proper type. */
14200 have_va_type = TREE_TYPE (valist);
14201 if (have_va_type == error_mark_node)
14202 return GS_ERROR;
14203 have_va_type = targetm.canonical_va_list_type (have_va_type);
14204 if (have_va_type == NULL_TREE
14205 && POINTER_TYPE_P (TREE_TYPE (valist)))
14206 /* Handle 'Case 1: Not an array type' from c-common.c/build_va_arg. */
14207 have_va_type
14208 = targetm.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist)));
14209 gcc_assert (have_va_type != NULL_TREE);
14210
14211 /* Generate a diagnostic for requesting data of a type that cannot
14212 be passed through `...' due to type promotion at the call site. */
14213 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
14214 != type)
14215 {
14216 static bool gave_help;
14217 bool warned;
14218 /* Use the expansion point to handle cases such as passing bool (defined
14219 in a system header) through `...'. */
14220 location_t xloc
14221 = expansion_point_location_if_in_system_header (loc);
14222
14223 /* Unfortunately, this is merely undefined, rather than a constraint
14224 violation, so we cannot make this an error. If this call is never
14225 executed, the program is still strictly conforming. */
14226 auto_diagnostic_group d;
14227 warned = warning_at (xloc, 0,
14228 "%qT is promoted to %qT when passed through %<...%>",
14229 type, promoted_type);
14230 if (!gave_help && warned)
14231 {
14232 gave_help = true;
14233 inform (xloc, "(so you should pass %qT not %qT to %<va_arg%>)",
14234 promoted_type, type);
14235 }
14236
14237 /* We can, however, treat "undefined" any way we please.
14238 Call abort to encourage the user to fix the program. */
14239 if (warned)
14240 inform (xloc, "if this code is reached, the program will abort");
14241 /* Before the abort, allow the evaluation of the va_list
14242 expression to exit or longjmp. */
14243 gimplify_and_add (valist, pre_p);
14244 t = build_call_expr_loc (loc,
14245 builtin_decl_implicit (BUILT_IN_TRAP), 0);
14246 gimplify_and_add (t, pre_p);
14247
14248 /* This is dead code, but go ahead and finish so that the
14249 mode of the result comes out right. */
14250 *expr_p = dummy_object (type);
14251 return GS_ALL_DONE;
14252 }
14253
14254 tag = build_int_cst (build_pointer_type (type), 0);
14255 aptag = build_int_cst (TREE_TYPE (valist), 0);
14256
14257 *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 3,
14258 valist, tag, aptag);
14259
14260 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
14261 needs to be expanded. */
14262 cfun->curr_properties &= ~PROP_gimple_lva;
14263
14264 return GS_OK;
14265 }
14266
14267 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
14268
14269 DST/SRC are the destination and source respectively. You can pass
14270 ungimplified trees in DST or SRC, in which case they will be
14271 converted to a gimple operand if necessary.
14272
14273 This function returns the newly created GIMPLE_ASSIGN tuple. */
14274
14275 gimple *
14276 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
14277 {
14278 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
14279 gimplify_and_add (t, seq_p);
14280 ggc_free (t);
14281 return gimple_seq_last_stmt (*seq_p);
14282 }
14283
14284 inline hashval_t
14285 gimplify_hasher::hash (const elt_t *p)
14286 {
14287 tree t = p->val;
14288 return iterative_hash_expr (t, 0);
14289 }
14290
14291 inline bool
14292 gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
14293 {
14294 tree t1 = p1->val;
14295 tree t2 = p2->val;
14296 enum tree_code code = TREE_CODE (t1);
14297
14298 if (TREE_CODE (t2) != code
14299 || TREE_TYPE (t1) != TREE_TYPE (t2))
14300 return false;
14301
14302 if (!operand_equal_p (t1, t2, 0))
14303 return false;
14304
14305 /* Only allow them to compare equal if they also hash equal; otherwise
14306 results are nondeterminate, and we fail bootstrap comparison. */
14307 gcc_checking_assert (hash (p1) == hash (p2));
14308
14309 return true;
14310 }