]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/gimplify.c
Update copyright years.
[thirdparty/gcc.git] / gcc / gimplify.c
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2020 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "gimple.h"
33 #include "gimple-predict.h"
34 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
35 #include "ssa.h"
36 #include "cgraph.h"
37 #include "tree-pretty-print.h"
38 #include "diagnostic-core.h"
39 #include "alias.h"
40 #include "fold-const.h"
41 #include "calls.h"
42 #include "varasm.h"
43 #include "stmt.h"
44 #include "expr.h"
45 #include "gimple-fold.h"
46 #include "tree-eh.h"
47 #include "gimplify.h"
48 #include "gimple-iterator.h"
49 #include "stor-layout.h"
50 #include "print-tree.h"
51 #include "tree-iterator.h"
52 #include "tree-inline.h"
53 #include "langhooks.h"
54 #include "tree-cfg.h"
55 #include "tree-ssa.h"
56 #include "omp-general.h"
57 #include "omp-low.h"
58 #include "gimple-low.h"
59 #include "gomp-constants.h"
60 #include "splay-tree.h"
61 #include "gimple-walk.h"
62 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
63 #include "builtins.h"
64 #include "stringpool.h"
65 #include "attribs.h"
66 #include "asan.h"
67 #include "dbgcnt.h"
68 #include "omp-offload.h"
69 #include "context.h"
70
71 /* Hash set of poisoned variables in a bind expr. */
72 static hash_set<tree> *asan_poisoned_variables = NULL;
73
74 enum gimplify_omp_var_data
75 {
76 GOVD_SEEN = 0x000001,
77 GOVD_EXPLICIT = 0x000002,
78 GOVD_SHARED = 0x000004,
79 GOVD_PRIVATE = 0x000008,
80 GOVD_FIRSTPRIVATE = 0x000010,
81 GOVD_LASTPRIVATE = 0x000020,
82 GOVD_REDUCTION = 0x000040,
83 GOVD_LOCAL = 0x00080,
84 GOVD_MAP = 0x000100,
85 GOVD_DEBUG_PRIVATE = 0x000200,
86 GOVD_PRIVATE_OUTER_REF = 0x000400,
87 GOVD_LINEAR = 0x000800,
88 GOVD_ALIGNED = 0x001000,
89
90 /* Flag for GOVD_MAP: don't copy back. */
91 GOVD_MAP_TO_ONLY = 0x002000,
92
93 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
94 GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 0x004000,
95
96 GOVD_MAP_0LEN_ARRAY = 0x008000,
97
98 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
99 GOVD_MAP_ALWAYS_TO = 0x010000,
100
101 /* Flag for shared vars that are or might be stored to in the region. */
102 GOVD_WRITTEN = 0x020000,
103
104 /* Flag for GOVD_MAP, if it is a forced mapping. */
105 GOVD_MAP_FORCE = 0x040000,
106
107 /* Flag for GOVD_MAP: must be present already. */
108 GOVD_MAP_FORCE_PRESENT = 0x080000,
109
110 /* Flag for GOVD_MAP: only allocate. */
111 GOVD_MAP_ALLOC_ONLY = 0x100000,
112
113 /* Flag for GOVD_MAP: only copy back. */
114 GOVD_MAP_FROM_ONLY = 0x200000,
115
116 GOVD_NONTEMPORAL = 0x400000,
117
118 /* Flag for GOVD_LASTPRIVATE: conditional modifier. */
119 GOVD_LASTPRIVATE_CONDITIONAL = 0x800000,
120
121 GOVD_CONDTEMP = 0x1000000,
122
123 /* Flag for GOVD_REDUCTION: inscan seen in {in,ex}clusive clause. */
124 GOVD_REDUCTION_INSCAN = 0x2000000,
125
126 /* Flag for GOVD_MAP: (struct) vars that have pointer attachments for
127 fields. */
128 GOVD_MAP_HAS_ATTACHMENTS = 8388608,
129
130 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
131 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
132 | GOVD_LOCAL)
133 };
134
135
136 enum omp_region_type
137 {
138 ORT_WORKSHARE = 0x00,
139 ORT_TASKGROUP = 0x01,
140 ORT_SIMD = 0x04,
141
142 ORT_PARALLEL = 0x08,
143 ORT_COMBINED_PARALLEL = ORT_PARALLEL | 1,
144
145 ORT_TASK = 0x10,
146 ORT_UNTIED_TASK = ORT_TASK | 1,
147 ORT_TASKLOOP = ORT_TASK | 2,
148 ORT_UNTIED_TASKLOOP = ORT_UNTIED_TASK | 2,
149
150 ORT_TEAMS = 0x20,
151 ORT_COMBINED_TEAMS = ORT_TEAMS | 1,
152 ORT_HOST_TEAMS = ORT_TEAMS | 2,
153 ORT_COMBINED_HOST_TEAMS = ORT_COMBINED_TEAMS | 2,
154
155 /* Data region. */
156 ORT_TARGET_DATA = 0x40,
157
158 /* Data region with offloading. */
159 ORT_TARGET = 0x80,
160 ORT_COMBINED_TARGET = ORT_TARGET | 1,
161 ORT_IMPLICIT_TARGET = ORT_TARGET | 2,
162
163 /* OpenACC variants. */
164 ORT_ACC = 0x100, /* A generic OpenACC region. */
165 ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */
166 ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */
167 ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 2, /* Kernels construct. */
168 ORT_ACC_SERIAL = ORT_ACC | ORT_TARGET | 4, /* Serial construct. */
169 ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 2, /* Host data. */
170
171 /* Dummy OpenMP region, used to disable expansion of
172 DECL_VALUE_EXPRs in taskloop pre body. */
173 ORT_NONE = 0x200
174 };
175
176 /* Gimplify hashtable helper. */
177
178 struct gimplify_hasher : free_ptr_hash <elt_t>
179 {
180 static inline hashval_t hash (const elt_t *);
181 static inline bool equal (const elt_t *, const elt_t *);
182 };
183
184 struct gimplify_ctx
185 {
186 struct gimplify_ctx *prev_context;
187
188 vec<gbind *> bind_expr_stack;
189 tree temps;
190 gimple_seq conditional_cleanups;
191 tree exit_label;
192 tree return_temp;
193
194 vec<tree> case_labels;
195 hash_set<tree> *live_switch_vars;
196 /* The formal temporary table. Should this be persistent? */
197 hash_table<gimplify_hasher> *temp_htab;
198
199 int conditions;
200 unsigned into_ssa : 1;
201 unsigned allow_rhs_cond_expr : 1;
202 unsigned in_cleanup_point_expr : 1;
203 unsigned keep_stack : 1;
204 unsigned save_stack : 1;
205 unsigned in_switch_expr : 1;
206 };
207
208 enum gimplify_defaultmap_kind
209 {
210 GDMK_SCALAR,
211 GDMK_AGGREGATE,
212 GDMK_ALLOCATABLE,
213 GDMK_POINTER
214 };
215
216 struct gimplify_omp_ctx
217 {
218 struct gimplify_omp_ctx *outer_context;
219 splay_tree variables;
220 hash_set<tree> *privatized_types;
221 tree clauses;
222 /* Iteration variables in an OMP_FOR. */
223 vec<tree> loop_iter_var;
224 location_t location;
225 enum omp_clause_default_kind default_kind;
226 enum omp_region_type region_type;
227 enum tree_code code;
228 bool combined_loop;
229 bool distribute;
230 bool target_firstprivatize_array_bases;
231 bool add_safelen1;
232 bool order_concurrent;
233 int defaultmap[4];
234 };
235
236 static struct gimplify_ctx *gimplify_ctxp;
237 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
238 static bool in_omp_construct;
239
240 /* Forward declaration. */
241 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
242 static hash_map<tree, tree> *oacc_declare_returns;
243 static enum gimplify_status gimplify_expr (tree *, gimple_seq *, gimple_seq *,
244 bool (*) (tree), fallback_t, bool);
245
246 /* Shorter alias name for the above function for use in gimplify.c
247 only. */
248
249 static inline void
250 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
251 {
252 gimple_seq_add_stmt_without_update (seq_p, gs);
253 }
254
255 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
256 NULL, a new sequence is allocated. This function is
257 similar to gimple_seq_add_seq, but does not scan the operands.
258 During gimplification, we need to manipulate statement sequences
259 before the def/use vectors have been constructed. */
260
261 static void
262 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
263 {
264 gimple_stmt_iterator si;
265
266 if (src == NULL)
267 return;
268
269 si = gsi_last (*dst_p);
270 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
271 }
272
273
274 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
275 and popping gimplify contexts. */
276
277 static struct gimplify_ctx *ctx_pool = NULL;
278
279 /* Return a gimplify context struct from the pool. */
280
281 static inline struct gimplify_ctx *
282 ctx_alloc (void)
283 {
284 struct gimplify_ctx * c = ctx_pool;
285
286 if (c)
287 ctx_pool = c->prev_context;
288 else
289 c = XNEW (struct gimplify_ctx);
290
291 memset (c, '\0', sizeof (*c));
292 return c;
293 }
294
295 /* Put gimplify context C back into the pool. */
296
297 static inline void
298 ctx_free (struct gimplify_ctx *c)
299 {
300 c->prev_context = ctx_pool;
301 ctx_pool = c;
302 }
303
304 /* Free allocated ctx stack memory. */
305
306 void
307 free_gimplify_stack (void)
308 {
309 struct gimplify_ctx *c;
310
311 while ((c = ctx_pool))
312 {
313 ctx_pool = c->prev_context;
314 free (c);
315 }
316 }
317
318
319 /* Set up a context for the gimplifier. */
320
321 void
322 push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
323 {
324 struct gimplify_ctx *c = ctx_alloc ();
325
326 c->prev_context = gimplify_ctxp;
327 gimplify_ctxp = c;
328 gimplify_ctxp->into_ssa = in_ssa;
329 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
330 }
331
332 /* Tear down a context for the gimplifier. If BODY is non-null, then
333 put the temporaries into the outer BIND_EXPR. Otherwise, put them
334 in the local_decls.
335
336 BODY is not a sequence, but the first tuple in a sequence. */
337
338 void
339 pop_gimplify_context (gimple *body)
340 {
341 struct gimplify_ctx *c = gimplify_ctxp;
342
343 gcc_assert (c
344 && (!c->bind_expr_stack.exists ()
345 || c->bind_expr_stack.is_empty ()));
346 c->bind_expr_stack.release ();
347 gimplify_ctxp = c->prev_context;
348
349 if (body)
350 declare_vars (c->temps, body, false);
351 else
352 record_vars (c->temps);
353
354 delete c->temp_htab;
355 c->temp_htab = NULL;
356 ctx_free (c);
357 }
358
359 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
360
361 static void
362 gimple_push_bind_expr (gbind *bind_stmt)
363 {
364 gimplify_ctxp->bind_expr_stack.reserve (8);
365 gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
366 }
367
368 /* Pop the first element off the stack of bindings. */
369
370 static void
371 gimple_pop_bind_expr (void)
372 {
373 gimplify_ctxp->bind_expr_stack.pop ();
374 }
375
376 /* Return the first element of the stack of bindings. */
377
378 gbind *
379 gimple_current_bind_expr (void)
380 {
381 return gimplify_ctxp->bind_expr_stack.last ();
382 }
383
384 /* Return the stack of bindings created during gimplification. */
385
386 vec<gbind *>
387 gimple_bind_expr_stack (void)
388 {
389 return gimplify_ctxp->bind_expr_stack;
390 }
391
392 /* Return true iff there is a COND_EXPR between us and the innermost
393 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
394
395 static bool
396 gimple_conditional_context (void)
397 {
398 return gimplify_ctxp->conditions > 0;
399 }
400
401 /* Note that we've entered a COND_EXPR. */
402
403 static void
404 gimple_push_condition (void)
405 {
406 #ifdef ENABLE_GIMPLE_CHECKING
407 if (gimplify_ctxp->conditions == 0)
408 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
409 #endif
410 ++(gimplify_ctxp->conditions);
411 }
412
413 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
414 now, add any conditional cleanups we've seen to the prequeue. */
415
416 static void
417 gimple_pop_condition (gimple_seq *pre_p)
418 {
419 int conds = --(gimplify_ctxp->conditions);
420
421 gcc_assert (conds >= 0);
422 if (conds == 0)
423 {
424 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
425 gimplify_ctxp->conditional_cleanups = NULL;
426 }
427 }
428
429 /* A stable comparison routine for use with splay trees and DECLs. */
430
431 static int
432 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
433 {
434 tree a = (tree) xa;
435 tree b = (tree) xb;
436
437 return DECL_UID (a) - DECL_UID (b);
438 }
439
440 /* Create a new omp construct that deals with variable remapping. */
441
442 static struct gimplify_omp_ctx *
443 new_omp_context (enum omp_region_type region_type)
444 {
445 struct gimplify_omp_ctx *c;
446
447 c = XCNEW (struct gimplify_omp_ctx);
448 c->outer_context = gimplify_omp_ctxp;
449 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
450 c->privatized_types = new hash_set<tree>;
451 c->location = input_location;
452 c->region_type = region_type;
453 if ((region_type & ORT_TASK) == 0)
454 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
455 else
456 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
457 c->defaultmap[GDMK_SCALAR] = GOVD_MAP;
458 c->defaultmap[GDMK_AGGREGATE] = GOVD_MAP;
459 c->defaultmap[GDMK_ALLOCATABLE] = GOVD_MAP;
460 c->defaultmap[GDMK_POINTER] = GOVD_MAP;
461
462 return c;
463 }
464
465 /* Destroy an omp construct that deals with variable remapping. */
466
467 static void
468 delete_omp_context (struct gimplify_omp_ctx *c)
469 {
470 splay_tree_delete (c->variables);
471 delete c->privatized_types;
472 c->loop_iter_var.release ();
473 XDELETE (c);
474 }
475
476 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
477 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
478
479 /* Both gimplify the statement T and append it to *SEQ_P. This function
480 behaves exactly as gimplify_stmt, but you don't have to pass T as a
481 reference. */
482
483 void
484 gimplify_and_add (tree t, gimple_seq *seq_p)
485 {
486 gimplify_stmt (&t, seq_p);
487 }
488
489 /* Gimplify statement T into sequence *SEQ_P, and return the first
490 tuple in the sequence of generated tuples for this statement.
491 Return NULL if gimplifying T produced no tuples. */
492
493 static gimple *
494 gimplify_and_return_first (tree t, gimple_seq *seq_p)
495 {
496 gimple_stmt_iterator last = gsi_last (*seq_p);
497
498 gimplify_and_add (t, seq_p);
499
500 if (!gsi_end_p (last))
501 {
502 gsi_next (&last);
503 return gsi_stmt (last);
504 }
505 else
506 return gimple_seq_first_stmt (*seq_p);
507 }
508
509 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
510 LHS, or for a call argument. */
511
512 static bool
513 is_gimple_mem_rhs (tree t)
514 {
515 /* If we're dealing with a renamable type, either source or dest must be
516 a renamed variable. */
517 if (is_gimple_reg_type (TREE_TYPE (t)))
518 return is_gimple_val (t);
519 else
520 return is_gimple_val (t) || is_gimple_lvalue (t);
521 }
522
523 /* Return true if T is a CALL_EXPR or an expression that can be
524 assigned to a temporary. Note that this predicate should only be
525 used during gimplification. See the rationale for this in
526 gimplify_modify_expr. */
527
528 static bool
529 is_gimple_reg_rhs_or_call (tree t)
530 {
531 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
532 || TREE_CODE (t) == CALL_EXPR);
533 }
534
535 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
536 this predicate should only be used during gimplification. See the
537 rationale for this in gimplify_modify_expr. */
538
539 static bool
540 is_gimple_mem_rhs_or_call (tree t)
541 {
542 /* If we're dealing with a renamable type, either source or dest must be
543 a renamed variable. */
544 if (is_gimple_reg_type (TREE_TYPE (t)))
545 return is_gimple_val (t);
546 else
547 return (is_gimple_val (t)
548 || is_gimple_lvalue (t)
549 || TREE_CLOBBER_P (t)
550 || TREE_CODE (t) == CALL_EXPR);
551 }
552
553 /* Create a temporary with a name derived from VAL. Subroutine of
554 lookup_tmp_var; nobody else should call this function. */
555
556 static inline tree
557 create_tmp_from_val (tree val)
558 {
559 /* Drop all qualifiers and address-space information from the value type. */
560 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
561 tree var = create_tmp_var (type, get_name (val));
562 if (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
563 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
564 DECL_GIMPLE_REG_P (var) = 1;
565 return var;
566 }
567
568 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
569 an existing expression temporary. */
570
571 static tree
572 lookup_tmp_var (tree val, bool is_formal)
573 {
574 tree ret;
575
576 /* If not optimizing, never really reuse a temporary. local-alloc
577 won't allocate any variable that is used in more than one basic
578 block, which means it will go into memory, causing much extra
579 work in reload and final and poorer code generation, outweighing
580 the extra memory allocation here. */
581 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
582 ret = create_tmp_from_val (val);
583 else
584 {
585 elt_t elt, *elt_p;
586 elt_t **slot;
587
588 elt.val = val;
589 if (!gimplify_ctxp->temp_htab)
590 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
591 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
592 if (*slot == NULL)
593 {
594 elt_p = XNEW (elt_t);
595 elt_p->val = val;
596 elt_p->temp = ret = create_tmp_from_val (val);
597 *slot = elt_p;
598 }
599 else
600 {
601 elt_p = *slot;
602 ret = elt_p->temp;
603 }
604 }
605
606 return ret;
607 }
608
609 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
610
611 static tree
612 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
613 bool is_formal, bool allow_ssa)
614 {
615 tree t, mod;
616
617 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
618 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
619 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
620 fb_rvalue);
621
622 if (allow_ssa
623 && gimplify_ctxp->into_ssa
624 && is_gimple_reg_type (TREE_TYPE (val)))
625 {
626 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
627 if (! gimple_in_ssa_p (cfun))
628 {
629 const char *name = get_name (val);
630 if (name)
631 SET_SSA_NAME_VAR_OR_IDENTIFIER (t, create_tmp_var_name (name));
632 }
633 }
634 else
635 t = lookup_tmp_var (val, is_formal);
636
637 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
638
639 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
640
641 /* gimplify_modify_expr might want to reduce this further. */
642 gimplify_and_add (mod, pre_p);
643 ggc_free (mod);
644
645 return t;
646 }
647
648 /* Return a formal temporary variable initialized with VAL. PRE_P is as
649 in gimplify_expr. Only use this function if:
650
651 1) The value of the unfactored expression represented by VAL will not
652 change between the initialization and use of the temporary, and
653 2) The temporary will not be otherwise modified.
654
655 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
656 and #2 means it is inappropriate for && temps.
657
658 For other cases, use get_initialized_tmp_var instead. */
659
660 tree
661 get_formal_tmp_var (tree val, gimple_seq *pre_p)
662 {
663 return internal_get_tmp_var (val, pre_p, NULL, true, true);
664 }
665
666 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
667 are as in gimplify_expr. */
668
669 tree
670 get_initialized_tmp_var (tree val, gimple_seq *pre_p,
671 gimple_seq *post_p /* = NULL */,
672 bool allow_ssa /* = true */)
673 {
674 return internal_get_tmp_var (val, pre_p, post_p, false, allow_ssa);
675 }
676
677 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
678 generate debug info for them; otherwise don't. */
679
680 void
681 declare_vars (tree vars, gimple *gs, bool debug_info)
682 {
683 tree last = vars;
684 if (last)
685 {
686 tree temps, block;
687
688 gbind *scope = as_a <gbind *> (gs);
689
690 temps = nreverse (last);
691
692 block = gimple_bind_block (scope);
693 gcc_assert (!block || TREE_CODE (block) == BLOCK);
694 if (!block || !debug_info)
695 {
696 DECL_CHAIN (last) = gimple_bind_vars (scope);
697 gimple_bind_set_vars (scope, temps);
698 }
699 else
700 {
701 /* We need to attach the nodes both to the BIND_EXPR and to its
702 associated BLOCK for debugging purposes. The key point here
703 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
704 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
705 if (BLOCK_VARS (block))
706 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
707 else
708 {
709 gimple_bind_set_vars (scope,
710 chainon (gimple_bind_vars (scope), temps));
711 BLOCK_VARS (block) = temps;
712 }
713 }
714 }
715 }
716
717 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
718 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
719 no such upper bound can be obtained. */
720
721 static void
722 force_constant_size (tree var)
723 {
724 /* The only attempt we make is by querying the maximum size of objects
725 of the variable's type. */
726
727 HOST_WIDE_INT max_size;
728
729 gcc_assert (VAR_P (var));
730
731 max_size = max_int_size_in_bytes (TREE_TYPE (var));
732
733 gcc_assert (max_size >= 0);
734
735 DECL_SIZE_UNIT (var)
736 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
737 DECL_SIZE (var)
738 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
739 }
740
741 /* Push the temporary variable TMP into the current binding. */
742
743 void
744 gimple_add_tmp_var_fn (struct function *fn, tree tmp)
745 {
746 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
747
748 /* Later processing assumes that the object size is constant, which might
749 not be true at this point. Force the use of a constant upper bound in
750 this case. */
751 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
752 force_constant_size (tmp);
753
754 DECL_CONTEXT (tmp) = fn->decl;
755 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
756
757 record_vars_into (tmp, fn->decl);
758 }
759
760 /* Push the temporary variable TMP into the current binding. */
761
762 void
763 gimple_add_tmp_var (tree tmp)
764 {
765 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
766
767 /* Later processing assumes that the object size is constant, which might
768 not be true at this point. Force the use of a constant upper bound in
769 this case. */
770 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
771 force_constant_size (tmp);
772
773 DECL_CONTEXT (tmp) = current_function_decl;
774 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
775
776 if (gimplify_ctxp)
777 {
778 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
779 gimplify_ctxp->temps = tmp;
780
781 /* Mark temporaries local within the nearest enclosing parallel. */
782 if (gimplify_omp_ctxp)
783 {
784 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
785 int flag = GOVD_LOCAL;
786 while (ctx
787 && (ctx->region_type == ORT_WORKSHARE
788 || ctx->region_type == ORT_TASKGROUP
789 || ctx->region_type == ORT_SIMD
790 || ctx->region_type == ORT_ACC))
791 {
792 if (ctx->region_type == ORT_SIMD
793 && TREE_ADDRESSABLE (tmp)
794 && !TREE_STATIC (tmp))
795 {
796 if (TREE_CODE (DECL_SIZE_UNIT (tmp)) != INTEGER_CST)
797 ctx->add_safelen1 = true;
798 else
799 flag = GOVD_PRIVATE;
800 break;
801 }
802 ctx = ctx->outer_context;
803 }
804 if (ctx)
805 omp_add_variable (ctx, tmp, flag | GOVD_SEEN);
806 }
807 }
808 else if (cfun)
809 record_vars (tmp);
810 else
811 {
812 gimple_seq body_seq;
813
814 /* This case is for nested functions. We need to expose the locals
815 they create. */
816 body_seq = gimple_body (current_function_decl);
817 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
818 }
819 }
820
821
822 \f
823 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
824 nodes that are referenced more than once in GENERIC functions. This is
825 necessary because gimplification (translation into GIMPLE) is performed
826 by modifying tree nodes in-place, so gimplication of a shared node in a
827 first context could generate an invalid GIMPLE form in a second context.
828
829 This is achieved with a simple mark/copy/unmark algorithm that walks the
830 GENERIC representation top-down, marks nodes with TREE_VISITED the first
831 time it encounters them, duplicates them if they already have TREE_VISITED
832 set, and finally removes the TREE_VISITED marks it has set.
833
834 The algorithm works only at the function level, i.e. it generates a GENERIC
835 representation of a function with no nodes shared within the function when
836 passed a GENERIC function (except for nodes that are allowed to be shared).
837
838 At the global level, it is also necessary to unshare tree nodes that are
839 referenced in more than one function, for the same aforementioned reason.
840 This requires some cooperation from the front-end. There are 2 strategies:
841
842 1. Manual unsharing. The front-end needs to call unshare_expr on every
843 expression that might end up being shared across functions.
844
845 2. Deep unsharing. This is an extension of regular unsharing. Instead
846 of calling unshare_expr on expressions that might be shared across
847 functions, the front-end pre-marks them with TREE_VISITED. This will
848 ensure that they are unshared on the first reference within functions
849 when the regular unsharing algorithm runs. The counterpart is that
850 this algorithm must look deeper than for manual unsharing, which is
851 specified by LANG_HOOKS_DEEP_UNSHARING.
852
853 If there are only few specific cases of node sharing across functions, it is
854 probably easier for a front-end to unshare the expressions manually. On the
855 contrary, if the expressions generated at the global level are as widespread
856 as expressions generated within functions, deep unsharing is very likely the
857 way to go. */
858
859 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
860 These nodes model computations that must be done once. If we were to
861 unshare something like SAVE_EXPR(i++), the gimplification process would
862 create wrong code. However, if DATA is non-null, it must hold a pointer
863 set that is used to unshare the subtrees of these nodes. */
864
865 static tree
866 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
867 {
868 tree t = *tp;
869 enum tree_code code = TREE_CODE (t);
870
871 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
872 copy their subtrees if we can make sure to do it only once. */
873 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
874 {
875 if (data && !((hash_set<tree> *)data)->add (t))
876 ;
877 else
878 *walk_subtrees = 0;
879 }
880
881 /* Stop at types, decls, constants like copy_tree_r. */
882 else if (TREE_CODE_CLASS (code) == tcc_type
883 || TREE_CODE_CLASS (code) == tcc_declaration
884 || TREE_CODE_CLASS (code) == tcc_constant)
885 *walk_subtrees = 0;
886
887 /* Cope with the statement expression extension. */
888 else if (code == STATEMENT_LIST)
889 ;
890
891 /* Leave the bulk of the work to copy_tree_r itself. */
892 else
893 copy_tree_r (tp, walk_subtrees, NULL);
894
895 return NULL_TREE;
896 }
897
898 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
899 If *TP has been visited already, then *TP is deeply copied by calling
900 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
901
902 static tree
903 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
904 {
905 tree t = *tp;
906 enum tree_code code = TREE_CODE (t);
907
908 /* Skip types, decls, and constants. But we do want to look at their
909 types and the bounds of types. Mark them as visited so we properly
910 unmark their subtrees on the unmark pass. If we've already seen them,
911 don't look down further. */
912 if (TREE_CODE_CLASS (code) == tcc_type
913 || TREE_CODE_CLASS (code) == tcc_declaration
914 || TREE_CODE_CLASS (code) == tcc_constant)
915 {
916 if (TREE_VISITED (t))
917 *walk_subtrees = 0;
918 else
919 TREE_VISITED (t) = 1;
920 }
921
922 /* If this node has been visited already, unshare it and don't look
923 any deeper. */
924 else if (TREE_VISITED (t))
925 {
926 walk_tree (tp, mostly_copy_tree_r, data, NULL);
927 *walk_subtrees = 0;
928 }
929
930 /* Otherwise, mark the node as visited and keep looking. */
931 else
932 TREE_VISITED (t) = 1;
933
934 return NULL_TREE;
935 }
936
937 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
938 copy_if_shared_r callback unmodified. */
939
940 static inline void
941 copy_if_shared (tree *tp, void *data)
942 {
943 walk_tree (tp, copy_if_shared_r, data, NULL);
944 }
945
946 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
947 any nested functions. */
948
949 static void
950 unshare_body (tree fndecl)
951 {
952 struct cgraph_node *cgn = cgraph_node::get (fndecl);
953 /* If the language requires deep unsharing, we need a pointer set to make
954 sure we don't repeatedly unshare subtrees of unshareable nodes. */
955 hash_set<tree> *visited
956 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
957
958 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
959 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
960 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
961
962 delete visited;
963
964 if (cgn)
965 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
966 unshare_body (cgn->decl);
967 }
968
969 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
970 Subtrees are walked until the first unvisited node is encountered. */
971
972 static tree
973 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
974 {
975 tree t = *tp;
976
977 /* If this node has been visited, unmark it and keep looking. */
978 if (TREE_VISITED (t))
979 TREE_VISITED (t) = 0;
980
981 /* Otherwise, don't look any deeper. */
982 else
983 *walk_subtrees = 0;
984
985 return NULL_TREE;
986 }
987
988 /* Unmark the visited trees rooted at *TP. */
989
990 static inline void
991 unmark_visited (tree *tp)
992 {
993 walk_tree (tp, unmark_visited_r, NULL, NULL);
994 }
995
996 /* Likewise, but mark all trees as not visited. */
997
998 static void
999 unvisit_body (tree fndecl)
1000 {
1001 struct cgraph_node *cgn = cgraph_node::get (fndecl);
1002
1003 unmark_visited (&DECL_SAVED_TREE (fndecl));
1004 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
1005 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
1006
1007 if (cgn)
1008 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
1009 unvisit_body (cgn->decl);
1010 }
1011
1012 /* Unconditionally make an unshared copy of EXPR. This is used when using
1013 stored expressions which span multiple functions, such as BINFO_VTABLE,
1014 as the normal unsharing process can't tell that they're shared. */
1015
1016 tree
1017 unshare_expr (tree expr)
1018 {
1019 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1020 return expr;
1021 }
1022
1023 /* Worker for unshare_expr_without_location. */
1024
1025 static tree
1026 prune_expr_location (tree *tp, int *walk_subtrees, void *)
1027 {
1028 if (EXPR_P (*tp))
1029 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
1030 else
1031 *walk_subtrees = 0;
1032 return NULL_TREE;
1033 }
1034
1035 /* Similar to unshare_expr but also prune all expression locations
1036 from EXPR. */
1037
1038 tree
1039 unshare_expr_without_location (tree expr)
1040 {
1041 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1042 if (EXPR_P (expr))
1043 walk_tree (&expr, prune_expr_location, NULL, NULL);
1044 return expr;
1045 }
1046
1047 /* Return the EXPR_LOCATION of EXPR, if it (maybe recursively) has
1048 one, OR_ELSE otherwise. The location of a STATEMENT_LISTs
1049 comprising at least one DEBUG_BEGIN_STMT followed by exactly one
1050 EXPR is the location of the EXPR. */
1051
1052 static location_t
1053 rexpr_location (tree expr, location_t or_else = UNKNOWN_LOCATION)
1054 {
1055 if (!expr)
1056 return or_else;
1057
1058 if (EXPR_HAS_LOCATION (expr))
1059 return EXPR_LOCATION (expr);
1060
1061 if (TREE_CODE (expr) != STATEMENT_LIST)
1062 return or_else;
1063
1064 tree_stmt_iterator i = tsi_start (expr);
1065
1066 bool found = false;
1067 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
1068 {
1069 found = true;
1070 tsi_next (&i);
1071 }
1072
1073 if (!found || !tsi_one_before_end_p (i))
1074 return or_else;
1075
1076 return rexpr_location (tsi_stmt (i), or_else);
1077 }
1078
1079 /* Return TRUE iff EXPR (maybe recursively) has a location; see
1080 rexpr_location for the potential recursion. */
1081
1082 static inline bool
1083 rexpr_has_location (tree expr)
1084 {
1085 return rexpr_location (expr) != UNKNOWN_LOCATION;
1086 }
1087
1088 \f
1089 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1090 contain statements and have a value. Assign its value to a temporary
1091 and give it void_type_node. Return the temporary, or NULL_TREE if
1092 WRAPPER was already void. */
1093
1094 tree
1095 voidify_wrapper_expr (tree wrapper, tree temp)
1096 {
1097 tree type = TREE_TYPE (wrapper);
1098 if (type && !VOID_TYPE_P (type))
1099 {
1100 tree *p;
1101
1102 /* Set p to point to the body of the wrapper. Loop until we find
1103 something that isn't a wrapper. */
1104 for (p = &wrapper; p && *p; )
1105 {
1106 switch (TREE_CODE (*p))
1107 {
1108 case BIND_EXPR:
1109 TREE_SIDE_EFFECTS (*p) = 1;
1110 TREE_TYPE (*p) = void_type_node;
1111 /* For a BIND_EXPR, the body is operand 1. */
1112 p = &BIND_EXPR_BODY (*p);
1113 break;
1114
1115 case CLEANUP_POINT_EXPR:
1116 case TRY_FINALLY_EXPR:
1117 case TRY_CATCH_EXPR:
1118 TREE_SIDE_EFFECTS (*p) = 1;
1119 TREE_TYPE (*p) = void_type_node;
1120 p = &TREE_OPERAND (*p, 0);
1121 break;
1122
1123 case STATEMENT_LIST:
1124 {
1125 tree_stmt_iterator i = tsi_last (*p);
1126 TREE_SIDE_EFFECTS (*p) = 1;
1127 TREE_TYPE (*p) = void_type_node;
1128 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1129 }
1130 break;
1131
1132 case COMPOUND_EXPR:
1133 /* Advance to the last statement. Set all container types to
1134 void. */
1135 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1136 {
1137 TREE_SIDE_EFFECTS (*p) = 1;
1138 TREE_TYPE (*p) = void_type_node;
1139 }
1140 break;
1141
1142 case TRANSACTION_EXPR:
1143 TREE_SIDE_EFFECTS (*p) = 1;
1144 TREE_TYPE (*p) = void_type_node;
1145 p = &TRANSACTION_EXPR_BODY (*p);
1146 break;
1147
1148 default:
1149 /* Assume that any tree upon which voidify_wrapper_expr is
1150 directly called is a wrapper, and that its body is op0. */
1151 if (p == &wrapper)
1152 {
1153 TREE_SIDE_EFFECTS (*p) = 1;
1154 TREE_TYPE (*p) = void_type_node;
1155 p = &TREE_OPERAND (*p, 0);
1156 break;
1157 }
1158 goto out;
1159 }
1160 }
1161
1162 out:
1163 if (p == NULL || IS_EMPTY_STMT (*p))
1164 temp = NULL_TREE;
1165 else if (temp)
1166 {
1167 /* The wrapper is on the RHS of an assignment that we're pushing
1168 down. */
1169 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1170 || TREE_CODE (temp) == MODIFY_EXPR);
1171 TREE_OPERAND (temp, 1) = *p;
1172 *p = temp;
1173 }
1174 else
1175 {
1176 temp = create_tmp_var (type, "retval");
1177 *p = build2 (INIT_EXPR, type, temp, *p);
1178 }
1179
1180 return temp;
1181 }
1182
1183 return NULL_TREE;
1184 }
1185
1186 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1187 a temporary through which they communicate. */
1188
1189 static void
1190 build_stack_save_restore (gcall **save, gcall **restore)
1191 {
1192 tree tmp_var;
1193
1194 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1195 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1196 gimple_call_set_lhs (*save, tmp_var);
1197
1198 *restore
1199 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1200 1, tmp_var);
1201 }
1202
1203 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1204
1205 static tree
1206 build_asan_poison_call_expr (tree decl)
1207 {
1208 /* Do not poison variables that have size equal to zero. */
1209 tree unit_size = DECL_SIZE_UNIT (decl);
1210 if (zerop (unit_size))
1211 return NULL_TREE;
1212
1213 tree base = build_fold_addr_expr (decl);
1214
1215 return build_call_expr_internal_loc (UNKNOWN_LOCATION, IFN_ASAN_MARK,
1216 void_type_node, 3,
1217 build_int_cst (integer_type_node,
1218 ASAN_MARK_POISON),
1219 base, unit_size);
1220 }
1221
1222 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1223 on POISON flag, shadow memory of a DECL variable. The call will be
1224 put on location identified by IT iterator, where BEFORE flag drives
1225 position where the stmt will be put. */
1226
1227 static void
1228 asan_poison_variable (tree decl, bool poison, gimple_stmt_iterator *it,
1229 bool before)
1230 {
1231 tree unit_size = DECL_SIZE_UNIT (decl);
1232 tree base = build_fold_addr_expr (decl);
1233
1234 /* Do not poison variables that have size equal to zero. */
1235 if (zerop (unit_size))
1236 return;
1237
1238 /* It's necessary to have all stack variables aligned to ASAN granularity
1239 bytes. */
1240 if (DECL_ALIGN_UNIT (decl) <= ASAN_SHADOW_GRANULARITY)
1241 SET_DECL_ALIGN (decl, BITS_PER_UNIT * ASAN_SHADOW_GRANULARITY);
1242
1243 HOST_WIDE_INT flags = poison ? ASAN_MARK_POISON : ASAN_MARK_UNPOISON;
1244
1245 gimple *g
1246 = gimple_build_call_internal (IFN_ASAN_MARK, 3,
1247 build_int_cst (integer_type_node, flags),
1248 base, unit_size);
1249
1250 if (before)
1251 gsi_insert_before (it, g, GSI_NEW_STMT);
1252 else
1253 gsi_insert_after (it, g, GSI_NEW_STMT);
1254 }
1255
1256 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1257 either poisons or unpoisons a DECL. Created statement is appended
1258 to SEQ_P gimple sequence. */
1259
1260 static void
1261 asan_poison_variable (tree decl, bool poison, gimple_seq *seq_p)
1262 {
1263 gimple_stmt_iterator it = gsi_last (*seq_p);
1264 bool before = false;
1265
1266 if (gsi_end_p (it))
1267 before = true;
1268
1269 asan_poison_variable (decl, poison, &it, before);
1270 }
1271
1272 /* Sort pair of VAR_DECLs A and B by DECL_UID. */
1273
1274 static int
1275 sort_by_decl_uid (const void *a, const void *b)
1276 {
1277 const tree *t1 = (const tree *)a;
1278 const tree *t2 = (const tree *)b;
1279
1280 int uid1 = DECL_UID (*t1);
1281 int uid2 = DECL_UID (*t2);
1282
1283 if (uid1 < uid2)
1284 return -1;
1285 else if (uid1 > uid2)
1286 return 1;
1287 else
1288 return 0;
1289 }
1290
1291 /* Generate IFN_ASAN_MARK internal call for all VARIABLES
1292 depending on POISON flag. Created statement is appended
1293 to SEQ_P gimple sequence. */
1294
1295 static void
1296 asan_poison_variables (hash_set<tree> *variables, bool poison, gimple_seq *seq_p)
1297 {
1298 unsigned c = variables->elements ();
1299 if (c == 0)
1300 return;
1301
1302 auto_vec<tree> sorted_variables (c);
1303
1304 for (hash_set<tree>::iterator it = variables->begin ();
1305 it != variables->end (); ++it)
1306 sorted_variables.safe_push (*it);
1307
1308 sorted_variables.qsort (sort_by_decl_uid);
1309
1310 unsigned i;
1311 tree var;
1312 FOR_EACH_VEC_ELT (sorted_variables, i, var)
1313 {
1314 asan_poison_variable (var, poison, seq_p);
1315
1316 /* Add use_after_scope_memory attribute for the variable in order
1317 to prevent re-written into SSA. */
1318 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE,
1319 DECL_ATTRIBUTES (var)))
1320 DECL_ATTRIBUTES (var)
1321 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE),
1322 integer_one_node,
1323 DECL_ATTRIBUTES (var));
1324 }
1325 }
1326
1327 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1328
1329 static enum gimplify_status
1330 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1331 {
1332 tree bind_expr = *expr_p;
1333 bool old_keep_stack = gimplify_ctxp->keep_stack;
1334 bool old_save_stack = gimplify_ctxp->save_stack;
1335 tree t;
1336 gbind *bind_stmt;
1337 gimple_seq body, cleanup;
1338 gcall *stack_save;
1339 location_t start_locus = 0, end_locus = 0;
1340 tree ret_clauses = NULL;
1341
1342 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1343
1344 /* Mark variables seen in this bind expr. */
1345 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1346 {
1347 if (VAR_P (t))
1348 {
1349 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1350
1351 /* Mark variable as local. */
1352 if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t))
1353 {
1354 if (! DECL_SEEN_IN_BIND_EXPR_P (t)
1355 || splay_tree_lookup (ctx->variables,
1356 (splay_tree_key) t) == NULL)
1357 {
1358 int flag = GOVD_LOCAL;
1359 if (ctx->region_type == ORT_SIMD
1360 && TREE_ADDRESSABLE (t)
1361 && !TREE_STATIC (t))
1362 {
1363 if (TREE_CODE (DECL_SIZE_UNIT (t)) != INTEGER_CST)
1364 ctx->add_safelen1 = true;
1365 else
1366 flag = GOVD_PRIVATE;
1367 }
1368 omp_add_variable (ctx, t, flag | GOVD_SEEN);
1369 }
1370 /* Static locals inside of target construct or offloaded
1371 routines need to be "omp declare target". */
1372 if (TREE_STATIC (t))
1373 for (; ctx; ctx = ctx->outer_context)
1374 if ((ctx->region_type & ORT_TARGET) != 0)
1375 {
1376 if (!lookup_attribute ("omp declare target",
1377 DECL_ATTRIBUTES (t)))
1378 {
1379 tree id = get_identifier ("omp declare target");
1380 DECL_ATTRIBUTES (t)
1381 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (t));
1382 varpool_node *node = varpool_node::get (t);
1383 if (node)
1384 {
1385 node->offloadable = 1;
1386 if (ENABLE_OFFLOADING && !DECL_EXTERNAL (t))
1387 {
1388 g->have_offload = true;
1389 if (!in_lto_p)
1390 vec_safe_push (offload_vars, t);
1391 }
1392 }
1393 }
1394 break;
1395 }
1396 }
1397
1398 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1399
1400 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1401 cfun->has_local_explicit_reg_vars = true;
1402 }
1403
1404 /* Preliminarily mark non-addressed complex variables as eligible
1405 for promotion to gimple registers. We'll transform their uses
1406 as we find them. */
1407 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1408 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1409 && !TREE_THIS_VOLATILE (t)
1410 && (VAR_P (t) && !DECL_HARD_REGISTER (t))
1411 && !needs_to_live_in_memory (t))
1412 DECL_GIMPLE_REG_P (t) = 1;
1413 }
1414
1415 bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1416 BIND_EXPR_BLOCK (bind_expr));
1417 gimple_push_bind_expr (bind_stmt);
1418
1419 gimplify_ctxp->keep_stack = false;
1420 gimplify_ctxp->save_stack = false;
1421
1422 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1423 body = NULL;
1424 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1425 gimple_bind_set_body (bind_stmt, body);
1426
1427 /* Source location wise, the cleanup code (stack_restore and clobbers)
1428 belongs to the end of the block, so propagate what we have. The
1429 stack_save operation belongs to the beginning of block, which we can
1430 infer from the bind_expr directly if the block has no explicit
1431 assignment. */
1432 if (BIND_EXPR_BLOCK (bind_expr))
1433 {
1434 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1435 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1436 }
1437 if (start_locus == 0)
1438 start_locus = EXPR_LOCATION (bind_expr);
1439
1440 cleanup = NULL;
1441 stack_save = NULL;
1442
1443 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1444 the stack space allocated to the VLAs. */
1445 if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack)
1446 {
1447 gcall *stack_restore;
1448
1449 /* Save stack on entry and restore it on exit. Add a try_finally
1450 block to achieve this. */
1451 build_stack_save_restore (&stack_save, &stack_restore);
1452
1453 gimple_set_location (stack_save, start_locus);
1454 gimple_set_location (stack_restore, end_locus);
1455
1456 gimplify_seq_add_stmt (&cleanup, stack_restore);
1457 }
1458
1459 /* Add clobbers for all variables that go out of scope. */
1460 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1461 {
1462 if (VAR_P (t)
1463 && !is_global_var (t)
1464 && DECL_CONTEXT (t) == current_function_decl)
1465 {
1466 if (!DECL_HARD_REGISTER (t)
1467 && !TREE_THIS_VOLATILE (t)
1468 && !DECL_HAS_VALUE_EXPR_P (t)
1469 /* Only care for variables that have to be in memory. Others
1470 will be rewritten into SSA names, hence moved to the
1471 top-level. */
1472 && !is_gimple_reg (t)
1473 && flag_stack_reuse != SR_NONE)
1474 {
1475 tree clobber = build_clobber (TREE_TYPE (t));
1476 gimple *clobber_stmt;
1477 clobber_stmt = gimple_build_assign (t, clobber);
1478 gimple_set_location (clobber_stmt, end_locus);
1479 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1480 }
1481
1482 if (flag_openacc && oacc_declare_returns != NULL)
1483 {
1484 tree *c = oacc_declare_returns->get (t);
1485 if (c != NULL)
1486 {
1487 if (ret_clauses)
1488 OMP_CLAUSE_CHAIN (*c) = ret_clauses;
1489
1490 ret_clauses = *c;
1491
1492 oacc_declare_returns->remove (t);
1493
1494 if (oacc_declare_returns->is_empty ())
1495 {
1496 delete oacc_declare_returns;
1497 oacc_declare_returns = NULL;
1498 }
1499 }
1500 }
1501 }
1502
1503 if (asan_poisoned_variables != NULL
1504 && asan_poisoned_variables->contains (t))
1505 {
1506 asan_poisoned_variables->remove (t);
1507 asan_poison_variable (t, true, &cleanup);
1508 }
1509
1510 if (gimplify_ctxp->live_switch_vars != NULL
1511 && gimplify_ctxp->live_switch_vars->contains (t))
1512 gimplify_ctxp->live_switch_vars->remove (t);
1513 }
1514
1515 if (ret_clauses)
1516 {
1517 gomp_target *stmt;
1518 gimple_stmt_iterator si = gsi_start (cleanup);
1519
1520 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
1521 ret_clauses);
1522 gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT);
1523 }
1524
1525 if (cleanup)
1526 {
1527 gtry *gs;
1528 gimple_seq new_body;
1529
1530 new_body = NULL;
1531 gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
1532 GIMPLE_TRY_FINALLY);
1533
1534 if (stack_save)
1535 gimplify_seq_add_stmt (&new_body, stack_save);
1536 gimplify_seq_add_stmt (&new_body, gs);
1537 gimple_bind_set_body (bind_stmt, new_body);
1538 }
1539
1540 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1541 if (!gimplify_ctxp->keep_stack)
1542 gimplify_ctxp->keep_stack = old_keep_stack;
1543 gimplify_ctxp->save_stack = old_save_stack;
1544
1545 gimple_pop_bind_expr ();
1546
1547 gimplify_seq_add_stmt (pre_p, bind_stmt);
1548
1549 if (temp)
1550 {
1551 *expr_p = temp;
1552 return GS_OK;
1553 }
1554
1555 *expr_p = NULL_TREE;
1556 return GS_ALL_DONE;
1557 }
1558
1559 /* Maybe add early return predict statement to PRE_P sequence. */
1560
1561 static void
1562 maybe_add_early_return_predict_stmt (gimple_seq *pre_p)
1563 {
1564 /* If we are not in a conditional context, add PREDICT statement. */
1565 if (gimple_conditional_context ())
1566 {
1567 gimple *predict = gimple_build_predict (PRED_TREE_EARLY_RETURN,
1568 NOT_TAKEN);
1569 gimplify_seq_add_stmt (pre_p, predict);
1570 }
1571 }
1572
1573 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1574 GIMPLE value, it is assigned to a new temporary and the statement is
1575 re-written to return the temporary.
1576
1577 PRE_P points to the sequence where side effects that must happen before
1578 STMT should be stored. */
1579
1580 static enum gimplify_status
1581 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1582 {
1583 greturn *ret;
1584 tree ret_expr = TREE_OPERAND (stmt, 0);
1585 tree result_decl, result;
1586
1587 if (ret_expr == error_mark_node)
1588 return GS_ERROR;
1589
1590 if (!ret_expr
1591 || TREE_CODE (ret_expr) == RESULT_DECL)
1592 {
1593 maybe_add_early_return_predict_stmt (pre_p);
1594 greturn *ret = gimple_build_return (ret_expr);
1595 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1596 gimplify_seq_add_stmt (pre_p, ret);
1597 return GS_ALL_DONE;
1598 }
1599
1600 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1601 result_decl = NULL_TREE;
1602 else
1603 {
1604 result_decl = TREE_OPERAND (ret_expr, 0);
1605
1606 /* See through a return by reference. */
1607 if (TREE_CODE (result_decl) == INDIRECT_REF)
1608 result_decl = TREE_OPERAND (result_decl, 0);
1609
1610 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1611 || TREE_CODE (ret_expr) == INIT_EXPR)
1612 && TREE_CODE (result_decl) == RESULT_DECL);
1613 }
1614
1615 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1616 Recall that aggregate_value_p is FALSE for any aggregate type that is
1617 returned in registers. If we're returning values in registers, then
1618 we don't want to extend the lifetime of the RESULT_DECL, particularly
1619 across another call. In addition, for those aggregates for which
1620 hard_function_value generates a PARALLEL, we'll die during normal
1621 expansion of structure assignments; there's special code in expand_return
1622 to handle this case that does not exist in expand_expr. */
1623 if (!result_decl)
1624 result = NULL_TREE;
1625 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1626 {
1627 if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST)
1628 {
1629 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1630 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1631 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1632 should be effectively allocated by the caller, i.e. all calls to
1633 this function must be subject to the Return Slot Optimization. */
1634 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1635 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1636 }
1637 result = result_decl;
1638 }
1639 else if (gimplify_ctxp->return_temp)
1640 result = gimplify_ctxp->return_temp;
1641 else
1642 {
1643 result = create_tmp_reg (TREE_TYPE (result_decl));
1644
1645 /* ??? With complex control flow (usually involving abnormal edges),
1646 we can wind up warning about an uninitialized value for this. Due
1647 to how this variable is constructed and initialized, this is never
1648 true. Give up and never warn. */
1649 TREE_NO_WARNING (result) = 1;
1650
1651 gimplify_ctxp->return_temp = result;
1652 }
1653
1654 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1655 Then gimplify the whole thing. */
1656 if (result != result_decl)
1657 TREE_OPERAND (ret_expr, 0) = result;
1658
1659 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1660
1661 maybe_add_early_return_predict_stmt (pre_p);
1662 ret = gimple_build_return (result);
1663 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1664 gimplify_seq_add_stmt (pre_p, ret);
1665
1666 return GS_ALL_DONE;
1667 }
1668
1669 /* Gimplify a variable-length array DECL. */
1670
1671 static void
1672 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1673 {
1674 /* This is a variable-sized decl. Simplify its size and mark it
1675 for deferred expansion. */
1676 tree t, addr, ptr_type;
1677
1678 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1679 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1680
1681 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1682 if (DECL_HAS_VALUE_EXPR_P (decl))
1683 return;
1684
1685 /* All occurrences of this decl in final gimplified code will be
1686 replaced by indirection. Setting DECL_VALUE_EXPR does two
1687 things: First, it lets the rest of the gimplifier know what
1688 replacement to use. Second, it lets the debug info know
1689 where to find the value. */
1690 ptr_type = build_pointer_type (TREE_TYPE (decl));
1691 addr = create_tmp_var (ptr_type, get_name (decl));
1692 DECL_IGNORED_P (addr) = 0;
1693 t = build_fold_indirect_ref (addr);
1694 TREE_THIS_NOTRAP (t) = 1;
1695 SET_DECL_VALUE_EXPR (decl, t);
1696 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1697
1698 t = build_alloca_call_expr (DECL_SIZE_UNIT (decl), DECL_ALIGN (decl),
1699 max_int_size_in_bytes (TREE_TYPE (decl)));
1700 /* The call has been built for a variable-sized object. */
1701 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1702 t = fold_convert (ptr_type, t);
1703 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1704
1705 gimplify_and_add (t, seq_p);
1706
1707 /* Record the dynamic allocation associated with DECL if requested. */
1708 if (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC)
1709 record_dynamic_alloc (decl);
1710 }
1711
1712 /* A helper function to be called via walk_tree. Mark all labels under *TP
1713 as being forced. To be called for DECL_INITIAL of static variables. */
1714
1715 static tree
1716 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1717 {
1718 if (TYPE_P (*tp))
1719 *walk_subtrees = 0;
1720 if (TREE_CODE (*tp) == LABEL_DECL)
1721 {
1722 FORCED_LABEL (*tp) = 1;
1723 cfun->has_forced_label_in_static = 1;
1724 }
1725
1726 return NULL_TREE;
1727 }
1728
1729 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1730 and initialization explicit. */
1731
1732 static enum gimplify_status
1733 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1734 {
1735 tree stmt = *stmt_p;
1736 tree decl = DECL_EXPR_DECL (stmt);
1737
1738 *stmt_p = NULL_TREE;
1739
1740 if (TREE_TYPE (decl) == error_mark_node)
1741 return GS_ERROR;
1742
1743 if ((TREE_CODE (decl) == TYPE_DECL
1744 || VAR_P (decl))
1745 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1746 {
1747 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1748 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1749 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl)), seq_p);
1750 }
1751
1752 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1753 in case its size expressions contain problematic nodes like CALL_EXPR. */
1754 if (TREE_CODE (decl) == TYPE_DECL
1755 && DECL_ORIGINAL_TYPE (decl)
1756 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1757 {
1758 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1759 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl)) == REFERENCE_TYPE)
1760 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl)), seq_p);
1761 }
1762
1763 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
1764 {
1765 tree init = DECL_INITIAL (decl);
1766 bool is_vla = false;
1767
1768 poly_uint64 size;
1769 if (!poly_int_tree_p (DECL_SIZE_UNIT (decl), &size)
1770 || (!TREE_STATIC (decl)
1771 && flag_stack_check == GENERIC_STACK_CHECK
1772 && maybe_gt (size,
1773 (unsigned HOST_WIDE_INT) STACK_CHECK_MAX_VAR_SIZE)))
1774 {
1775 gimplify_vla_decl (decl, seq_p);
1776 is_vla = true;
1777 }
1778
1779 if (asan_poisoned_variables
1780 && !is_vla
1781 && TREE_ADDRESSABLE (decl)
1782 && !TREE_STATIC (decl)
1783 && !DECL_HAS_VALUE_EXPR_P (decl)
1784 && DECL_ALIGN (decl) <= MAX_SUPPORTED_STACK_ALIGNMENT
1785 && dbg_cnt (asan_use_after_scope)
1786 && !gimplify_omp_ctxp)
1787 {
1788 asan_poisoned_variables->add (decl);
1789 asan_poison_variable (decl, false, seq_p);
1790 if (!DECL_ARTIFICIAL (decl) && gimplify_ctxp->live_switch_vars)
1791 gimplify_ctxp->live_switch_vars->add (decl);
1792 }
1793
1794 /* Some front ends do not explicitly declare all anonymous
1795 artificial variables. We compensate here by declaring the
1796 variables, though it would be better if the front ends would
1797 explicitly declare them. */
1798 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1799 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1800 gimple_add_tmp_var (decl);
1801
1802 if (init && init != error_mark_node)
1803 {
1804 if (!TREE_STATIC (decl))
1805 {
1806 DECL_INITIAL (decl) = NULL_TREE;
1807 init = build2 (INIT_EXPR, void_type_node, decl, init);
1808 gimplify_and_add (init, seq_p);
1809 ggc_free (init);
1810 }
1811 else
1812 /* We must still examine initializers for static variables
1813 as they may contain a label address. */
1814 walk_tree (&init, force_labels_r, NULL, NULL);
1815 }
1816 }
1817
1818 return GS_ALL_DONE;
1819 }
1820
1821 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1822 and replacing the LOOP_EXPR with goto, but if the loop contains an
1823 EXIT_EXPR, we need to append a label for it to jump to. */
1824
1825 static enum gimplify_status
1826 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1827 {
1828 tree saved_label = gimplify_ctxp->exit_label;
1829 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1830
1831 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1832
1833 gimplify_ctxp->exit_label = NULL_TREE;
1834
1835 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1836
1837 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1838
1839 if (gimplify_ctxp->exit_label)
1840 gimplify_seq_add_stmt (pre_p,
1841 gimple_build_label (gimplify_ctxp->exit_label));
1842
1843 gimplify_ctxp->exit_label = saved_label;
1844
1845 *expr_p = NULL;
1846 return GS_ALL_DONE;
1847 }
1848
1849 /* Gimplify a statement list onto a sequence. These may be created either
1850 by an enlightened front-end, or by shortcut_cond_expr. */
1851
1852 static enum gimplify_status
1853 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1854 {
1855 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1856
1857 tree_stmt_iterator i = tsi_start (*expr_p);
1858
1859 while (!tsi_end_p (i))
1860 {
1861 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1862 tsi_delink (&i);
1863 }
1864
1865 if (temp)
1866 {
1867 *expr_p = temp;
1868 return GS_OK;
1869 }
1870
1871 return GS_ALL_DONE;
1872 }
1873
1874 /* Callback for walk_gimple_seq. */
1875
1876 static tree
1877 warn_switch_unreachable_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
1878 struct walk_stmt_info *wi)
1879 {
1880 gimple *stmt = gsi_stmt (*gsi_p);
1881
1882 *handled_ops_p = true;
1883 switch (gimple_code (stmt))
1884 {
1885 case GIMPLE_TRY:
1886 /* A compiler-generated cleanup or a user-written try block.
1887 If it's empty, don't dive into it--that would result in
1888 worse location info. */
1889 if (gimple_try_eval (stmt) == NULL)
1890 {
1891 wi->info = stmt;
1892 return integer_zero_node;
1893 }
1894 /* Fall through. */
1895 case GIMPLE_BIND:
1896 case GIMPLE_CATCH:
1897 case GIMPLE_EH_FILTER:
1898 case GIMPLE_TRANSACTION:
1899 /* Walk the sub-statements. */
1900 *handled_ops_p = false;
1901 break;
1902
1903 case GIMPLE_DEBUG:
1904 /* Ignore these. We may generate them before declarations that
1905 are never executed. If there's something to warn about,
1906 there will be non-debug stmts too, and we'll catch those. */
1907 break;
1908
1909 case GIMPLE_CALL:
1910 if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
1911 {
1912 *handled_ops_p = false;
1913 break;
1914 }
1915 /* Fall through. */
1916 default:
1917 /* Save the first "real" statement (not a decl/lexical scope/...). */
1918 wi->info = stmt;
1919 return integer_zero_node;
1920 }
1921 return NULL_TREE;
1922 }
1923
1924 /* Possibly warn about unreachable statements between switch's controlling
1925 expression and the first case. SEQ is the body of a switch expression. */
1926
1927 static void
1928 maybe_warn_switch_unreachable (gimple_seq seq)
1929 {
1930 if (!warn_switch_unreachable
1931 /* This warning doesn't play well with Fortran when optimizations
1932 are on. */
1933 || lang_GNU_Fortran ()
1934 || seq == NULL)
1935 return;
1936
1937 struct walk_stmt_info wi;
1938 memset (&wi, 0, sizeof (wi));
1939 walk_gimple_seq (seq, warn_switch_unreachable_r, NULL, &wi);
1940 gimple *stmt = (gimple *) wi.info;
1941
1942 if (stmt && gimple_code (stmt) != GIMPLE_LABEL)
1943 {
1944 if (gimple_code (stmt) == GIMPLE_GOTO
1945 && TREE_CODE (gimple_goto_dest (stmt)) == LABEL_DECL
1946 && DECL_ARTIFICIAL (gimple_goto_dest (stmt)))
1947 /* Don't warn for compiler-generated gotos. These occur
1948 in Duff's devices, for example. */;
1949 else
1950 warning_at (gimple_location (stmt), OPT_Wswitch_unreachable,
1951 "statement will never be executed");
1952 }
1953 }
1954
1955
1956 /* A label entry that pairs label and a location. */
1957 struct label_entry
1958 {
1959 tree label;
1960 location_t loc;
1961 };
1962
1963 /* Find LABEL in vector of label entries VEC. */
1964
1965 static struct label_entry *
1966 find_label_entry (const auto_vec<struct label_entry> *vec, tree label)
1967 {
1968 unsigned int i;
1969 struct label_entry *l;
1970
1971 FOR_EACH_VEC_ELT (*vec, i, l)
1972 if (l->label == label)
1973 return l;
1974 return NULL;
1975 }
1976
1977 /* Return true if LABEL, a LABEL_DECL, represents a case label
1978 in a vector of labels CASES. */
1979
1980 static bool
1981 case_label_p (const vec<tree> *cases, tree label)
1982 {
1983 unsigned int i;
1984 tree l;
1985
1986 FOR_EACH_VEC_ELT (*cases, i, l)
1987 if (CASE_LABEL (l) == label)
1988 return true;
1989 return false;
1990 }
1991
1992 /* Find the last nondebug statement in a scope STMT. */
1993
1994 static gimple *
1995 last_stmt_in_scope (gimple *stmt)
1996 {
1997 if (!stmt)
1998 return NULL;
1999
2000 switch (gimple_code (stmt))
2001 {
2002 case GIMPLE_BIND:
2003 {
2004 gbind *bind = as_a <gbind *> (stmt);
2005 stmt = gimple_seq_last_nondebug_stmt (gimple_bind_body (bind));
2006 return last_stmt_in_scope (stmt);
2007 }
2008
2009 case GIMPLE_TRY:
2010 {
2011 gtry *try_stmt = as_a <gtry *> (stmt);
2012 stmt = gimple_seq_last_nondebug_stmt (gimple_try_eval (try_stmt));
2013 gimple *last_eval = last_stmt_in_scope (stmt);
2014 if (gimple_stmt_may_fallthru (last_eval)
2015 && (last_eval == NULL
2016 || !gimple_call_internal_p (last_eval, IFN_FALLTHROUGH))
2017 && gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY)
2018 {
2019 stmt = gimple_seq_last_nondebug_stmt (gimple_try_cleanup (try_stmt));
2020 return last_stmt_in_scope (stmt);
2021 }
2022 else
2023 return last_eval;
2024 }
2025
2026 case GIMPLE_DEBUG:
2027 gcc_unreachable ();
2028
2029 default:
2030 return stmt;
2031 }
2032 }
2033
2034 /* Collect interesting labels in LABELS and return the statement preceding
2035 another case label, or a user-defined label. Store a location useful
2036 to give warnings at *PREVLOC (usually the location of the returned
2037 statement or of its surrounding scope). */
2038
2039 static gimple *
2040 collect_fallthrough_labels (gimple_stmt_iterator *gsi_p,
2041 auto_vec <struct label_entry> *labels,
2042 location_t *prevloc)
2043 {
2044 gimple *prev = NULL;
2045
2046 *prevloc = UNKNOWN_LOCATION;
2047 do
2048 {
2049 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND)
2050 {
2051 /* Recognize the special GIMPLE_BIND added by gimplify_switch_expr,
2052 which starts on a GIMPLE_SWITCH and ends with a break label.
2053 Handle that as a single statement that can fall through. */
2054 gbind *bind = as_a <gbind *> (gsi_stmt (*gsi_p));
2055 gimple *first = gimple_seq_first_stmt (gimple_bind_body (bind));
2056 gimple *last = gimple_seq_last_stmt (gimple_bind_body (bind));
2057 if (last
2058 && gimple_code (first) == GIMPLE_SWITCH
2059 && gimple_code (last) == GIMPLE_LABEL)
2060 {
2061 tree label = gimple_label_label (as_a <glabel *> (last));
2062 if (SWITCH_BREAK_LABEL_P (label))
2063 {
2064 prev = bind;
2065 gsi_next (gsi_p);
2066 continue;
2067 }
2068 }
2069 }
2070 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND
2071 || gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_TRY)
2072 {
2073 /* Nested scope. Only look at the last statement of
2074 the innermost scope. */
2075 location_t bind_loc = gimple_location (gsi_stmt (*gsi_p));
2076 gimple *last = last_stmt_in_scope (gsi_stmt (*gsi_p));
2077 if (last)
2078 {
2079 prev = last;
2080 /* It might be a label without a location. Use the
2081 location of the scope then. */
2082 if (!gimple_has_location (prev))
2083 *prevloc = bind_loc;
2084 }
2085 gsi_next (gsi_p);
2086 continue;
2087 }
2088
2089 /* Ifs are tricky. */
2090 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_COND)
2091 {
2092 gcond *cond_stmt = as_a <gcond *> (gsi_stmt (*gsi_p));
2093 tree false_lab = gimple_cond_false_label (cond_stmt);
2094 location_t if_loc = gimple_location (cond_stmt);
2095
2096 /* If we have e.g.
2097 if (i > 1) goto <D.2259>; else goto D;
2098 we can't do much with the else-branch. */
2099 if (!DECL_ARTIFICIAL (false_lab))
2100 break;
2101
2102 /* Go on until the false label, then one step back. */
2103 for (; !gsi_end_p (*gsi_p); gsi_next (gsi_p))
2104 {
2105 gimple *stmt = gsi_stmt (*gsi_p);
2106 if (gimple_code (stmt) == GIMPLE_LABEL
2107 && gimple_label_label (as_a <glabel *> (stmt)) == false_lab)
2108 break;
2109 }
2110
2111 /* Not found? Oops. */
2112 if (gsi_end_p (*gsi_p))
2113 break;
2114
2115 struct label_entry l = { false_lab, if_loc };
2116 labels->safe_push (l);
2117
2118 /* Go to the last statement of the then branch. */
2119 gsi_prev (gsi_p);
2120
2121 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
2122 <D.1759>:
2123 <stmt>;
2124 goto <D.1761>;
2125 <D.1760>:
2126 */
2127 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_GOTO
2128 && !gimple_has_location (gsi_stmt (*gsi_p)))
2129 {
2130 /* Look at the statement before, it might be
2131 attribute fallthrough, in which case don't warn. */
2132 gsi_prev (gsi_p);
2133 bool fallthru_before_dest
2134 = gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_FALLTHROUGH);
2135 gsi_next (gsi_p);
2136 tree goto_dest = gimple_goto_dest (gsi_stmt (*gsi_p));
2137 if (!fallthru_before_dest)
2138 {
2139 struct label_entry l = { goto_dest, if_loc };
2140 labels->safe_push (l);
2141 }
2142 }
2143 /* And move back. */
2144 gsi_next (gsi_p);
2145 }
2146
2147 /* Remember the last statement. Skip labels that are of no interest
2148 to us. */
2149 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2150 {
2151 tree label = gimple_label_label (as_a <glabel *> (gsi_stmt (*gsi_p)));
2152 if (find_label_entry (labels, label))
2153 prev = gsi_stmt (*gsi_p);
2154 }
2155 else if (gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_ASAN_MARK))
2156 ;
2157 else if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_PREDICT)
2158 ;
2159 else if (!is_gimple_debug (gsi_stmt (*gsi_p)))
2160 prev = gsi_stmt (*gsi_p);
2161 gsi_next (gsi_p);
2162 }
2163 while (!gsi_end_p (*gsi_p)
2164 /* Stop if we find a case or a user-defined label. */
2165 && (gimple_code (gsi_stmt (*gsi_p)) != GIMPLE_LABEL
2166 || !gimple_has_location (gsi_stmt (*gsi_p))));
2167
2168 if (prev && gimple_has_location (prev))
2169 *prevloc = gimple_location (prev);
2170 return prev;
2171 }
2172
2173 /* Return true if the switch fallthough warning should occur. LABEL is
2174 the label statement that we're falling through to. */
2175
2176 static bool
2177 should_warn_for_implicit_fallthrough (gimple_stmt_iterator *gsi_p, tree label)
2178 {
2179 gimple_stmt_iterator gsi = *gsi_p;
2180
2181 /* Don't warn if the label is marked with a "falls through" comment. */
2182 if (FALLTHROUGH_LABEL_P (label))
2183 return false;
2184
2185 /* Don't warn for non-case labels followed by a statement:
2186 case 0:
2187 foo ();
2188 label:
2189 bar ();
2190 as these are likely intentional. */
2191 if (!case_label_p (&gimplify_ctxp->case_labels, label))
2192 {
2193 tree l;
2194 while (!gsi_end_p (gsi)
2195 && gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2196 && (l = gimple_label_label (as_a <glabel *> (gsi_stmt (gsi))))
2197 && !case_label_p (&gimplify_ctxp->case_labels, l))
2198 gsi_next_nondebug (&gsi);
2199 if (gsi_end_p (gsi) || gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL)
2200 return false;
2201 }
2202
2203 /* Don't warn for terminated branches, i.e. when the subsequent case labels
2204 immediately breaks. */
2205 gsi = *gsi_p;
2206
2207 /* Skip all immediately following labels. */
2208 while (!gsi_end_p (gsi)
2209 && (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2210 || gimple_code (gsi_stmt (gsi)) == GIMPLE_PREDICT))
2211 gsi_next_nondebug (&gsi);
2212
2213 /* { ... something; default:; } */
2214 if (gsi_end_p (gsi)
2215 /* { ... something; default: break; } or
2216 { ... something; default: goto L; } */
2217 || gimple_code (gsi_stmt (gsi)) == GIMPLE_GOTO
2218 /* { ... something; default: return; } */
2219 || gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
2220 return false;
2221
2222 return true;
2223 }
2224
2225 /* Callback for walk_gimple_seq. */
2226
2227 static tree
2228 warn_implicit_fallthrough_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2229 struct walk_stmt_info *)
2230 {
2231 gimple *stmt = gsi_stmt (*gsi_p);
2232
2233 *handled_ops_p = true;
2234 switch (gimple_code (stmt))
2235 {
2236 case GIMPLE_TRY:
2237 case GIMPLE_BIND:
2238 case GIMPLE_CATCH:
2239 case GIMPLE_EH_FILTER:
2240 case GIMPLE_TRANSACTION:
2241 /* Walk the sub-statements. */
2242 *handled_ops_p = false;
2243 break;
2244
2245 /* Find a sequence of form:
2246
2247 GIMPLE_LABEL
2248 [...]
2249 <may fallthru stmt>
2250 GIMPLE_LABEL
2251
2252 and possibly warn. */
2253 case GIMPLE_LABEL:
2254 {
2255 /* Found a label. Skip all immediately following labels. */
2256 while (!gsi_end_p (*gsi_p)
2257 && gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2258 gsi_next_nondebug (gsi_p);
2259
2260 /* There might be no more statements. */
2261 if (gsi_end_p (*gsi_p))
2262 return integer_zero_node;
2263
2264 /* Vector of labels that fall through. */
2265 auto_vec <struct label_entry> labels;
2266 location_t prevloc;
2267 gimple *prev = collect_fallthrough_labels (gsi_p, &labels, &prevloc);
2268
2269 /* There might be no more statements. */
2270 if (gsi_end_p (*gsi_p))
2271 return integer_zero_node;
2272
2273 gimple *next = gsi_stmt (*gsi_p);
2274 tree label;
2275 /* If what follows is a label, then we may have a fallthrough. */
2276 if (gimple_code (next) == GIMPLE_LABEL
2277 && gimple_has_location (next)
2278 && (label = gimple_label_label (as_a <glabel *> (next)))
2279 && prev != NULL)
2280 {
2281 struct label_entry *l;
2282 bool warned_p = false;
2283 auto_diagnostic_group d;
2284 if (!should_warn_for_implicit_fallthrough (gsi_p, label))
2285 /* Quiet. */;
2286 else if (gimple_code (prev) == GIMPLE_LABEL
2287 && (label = gimple_label_label (as_a <glabel *> (prev)))
2288 && (l = find_label_entry (&labels, label)))
2289 warned_p = warning_at (l->loc, OPT_Wimplicit_fallthrough_,
2290 "this statement may fall through");
2291 else if (!gimple_call_internal_p (prev, IFN_FALLTHROUGH)
2292 /* Try to be clever and don't warn when the statement
2293 can't actually fall through. */
2294 && gimple_stmt_may_fallthru (prev)
2295 && prevloc != UNKNOWN_LOCATION)
2296 warned_p = warning_at (prevloc,
2297 OPT_Wimplicit_fallthrough_,
2298 "this statement may fall through");
2299 if (warned_p)
2300 inform (gimple_location (next), "here");
2301
2302 /* Mark this label as processed so as to prevent multiple
2303 warnings in nested switches. */
2304 FALLTHROUGH_LABEL_P (label) = true;
2305
2306 /* So that next warn_implicit_fallthrough_r will start looking for
2307 a new sequence starting with this label. */
2308 gsi_prev (gsi_p);
2309 }
2310 }
2311 break;
2312 default:
2313 break;
2314 }
2315 return NULL_TREE;
2316 }
2317
2318 /* Warn when a switch case falls through. */
2319
2320 static void
2321 maybe_warn_implicit_fallthrough (gimple_seq seq)
2322 {
2323 if (!warn_implicit_fallthrough)
2324 return;
2325
2326 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2327 if (!(lang_GNU_C ()
2328 || lang_GNU_CXX ()
2329 || lang_GNU_OBJC ()))
2330 return;
2331
2332 struct walk_stmt_info wi;
2333 memset (&wi, 0, sizeof (wi));
2334 walk_gimple_seq (seq, warn_implicit_fallthrough_r, NULL, &wi);
2335 }
2336
2337 /* Callback for walk_gimple_seq. */
2338
2339 static tree
2340 expand_FALLTHROUGH_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2341 struct walk_stmt_info *wi)
2342 {
2343 gimple *stmt = gsi_stmt (*gsi_p);
2344
2345 *handled_ops_p = true;
2346 switch (gimple_code (stmt))
2347 {
2348 case GIMPLE_TRY:
2349 case GIMPLE_BIND:
2350 case GIMPLE_CATCH:
2351 case GIMPLE_EH_FILTER:
2352 case GIMPLE_TRANSACTION:
2353 /* Walk the sub-statements. */
2354 *handled_ops_p = false;
2355 break;
2356 case GIMPLE_CALL:
2357 if (gimple_call_internal_p (stmt, IFN_FALLTHROUGH))
2358 {
2359 gsi_remove (gsi_p, true);
2360 if (gsi_end_p (*gsi_p))
2361 {
2362 *static_cast<location_t *>(wi->info) = gimple_location (stmt);
2363 return integer_zero_node;
2364 }
2365
2366 bool found = false;
2367 location_t loc = gimple_location (stmt);
2368
2369 gimple_stmt_iterator gsi2 = *gsi_p;
2370 stmt = gsi_stmt (gsi2);
2371 if (gimple_code (stmt) == GIMPLE_GOTO && !gimple_has_location (stmt))
2372 {
2373 /* Go on until the artificial label. */
2374 tree goto_dest = gimple_goto_dest (stmt);
2375 for (; !gsi_end_p (gsi2); gsi_next (&gsi2))
2376 {
2377 if (gimple_code (gsi_stmt (gsi2)) == GIMPLE_LABEL
2378 && gimple_label_label (as_a <glabel *> (gsi_stmt (gsi2)))
2379 == goto_dest)
2380 break;
2381 }
2382
2383 /* Not found? Stop. */
2384 if (gsi_end_p (gsi2))
2385 break;
2386
2387 /* Look one past it. */
2388 gsi_next (&gsi2);
2389 }
2390
2391 /* We're looking for a case label or default label here. */
2392 while (!gsi_end_p (gsi2))
2393 {
2394 stmt = gsi_stmt (gsi2);
2395 if (gimple_code (stmt) == GIMPLE_LABEL)
2396 {
2397 tree label = gimple_label_label (as_a <glabel *> (stmt));
2398 if (gimple_has_location (stmt) && DECL_ARTIFICIAL (label))
2399 {
2400 found = true;
2401 break;
2402 }
2403 }
2404 else if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
2405 ;
2406 else if (!is_gimple_debug (stmt))
2407 /* Anything else is not expected. */
2408 break;
2409 gsi_next (&gsi2);
2410 }
2411 if (!found)
2412 pedwarn (loc, 0, "attribute %<fallthrough%> not preceding "
2413 "a case label or default label");
2414 }
2415 break;
2416 default:
2417 break;
2418 }
2419 return NULL_TREE;
2420 }
2421
2422 /* Expand all FALLTHROUGH () calls in SEQ. */
2423
2424 static void
2425 expand_FALLTHROUGH (gimple_seq *seq_p)
2426 {
2427 struct walk_stmt_info wi;
2428 location_t loc;
2429 memset (&wi, 0, sizeof (wi));
2430 wi.info = (void *) &loc;
2431 walk_gimple_seq_mod (seq_p, expand_FALLTHROUGH_r, NULL, &wi);
2432 if (wi.callback_result == integer_zero_node)
2433 /* We've found [[fallthrough]]; at the end of a switch, which the C++
2434 standard says is ill-formed; see [dcl.attr.fallthrough]. */
2435 pedwarn (loc, 0, "attribute %<fallthrough%> not preceding "
2436 "a case label or default label");
2437 }
2438
2439 \f
2440 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
2441 branch to. */
2442
2443 static enum gimplify_status
2444 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
2445 {
2446 tree switch_expr = *expr_p;
2447 gimple_seq switch_body_seq = NULL;
2448 enum gimplify_status ret;
2449 tree index_type = TREE_TYPE (switch_expr);
2450 if (index_type == NULL_TREE)
2451 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
2452
2453 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
2454 fb_rvalue);
2455 if (ret == GS_ERROR || ret == GS_UNHANDLED)
2456 return ret;
2457
2458 if (SWITCH_BODY (switch_expr))
2459 {
2460 vec<tree> labels;
2461 vec<tree> saved_labels;
2462 hash_set<tree> *saved_live_switch_vars = NULL;
2463 tree default_case = NULL_TREE;
2464 gswitch *switch_stmt;
2465
2466 /* Save old labels, get new ones from body, then restore the old
2467 labels. Save all the things from the switch body to append after. */
2468 saved_labels = gimplify_ctxp->case_labels;
2469 gimplify_ctxp->case_labels.create (8);
2470
2471 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
2472 saved_live_switch_vars = gimplify_ctxp->live_switch_vars;
2473 tree_code body_type = TREE_CODE (SWITCH_BODY (switch_expr));
2474 if (body_type == BIND_EXPR || body_type == STATEMENT_LIST)
2475 gimplify_ctxp->live_switch_vars = new hash_set<tree> (4);
2476 else
2477 gimplify_ctxp->live_switch_vars = NULL;
2478
2479 bool old_in_switch_expr = gimplify_ctxp->in_switch_expr;
2480 gimplify_ctxp->in_switch_expr = true;
2481
2482 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
2483
2484 gimplify_ctxp->in_switch_expr = old_in_switch_expr;
2485 maybe_warn_switch_unreachable (switch_body_seq);
2486 maybe_warn_implicit_fallthrough (switch_body_seq);
2487 /* Only do this for the outermost GIMPLE_SWITCH. */
2488 if (!gimplify_ctxp->in_switch_expr)
2489 expand_FALLTHROUGH (&switch_body_seq);
2490
2491 labels = gimplify_ctxp->case_labels;
2492 gimplify_ctxp->case_labels = saved_labels;
2493
2494 if (gimplify_ctxp->live_switch_vars)
2495 {
2496 gcc_assert (gimplify_ctxp->live_switch_vars->is_empty ());
2497 delete gimplify_ctxp->live_switch_vars;
2498 }
2499 gimplify_ctxp->live_switch_vars = saved_live_switch_vars;
2500
2501 preprocess_case_label_vec_for_gimple (labels, index_type,
2502 &default_case);
2503
2504 bool add_bind = false;
2505 if (!default_case)
2506 {
2507 glabel *new_default;
2508
2509 default_case
2510 = build_case_label (NULL_TREE, NULL_TREE,
2511 create_artificial_label (UNKNOWN_LOCATION));
2512 if (old_in_switch_expr)
2513 {
2514 SWITCH_BREAK_LABEL_P (CASE_LABEL (default_case)) = 1;
2515 add_bind = true;
2516 }
2517 new_default = gimple_build_label (CASE_LABEL (default_case));
2518 gimplify_seq_add_stmt (&switch_body_seq, new_default);
2519 }
2520 else if (old_in_switch_expr)
2521 {
2522 gimple *last = gimple_seq_last_stmt (switch_body_seq);
2523 if (last && gimple_code (last) == GIMPLE_LABEL)
2524 {
2525 tree label = gimple_label_label (as_a <glabel *> (last));
2526 if (SWITCH_BREAK_LABEL_P (label))
2527 add_bind = true;
2528 }
2529 }
2530
2531 switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
2532 default_case, labels);
2533 /* For the benefit of -Wimplicit-fallthrough, if switch_body_seq
2534 ends with a GIMPLE_LABEL holding SWITCH_BREAK_LABEL_P LABEL_DECL,
2535 wrap the GIMPLE_SWITCH up to that GIMPLE_LABEL into a GIMPLE_BIND,
2536 so that we can easily find the start and end of the switch
2537 statement. */
2538 if (add_bind)
2539 {
2540 gimple_seq bind_body = NULL;
2541 gimplify_seq_add_stmt (&bind_body, switch_stmt);
2542 gimple_seq_add_seq (&bind_body, switch_body_seq);
2543 gbind *bind = gimple_build_bind (NULL_TREE, bind_body, NULL_TREE);
2544 gimple_set_location (bind, EXPR_LOCATION (switch_expr));
2545 gimplify_seq_add_stmt (pre_p, bind);
2546 }
2547 else
2548 {
2549 gimplify_seq_add_stmt (pre_p, switch_stmt);
2550 gimplify_seq_add_seq (pre_p, switch_body_seq);
2551 }
2552 labels.release ();
2553 }
2554 else
2555 gcc_unreachable ();
2556
2557 return GS_ALL_DONE;
2558 }
2559
2560 /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
2561
2562 static enum gimplify_status
2563 gimplify_label_expr (tree *expr_p, gimple_seq *pre_p)
2564 {
2565 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
2566 == current_function_decl);
2567
2568 tree label = LABEL_EXPR_LABEL (*expr_p);
2569 glabel *label_stmt = gimple_build_label (label);
2570 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2571 gimplify_seq_add_stmt (pre_p, label_stmt);
2572
2573 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2574 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2575 NOT_TAKEN));
2576 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2577 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2578 TAKEN));
2579
2580 return GS_ALL_DONE;
2581 }
2582
2583 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
2584
2585 static enum gimplify_status
2586 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
2587 {
2588 struct gimplify_ctx *ctxp;
2589 glabel *label_stmt;
2590
2591 /* Invalid programs can play Duff's Device type games with, for example,
2592 #pragma omp parallel. At least in the C front end, we don't
2593 detect such invalid branches until after gimplification, in the
2594 diagnose_omp_blocks pass. */
2595 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
2596 if (ctxp->case_labels.exists ())
2597 break;
2598
2599 tree label = CASE_LABEL (*expr_p);
2600 label_stmt = gimple_build_label (label);
2601 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2602 ctxp->case_labels.safe_push (*expr_p);
2603 gimplify_seq_add_stmt (pre_p, label_stmt);
2604
2605 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2606 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2607 NOT_TAKEN));
2608 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2609 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2610 TAKEN));
2611
2612 return GS_ALL_DONE;
2613 }
2614
2615 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
2616 if necessary. */
2617
2618 tree
2619 build_and_jump (tree *label_p)
2620 {
2621 if (label_p == NULL)
2622 /* If there's nowhere to jump, just fall through. */
2623 return NULL_TREE;
2624
2625 if (*label_p == NULL_TREE)
2626 {
2627 tree label = create_artificial_label (UNKNOWN_LOCATION);
2628 *label_p = label;
2629 }
2630
2631 return build1 (GOTO_EXPR, void_type_node, *label_p);
2632 }
2633
2634 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
2635 This also involves building a label to jump to and communicating it to
2636 gimplify_loop_expr through gimplify_ctxp->exit_label. */
2637
2638 static enum gimplify_status
2639 gimplify_exit_expr (tree *expr_p)
2640 {
2641 tree cond = TREE_OPERAND (*expr_p, 0);
2642 tree expr;
2643
2644 expr = build_and_jump (&gimplify_ctxp->exit_label);
2645 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
2646 *expr_p = expr;
2647
2648 return GS_OK;
2649 }
2650
2651 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
2652 different from its canonical type, wrap the whole thing inside a
2653 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
2654 type.
2655
2656 The canonical type of a COMPONENT_REF is the type of the field being
2657 referenced--unless the field is a bit-field which can be read directly
2658 in a smaller mode, in which case the canonical type is the
2659 sign-appropriate type corresponding to that mode. */
2660
2661 static void
2662 canonicalize_component_ref (tree *expr_p)
2663 {
2664 tree expr = *expr_p;
2665 tree type;
2666
2667 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
2668
2669 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
2670 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
2671 else
2672 type = TREE_TYPE (TREE_OPERAND (expr, 1));
2673
2674 /* One could argue that all the stuff below is not necessary for
2675 the non-bitfield case and declare it a FE error if type
2676 adjustment would be needed. */
2677 if (TREE_TYPE (expr) != type)
2678 {
2679 #ifdef ENABLE_TYPES_CHECKING
2680 tree old_type = TREE_TYPE (expr);
2681 #endif
2682 int type_quals;
2683
2684 /* We need to preserve qualifiers and propagate them from
2685 operand 0. */
2686 type_quals = TYPE_QUALS (type)
2687 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
2688 if (TYPE_QUALS (type) != type_quals)
2689 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
2690
2691 /* Set the type of the COMPONENT_REF to the underlying type. */
2692 TREE_TYPE (expr) = type;
2693
2694 #ifdef ENABLE_TYPES_CHECKING
2695 /* It is now a FE error, if the conversion from the canonical
2696 type to the original expression type is not useless. */
2697 gcc_assert (useless_type_conversion_p (old_type, type));
2698 #endif
2699 }
2700 }
2701
2702 /* If a NOP conversion is changing a pointer to array of foo to a pointer
2703 to foo, embed that change in the ADDR_EXPR by converting
2704 T array[U];
2705 (T *)&array
2706 ==>
2707 &array[L]
2708 where L is the lower bound. For simplicity, only do this for constant
2709 lower bound.
2710 The constraint is that the type of &array[L] is trivially convertible
2711 to T *. */
2712
2713 static void
2714 canonicalize_addr_expr (tree *expr_p)
2715 {
2716 tree expr = *expr_p;
2717 tree addr_expr = TREE_OPERAND (expr, 0);
2718 tree datype, ddatype, pddatype;
2719
2720 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
2721 if (!POINTER_TYPE_P (TREE_TYPE (expr))
2722 || TREE_CODE (addr_expr) != ADDR_EXPR)
2723 return;
2724
2725 /* The addr_expr type should be a pointer to an array. */
2726 datype = TREE_TYPE (TREE_TYPE (addr_expr));
2727 if (TREE_CODE (datype) != ARRAY_TYPE)
2728 return;
2729
2730 /* The pointer to element type shall be trivially convertible to
2731 the expression pointer type. */
2732 ddatype = TREE_TYPE (datype);
2733 pddatype = build_pointer_type (ddatype);
2734 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
2735 pddatype))
2736 return;
2737
2738 /* The lower bound and element sizes must be constant. */
2739 if (!TYPE_SIZE_UNIT (ddatype)
2740 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
2741 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
2742 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
2743 return;
2744
2745 /* All checks succeeded. Build a new node to merge the cast. */
2746 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
2747 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
2748 NULL_TREE, NULL_TREE);
2749 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
2750
2751 /* We can have stripped a required restrict qualifier above. */
2752 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
2753 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
2754 }
2755
2756 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
2757 underneath as appropriate. */
2758
2759 static enum gimplify_status
2760 gimplify_conversion (tree *expr_p)
2761 {
2762 location_t loc = EXPR_LOCATION (*expr_p);
2763 gcc_assert (CONVERT_EXPR_P (*expr_p));
2764
2765 /* Then strip away all but the outermost conversion. */
2766 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
2767
2768 /* And remove the outermost conversion if it's useless. */
2769 if (tree_ssa_useless_type_conversion (*expr_p))
2770 *expr_p = TREE_OPERAND (*expr_p, 0);
2771
2772 /* If we still have a conversion at the toplevel,
2773 then canonicalize some constructs. */
2774 if (CONVERT_EXPR_P (*expr_p))
2775 {
2776 tree sub = TREE_OPERAND (*expr_p, 0);
2777
2778 /* If a NOP conversion is changing the type of a COMPONENT_REF
2779 expression, then canonicalize its type now in order to expose more
2780 redundant conversions. */
2781 if (TREE_CODE (sub) == COMPONENT_REF)
2782 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
2783
2784 /* If a NOP conversion is changing a pointer to array of foo
2785 to a pointer to foo, embed that change in the ADDR_EXPR. */
2786 else if (TREE_CODE (sub) == ADDR_EXPR)
2787 canonicalize_addr_expr (expr_p);
2788 }
2789
2790 /* If we have a conversion to a non-register type force the
2791 use of a VIEW_CONVERT_EXPR instead. */
2792 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
2793 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
2794 TREE_OPERAND (*expr_p, 0));
2795
2796 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
2797 if (TREE_CODE (*expr_p) == CONVERT_EXPR)
2798 TREE_SET_CODE (*expr_p, NOP_EXPR);
2799
2800 return GS_OK;
2801 }
2802
2803 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
2804 DECL_VALUE_EXPR, and it's worth re-examining things. */
2805
2806 static enum gimplify_status
2807 gimplify_var_or_parm_decl (tree *expr_p)
2808 {
2809 tree decl = *expr_p;
2810
2811 /* ??? If this is a local variable, and it has not been seen in any
2812 outer BIND_EXPR, then it's probably the result of a duplicate
2813 declaration, for which we've already issued an error. It would
2814 be really nice if the front end wouldn't leak these at all.
2815 Currently the only known culprit is C++ destructors, as seen
2816 in g++.old-deja/g++.jason/binding.C. */
2817 if (VAR_P (decl)
2818 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
2819 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
2820 && decl_function_context (decl) == current_function_decl)
2821 {
2822 gcc_assert (seen_error ());
2823 return GS_ERROR;
2824 }
2825
2826 /* When within an OMP context, notice uses of variables. */
2827 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
2828 return GS_ALL_DONE;
2829
2830 /* If the decl is an alias for another expression, substitute it now. */
2831 if (DECL_HAS_VALUE_EXPR_P (decl))
2832 {
2833 *expr_p = unshare_expr (DECL_VALUE_EXPR (decl));
2834 return GS_OK;
2835 }
2836
2837 return GS_ALL_DONE;
2838 }
2839
2840 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
2841
2842 static void
2843 recalculate_side_effects (tree t)
2844 {
2845 enum tree_code code = TREE_CODE (t);
2846 int len = TREE_OPERAND_LENGTH (t);
2847 int i;
2848
2849 switch (TREE_CODE_CLASS (code))
2850 {
2851 case tcc_expression:
2852 switch (code)
2853 {
2854 case INIT_EXPR:
2855 case MODIFY_EXPR:
2856 case VA_ARG_EXPR:
2857 case PREDECREMENT_EXPR:
2858 case PREINCREMENT_EXPR:
2859 case POSTDECREMENT_EXPR:
2860 case POSTINCREMENT_EXPR:
2861 /* All of these have side-effects, no matter what their
2862 operands are. */
2863 return;
2864
2865 default:
2866 break;
2867 }
2868 /* Fall through. */
2869
2870 case tcc_comparison: /* a comparison expression */
2871 case tcc_unary: /* a unary arithmetic expression */
2872 case tcc_binary: /* a binary arithmetic expression */
2873 case tcc_reference: /* a reference */
2874 case tcc_vl_exp: /* a function call */
2875 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
2876 for (i = 0; i < len; ++i)
2877 {
2878 tree op = TREE_OPERAND (t, i);
2879 if (op && TREE_SIDE_EFFECTS (op))
2880 TREE_SIDE_EFFECTS (t) = 1;
2881 }
2882 break;
2883
2884 case tcc_constant:
2885 /* No side-effects. */
2886 return;
2887
2888 default:
2889 gcc_unreachable ();
2890 }
2891 }
2892
2893 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
2894 node *EXPR_P.
2895
2896 compound_lval
2897 : min_lval '[' val ']'
2898 | min_lval '.' ID
2899 | compound_lval '[' val ']'
2900 | compound_lval '.' ID
2901
2902 This is not part of the original SIMPLE definition, which separates
2903 array and member references, but it seems reasonable to handle them
2904 together. Also, this way we don't run into problems with union
2905 aliasing; gcc requires that for accesses through a union to alias, the
2906 union reference must be explicit, which was not always the case when we
2907 were splitting up array and member refs.
2908
2909 PRE_P points to the sequence where side effects that must happen before
2910 *EXPR_P should be stored.
2911
2912 POST_P points to the sequence where side effects that must happen after
2913 *EXPR_P should be stored. */
2914
2915 static enum gimplify_status
2916 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2917 fallback_t fallback)
2918 {
2919 tree *p;
2920 enum gimplify_status ret = GS_ALL_DONE, tret;
2921 int i;
2922 location_t loc = EXPR_LOCATION (*expr_p);
2923 tree expr = *expr_p;
2924
2925 /* Create a stack of the subexpressions so later we can walk them in
2926 order from inner to outer. */
2927 auto_vec<tree, 10> expr_stack;
2928
2929 /* We can handle anything that get_inner_reference can deal with. */
2930 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
2931 {
2932 restart:
2933 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
2934 if (TREE_CODE (*p) == INDIRECT_REF)
2935 *p = fold_indirect_ref_loc (loc, *p);
2936
2937 if (handled_component_p (*p))
2938 ;
2939 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
2940 additional COMPONENT_REFs. */
2941 else if ((VAR_P (*p) || TREE_CODE (*p) == PARM_DECL)
2942 && gimplify_var_or_parm_decl (p) == GS_OK)
2943 goto restart;
2944 else
2945 break;
2946
2947 expr_stack.safe_push (*p);
2948 }
2949
2950 gcc_assert (expr_stack.length ());
2951
2952 /* Now EXPR_STACK is a stack of pointers to all the refs we've
2953 walked through and P points to the innermost expression.
2954
2955 Java requires that we elaborated nodes in source order. That
2956 means we must gimplify the inner expression followed by each of
2957 the indices, in order. But we can't gimplify the inner
2958 expression until we deal with any variable bounds, sizes, or
2959 positions in order to deal with PLACEHOLDER_EXPRs.
2960
2961 So we do this in three steps. First we deal with the annotations
2962 for any variables in the components, then we gimplify the base,
2963 then we gimplify any indices, from left to right. */
2964 for (i = expr_stack.length () - 1; i >= 0; i--)
2965 {
2966 tree t = expr_stack[i];
2967
2968 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2969 {
2970 /* Gimplify the low bound and element type size and put them into
2971 the ARRAY_REF. If these values are set, they have already been
2972 gimplified. */
2973 if (TREE_OPERAND (t, 2) == NULL_TREE)
2974 {
2975 tree low = unshare_expr (array_ref_low_bound (t));
2976 if (!is_gimple_min_invariant (low))
2977 {
2978 TREE_OPERAND (t, 2) = low;
2979 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2980 post_p, is_gimple_reg,
2981 fb_rvalue);
2982 ret = MIN (ret, tret);
2983 }
2984 }
2985 else
2986 {
2987 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2988 is_gimple_reg, fb_rvalue);
2989 ret = MIN (ret, tret);
2990 }
2991
2992 if (TREE_OPERAND (t, 3) == NULL_TREE)
2993 {
2994 tree elmt_size = array_ref_element_size (t);
2995 if (!is_gimple_min_invariant (elmt_size))
2996 {
2997 elmt_size = unshare_expr (elmt_size);
2998 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
2999 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
3000
3001 /* Divide the element size by the alignment of the element
3002 type (above). */
3003 elmt_size = size_binop_loc (loc, EXACT_DIV_EXPR,
3004 elmt_size, factor);
3005
3006 TREE_OPERAND (t, 3) = elmt_size;
3007 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
3008 post_p, is_gimple_reg,
3009 fb_rvalue);
3010 ret = MIN (ret, tret);
3011 }
3012 }
3013 else
3014 {
3015 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
3016 is_gimple_reg, fb_rvalue);
3017 ret = MIN (ret, tret);
3018 }
3019 }
3020 else if (TREE_CODE (t) == COMPONENT_REF)
3021 {
3022 /* Set the field offset into T and gimplify it. */
3023 if (TREE_OPERAND (t, 2) == NULL_TREE)
3024 {
3025 tree offset = component_ref_field_offset (t);
3026 if (!is_gimple_min_invariant (offset))
3027 {
3028 offset = unshare_expr (offset);
3029 tree field = TREE_OPERAND (t, 1);
3030 tree factor
3031 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
3032
3033 /* Divide the offset by its alignment. */
3034 offset = size_binop_loc (loc, EXACT_DIV_EXPR,
3035 offset, factor);
3036
3037 TREE_OPERAND (t, 2) = offset;
3038 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
3039 post_p, is_gimple_reg,
3040 fb_rvalue);
3041 ret = MIN (ret, tret);
3042 }
3043 }
3044 else
3045 {
3046 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
3047 is_gimple_reg, fb_rvalue);
3048 ret = MIN (ret, tret);
3049 }
3050 }
3051 }
3052
3053 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
3054 so as to match the min_lval predicate. Failure to do so may result
3055 in the creation of large aggregate temporaries. */
3056 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
3057 fallback | fb_lvalue);
3058 ret = MIN (ret, tret);
3059
3060 /* And finally, the indices and operands of ARRAY_REF. During this
3061 loop we also remove any useless conversions. */
3062 for (; expr_stack.length () > 0; )
3063 {
3064 tree t = expr_stack.pop ();
3065
3066 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3067 {
3068 /* Gimplify the dimension. */
3069 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
3070 {
3071 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
3072 is_gimple_val, fb_rvalue);
3073 ret = MIN (ret, tret);
3074 }
3075 }
3076
3077 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
3078
3079 /* The innermost expression P may have originally had
3080 TREE_SIDE_EFFECTS set which would have caused all the outer
3081 expressions in *EXPR_P leading to P to also have had
3082 TREE_SIDE_EFFECTS set. */
3083 recalculate_side_effects (t);
3084 }
3085
3086 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
3087 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
3088 {
3089 canonicalize_component_ref (expr_p);
3090 }
3091
3092 expr_stack.release ();
3093
3094 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
3095
3096 return ret;
3097 }
3098
3099 /* Gimplify the self modifying expression pointed to by EXPR_P
3100 (++, --, +=, -=).
3101
3102 PRE_P points to the list where side effects that must happen before
3103 *EXPR_P should be stored.
3104
3105 POST_P points to the list where side effects that must happen after
3106 *EXPR_P should be stored.
3107
3108 WANT_VALUE is nonzero iff we want to use the value of this expression
3109 in another expression.
3110
3111 ARITH_TYPE is the type the computation should be performed in. */
3112
3113 enum gimplify_status
3114 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3115 bool want_value, tree arith_type)
3116 {
3117 enum tree_code code;
3118 tree lhs, lvalue, rhs, t1;
3119 gimple_seq post = NULL, *orig_post_p = post_p;
3120 bool postfix;
3121 enum tree_code arith_code;
3122 enum gimplify_status ret;
3123 location_t loc = EXPR_LOCATION (*expr_p);
3124
3125 code = TREE_CODE (*expr_p);
3126
3127 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
3128 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
3129
3130 /* Prefix or postfix? */
3131 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
3132 /* Faster to treat as prefix if result is not used. */
3133 postfix = want_value;
3134 else
3135 postfix = false;
3136
3137 /* For postfix, make sure the inner expression's post side effects
3138 are executed after side effects from this expression. */
3139 if (postfix)
3140 post_p = &post;
3141
3142 /* Add or subtract? */
3143 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
3144 arith_code = PLUS_EXPR;
3145 else
3146 arith_code = MINUS_EXPR;
3147
3148 /* Gimplify the LHS into a GIMPLE lvalue. */
3149 lvalue = TREE_OPERAND (*expr_p, 0);
3150 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
3151 if (ret == GS_ERROR)
3152 return ret;
3153
3154 /* Extract the operands to the arithmetic operation. */
3155 lhs = lvalue;
3156 rhs = TREE_OPERAND (*expr_p, 1);
3157
3158 /* For postfix operator, we evaluate the LHS to an rvalue and then use
3159 that as the result value and in the postqueue operation. */
3160 if (postfix)
3161 {
3162 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
3163 if (ret == GS_ERROR)
3164 return ret;
3165
3166 lhs = get_initialized_tmp_var (lhs, pre_p);
3167 }
3168
3169 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
3170 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
3171 {
3172 rhs = convert_to_ptrofftype_loc (loc, rhs);
3173 if (arith_code == MINUS_EXPR)
3174 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
3175 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
3176 }
3177 else
3178 t1 = fold_convert (TREE_TYPE (*expr_p),
3179 fold_build2 (arith_code, arith_type,
3180 fold_convert (arith_type, lhs),
3181 fold_convert (arith_type, rhs)));
3182
3183 if (postfix)
3184 {
3185 gimplify_assign (lvalue, t1, pre_p);
3186 gimplify_seq_add_seq (orig_post_p, post);
3187 *expr_p = lhs;
3188 return GS_ALL_DONE;
3189 }
3190 else
3191 {
3192 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
3193 return GS_OK;
3194 }
3195 }
3196
3197 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
3198
3199 static void
3200 maybe_with_size_expr (tree *expr_p)
3201 {
3202 tree expr = *expr_p;
3203 tree type = TREE_TYPE (expr);
3204 tree size;
3205
3206 /* If we've already wrapped this or the type is error_mark_node, we can't do
3207 anything. */
3208 if (TREE_CODE (expr) == WITH_SIZE_EXPR
3209 || type == error_mark_node)
3210 return;
3211
3212 /* If the size isn't known or is a constant, we have nothing to do. */
3213 size = TYPE_SIZE_UNIT (type);
3214 if (!size || poly_int_tree_p (size))
3215 return;
3216
3217 /* Otherwise, make a WITH_SIZE_EXPR. */
3218 size = unshare_expr (size);
3219 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
3220 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
3221 }
3222
3223 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
3224 Store any side-effects in PRE_P. CALL_LOCATION is the location of
3225 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3226 gimplified to an SSA name. */
3227
3228 enum gimplify_status
3229 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
3230 bool allow_ssa)
3231 {
3232 bool (*test) (tree);
3233 fallback_t fb;
3234
3235 /* In general, we allow lvalues for function arguments to avoid
3236 extra overhead of copying large aggregates out of even larger
3237 aggregates into temporaries only to copy the temporaries to
3238 the argument list. Make optimizers happy by pulling out to
3239 temporaries those types that fit in registers. */
3240 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
3241 test = is_gimple_val, fb = fb_rvalue;
3242 else
3243 {
3244 test = is_gimple_lvalue, fb = fb_either;
3245 /* Also strip a TARGET_EXPR that would force an extra copy. */
3246 if (TREE_CODE (*arg_p) == TARGET_EXPR)
3247 {
3248 tree init = TARGET_EXPR_INITIAL (*arg_p);
3249 if (init
3250 && !VOID_TYPE_P (TREE_TYPE (init)))
3251 *arg_p = init;
3252 }
3253 }
3254
3255 /* If this is a variable sized type, we must remember the size. */
3256 maybe_with_size_expr (arg_p);
3257
3258 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
3259 /* Make sure arguments have the same location as the function call
3260 itself. */
3261 protected_set_expr_location (*arg_p, call_location);
3262
3263 /* There is a sequence point before a function call. Side effects in
3264 the argument list must occur before the actual call. So, when
3265 gimplifying arguments, force gimplify_expr to use an internal
3266 post queue which is then appended to the end of PRE_P. */
3267 return gimplify_expr (arg_p, pre_p, NULL, test, fb, allow_ssa);
3268 }
3269
3270 /* Don't fold inside offloading or taskreg regions: it can break code by
3271 adding decl references that weren't in the source. We'll do it during
3272 omplower pass instead. */
3273
3274 static bool
3275 maybe_fold_stmt (gimple_stmt_iterator *gsi)
3276 {
3277 struct gimplify_omp_ctx *ctx;
3278 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
3279 if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0)
3280 return false;
3281 else if ((ctx->region_type & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
3282 return false;
3283 /* Delay folding of builtins until the IL is in consistent state
3284 so the diagnostic machinery can do a better job. */
3285 if (gimple_call_builtin_p (gsi_stmt (*gsi)))
3286 return false;
3287 return fold_stmt (gsi);
3288 }
3289
3290 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
3291 WANT_VALUE is true if the result of the call is desired. */
3292
3293 static enum gimplify_status
3294 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
3295 {
3296 tree fndecl, parms, p, fnptrtype;
3297 enum gimplify_status ret;
3298 int i, nargs;
3299 gcall *call;
3300 bool builtin_va_start_p = false;
3301 location_t loc = EXPR_LOCATION (*expr_p);
3302
3303 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
3304
3305 /* For reliable diagnostics during inlining, it is necessary that
3306 every call_expr be annotated with file and line. */
3307 if (! EXPR_HAS_LOCATION (*expr_p))
3308 SET_EXPR_LOCATION (*expr_p, input_location);
3309
3310 /* Gimplify internal functions created in the FEs. */
3311 if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
3312 {
3313 if (want_value)
3314 return GS_ALL_DONE;
3315
3316 nargs = call_expr_nargs (*expr_p);
3317 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
3318 auto_vec<tree> vargs (nargs);
3319
3320 for (i = 0; i < nargs; i++)
3321 {
3322 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3323 EXPR_LOCATION (*expr_p));
3324 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
3325 }
3326
3327 gcall *call = gimple_build_call_internal_vec (ifn, vargs);
3328 gimple_call_set_nothrow (call, TREE_NOTHROW (*expr_p));
3329 gimplify_seq_add_stmt (pre_p, call);
3330 return GS_ALL_DONE;
3331 }
3332
3333 /* This may be a call to a builtin function.
3334
3335 Builtin function calls may be transformed into different
3336 (and more efficient) builtin function calls under certain
3337 circumstances. Unfortunately, gimplification can muck things
3338 up enough that the builtin expanders are not aware that certain
3339 transformations are still valid.
3340
3341 So we attempt transformation/gimplification of the call before
3342 we gimplify the CALL_EXPR. At this time we do not manage to
3343 transform all calls in the same manner as the expanders do, but
3344 we do transform most of them. */
3345 fndecl = get_callee_fndecl (*expr_p);
3346 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
3347 switch (DECL_FUNCTION_CODE (fndecl))
3348 {
3349 CASE_BUILT_IN_ALLOCA:
3350 /* If the call has been built for a variable-sized object, then we
3351 want to restore the stack level when the enclosing BIND_EXPR is
3352 exited to reclaim the allocated space; otherwise, we precisely
3353 need to do the opposite and preserve the latest stack level. */
3354 if (CALL_ALLOCA_FOR_VAR_P (*expr_p))
3355 gimplify_ctxp->save_stack = true;
3356 else
3357 gimplify_ctxp->keep_stack = true;
3358 break;
3359
3360 case BUILT_IN_VA_START:
3361 {
3362 builtin_va_start_p = TRUE;
3363 if (call_expr_nargs (*expr_p) < 2)
3364 {
3365 error ("too few arguments to function %<va_start%>");
3366 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3367 return GS_OK;
3368 }
3369
3370 if (fold_builtin_next_arg (*expr_p, true))
3371 {
3372 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3373 return GS_OK;
3374 }
3375 break;
3376 }
3377
3378 case BUILT_IN_EH_RETURN:
3379 cfun->calls_eh_return = true;
3380 break;
3381
3382 default:
3383 ;
3384 }
3385 if (fndecl && fndecl_built_in_p (fndecl))
3386 {
3387 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3388 if (new_tree && new_tree != *expr_p)
3389 {
3390 /* There was a transformation of this call which computes the
3391 same value, but in a more efficient way. Return and try
3392 again. */
3393 *expr_p = new_tree;
3394 return GS_OK;
3395 }
3396 }
3397
3398 /* Remember the original function pointer type. */
3399 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
3400
3401 if (flag_openmp
3402 && fndecl
3403 && cfun
3404 && (cfun->curr_properties & PROP_gimple_any) == 0)
3405 {
3406 tree variant = omp_resolve_declare_variant (fndecl);
3407 if (variant != fndecl)
3408 CALL_EXPR_FN (*expr_p) = build1 (ADDR_EXPR, fnptrtype, variant);
3409 }
3410
3411 /* There is a sequence point before the call, so any side effects in
3412 the calling expression must occur before the actual call. Force
3413 gimplify_expr to use an internal post queue. */
3414 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
3415 is_gimple_call_addr, fb_rvalue);
3416
3417 nargs = call_expr_nargs (*expr_p);
3418
3419 /* Get argument types for verification. */
3420 fndecl = get_callee_fndecl (*expr_p);
3421 parms = NULL_TREE;
3422 if (fndecl)
3423 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3424 else
3425 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
3426
3427 if (fndecl && DECL_ARGUMENTS (fndecl))
3428 p = DECL_ARGUMENTS (fndecl);
3429 else if (parms)
3430 p = parms;
3431 else
3432 p = NULL_TREE;
3433 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
3434 ;
3435
3436 /* If the last argument is __builtin_va_arg_pack () and it is not
3437 passed as a named argument, decrease the number of CALL_EXPR
3438 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
3439 if (!p
3440 && i < nargs
3441 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
3442 {
3443 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
3444 tree last_arg_fndecl = get_callee_fndecl (last_arg);
3445
3446 if (last_arg_fndecl
3447 && fndecl_built_in_p (last_arg_fndecl, BUILT_IN_VA_ARG_PACK))
3448 {
3449 tree call = *expr_p;
3450
3451 --nargs;
3452 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
3453 CALL_EXPR_FN (call),
3454 nargs, CALL_EXPR_ARGP (call));
3455
3456 /* Copy all CALL_EXPR flags, location and block, except
3457 CALL_EXPR_VA_ARG_PACK flag. */
3458 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
3459 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
3460 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
3461 = CALL_EXPR_RETURN_SLOT_OPT (call);
3462 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
3463 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
3464
3465 /* Set CALL_EXPR_VA_ARG_PACK. */
3466 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
3467 }
3468 }
3469
3470 /* If the call returns twice then after building the CFG the call
3471 argument computations will no longer dominate the call because
3472 we add an abnormal incoming edge to the call. So do not use SSA
3473 vars there. */
3474 bool returns_twice = call_expr_flags (*expr_p) & ECF_RETURNS_TWICE;
3475
3476 /* Gimplify the function arguments. */
3477 if (nargs > 0)
3478 {
3479 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
3480 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
3481 PUSH_ARGS_REVERSED ? i-- : i++)
3482 {
3483 enum gimplify_status t;
3484
3485 /* Avoid gimplifying the second argument to va_start, which needs to
3486 be the plain PARM_DECL. */
3487 if ((i != 1) || !builtin_va_start_p)
3488 {
3489 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3490 EXPR_LOCATION (*expr_p), ! returns_twice);
3491
3492 if (t == GS_ERROR)
3493 ret = GS_ERROR;
3494 }
3495 }
3496 }
3497
3498 /* Gimplify the static chain. */
3499 if (CALL_EXPR_STATIC_CHAIN (*expr_p))
3500 {
3501 if (fndecl && !DECL_STATIC_CHAIN (fndecl))
3502 CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
3503 else
3504 {
3505 enum gimplify_status t;
3506 t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
3507 EXPR_LOCATION (*expr_p), ! returns_twice);
3508 if (t == GS_ERROR)
3509 ret = GS_ERROR;
3510 }
3511 }
3512
3513 /* Verify the function result. */
3514 if (want_value && fndecl
3515 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
3516 {
3517 error_at (loc, "using result of function returning %<void%>");
3518 ret = GS_ERROR;
3519 }
3520
3521 /* Try this again in case gimplification exposed something. */
3522 if (ret != GS_ERROR)
3523 {
3524 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3525
3526 if (new_tree && new_tree != *expr_p)
3527 {
3528 /* There was a transformation of this call which computes the
3529 same value, but in a more efficient way. Return and try
3530 again. */
3531 *expr_p = new_tree;
3532 return GS_OK;
3533 }
3534 }
3535 else
3536 {
3537 *expr_p = error_mark_node;
3538 return GS_ERROR;
3539 }
3540
3541 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
3542 decl. This allows us to eliminate redundant or useless
3543 calls to "const" functions. */
3544 if (TREE_CODE (*expr_p) == CALL_EXPR)
3545 {
3546 int flags = call_expr_flags (*expr_p);
3547 if (flags & (ECF_CONST | ECF_PURE)
3548 /* An infinite loop is considered a side effect. */
3549 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
3550 TREE_SIDE_EFFECTS (*expr_p) = 0;
3551 }
3552
3553 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
3554 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
3555 form and delegate the creation of a GIMPLE_CALL to
3556 gimplify_modify_expr. This is always possible because when
3557 WANT_VALUE is true, the caller wants the result of this call into
3558 a temporary, which means that we will emit an INIT_EXPR in
3559 internal_get_tmp_var which will then be handled by
3560 gimplify_modify_expr. */
3561 if (!want_value)
3562 {
3563 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
3564 have to do is replicate it as a GIMPLE_CALL tuple. */
3565 gimple_stmt_iterator gsi;
3566 call = gimple_build_call_from_tree (*expr_p, fnptrtype);
3567 notice_special_calls (call);
3568 gimplify_seq_add_stmt (pre_p, call);
3569 gsi = gsi_last (*pre_p);
3570 maybe_fold_stmt (&gsi);
3571 *expr_p = NULL_TREE;
3572 }
3573 else
3574 /* Remember the original function type. */
3575 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
3576 CALL_EXPR_FN (*expr_p));
3577
3578 return ret;
3579 }
3580
3581 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
3582 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
3583
3584 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
3585 condition is true or false, respectively. If null, we should generate
3586 our own to skip over the evaluation of this specific expression.
3587
3588 LOCUS is the source location of the COND_EXPR.
3589
3590 This function is the tree equivalent of do_jump.
3591
3592 shortcut_cond_r should only be called by shortcut_cond_expr. */
3593
3594 static tree
3595 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
3596 location_t locus)
3597 {
3598 tree local_label = NULL_TREE;
3599 tree t, expr = NULL;
3600
3601 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
3602 retain the shortcut semantics. Just insert the gotos here;
3603 shortcut_cond_expr will append the real blocks later. */
3604 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3605 {
3606 location_t new_locus;
3607
3608 /* Turn if (a && b) into
3609
3610 if (a); else goto no;
3611 if (b) goto yes; else goto no;
3612 (no:) */
3613
3614 if (false_label_p == NULL)
3615 false_label_p = &local_label;
3616
3617 /* Keep the original source location on the first 'if'. */
3618 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
3619 append_to_statement_list (t, &expr);
3620
3621 /* Set the source location of the && on the second 'if'. */
3622 new_locus = rexpr_location (pred, locus);
3623 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3624 new_locus);
3625 append_to_statement_list (t, &expr);
3626 }
3627 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3628 {
3629 location_t new_locus;
3630
3631 /* Turn if (a || b) into
3632
3633 if (a) goto yes;
3634 if (b) goto yes; else goto no;
3635 (yes:) */
3636
3637 if (true_label_p == NULL)
3638 true_label_p = &local_label;
3639
3640 /* Keep the original source location on the first 'if'. */
3641 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
3642 append_to_statement_list (t, &expr);
3643
3644 /* Set the source location of the || on the second 'if'. */
3645 new_locus = rexpr_location (pred, locus);
3646 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3647 new_locus);
3648 append_to_statement_list (t, &expr);
3649 }
3650 else if (TREE_CODE (pred) == COND_EXPR
3651 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
3652 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
3653 {
3654 location_t new_locus;
3655
3656 /* As long as we're messing with gotos, turn if (a ? b : c) into
3657 if (a)
3658 if (b) goto yes; else goto no;
3659 else
3660 if (c) goto yes; else goto no;
3661
3662 Don't do this if one of the arms has void type, which can happen
3663 in C++ when the arm is throw. */
3664
3665 /* Keep the original source location on the first 'if'. Set the source
3666 location of the ? on the second 'if'. */
3667 new_locus = rexpr_location (pred, locus);
3668 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
3669 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
3670 false_label_p, locus),
3671 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
3672 false_label_p, new_locus));
3673 }
3674 else
3675 {
3676 expr = build3 (COND_EXPR, void_type_node, pred,
3677 build_and_jump (true_label_p),
3678 build_and_jump (false_label_p));
3679 SET_EXPR_LOCATION (expr, locus);
3680 }
3681
3682 if (local_label)
3683 {
3684 t = build1 (LABEL_EXPR, void_type_node, local_label);
3685 append_to_statement_list (t, &expr);
3686 }
3687
3688 return expr;
3689 }
3690
3691 /* If EXPR is a GOTO_EXPR, return it. If it is a STATEMENT_LIST, skip
3692 any of its leading DEBUG_BEGIN_STMTS and recurse on the subsequent
3693 statement, if it is the last one. Otherwise, return NULL. */
3694
3695 static tree
3696 find_goto (tree expr)
3697 {
3698 if (!expr)
3699 return NULL_TREE;
3700
3701 if (TREE_CODE (expr) == GOTO_EXPR)
3702 return expr;
3703
3704 if (TREE_CODE (expr) != STATEMENT_LIST)
3705 return NULL_TREE;
3706
3707 tree_stmt_iterator i = tsi_start (expr);
3708
3709 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
3710 tsi_next (&i);
3711
3712 if (!tsi_one_before_end_p (i))
3713 return NULL_TREE;
3714
3715 return find_goto (tsi_stmt (i));
3716 }
3717
3718 /* Same as find_goto, except that it returns NULL if the destination
3719 is not a LABEL_DECL. */
3720
3721 static inline tree
3722 find_goto_label (tree expr)
3723 {
3724 tree dest = find_goto (expr);
3725 if (dest && TREE_CODE (GOTO_DESTINATION (dest)) == LABEL_DECL)
3726 return dest;
3727 return NULL_TREE;
3728 }
3729
3730 /* Given a conditional expression EXPR with short-circuit boolean
3731 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
3732 predicate apart into the equivalent sequence of conditionals. */
3733
3734 static tree
3735 shortcut_cond_expr (tree expr)
3736 {
3737 tree pred = TREE_OPERAND (expr, 0);
3738 tree then_ = TREE_OPERAND (expr, 1);
3739 tree else_ = TREE_OPERAND (expr, 2);
3740 tree true_label, false_label, end_label, t;
3741 tree *true_label_p;
3742 tree *false_label_p;
3743 bool emit_end, emit_false, jump_over_else;
3744 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
3745 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
3746
3747 /* First do simple transformations. */
3748 if (!else_se)
3749 {
3750 /* If there is no 'else', turn
3751 if (a && b) then c
3752 into
3753 if (a) if (b) then c. */
3754 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3755 {
3756 /* Keep the original source location on the first 'if'. */
3757 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3758 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3759 /* Set the source location of the && on the second 'if'. */
3760 if (rexpr_has_location (pred))
3761 SET_EXPR_LOCATION (expr, rexpr_location (pred));
3762 then_ = shortcut_cond_expr (expr);
3763 then_se = then_ && TREE_SIDE_EFFECTS (then_);
3764 pred = TREE_OPERAND (pred, 0);
3765 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
3766 SET_EXPR_LOCATION (expr, locus);
3767 }
3768 }
3769
3770 if (!then_se)
3771 {
3772 /* If there is no 'then', turn
3773 if (a || b); else d
3774 into
3775 if (a); else if (b); else d. */
3776 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3777 {
3778 /* Keep the original source location on the first 'if'. */
3779 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3780 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3781 /* Set the source location of the || on the second 'if'. */
3782 if (rexpr_has_location (pred))
3783 SET_EXPR_LOCATION (expr, rexpr_location (pred));
3784 else_ = shortcut_cond_expr (expr);
3785 else_se = else_ && TREE_SIDE_EFFECTS (else_);
3786 pred = TREE_OPERAND (pred, 0);
3787 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
3788 SET_EXPR_LOCATION (expr, locus);
3789 }
3790 }
3791
3792 /* If we're done, great. */
3793 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
3794 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
3795 return expr;
3796
3797 /* Otherwise we need to mess with gotos. Change
3798 if (a) c; else d;
3799 to
3800 if (a); else goto no;
3801 c; goto end;
3802 no: d; end:
3803 and recursively gimplify the condition. */
3804
3805 true_label = false_label = end_label = NULL_TREE;
3806
3807 /* If our arms just jump somewhere, hijack those labels so we don't
3808 generate jumps to jumps. */
3809
3810 if (tree then_goto = find_goto_label (then_))
3811 {
3812 true_label = GOTO_DESTINATION (then_goto);
3813 then_ = NULL;
3814 then_se = false;
3815 }
3816
3817 if (tree else_goto = find_goto_label (else_))
3818 {
3819 false_label = GOTO_DESTINATION (else_goto);
3820 else_ = NULL;
3821 else_se = false;
3822 }
3823
3824 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
3825 if (true_label)
3826 true_label_p = &true_label;
3827 else
3828 true_label_p = NULL;
3829
3830 /* The 'else' branch also needs a label if it contains interesting code. */
3831 if (false_label || else_se)
3832 false_label_p = &false_label;
3833 else
3834 false_label_p = NULL;
3835
3836 /* If there was nothing else in our arms, just forward the label(s). */
3837 if (!then_se && !else_se)
3838 return shortcut_cond_r (pred, true_label_p, false_label_p,
3839 EXPR_LOC_OR_LOC (expr, input_location));
3840
3841 /* If our last subexpression already has a terminal label, reuse it. */
3842 if (else_se)
3843 t = expr_last (else_);
3844 else if (then_se)
3845 t = expr_last (then_);
3846 else
3847 t = NULL;
3848 if (t && TREE_CODE (t) == LABEL_EXPR)
3849 end_label = LABEL_EXPR_LABEL (t);
3850
3851 /* If we don't care about jumping to the 'else' branch, jump to the end
3852 if the condition is false. */
3853 if (!false_label_p)
3854 false_label_p = &end_label;
3855
3856 /* We only want to emit these labels if we aren't hijacking them. */
3857 emit_end = (end_label == NULL_TREE);
3858 emit_false = (false_label == NULL_TREE);
3859
3860 /* We only emit the jump over the else clause if we have to--if the
3861 then clause may fall through. Otherwise we can wind up with a
3862 useless jump and a useless label at the end of gimplified code,
3863 which will cause us to think that this conditional as a whole
3864 falls through even if it doesn't. If we then inline a function
3865 which ends with such a condition, that can cause us to issue an
3866 inappropriate warning about control reaching the end of a
3867 non-void function. */
3868 jump_over_else = block_may_fallthru (then_);
3869
3870 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
3871 EXPR_LOC_OR_LOC (expr, input_location));
3872
3873 expr = NULL;
3874 append_to_statement_list (pred, &expr);
3875
3876 append_to_statement_list (then_, &expr);
3877 if (else_se)
3878 {
3879 if (jump_over_else)
3880 {
3881 tree last = expr_last (expr);
3882 t = build_and_jump (&end_label);
3883 if (rexpr_has_location (last))
3884 SET_EXPR_LOCATION (t, rexpr_location (last));
3885 append_to_statement_list (t, &expr);
3886 }
3887 if (emit_false)
3888 {
3889 t = build1 (LABEL_EXPR, void_type_node, false_label);
3890 append_to_statement_list (t, &expr);
3891 }
3892 append_to_statement_list (else_, &expr);
3893 }
3894 if (emit_end && end_label)
3895 {
3896 t = build1 (LABEL_EXPR, void_type_node, end_label);
3897 append_to_statement_list (t, &expr);
3898 }
3899
3900 return expr;
3901 }
3902
3903 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
3904
3905 tree
3906 gimple_boolify (tree expr)
3907 {
3908 tree type = TREE_TYPE (expr);
3909 location_t loc = EXPR_LOCATION (expr);
3910
3911 if (TREE_CODE (expr) == NE_EXPR
3912 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
3913 && integer_zerop (TREE_OPERAND (expr, 1)))
3914 {
3915 tree call = TREE_OPERAND (expr, 0);
3916 tree fn = get_callee_fndecl (call);
3917
3918 /* For __builtin_expect ((long) (x), y) recurse into x as well
3919 if x is truth_value_p. */
3920 if (fn
3921 && fndecl_built_in_p (fn, BUILT_IN_EXPECT)
3922 && call_expr_nargs (call) == 2)
3923 {
3924 tree arg = CALL_EXPR_ARG (call, 0);
3925 if (arg)
3926 {
3927 if (TREE_CODE (arg) == NOP_EXPR
3928 && TREE_TYPE (arg) == TREE_TYPE (call))
3929 arg = TREE_OPERAND (arg, 0);
3930 if (truth_value_p (TREE_CODE (arg)))
3931 {
3932 arg = gimple_boolify (arg);
3933 CALL_EXPR_ARG (call, 0)
3934 = fold_convert_loc (loc, TREE_TYPE (call), arg);
3935 }
3936 }
3937 }
3938 }
3939
3940 switch (TREE_CODE (expr))
3941 {
3942 case TRUTH_AND_EXPR:
3943 case TRUTH_OR_EXPR:
3944 case TRUTH_XOR_EXPR:
3945 case TRUTH_ANDIF_EXPR:
3946 case TRUTH_ORIF_EXPR:
3947 /* Also boolify the arguments of truth exprs. */
3948 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
3949 /* FALLTHRU */
3950
3951 case TRUTH_NOT_EXPR:
3952 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3953
3954 /* These expressions always produce boolean results. */
3955 if (TREE_CODE (type) != BOOLEAN_TYPE)
3956 TREE_TYPE (expr) = boolean_type_node;
3957 return expr;
3958
3959 case ANNOTATE_EXPR:
3960 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
3961 {
3962 case annot_expr_ivdep_kind:
3963 case annot_expr_unroll_kind:
3964 case annot_expr_no_vector_kind:
3965 case annot_expr_vector_kind:
3966 case annot_expr_parallel_kind:
3967 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3968 if (TREE_CODE (type) != BOOLEAN_TYPE)
3969 TREE_TYPE (expr) = boolean_type_node;
3970 return expr;
3971 default:
3972 gcc_unreachable ();
3973 }
3974
3975 default:
3976 if (COMPARISON_CLASS_P (expr))
3977 {
3978 /* There expressions always prduce boolean results. */
3979 if (TREE_CODE (type) != BOOLEAN_TYPE)
3980 TREE_TYPE (expr) = boolean_type_node;
3981 return expr;
3982 }
3983 /* Other expressions that get here must have boolean values, but
3984 might need to be converted to the appropriate mode. */
3985 if (TREE_CODE (type) == BOOLEAN_TYPE)
3986 return expr;
3987 return fold_convert_loc (loc, boolean_type_node, expr);
3988 }
3989 }
3990
3991 /* Given a conditional expression *EXPR_P without side effects, gimplify
3992 its operands. New statements are inserted to PRE_P. */
3993
3994 static enum gimplify_status
3995 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
3996 {
3997 tree expr = *expr_p, cond;
3998 enum gimplify_status ret, tret;
3999 enum tree_code code;
4000
4001 cond = gimple_boolify (COND_EXPR_COND (expr));
4002
4003 /* We need to handle && and || specially, as their gimplification
4004 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
4005 code = TREE_CODE (cond);
4006 if (code == TRUTH_ANDIF_EXPR)
4007 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
4008 else if (code == TRUTH_ORIF_EXPR)
4009 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
4010 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
4011 COND_EXPR_COND (*expr_p) = cond;
4012
4013 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
4014 is_gimple_val, fb_rvalue);
4015 ret = MIN (ret, tret);
4016 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
4017 is_gimple_val, fb_rvalue);
4018
4019 return MIN (ret, tret);
4020 }
4021
4022 /* Return true if evaluating EXPR could trap.
4023 EXPR is GENERIC, while tree_could_trap_p can be called
4024 only on GIMPLE. */
4025
4026 bool
4027 generic_expr_could_trap_p (tree expr)
4028 {
4029 unsigned i, n;
4030
4031 if (!expr || is_gimple_val (expr))
4032 return false;
4033
4034 if (!EXPR_P (expr) || tree_could_trap_p (expr))
4035 return true;
4036
4037 n = TREE_OPERAND_LENGTH (expr);
4038 for (i = 0; i < n; i++)
4039 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
4040 return true;
4041
4042 return false;
4043 }
4044
4045 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
4046 into
4047
4048 if (p) if (p)
4049 t1 = a; a;
4050 else or else
4051 t1 = b; b;
4052 t1;
4053
4054 The second form is used when *EXPR_P is of type void.
4055
4056 PRE_P points to the list where side effects that must happen before
4057 *EXPR_P should be stored. */
4058
4059 static enum gimplify_status
4060 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
4061 {
4062 tree expr = *expr_p;
4063 tree type = TREE_TYPE (expr);
4064 location_t loc = EXPR_LOCATION (expr);
4065 tree tmp, arm1, arm2;
4066 enum gimplify_status ret;
4067 tree label_true, label_false, label_cont;
4068 bool have_then_clause_p, have_else_clause_p;
4069 gcond *cond_stmt;
4070 enum tree_code pred_code;
4071 gimple_seq seq = NULL;
4072
4073 /* If this COND_EXPR has a value, copy the values into a temporary within
4074 the arms. */
4075 if (!VOID_TYPE_P (type))
4076 {
4077 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
4078 tree result;
4079
4080 /* If either an rvalue is ok or we do not require an lvalue, create the
4081 temporary. But we cannot do that if the type is addressable. */
4082 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
4083 && !TREE_ADDRESSABLE (type))
4084 {
4085 if (gimplify_ctxp->allow_rhs_cond_expr
4086 /* If either branch has side effects or could trap, it can't be
4087 evaluated unconditionally. */
4088 && !TREE_SIDE_EFFECTS (then_)
4089 && !generic_expr_could_trap_p (then_)
4090 && !TREE_SIDE_EFFECTS (else_)
4091 && !generic_expr_could_trap_p (else_))
4092 return gimplify_pure_cond_expr (expr_p, pre_p);
4093
4094 tmp = create_tmp_var (type, "iftmp");
4095 result = tmp;
4096 }
4097
4098 /* Otherwise, only create and copy references to the values. */
4099 else
4100 {
4101 type = build_pointer_type (type);
4102
4103 if (!VOID_TYPE_P (TREE_TYPE (then_)))
4104 then_ = build_fold_addr_expr_loc (loc, then_);
4105
4106 if (!VOID_TYPE_P (TREE_TYPE (else_)))
4107 else_ = build_fold_addr_expr_loc (loc, else_);
4108
4109 expr
4110 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
4111
4112 tmp = create_tmp_var (type, "iftmp");
4113 result = build_simple_mem_ref_loc (loc, tmp);
4114 }
4115
4116 /* Build the new then clause, `tmp = then_;'. But don't build the
4117 assignment if the value is void; in C++ it can be if it's a throw. */
4118 if (!VOID_TYPE_P (TREE_TYPE (then_)))
4119 TREE_OPERAND (expr, 1) = build2 (INIT_EXPR, type, tmp, then_);
4120
4121 /* Similarly, build the new else clause, `tmp = else_;'. */
4122 if (!VOID_TYPE_P (TREE_TYPE (else_)))
4123 TREE_OPERAND (expr, 2) = build2 (INIT_EXPR, type, tmp, else_);
4124
4125 TREE_TYPE (expr) = void_type_node;
4126 recalculate_side_effects (expr);
4127
4128 /* Move the COND_EXPR to the prequeue. */
4129 gimplify_stmt (&expr, pre_p);
4130
4131 *expr_p = result;
4132 return GS_ALL_DONE;
4133 }
4134
4135 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
4136 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
4137 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
4138 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
4139
4140 /* Make sure the condition has BOOLEAN_TYPE. */
4141 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4142
4143 /* Break apart && and || conditions. */
4144 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
4145 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
4146 {
4147 expr = shortcut_cond_expr (expr);
4148
4149 if (expr != *expr_p)
4150 {
4151 *expr_p = expr;
4152
4153 /* We can't rely on gimplify_expr to re-gimplify the expanded
4154 form properly, as cleanups might cause the target labels to be
4155 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
4156 set up a conditional context. */
4157 gimple_push_condition ();
4158 gimplify_stmt (expr_p, &seq);
4159 gimple_pop_condition (pre_p);
4160 gimple_seq_add_seq (pre_p, seq);
4161
4162 return GS_ALL_DONE;
4163 }
4164 }
4165
4166 /* Now do the normal gimplification. */
4167
4168 /* Gimplify condition. */
4169 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL,
4170 is_gimple_condexpr_for_cond, fb_rvalue);
4171 if (ret == GS_ERROR)
4172 return GS_ERROR;
4173 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
4174
4175 gimple_push_condition ();
4176
4177 have_then_clause_p = have_else_clause_p = false;
4178 label_true = find_goto_label (TREE_OPERAND (expr, 1));
4179 if (label_true
4180 && DECL_CONTEXT (GOTO_DESTINATION (label_true)) == current_function_decl
4181 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4182 have different locations, otherwise we end up with incorrect
4183 location information on the branches. */
4184 && (optimize
4185 || !EXPR_HAS_LOCATION (expr)
4186 || !rexpr_has_location (label_true)
4187 || EXPR_LOCATION (expr) == rexpr_location (label_true)))
4188 {
4189 have_then_clause_p = true;
4190 label_true = GOTO_DESTINATION (label_true);
4191 }
4192 else
4193 label_true = create_artificial_label (UNKNOWN_LOCATION);
4194 label_false = find_goto_label (TREE_OPERAND (expr, 2));
4195 if (label_false
4196 && DECL_CONTEXT (GOTO_DESTINATION (label_false)) == current_function_decl
4197 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4198 have different locations, otherwise we end up with incorrect
4199 location information on the branches. */
4200 && (optimize
4201 || !EXPR_HAS_LOCATION (expr)
4202 || !rexpr_has_location (label_false)
4203 || EXPR_LOCATION (expr) == rexpr_location (label_false)))
4204 {
4205 have_else_clause_p = true;
4206 label_false = GOTO_DESTINATION (label_false);
4207 }
4208 else
4209 label_false = create_artificial_label (UNKNOWN_LOCATION);
4210
4211 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
4212 &arm2);
4213 cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
4214 label_false);
4215 gimple_set_no_warning (cond_stmt, TREE_NO_WARNING (COND_EXPR_COND (expr)));
4216 gimplify_seq_add_stmt (&seq, cond_stmt);
4217 gimple_stmt_iterator gsi = gsi_last (seq);
4218 maybe_fold_stmt (&gsi);
4219
4220 label_cont = NULL_TREE;
4221 if (!have_then_clause_p)
4222 {
4223 /* For if (...) {} else { code; } put label_true after
4224 the else block. */
4225 if (TREE_OPERAND (expr, 1) == NULL_TREE
4226 && !have_else_clause_p
4227 && TREE_OPERAND (expr, 2) != NULL_TREE)
4228 label_cont = label_true;
4229 else
4230 {
4231 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
4232 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
4233 /* For if (...) { code; } else {} or
4234 if (...) { code; } else goto label; or
4235 if (...) { code; return; } else { ... }
4236 label_cont isn't needed. */
4237 if (!have_else_clause_p
4238 && TREE_OPERAND (expr, 2) != NULL_TREE
4239 && gimple_seq_may_fallthru (seq))
4240 {
4241 gimple *g;
4242 label_cont = create_artificial_label (UNKNOWN_LOCATION);
4243
4244 g = gimple_build_goto (label_cont);
4245
4246 /* GIMPLE_COND's are very low level; they have embedded
4247 gotos. This particular embedded goto should not be marked
4248 with the location of the original COND_EXPR, as it would
4249 correspond to the COND_EXPR's condition, not the ELSE or the
4250 THEN arms. To avoid marking it with the wrong location, flag
4251 it as "no location". */
4252 gimple_set_do_not_emit_location (g);
4253
4254 gimplify_seq_add_stmt (&seq, g);
4255 }
4256 }
4257 }
4258 if (!have_else_clause_p)
4259 {
4260 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
4261 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
4262 }
4263 if (label_cont)
4264 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
4265
4266 gimple_pop_condition (pre_p);
4267 gimple_seq_add_seq (pre_p, seq);
4268
4269 if (ret == GS_ERROR)
4270 ; /* Do nothing. */
4271 else if (have_then_clause_p || have_else_clause_p)
4272 ret = GS_ALL_DONE;
4273 else
4274 {
4275 /* Both arms are empty; replace the COND_EXPR with its predicate. */
4276 expr = TREE_OPERAND (expr, 0);
4277 gimplify_stmt (&expr, pre_p);
4278 }
4279
4280 *expr_p = NULL;
4281 return ret;
4282 }
4283
4284 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4285 to be marked addressable.
4286
4287 We cannot rely on such an expression being directly markable if a temporary
4288 has been created by the gimplification. In this case, we create another
4289 temporary and initialize it with a copy, which will become a store after we
4290 mark it addressable. This can happen if the front-end passed us something
4291 that it could not mark addressable yet, like a Fortran pass-by-reference
4292 parameter (int) floatvar. */
4293
4294 static void
4295 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
4296 {
4297 while (handled_component_p (*expr_p))
4298 expr_p = &TREE_OPERAND (*expr_p, 0);
4299 if (is_gimple_reg (*expr_p))
4300 {
4301 /* Do not allow an SSA name as the temporary. */
4302 tree var = get_initialized_tmp_var (*expr_p, seq_p, NULL, false);
4303 DECL_GIMPLE_REG_P (var) = 0;
4304 *expr_p = var;
4305 }
4306 }
4307
4308 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4309 a call to __builtin_memcpy. */
4310
4311 static enum gimplify_status
4312 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
4313 gimple_seq *seq_p)
4314 {
4315 tree t, to, to_ptr, from, from_ptr;
4316 gcall *gs;
4317 location_t loc = EXPR_LOCATION (*expr_p);
4318
4319 to = TREE_OPERAND (*expr_p, 0);
4320 from = TREE_OPERAND (*expr_p, 1);
4321
4322 /* Mark the RHS addressable. Beware that it may not be possible to do so
4323 directly if a temporary has been created by the gimplification. */
4324 prepare_gimple_addressable (&from, seq_p);
4325
4326 mark_addressable (from);
4327 from_ptr = build_fold_addr_expr_loc (loc, from);
4328 gimplify_arg (&from_ptr, seq_p, loc);
4329
4330 mark_addressable (to);
4331 to_ptr = build_fold_addr_expr_loc (loc, to);
4332 gimplify_arg (&to_ptr, seq_p, loc);
4333
4334 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
4335
4336 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
4337
4338 if (want_value)
4339 {
4340 /* tmp = memcpy() */
4341 t = create_tmp_var (TREE_TYPE (to_ptr));
4342 gimple_call_set_lhs (gs, t);
4343 gimplify_seq_add_stmt (seq_p, gs);
4344
4345 *expr_p = build_simple_mem_ref (t);
4346 return GS_ALL_DONE;
4347 }
4348
4349 gimplify_seq_add_stmt (seq_p, gs);
4350 *expr_p = NULL;
4351 return GS_ALL_DONE;
4352 }
4353
4354 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4355 a call to __builtin_memset. In this case we know that the RHS is
4356 a CONSTRUCTOR with an empty element list. */
4357
4358 static enum gimplify_status
4359 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
4360 gimple_seq *seq_p)
4361 {
4362 tree t, from, to, to_ptr;
4363 gcall *gs;
4364 location_t loc = EXPR_LOCATION (*expr_p);
4365
4366 /* Assert our assumptions, to abort instead of producing wrong code
4367 silently if they are not met. Beware that the RHS CONSTRUCTOR might
4368 not be immediately exposed. */
4369 from = TREE_OPERAND (*expr_p, 1);
4370 if (TREE_CODE (from) == WITH_SIZE_EXPR)
4371 from = TREE_OPERAND (from, 0);
4372
4373 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
4374 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
4375
4376 /* Now proceed. */
4377 to = TREE_OPERAND (*expr_p, 0);
4378
4379 to_ptr = build_fold_addr_expr_loc (loc, to);
4380 gimplify_arg (&to_ptr, seq_p, loc);
4381 t = builtin_decl_implicit (BUILT_IN_MEMSET);
4382
4383 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
4384
4385 if (want_value)
4386 {
4387 /* tmp = memset() */
4388 t = create_tmp_var (TREE_TYPE (to_ptr));
4389 gimple_call_set_lhs (gs, t);
4390 gimplify_seq_add_stmt (seq_p, gs);
4391
4392 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
4393 return GS_ALL_DONE;
4394 }
4395
4396 gimplify_seq_add_stmt (seq_p, gs);
4397 *expr_p = NULL;
4398 return GS_ALL_DONE;
4399 }
4400
4401 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
4402 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
4403 assignment. Return non-null if we detect a potential overlap. */
4404
4405 struct gimplify_init_ctor_preeval_data
4406 {
4407 /* The base decl of the lhs object. May be NULL, in which case we
4408 have to assume the lhs is indirect. */
4409 tree lhs_base_decl;
4410
4411 /* The alias set of the lhs object. */
4412 alias_set_type lhs_alias_set;
4413 };
4414
4415 static tree
4416 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
4417 {
4418 struct gimplify_init_ctor_preeval_data *data
4419 = (struct gimplify_init_ctor_preeval_data *) xdata;
4420 tree t = *tp;
4421
4422 /* If we find the base object, obviously we have overlap. */
4423 if (data->lhs_base_decl == t)
4424 return t;
4425
4426 /* If the constructor component is indirect, determine if we have a
4427 potential overlap with the lhs. The only bits of information we
4428 have to go on at this point are addressability and alias sets. */
4429 if ((INDIRECT_REF_P (t)
4430 || TREE_CODE (t) == MEM_REF)
4431 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4432 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
4433 return t;
4434
4435 /* If the constructor component is a call, determine if it can hide a
4436 potential overlap with the lhs through an INDIRECT_REF like above.
4437 ??? Ugh - this is completely broken. In fact this whole analysis
4438 doesn't look conservative. */
4439 if (TREE_CODE (t) == CALL_EXPR)
4440 {
4441 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
4442
4443 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
4444 if (POINTER_TYPE_P (TREE_VALUE (type))
4445 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4446 && alias_sets_conflict_p (data->lhs_alias_set,
4447 get_alias_set
4448 (TREE_TYPE (TREE_VALUE (type)))))
4449 return t;
4450 }
4451
4452 if (IS_TYPE_OR_DECL_P (t))
4453 *walk_subtrees = 0;
4454 return NULL;
4455 }
4456
4457 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
4458 force values that overlap with the lhs (as described by *DATA)
4459 into temporaries. */
4460
4461 static void
4462 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4463 struct gimplify_init_ctor_preeval_data *data)
4464 {
4465 enum gimplify_status one;
4466
4467 /* If the value is constant, then there's nothing to pre-evaluate. */
4468 if (TREE_CONSTANT (*expr_p))
4469 {
4470 /* Ensure it does not have side effects, it might contain a reference to
4471 the object we're initializing. */
4472 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
4473 return;
4474 }
4475
4476 /* If the type has non-trivial constructors, we can't pre-evaluate. */
4477 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
4478 return;
4479
4480 /* Recurse for nested constructors. */
4481 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
4482 {
4483 unsigned HOST_WIDE_INT ix;
4484 constructor_elt *ce;
4485 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
4486
4487 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
4488 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
4489
4490 return;
4491 }
4492
4493 /* If this is a variable sized type, we must remember the size. */
4494 maybe_with_size_expr (expr_p);
4495
4496 /* Gimplify the constructor element to something appropriate for the rhs
4497 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
4498 the gimplifier will consider this a store to memory. Doing this
4499 gimplification now means that we won't have to deal with complicated
4500 language-specific trees, nor trees like SAVE_EXPR that can induce
4501 exponential search behavior. */
4502 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
4503 if (one == GS_ERROR)
4504 {
4505 *expr_p = NULL;
4506 return;
4507 }
4508
4509 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
4510 with the lhs, since "a = { .x=a }" doesn't make sense. This will
4511 always be true for all scalars, since is_gimple_mem_rhs insists on a
4512 temporary variable for them. */
4513 if (DECL_P (*expr_p))
4514 return;
4515
4516 /* If this is of variable size, we have no choice but to assume it doesn't
4517 overlap since we can't make a temporary for it. */
4518 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
4519 return;
4520
4521 /* Otherwise, we must search for overlap ... */
4522 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
4523 return;
4524
4525 /* ... and if found, force the value into a temporary. */
4526 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
4527 }
4528
4529 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
4530 a RANGE_EXPR in a CONSTRUCTOR for an array.
4531
4532 var = lower;
4533 loop_entry:
4534 object[var] = value;
4535 if (var == upper)
4536 goto loop_exit;
4537 var = var + 1;
4538 goto loop_entry;
4539 loop_exit:
4540
4541 We increment var _after_ the loop exit check because we might otherwise
4542 fail if upper == TYPE_MAX_VALUE (type for upper).
4543
4544 Note that we never have to deal with SAVE_EXPRs here, because this has
4545 already been taken care of for us, in gimplify_init_ctor_preeval(). */
4546
4547 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
4548 gimple_seq *, bool);
4549
4550 static void
4551 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
4552 tree value, tree array_elt_type,
4553 gimple_seq *pre_p, bool cleared)
4554 {
4555 tree loop_entry_label, loop_exit_label, fall_thru_label;
4556 tree var, var_type, cref, tmp;
4557
4558 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
4559 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
4560 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
4561
4562 /* Create and initialize the index variable. */
4563 var_type = TREE_TYPE (upper);
4564 var = create_tmp_var (var_type);
4565 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
4566
4567 /* Add the loop entry label. */
4568 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
4569
4570 /* Build the reference. */
4571 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4572 var, NULL_TREE, NULL_TREE);
4573
4574 /* If we are a constructor, just call gimplify_init_ctor_eval to do
4575 the store. Otherwise just assign value to the reference. */
4576
4577 if (TREE_CODE (value) == CONSTRUCTOR)
4578 /* NB we might have to call ourself recursively through
4579 gimplify_init_ctor_eval if the value is a constructor. */
4580 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4581 pre_p, cleared);
4582 else
4583 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
4584
4585 /* We exit the loop when the index var is equal to the upper bound. */
4586 gimplify_seq_add_stmt (pre_p,
4587 gimple_build_cond (EQ_EXPR, var, upper,
4588 loop_exit_label, fall_thru_label));
4589
4590 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
4591
4592 /* Otherwise, increment the index var... */
4593 tmp = build2 (PLUS_EXPR, var_type, var,
4594 fold_convert (var_type, integer_one_node));
4595 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
4596
4597 /* ...and jump back to the loop entry. */
4598 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
4599
4600 /* Add the loop exit label. */
4601 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
4602 }
4603
4604 /* Return true if FDECL is accessing a field that is zero sized. */
4605
4606 static bool
4607 zero_sized_field_decl (const_tree fdecl)
4608 {
4609 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
4610 && integer_zerop (DECL_SIZE (fdecl)))
4611 return true;
4612 return false;
4613 }
4614
4615 /* Return true if TYPE is zero sized. */
4616
4617 static bool
4618 zero_sized_type (const_tree type)
4619 {
4620 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
4621 && integer_zerop (TYPE_SIZE (type)))
4622 return true;
4623 return false;
4624 }
4625
4626 /* A subroutine of gimplify_init_constructor. Generate individual
4627 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
4628 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
4629 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
4630 zeroed first. */
4631
4632 static void
4633 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
4634 gimple_seq *pre_p, bool cleared)
4635 {
4636 tree array_elt_type = NULL;
4637 unsigned HOST_WIDE_INT ix;
4638 tree purpose, value;
4639
4640 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
4641 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
4642
4643 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
4644 {
4645 tree cref;
4646
4647 /* NULL values are created above for gimplification errors. */
4648 if (value == NULL)
4649 continue;
4650
4651 if (cleared && initializer_zerop (value))
4652 continue;
4653
4654 /* ??? Here's to hoping the front end fills in all of the indices,
4655 so we don't have to figure out what's missing ourselves. */
4656 gcc_assert (purpose);
4657
4658 /* Skip zero-sized fields, unless value has side-effects. This can
4659 happen with calls to functions returning a zero-sized type, which
4660 we shouldn't discard. As a number of downstream passes don't
4661 expect sets of zero-sized fields, we rely on the gimplification of
4662 the MODIFY_EXPR we make below to drop the assignment statement. */
4663 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
4664 continue;
4665
4666 /* If we have a RANGE_EXPR, we have to build a loop to assign the
4667 whole range. */
4668 if (TREE_CODE (purpose) == RANGE_EXPR)
4669 {
4670 tree lower = TREE_OPERAND (purpose, 0);
4671 tree upper = TREE_OPERAND (purpose, 1);
4672
4673 /* If the lower bound is equal to upper, just treat it as if
4674 upper was the index. */
4675 if (simple_cst_equal (lower, upper))
4676 purpose = upper;
4677 else
4678 {
4679 gimplify_init_ctor_eval_range (object, lower, upper, value,
4680 array_elt_type, pre_p, cleared);
4681 continue;
4682 }
4683 }
4684
4685 if (array_elt_type)
4686 {
4687 /* Do not use bitsizetype for ARRAY_REF indices. */
4688 if (TYPE_DOMAIN (TREE_TYPE (object)))
4689 purpose
4690 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
4691 purpose);
4692 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4693 purpose, NULL_TREE, NULL_TREE);
4694 }
4695 else
4696 {
4697 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
4698 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
4699 unshare_expr (object), purpose, NULL_TREE);
4700 }
4701
4702 if (TREE_CODE (value) == CONSTRUCTOR
4703 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
4704 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4705 pre_p, cleared);
4706 else
4707 {
4708 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
4709 gimplify_and_add (init, pre_p);
4710 ggc_free (init);
4711 }
4712 }
4713 }
4714
4715 /* Return the appropriate RHS predicate for this LHS. */
4716
4717 gimple_predicate
4718 rhs_predicate_for (tree lhs)
4719 {
4720 if (is_gimple_reg (lhs))
4721 return is_gimple_reg_rhs_or_call;
4722 else
4723 return is_gimple_mem_rhs_or_call;
4724 }
4725
4726 /* Return the initial guess for an appropriate RHS predicate for this LHS,
4727 before the LHS has been gimplified. */
4728
4729 static gimple_predicate
4730 initial_rhs_predicate_for (tree lhs)
4731 {
4732 if (is_gimple_reg_type (TREE_TYPE (lhs)))
4733 return is_gimple_reg_rhs_or_call;
4734 else
4735 return is_gimple_mem_rhs_or_call;
4736 }
4737
4738 /* Gimplify a C99 compound literal expression. This just means adding
4739 the DECL_EXPR before the current statement and using its anonymous
4740 decl instead. */
4741
4742 static enum gimplify_status
4743 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
4744 bool (*gimple_test_f) (tree),
4745 fallback_t fallback)
4746 {
4747 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
4748 tree decl = DECL_EXPR_DECL (decl_s);
4749 tree init = DECL_INITIAL (decl);
4750 /* Mark the decl as addressable if the compound literal
4751 expression is addressable now, otherwise it is marked too late
4752 after we gimplify the initialization expression. */
4753 if (TREE_ADDRESSABLE (*expr_p))
4754 TREE_ADDRESSABLE (decl) = 1;
4755 /* Otherwise, if we don't need an lvalue and have a literal directly
4756 substitute it. Check if it matches the gimple predicate, as
4757 otherwise we'd generate a new temporary, and we can as well just
4758 use the decl we already have. */
4759 else if (!TREE_ADDRESSABLE (decl)
4760 && !TREE_THIS_VOLATILE (decl)
4761 && init
4762 && (fallback & fb_lvalue) == 0
4763 && gimple_test_f (init))
4764 {
4765 *expr_p = init;
4766 return GS_OK;
4767 }
4768
4769 /* Preliminarily mark non-addressed complex variables as eligible
4770 for promotion to gimple registers. We'll transform their uses
4771 as we find them. */
4772 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
4773 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
4774 && !TREE_THIS_VOLATILE (decl)
4775 && !needs_to_live_in_memory (decl))
4776 DECL_GIMPLE_REG_P (decl) = 1;
4777
4778 /* If the decl is not addressable, then it is being used in some
4779 expression or on the right hand side of a statement, and it can
4780 be put into a readonly data section. */
4781 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
4782 TREE_READONLY (decl) = 1;
4783
4784 /* This decl isn't mentioned in the enclosing block, so add it to the
4785 list of temps. FIXME it seems a bit of a kludge to say that
4786 anonymous artificial vars aren't pushed, but everything else is. */
4787 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
4788 gimple_add_tmp_var (decl);
4789
4790 gimplify_and_add (decl_s, pre_p);
4791 *expr_p = decl;
4792 return GS_OK;
4793 }
4794
4795 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
4796 return a new CONSTRUCTOR if something changed. */
4797
4798 static tree
4799 optimize_compound_literals_in_ctor (tree orig_ctor)
4800 {
4801 tree ctor = orig_ctor;
4802 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
4803 unsigned int idx, num = vec_safe_length (elts);
4804
4805 for (idx = 0; idx < num; idx++)
4806 {
4807 tree value = (*elts)[idx].value;
4808 tree newval = value;
4809 if (TREE_CODE (value) == CONSTRUCTOR)
4810 newval = optimize_compound_literals_in_ctor (value);
4811 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
4812 {
4813 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
4814 tree decl = DECL_EXPR_DECL (decl_s);
4815 tree init = DECL_INITIAL (decl);
4816
4817 if (!TREE_ADDRESSABLE (value)
4818 && !TREE_ADDRESSABLE (decl)
4819 && init
4820 && TREE_CODE (init) == CONSTRUCTOR)
4821 newval = optimize_compound_literals_in_ctor (init);
4822 }
4823 if (newval == value)
4824 continue;
4825
4826 if (ctor == orig_ctor)
4827 {
4828 ctor = copy_node (orig_ctor);
4829 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
4830 elts = CONSTRUCTOR_ELTS (ctor);
4831 }
4832 (*elts)[idx].value = newval;
4833 }
4834 return ctor;
4835 }
4836
4837 /* A subroutine of gimplify_modify_expr. Break out elements of a
4838 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
4839
4840 Note that we still need to clear any elements that don't have explicit
4841 initializers, so if not all elements are initialized we keep the
4842 original MODIFY_EXPR, we just remove all of the constructor elements.
4843
4844 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
4845 GS_ERROR if we would have to create a temporary when gimplifying
4846 this constructor. Otherwise, return GS_OK.
4847
4848 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
4849
4850 static enum gimplify_status
4851 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4852 bool want_value, bool notify_temp_creation)
4853 {
4854 tree object, ctor, type;
4855 enum gimplify_status ret;
4856 vec<constructor_elt, va_gc> *elts;
4857
4858 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
4859
4860 if (!notify_temp_creation)
4861 {
4862 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4863 is_gimple_lvalue, fb_lvalue);
4864 if (ret == GS_ERROR)
4865 return ret;
4866 }
4867
4868 object = TREE_OPERAND (*expr_p, 0);
4869 ctor = TREE_OPERAND (*expr_p, 1)
4870 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
4871 type = TREE_TYPE (ctor);
4872 elts = CONSTRUCTOR_ELTS (ctor);
4873 ret = GS_ALL_DONE;
4874
4875 switch (TREE_CODE (type))
4876 {
4877 case RECORD_TYPE:
4878 case UNION_TYPE:
4879 case QUAL_UNION_TYPE:
4880 case ARRAY_TYPE:
4881 {
4882 struct gimplify_init_ctor_preeval_data preeval_data;
4883 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
4884 HOST_WIDE_INT num_unique_nonzero_elements;
4885 bool cleared, complete_p, valid_const_initializer;
4886 /* Use readonly data for initializers of this or smaller size
4887 regardless of the num_nonzero_elements / num_unique_nonzero_elements
4888 ratio. */
4889 const HOST_WIDE_INT min_unique_size = 64;
4890 /* If num_nonzero_elements / num_unique_nonzero_elements ratio
4891 is smaller than this, use readonly data. */
4892 const int unique_nonzero_ratio = 8;
4893
4894 /* Aggregate types must lower constructors to initialization of
4895 individual elements. The exception is that a CONSTRUCTOR node
4896 with no elements indicates zero-initialization of the whole. */
4897 if (vec_safe_is_empty (elts))
4898 {
4899 if (notify_temp_creation)
4900 return GS_OK;
4901 break;
4902 }
4903
4904 /* Fetch information about the constructor to direct later processing.
4905 We might want to make static versions of it in various cases, and
4906 can only do so if it known to be a valid constant initializer. */
4907 valid_const_initializer
4908 = categorize_ctor_elements (ctor, &num_nonzero_elements,
4909 &num_unique_nonzero_elements,
4910 &num_ctor_elements, &complete_p);
4911
4912 /* If a const aggregate variable is being initialized, then it
4913 should never be a lose to promote the variable to be static. */
4914 if (valid_const_initializer
4915 && num_nonzero_elements > 1
4916 && TREE_READONLY (object)
4917 && VAR_P (object)
4918 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object))
4919 /* For ctors that have many repeated nonzero elements
4920 represented through RANGE_EXPRs, prefer initializing
4921 those through runtime loops over copies of large amounts
4922 of data from readonly data section. */
4923 && (num_unique_nonzero_elements
4924 > num_nonzero_elements / unique_nonzero_ratio
4925 || ((unsigned HOST_WIDE_INT) int_size_in_bytes (type)
4926 <= (unsigned HOST_WIDE_INT) min_unique_size)))
4927 {
4928 if (notify_temp_creation)
4929 return GS_ERROR;
4930 DECL_INITIAL (object) = ctor;
4931 TREE_STATIC (object) = 1;
4932 if (!DECL_NAME (object))
4933 DECL_NAME (object) = create_tmp_var_name ("C");
4934 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
4935
4936 /* ??? C++ doesn't automatically append a .<number> to the
4937 assembler name, and even when it does, it looks at FE private
4938 data structures to figure out what that number should be,
4939 which are not set for this variable. I suppose this is
4940 important for local statics for inline functions, which aren't
4941 "local" in the object file sense. So in order to get a unique
4942 TU-local symbol, we must invoke the lhd version now. */
4943 lhd_set_decl_assembler_name (object);
4944
4945 *expr_p = NULL_TREE;
4946 break;
4947 }
4948
4949 /* If there are "lots" of initialized elements, even discounting
4950 those that are not address constants (and thus *must* be
4951 computed at runtime), then partition the constructor into
4952 constant and non-constant parts. Block copy the constant
4953 parts in, then generate code for the non-constant parts. */
4954 /* TODO. There's code in cp/typeck.c to do this. */
4955
4956 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
4957 /* store_constructor will ignore the clearing of variable-sized
4958 objects. Initializers for such objects must explicitly set
4959 every field that needs to be set. */
4960 cleared = false;
4961 else if (!complete_p)
4962 /* If the constructor isn't complete, clear the whole object
4963 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
4964
4965 ??? This ought not to be needed. For any element not present
4966 in the initializer, we should simply set them to zero. Except
4967 we'd need to *find* the elements that are not present, and that
4968 requires trickery to avoid quadratic compile-time behavior in
4969 large cases or excessive memory use in small cases. */
4970 cleared = !CONSTRUCTOR_NO_CLEARING (ctor);
4971 else if (num_ctor_elements - num_nonzero_elements
4972 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
4973 && num_nonzero_elements < num_ctor_elements / 4)
4974 /* If there are "lots" of zeros, it's more efficient to clear
4975 the memory and then set the nonzero elements. */
4976 cleared = true;
4977 else
4978 cleared = false;
4979
4980 /* If there are "lots" of initialized elements, and all of them
4981 are valid address constants, then the entire initializer can
4982 be dropped to memory, and then memcpy'd out. Don't do this
4983 for sparse arrays, though, as it's more efficient to follow
4984 the standard CONSTRUCTOR behavior of memset followed by
4985 individual element initialization. Also don't do this for small
4986 all-zero initializers (which aren't big enough to merit
4987 clearing), and don't try to make bitwise copies of
4988 TREE_ADDRESSABLE types. */
4989
4990 if (valid_const_initializer
4991 && !(cleared || num_nonzero_elements == 0)
4992 && !TREE_ADDRESSABLE (type))
4993 {
4994 HOST_WIDE_INT size = int_size_in_bytes (type);
4995 unsigned int align;
4996
4997 /* ??? We can still get unbounded array types, at least
4998 from the C++ front end. This seems wrong, but attempt
4999 to work around it for now. */
5000 if (size < 0)
5001 {
5002 size = int_size_in_bytes (TREE_TYPE (object));
5003 if (size >= 0)
5004 TREE_TYPE (ctor) = type = TREE_TYPE (object);
5005 }
5006
5007 /* Find the maximum alignment we can assume for the object. */
5008 /* ??? Make use of DECL_OFFSET_ALIGN. */
5009 if (DECL_P (object))
5010 align = DECL_ALIGN (object);
5011 else
5012 align = TYPE_ALIGN (type);
5013
5014 /* Do a block move either if the size is so small as to make
5015 each individual move a sub-unit move on average, or if it
5016 is so large as to make individual moves inefficient. */
5017 if (size > 0
5018 && num_nonzero_elements > 1
5019 /* For ctors that have many repeated nonzero elements
5020 represented through RANGE_EXPRs, prefer initializing
5021 those through runtime loops over copies of large amounts
5022 of data from readonly data section. */
5023 && (num_unique_nonzero_elements
5024 > num_nonzero_elements / unique_nonzero_ratio
5025 || size <= min_unique_size)
5026 && (size < num_nonzero_elements
5027 || !can_move_by_pieces (size, align)))
5028 {
5029 if (notify_temp_creation)
5030 return GS_ERROR;
5031
5032 walk_tree (&ctor, force_labels_r, NULL, NULL);
5033 ctor = tree_output_constant_def (ctor);
5034 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
5035 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
5036 TREE_OPERAND (*expr_p, 1) = ctor;
5037
5038 /* This is no longer an assignment of a CONSTRUCTOR, but
5039 we still may have processing to do on the LHS. So
5040 pretend we didn't do anything here to let that happen. */
5041 return GS_UNHANDLED;
5042 }
5043 }
5044
5045 /* If the target is volatile, we have non-zero elements and more than
5046 one field to assign, initialize the target from a temporary. */
5047 if (TREE_THIS_VOLATILE (object)
5048 && !TREE_ADDRESSABLE (type)
5049 && (num_nonzero_elements > 0 || !cleared)
5050 && vec_safe_length (elts) > 1)
5051 {
5052 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
5053 TREE_OPERAND (*expr_p, 0) = temp;
5054 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
5055 *expr_p,
5056 build2 (MODIFY_EXPR, void_type_node,
5057 object, temp));
5058 return GS_OK;
5059 }
5060
5061 if (notify_temp_creation)
5062 return GS_OK;
5063
5064 /* If there are nonzero elements and if needed, pre-evaluate to capture
5065 elements overlapping with the lhs into temporaries. We must do this
5066 before clearing to fetch the values before they are zeroed-out. */
5067 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
5068 {
5069 preeval_data.lhs_base_decl = get_base_address (object);
5070 if (!DECL_P (preeval_data.lhs_base_decl))
5071 preeval_data.lhs_base_decl = NULL;
5072 preeval_data.lhs_alias_set = get_alias_set (object);
5073
5074 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
5075 pre_p, post_p, &preeval_data);
5076 }
5077
5078 bool ctor_has_side_effects_p
5079 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1));
5080
5081 if (cleared)
5082 {
5083 /* Zap the CONSTRUCTOR element list, which simplifies this case.
5084 Note that we still have to gimplify, in order to handle the
5085 case of variable sized types. Avoid shared tree structures. */
5086 CONSTRUCTOR_ELTS (ctor) = NULL;
5087 TREE_SIDE_EFFECTS (ctor) = 0;
5088 object = unshare_expr (object);
5089 gimplify_stmt (expr_p, pre_p);
5090 }
5091
5092 /* If we have not block cleared the object, or if there are nonzero
5093 elements in the constructor, or if the constructor has side effects,
5094 add assignments to the individual scalar fields of the object. */
5095 if (!cleared
5096 || num_nonzero_elements > 0
5097 || ctor_has_side_effects_p)
5098 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
5099
5100 *expr_p = NULL_TREE;
5101 }
5102 break;
5103
5104 case COMPLEX_TYPE:
5105 {
5106 tree r, i;
5107
5108 if (notify_temp_creation)
5109 return GS_OK;
5110
5111 /* Extract the real and imaginary parts out of the ctor. */
5112 gcc_assert (elts->length () == 2);
5113 r = (*elts)[0].value;
5114 i = (*elts)[1].value;
5115 if (r == NULL || i == NULL)
5116 {
5117 tree zero = build_zero_cst (TREE_TYPE (type));
5118 if (r == NULL)
5119 r = zero;
5120 if (i == NULL)
5121 i = zero;
5122 }
5123
5124 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
5125 represent creation of a complex value. */
5126 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
5127 {
5128 ctor = build_complex (type, r, i);
5129 TREE_OPERAND (*expr_p, 1) = ctor;
5130 }
5131 else
5132 {
5133 ctor = build2 (COMPLEX_EXPR, type, r, i);
5134 TREE_OPERAND (*expr_p, 1) = ctor;
5135 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
5136 pre_p,
5137 post_p,
5138 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
5139 fb_rvalue);
5140 }
5141 }
5142 break;
5143
5144 case VECTOR_TYPE:
5145 {
5146 unsigned HOST_WIDE_INT ix;
5147 constructor_elt *ce;
5148
5149 if (notify_temp_creation)
5150 return GS_OK;
5151
5152 /* Go ahead and simplify constant constructors to VECTOR_CST. */
5153 if (TREE_CONSTANT (ctor))
5154 {
5155 bool constant_p = true;
5156 tree value;
5157
5158 /* Even when ctor is constant, it might contain non-*_CST
5159 elements, such as addresses or trapping values like
5160 1.0/0.0 - 1.0/0.0. Such expressions don't belong
5161 in VECTOR_CST nodes. */
5162 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
5163 if (!CONSTANT_CLASS_P (value))
5164 {
5165 constant_p = false;
5166 break;
5167 }
5168
5169 if (constant_p)
5170 {
5171 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
5172 break;
5173 }
5174
5175 TREE_CONSTANT (ctor) = 0;
5176 }
5177
5178 /* Vector types use CONSTRUCTOR all the way through gimple
5179 compilation as a general initializer. */
5180 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
5181 {
5182 enum gimplify_status tret;
5183 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
5184 fb_rvalue);
5185 if (tret == GS_ERROR)
5186 ret = GS_ERROR;
5187 else if (TREE_STATIC (ctor)
5188 && !initializer_constant_valid_p (ce->value,
5189 TREE_TYPE (ce->value)))
5190 TREE_STATIC (ctor) = 0;
5191 }
5192 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
5193 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
5194 }
5195 break;
5196
5197 default:
5198 /* So how did we get a CONSTRUCTOR for a scalar type? */
5199 gcc_unreachable ();
5200 }
5201
5202 if (ret == GS_ERROR)
5203 return GS_ERROR;
5204 /* If we have gimplified both sides of the initializer but have
5205 not emitted an assignment, do so now. */
5206 if (*expr_p)
5207 {
5208 tree lhs = TREE_OPERAND (*expr_p, 0);
5209 tree rhs = TREE_OPERAND (*expr_p, 1);
5210 if (want_value && object == lhs)
5211 lhs = unshare_expr (lhs);
5212 gassign *init = gimple_build_assign (lhs, rhs);
5213 gimplify_seq_add_stmt (pre_p, init);
5214 }
5215 if (want_value)
5216 {
5217 *expr_p = object;
5218 return GS_OK;
5219 }
5220 else
5221 {
5222 *expr_p = NULL;
5223 return GS_ALL_DONE;
5224 }
5225 }
5226
5227 /* Given a pointer value OP0, return a simplified version of an
5228 indirection through OP0, or NULL_TREE if no simplification is
5229 possible. This may only be applied to a rhs of an expression.
5230 Note that the resulting type may be different from the type pointed
5231 to in the sense that it is still compatible from the langhooks
5232 point of view. */
5233
5234 static tree
5235 gimple_fold_indirect_ref_rhs (tree t)
5236 {
5237 return gimple_fold_indirect_ref (t);
5238 }
5239
5240 /* Subroutine of gimplify_modify_expr to do simplifications of
5241 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
5242 something changes. */
5243
5244 static enum gimplify_status
5245 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
5246 gimple_seq *pre_p, gimple_seq *post_p,
5247 bool want_value)
5248 {
5249 enum gimplify_status ret = GS_UNHANDLED;
5250 bool changed;
5251
5252 do
5253 {
5254 changed = false;
5255 switch (TREE_CODE (*from_p))
5256 {
5257 case VAR_DECL:
5258 /* If we're assigning from a read-only variable initialized with
5259 a constructor, do the direct assignment from the constructor,
5260 but only if neither source nor target are volatile since this
5261 latter assignment might end up being done on a per-field basis. */
5262 if (DECL_INITIAL (*from_p)
5263 && TREE_READONLY (*from_p)
5264 && !TREE_THIS_VOLATILE (*from_p)
5265 && !TREE_THIS_VOLATILE (*to_p)
5266 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
5267 {
5268 tree old_from = *from_p;
5269 enum gimplify_status subret;
5270
5271 /* Move the constructor into the RHS. */
5272 *from_p = unshare_expr (DECL_INITIAL (*from_p));
5273
5274 /* Let's see if gimplify_init_constructor will need to put
5275 it in memory. */
5276 subret = gimplify_init_constructor (expr_p, NULL, NULL,
5277 false, true);
5278 if (subret == GS_ERROR)
5279 {
5280 /* If so, revert the change. */
5281 *from_p = old_from;
5282 }
5283 else
5284 {
5285 ret = GS_OK;
5286 changed = true;
5287 }
5288 }
5289 break;
5290 case INDIRECT_REF:
5291 {
5292 /* If we have code like
5293
5294 *(const A*)(A*)&x
5295
5296 where the type of "x" is a (possibly cv-qualified variant
5297 of "A"), treat the entire expression as identical to "x".
5298 This kind of code arises in C++ when an object is bound
5299 to a const reference, and if "x" is a TARGET_EXPR we want
5300 to take advantage of the optimization below. */
5301 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
5302 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
5303 if (t)
5304 {
5305 if (TREE_THIS_VOLATILE (t) != volatile_p)
5306 {
5307 if (DECL_P (t))
5308 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
5309 build_fold_addr_expr (t));
5310 if (REFERENCE_CLASS_P (t))
5311 TREE_THIS_VOLATILE (t) = volatile_p;
5312 }
5313 *from_p = t;
5314 ret = GS_OK;
5315 changed = true;
5316 }
5317 break;
5318 }
5319
5320 case TARGET_EXPR:
5321 {
5322 /* If we are initializing something from a TARGET_EXPR, strip the
5323 TARGET_EXPR and initialize it directly, if possible. This can't
5324 be done if the initializer is void, since that implies that the
5325 temporary is set in some non-trivial way.
5326
5327 ??? What about code that pulls out the temp and uses it
5328 elsewhere? I think that such code never uses the TARGET_EXPR as
5329 an initializer. If I'm wrong, we'll die because the temp won't
5330 have any RTL. In that case, I guess we'll need to replace
5331 references somehow. */
5332 tree init = TARGET_EXPR_INITIAL (*from_p);
5333
5334 if (init
5335 && (TREE_CODE (*expr_p) != MODIFY_EXPR
5336 || !TARGET_EXPR_NO_ELIDE (*from_p))
5337 && !VOID_TYPE_P (TREE_TYPE (init)))
5338 {
5339 *from_p = init;
5340 ret = GS_OK;
5341 changed = true;
5342 }
5343 }
5344 break;
5345
5346 case COMPOUND_EXPR:
5347 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
5348 caught. */
5349 gimplify_compound_expr (from_p, pre_p, true);
5350 ret = GS_OK;
5351 changed = true;
5352 break;
5353
5354 case CONSTRUCTOR:
5355 /* If we already made some changes, let the front end have a
5356 crack at this before we break it down. */
5357 if (ret != GS_UNHANDLED)
5358 break;
5359 /* If we're initializing from a CONSTRUCTOR, break this into
5360 individual MODIFY_EXPRs. */
5361 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
5362 false);
5363
5364 case COND_EXPR:
5365 /* If we're assigning to a non-register type, push the assignment
5366 down into the branches. This is mandatory for ADDRESSABLE types,
5367 since we cannot generate temporaries for such, but it saves a
5368 copy in other cases as well. */
5369 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
5370 {
5371 /* This code should mirror the code in gimplify_cond_expr. */
5372 enum tree_code code = TREE_CODE (*expr_p);
5373 tree cond = *from_p;
5374 tree result = *to_p;
5375
5376 ret = gimplify_expr (&result, pre_p, post_p,
5377 is_gimple_lvalue, fb_lvalue);
5378 if (ret != GS_ERROR)
5379 ret = GS_OK;
5380
5381 /* If we are going to write RESULT more than once, clear
5382 TREE_READONLY flag, otherwise we might incorrectly promote
5383 the variable to static const and initialize it at compile
5384 time in one of the branches. */
5385 if (VAR_P (result)
5386 && TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node
5387 && TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5388 TREE_READONLY (result) = 0;
5389 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
5390 TREE_OPERAND (cond, 1)
5391 = build2 (code, void_type_node, result,
5392 TREE_OPERAND (cond, 1));
5393 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5394 TREE_OPERAND (cond, 2)
5395 = build2 (code, void_type_node, unshare_expr (result),
5396 TREE_OPERAND (cond, 2));
5397
5398 TREE_TYPE (cond) = void_type_node;
5399 recalculate_side_effects (cond);
5400
5401 if (want_value)
5402 {
5403 gimplify_and_add (cond, pre_p);
5404 *expr_p = unshare_expr (result);
5405 }
5406 else
5407 *expr_p = cond;
5408 return ret;
5409 }
5410 break;
5411
5412 case CALL_EXPR:
5413 /* For calls that return in memory, give *to_p as the CALL_EXPR's
5414 return slot so that we don't generate a temporary. */
5415 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
5416 && aggregate_value_p (*from_p, *from_p))
5417 {
5418 bool use_target;
5419
5420 if (!(rhs_predicate_for (*to_p))(*from_p))
5421 /* If we need a temporary, *to_p isn't accurate. */
5422 use_target = false;
5423 /* It's OK to use the return slot directly unless it's an NRV. */
5424 else if (TREE_CODE (*to_p) == RESULT_DECL
5425 && DECL_NAME (*to_p) == NULL_TREE
5426 && needs_to_live_in_memory (*to_p))
5427 use_target = true;
5428 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
5429 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
5430 /* Don't force regs into memory. */
5431 use_target = false;
5432 else if (TREE_CODE (*expr_p) == INIT_EXPR)
5433 /* It's OK to use the target directly if it's being
5434 initialized. */
5435 use_target = true;
5436 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p)))
5437 != INTEGER_CST)
5438 /* Always use the target and thus RSO for variable-sized types.
5439 GIMPLE cannot deal with a variable-sized assignment
5440 embedded in a call statement. */
5441 use_target = true;
5442 else if (TREE_CODE (*to_p) != SSA_NAME
5443 && (!is_gimple_variable (*to_p)
5444 || needs_to_live_in_memory (*to_p)))
5445 /* Don't use the original target if it's already addressable;
5446 if its address escapes, and the called function uses the
5447 NRV optimization, a conforming program could see *to_p
5448 change before the called function returns; see c++/19317.
5449 When optimizing, the return_slot pass marks more functions
5450 as safe after we have escape info. */
5451 use_target = false;
5452 else
5453 use_target = true;
5454
5455 if (use_target)
5456 {
5457 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
5458 mark_addressable (*to_p);
5459 }
5460 }
5461 break;
5462
5463 case WITH_SIZE_EXPR:
5464 /* Likewise for calls that return an aggregate of non-constant size,
5465 since we would not be able to generate a temporary at all. */
5466 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
5467 {
5468 *from_p = TREE_OPERAND (*from_p, 0);
5469 /* We don't change ret in this case because the
5470 WITH_SIZE_EXPR might have been added in
5471 gimplify_modify_expr, so returning GS_OK would lead to an
5472 infinite loop. */
5473 changed = true;
5474 }
5475 break;
5476
5477 /* If we're initializing from a container, push the initialization
5478 inside it. */
5479 case CLEANUP_POINT_EXPR:
5480 case BIND_EXPR:
5481 case STATEMENT_LIST:
5482 {
5483 tree wrap = *from_p;
5484 tree t;
5485
5486 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
5487 fb_lvalue);
5488 if (ret != GS_ERROR)
5489 ret = GS_OK;
5490
5491 t = voidify_wrapper_expr (wrap, *expr_p);
5492 gcc_assert (t == *expr_p);
5493
5494 if (want_value)
5495 {
5496 gimplify_and_add (wrap, pre_p);
5497 *expr_p = unshare_expr (*to_p);
5498 }
5499 else
5500 *expr_p = wrap;
5501 return GS_OK;
5502 }
5503
5504 case COMPOUND_LITERAL_EXPR:
5505 {
5506 tree complit = TREE_OPERAND (*expr_p, 1);
5507 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
5508 tree decl = DECL_EXPR_DECL (decl_s);
5509 tree init = DECL_INITIAL (decl);
5510
5511 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
5512 into struct T x = { 0, 1, 2 } if the address of the
5513 compound literal has never been taken. */
5514 if (!TREE_ADDRESSABLE (complit)
5515 && !TREE_ADDRESSABLE (decl)
5516 && init)
5517 {
5518 *expr_p = copy_node (*expr_p);
5519 TREE_OPERAND (*expr_p, 1) = init;
5520 return GS_OK;
5521 }
5522 }
5523
5524 default:
5525 break;
5526 }
5527 }
5528 while (changed);
5529
5530 return ret;
5531 }
5532
5533
5534 /* Return true if T looks like a valid GIMPLE statement. */
5535
5536 static bool
5537 is_gimple_stmt (tree t)
5538 {
5539 const enum tree_code code = TREE_CODE (t);
5540
5541 switch (code)
5542 {
5543 case NOP_EXPR:
5544 /* The only valid NOP_EXPR is the empty statement. */
5545 return IS_EMPTY_STMT (t);
5546
5547 case BIND_EXPR:
5548 case COND_EXPR:
5549 /* These are only valid if they're void. */
5550 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
5551
5552 case SWITCH_EXPR:
5553 case GOTO_EXPR:
5554 case RETURN_EXPR:
5555 case LABEL_EXPR:
5556 case CASE_LABEL_EXPR:
5557 case TRY_CATCH_EXPR:
5558 case TRY_FINALLY_EXPR:
5559 case EH_FILTER_EXPR:
5560 case CATCH_EXPR:
5561 case ASM_EXPR:
5562 case STATEMENT_LIST:
5563 case OACC_PARALLEL:
5564 case OACC_KERNELS:
5565 case OACC_SERIAL:
5566 case OACC_DATA:
5567 case OACC_HOST_DATA:
5568 case OACC_DECLARE:
5569 case OACC_UPDATE:
5570 case OACC_ENTER_DATA:
5571 case OACC_EXIT_DATA:
5572 case OACC_CACHE:
5573 case OMP_PARALLEL:
5574 case OMP_FOR:
5575 case OMP_SIMD:
5576 case OMP_DISTRIBUTE:
5577 case OMP_LOOP:
5578 case OACC_LOOP:
5579 case OMP_SCAN:
5580 case OMP_SECTIONS:
5581 case OMP_SECTION:
5582 case OMP_SINGLE:
5583 case OMP_MASTER:
5584 case OMP_TASKGROUP:
5585 case OMP_ORDERED:
5586 case OMP_CRITICAL:
5587 case OMP_TASK:
5588 case OMP_TARGET:
5589 case OMP_TARGET_DATA:
5590 case OMP_TARGET_UPDATE:
5591 case OMP_TARGET_ENTER_DATA:
5592 case OMP_TARGET_EXIT_DATA:
5593 case OMP_TASKLOOP:
5594 case OMP_TEAMS:
5595 /* These are always void. */
5596 return true;
5597
5598 case CALL_EXPR:
5599 case MODIFY_EXPR:
5600 case PREDICT_EXPR:
5601 /* These are valid regardless of their type. */
5602 return true;
5603
5604 default:
5605 return false;
5606 }
5607 }
5608
5609
5610 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
5611 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
5612 DECL_GIMPLE_REG_P set.
5613
5614 IMPORTANT NOTE: This promotion is performed by introducing a load of the
5615 other, unmodified part of the complex object just before the total store.
5616 As a consequence, if the object is still uninitialized, an undefined value
5617 will be loaded into a register, which may result in a spurious exception
5618 if the register is floating-point and the value happens to be a signaling
5619 NaN for example. Then the fully-fledged complex operations lowering pass
5620 followed by a DCE pass are necessary in order to fix things up. */
5621
5622 static enum gimplify_status
5623 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
5624 bool want_value)
5625 {
5626 enum tree_code code, ocode;
5627 tree lhs, rhs, new_rhs, other, realpart, imagpart;
5628
5629 lhs = TREE_OPERAND (*expr_p, 0);
5630 rhs = TREE_OPERAND (*expr_p, 1);
5631 code = TREE_CODE (lhs);
5632 lhs = TREE_OPERAND (lhs, 0);
5633
5634 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
5635 other = build1 (ocode, TREE_TYPE (rhs), lhs);
5636 TREE_NO_WARNING (other) = 1;
5637 other = get_formal_tmp_var (other, pre_p);
5638
5639 realpart = code == REALPART_EXPR ? rhs : other;
5640 imagpart = code == REALPART_EXPR ? other : rhs;
5641
5642 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
5643 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
5644 else
5645 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
5646
5647 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
5648 *expr_p = (want_value) ? rhs : NULL_TREE;
5649
5650 return GS_ALL_DONE;
5651 }
5652
5653 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
5654
5655 modify_expr
5656 : varname '=' rhs
5657 | '*' ID '=' rhs
5658
5659 PRE_P points to the list where side effects that must happen before
5660 *EXPR_P should be stored.
5661
5662 POST_P points to the list where side effects that must happen after
5663 *EXPR_P should be stored.
5664
5665 WANT_VALUE is nonzero iff we want to use the value of this expression
5666 in another expression. */
5667
5668 static enum gimplify_status
5669 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
5670 bool want_value)
5671 {
5672 tree *from_p = &TREE_OPERAND (*expr_p, 1);
5673 tree *to_p = &TREE_OPERAND (*expr_p, 0);
5674 enum gimplify_status ret = GS_UNHANDLED;
5675 gimple *assign;
5676 location_t loc = EXPR_LOCATION (*expr_p);
5677 gimple_stmt_iterator gsi;
5678
5679 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
5680 || TREE_CODE (*expr_p) == INIT_EXPR);
5681
5682 /* Trying to simplify a clobber using normal logic doesn't work,
5683 so handle it here. */
5684 if (TREE_CLOBBER_P (*from_p))
5685 {
5686 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5687 if (ret == GS_ERROR)
5688 return ret;
5689 gcc_assert (!want_value);
5690 if (!VAR_P (*to_p) && TREE_CODE (*to_p) != MEM_REF)
5691 {
5692 tree addr = get_initialized_tmp_var (build_fold_addr_expr (*to_p),
5693 pre_p, post_p);
5694 *to_p = build_simple_mem_ref_loc (EXPR_LOCATION (*to_p), addr);
5695 }
5696 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
5697 *expr_p = NULL;
5698 return GS_ALL_DONE;
5699 }
5700
5701 /* Insert pointer conversions required by the middle-end that are not
5702 required by the frontend. This fixes middle-end type checking for
5703 for example gcc.dg/redecl-6.c. */
5704 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
5705 {
5706 STRIP_USELESS_TYPE_CONVERSION (*from_p);
5707 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
5708 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
5709 }
5710
5711 /* See if any simplifications can be done based on what the RHS is. */
5712 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5713 want_value);
5714 if (ret != GS_UNHANDLED)
5715 return ret;
5716
5717 /* For zero sized types only gimplify the left hand side and right hand
5718 side as statements and throw away the assignment. Do this after
5719 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
5720 types properly. */
5721 if (zero_sized_type (TREE_TYPE (*from_p))
5722 && !want_value
5723 /* Don't do this for calls that return addressable types, expand_call
5724 relies on those having a lhs. */
5725 && !(TREE_ADDRESSABLE (TREE_TYPE (*from_p))
5726 && TREE_CODE (*from_p) == CALL_EXPR))
5727 {
5728 gimplify_stmt (from_p, pre_p);
5729 gimplify_stmt (to_p, pre_p);
5730 *expr_p = NULL_TREE;
5731 return GS_ALL_DONE;
5732 }
5733
5734 /* If the value being copied is of variable width, compute the length
5735 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
5736 before gimplifying any of the operands so that we can resolve any
5737 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
5738 the size of the expression to be copied, not of the destination, so
5739 that is what we must do here. */
5740 maybe_with_size_expr (from_p);
5741
5742 /* As a special case, we have to temporarily allow for assignments
5743 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
5744 a toplevel statement, when gimplifying the GENERIC expression
5745 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
5746 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
5747
5748 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
5749 prevent gimplify_expr from trying to create a new temporary for
5750 foo's LHS, we tell it that it should only gimplify until it
5751 reaches the CALL_EXPR. On return from gimplify_expr, the newly
5752 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
5753 and all we need to do here is set 'a' to be its LHS. */
5754
5755 /* Gimplify the RHS first for C++17 and bug 71104. */
5756 gimple_predicate initial_pred = initial_rhs_predicate_for (*to_p);
5757 ret = gimplify_expr (from_p, pre_p, post_p, initial_pred, fb_rvalue);
5758 if (ret == GS_ERROR)
5759 return ret;
5760
5761 /* Then gimplify the LHS. */
5762 /* If we gimplified the RHS to a CALL_EXPR and that call may return
5763 twice we have to make sure to gimplify into non-SSA as otherwise
5764 the abnormal edge added later will make those defs not dominate
5765 their uses.
5766 ??? Technically this applies only to the registers used in the
5767 resulting non-register *TO_P. */
5768 bool saved_into_ssa = gimplify_ctxp->into_ssa;
5769 if (saved_into_ssa
5770 && TREE_CODE (*from_p) == CALL_EXPR
5771 && call_expr_flags (*from_p) & ECF_RETURNS_TWICE)
5772 gimplify_ctxp->into_ssa = false;
5773 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5774 gimplify_ctxp->into_ssa = saved_into_ssa;
5775 if (ret == GS_ERROR)
5776 return ret;
5777
5778 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
5779 guess for the predicate was wrong. */
5780 gimple_predicate final_pred = rhs_predicate_for (*to_p);
5781 if (final_pred != initial_pred)
5782 {
5783 ret = gimplify_expr (from_p, pre_p, post_p, final_pred, fb_rvalue);
5784 if (ret == GS_ERROR)
5785 return ret;
5786 }
5787
5788 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
5789 size as argument to the call. */
5790 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5791 {
5792 tree call = TREE_OPERAND (*from_p, 0);
5793 tree vlasize = TREE_OPERAND (*from_p, 1);
5794
5795 if (TREE_CODE (call) == CALL_EXPR
5796 && CALL_EXPR_IFN (call) == IFN_VA_ARG)
5797 {
5798 int nargs = call_expr_nargs (call);
5799 tree type = TREE_TYPE (call);
5800 tree ap = CALL_EXPR_ARG (call, 0);
5801 tree tag = CALL_EXPR_ARG (call, 1);
5802 tree aptag = CALL_EXPR_ARG (call, 2);
5803 tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call),
5804 IFN_VA_ARG, type,
5805 nargs + 1, ap, tag,
5806 aptag, vlasize);
5807 TREE_OPERAND (*from_p, 0) = newcall;
5808 }
5809 }
5810
5811 /* Now see if the above changed *from_p to something we handle specially. */
5812 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5813 want_value);
5814 if (ret != GS_UNHANDLED)
5815 return ret;
5816
5817 /* If we've got a variable sized assignment between two lvalues (i.e. does
5818 not involve a call), then we can make things a bit more straightforward
5819 by converting the assignment to memcpy or memset. */
5820 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5821 {
5822 tree from = TREE_OPERAND (*from_p, 0);
5823 tree size = TREE_OPERAND (*from_p, 1);
5824
5825 if (TREE_CODE (from) == CONSTRUCTOR)
5826 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
5827
5828 if (is_gimple_addressable (from))
5829 {
5830 *from_p = from;
5831 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
5832 pre_p);
5833 }
5834 }
5835
5836 /* Transform partial stores to non-addressable complex variables into
5837 total stores. This allows us to use real instead of virtual operands
5838 for these variables, which improves optimization. */
5839 if ((TREE_CODE (*to_p) == REALPART_EXPR
5840 || TREE_CODE (*to_p) == IMAGPART_EXPR)
5841 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
5842 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
5843
5844 /* Try to alleviate the effects of the gimplification creating artificial
5845 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
5846 make sure not to create DECL_DEBUG_EXPR links across functions. */
5847 if (!gimplify_ctxp->into_ssa
5848 && VAR_P (*from_p)
5849 && DECL_IGNORED_P (*from_p)
5850 && DECL_P (*to_p)
5851 && !DECL_IGNORED_P (*to_p)
5852 && decl_function_context (*to_p) == current_function_decl
5853 && decl_function_context (*from_p) == current_function_decl)
5854 {
5855 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
5856 DECL_NAME (*from_p)
5857 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
5858 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
5859 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
5860 }
5861
5862 if (want_value && TREE_THIS_VOLATILE (*to_p))
5863 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
5864
5865 if (TREE_CODE (*from_p) == CALL_EXPR)
5866 {
5867 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
5868 instead of a GIMPLE_ASSIGN. */
5869 gcall *call_stmt;
5870 if (CALL_EXPR_FN (*from_p) == NULL_TREE)
5871 {
5872 /* Gimplify internal functions created in the FEs. */
5873 int nargs = call_expr_nargs (*from_p), i;
5874 enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
5875 auto_vec<tree> vargs (nargs);
5876
5877 for (i = 0; i < nargs; i++)
5878 {
5879 gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
5880 EXPR_LOCATION (*from_p));
5881 vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
5882 }
5883 call_stmt = gimple_build_call_internal_vec (ifn, vargs);
5884 gimple_call_set_nothrow (call_stmt, TREE_NOTHROW (*from_p));
5885 gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
5886 }
5887 else
5888 {
5889 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
5890 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
5891 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
5892 tree fndecl = get_callee_fndecl (*from_p);
5893 if (fndecl
5894 && fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
5895 && call_expr_nargs (*from_p) == 3)
5896 call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
5897 CALL_EXPR_ARG (*from_p, 0),
5898 CALL_EXPR_ARG (*from_p, 1),
5899 CALL_EXPR_ARG (*from_p, 2));
5900 else
5901 {
5902 call_stmt = gimple_build_call_from_tree (*from_p, fnptrtype);
5903 }
5904 }
5905 notice_special_calls (call_stmt);
5906 if (!gimple_call_noreturn_p (call_stmt) || !should_remove_lhs_p (*to_p))
5907 gimple_call_set_lhs (call_stmt, *to_p);
5908 else if (TREE_CODE (*to_p) == SSA_NAME)
5909 /* The above is somewhat premature, avoid ICEing later for a
5910 SSA name w/o a definition. We may have uses in the GIMPLE IL.
5911 ??? This doesn't make it a default-def. */
5912 SSA_NAME_DEF_STMT (*to_p) = gimple_build_nop ();
5913
5914 assign = call_stmt;
5915 }
5916 else
5917 {
5918 assign = gimple_build_assign (*to_p, *from_p);
5919 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
5920 if (COMPARISON_CLASS_P (*from_p))
5921 gimple_set_no_warning (assign, TREE_NO_WARNING (*from_p));
5922 }
5923
5924 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
5925 {
5926 /* We should have got an SSA name from the start. */
5927 gcc_assert (TREE_CODE (*to_p) == SSA_NAME
5928 || ! gimple_in_ssa_p (cfun));
5929 }
5930
5931 gimplify_seq_add_stmt (pre_p, assign);
5932 gsi = gsi_last (*pre_p);
5933 maybe_fold_stmt (&gsi);
5934
5935 if (want_value)
5936 {
5937 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
5938 return GS_OK;
5939 }
5940 else
5941 *expr_p = NULL;
5942
5943 return GS_ALL_DONE;
5944 }
5945
5946 /* Gimplify a comparison between two variable-sized objects. Do this
5947 with a call to BUILT_IN_MEMCMP. */
5948
5949 static enum gimplify_status
5950 gimplify_variable_sized_compare (tree *expr_p)
5951 {
5952 location_t loc = EXPR_LOCATION (*expr_p);
5953 tree op0 = TREE_OPERAND (*expr_p, 0);
5954 tree op1 = TREE_OPERAND (*expr_p, 1);
5955 tree t, arg, dest, src, expr;
5956
5957 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
5958 arg = unshare_expr (arg);
5959 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
5960 src = build_fold_addr_expr_loc (loc, op1);
5961 dest = build_fold_addr_expr_loc (loc, op0);
5962 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
5963 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
5964
5965 expr
5966 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
5967 SET_EXPR_LOCATION (expr, loc);
5968 *expr_p = expr;
5969
5970 return GS_OK;
5971 }
5972
5973 /* Gimplify a comparison between two aggregate objects of integral scalar
5974 mode as a comparison between the bitwise equivalent scalar values. */
5975
5976 static enum gimplify_status
5977 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
5978 {
5979 location_t loc = EXPR_LOCATION (*expr_p);
5980 tree op0 = TREE_OPERAND (*expr_p, 0);
5981 tree op1 = TREE_OPERAND (*expr_p, 1);
5982
5983 tree type = TREE_TYPE (op0);
5984 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
5985
5986 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
5987 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
5988
5989 *expr_p
5990 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
5991
5992 return GS_OK;
5993 }
5994
5995 /* Gimplify an expression sequence. This function gimplifies each
5996 expression and rewrites the original expression with the last
5997 expression of the sequence in GIMPLE form.
5998
5999 PRE_P points to the list where the side effects for all the
6000 expressions in the sequence will be emitted.
6001
6002 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
6003
6004 static enum gimplify_status
6005 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
6006 {
6007 tree t = *expr_p;
6008
6009 do
6010 {
6011 tree *sub_p = &TREE_OPERAND (t, 0);
6012
6013 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
6014 gimplify_compound_expr (sub_p, pre_p, false);
6015 else
6016 gimplify_stmt (sub_p, pre_p);
6017
6018 t = TREE_OPERAND (t, 1);
6019 }
6020 while (TREE_CODE (t) == COMPOUND_EXPR);
6021
6022 *expr_p = t;
6023 if (want_value)
6024 return GS_OK;
6025 else
6026 {
6027 gimplify_stmt (expr_p, pre_p);
6028 return GS_ALL_DONE;
6029 }
6030 }
6031
6032 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
6033 gimplify. After gimplification, EXPR_P will point to a new temporary
6034 that holds the original value of the SAVE_EXPR node.
6035
6036 PRE_P points to the list where side effects that must happen before
6037 *EXPR_P should be stored. */
6038
6039 static enum gimplify_status
6040 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6041 {
6042 enum gimplify_status ret = GS_ALL_DONE;
6043 tree val;
6044
6045 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
6046 val = TREE_OPERAND (*expr_p, 0);
6047
6048 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
6049 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
6050 {
6051 /* The operand may be a void-valued expression. It is
6052 being executed only for its side-effects. */
6053 if (TREE_TYPE (val) == void_type_node)
6054 {
6055 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6056 is_gimple_stmt, fb_none);
6057 val = NULL;
6058 }
6059 else
6060 /* The temporary may not be an SSA name as later abnormal and EH
6061 control flow may invalidate use/def domination. When in SSA
6062 form then assume there are no such issues and SAVE_EXPRs only
6063 appear via GENERIC foldings. */
6064 val = get_initialized_tmp_var (val, pre_p, post_p,
6065 gimple_in_ssa_p (cfun));
6066
6067 TREE_OPERAND (*expr_p, 0) = val;
6068 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
6069 }
6070
6071 *expr_p = val;
6072
6073 return ret;
6074 }
6075
6076 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
6077
6078 unary_expr
6079 : ...
6080 | '&' varname
6081 ...
6082
6083 PRE_P points to the list where side effects that must happen before
6084 *EXPR_P should be stored.
6085
6086 POST_P points to the list where side effects that must happen after
6087 *EXPR_P should be stored. */
6088
6089 static enum gimplify_status
6090 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6091 {
6092 tree expr = *expr_p;
6093 tree op0 = TREE_OPERAND (expr, 0);
6094 enum gimplify_status ret;
6095 location_t loc = EXPR_LOCATION (*expr_p);
6096
6097 switch (TREE_CODE (op0))
6098 {
6099 case INDIRECT_REF:
6100 do_indirect_ref:
6101 /* Check if we are dealing with an expression of the form '&*ptr'.
6102 While the front end folds away '&*ptr' into 'ptr', these
6103 expressions may be generated internally by the compiler (e.g.,
6104 builtins like __builtin_va_end). */
6105 /* Caution: the silent array decomposition semantics we allow for
6106 ADDR_EXPR means we can't always discard the pair. */
6107 /* Gimplification of the ADDR_EXPR operand may drop
6108 cv-qualification conversions, so make sure we add them if
6109 needed. */
6110 {
6111 tree op00 = TREE_OPERAND (op0, 0);
6112 tree t_expr = TREE_TYPE (expr);
6113 tree t_op00 = TREE_TYPE (op00);
6114
6115 if (!useless_type_conversion_p (t_expr, t_op00))
6116 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
6117 *expr_p = op00;
6118 ret = GS_OK;
6119 }
6120 break;
6121
6122 case VIEW_CONVERT_EXPR:
6123 /* Take the address of our operand and then convert it to the type of
6124 this ADDR_EXPR.
6125
6126 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
6127 all clear. The impact of this transformation is even less clear. */
6128
6129 /* If the operand is a useless conversion, look through it. Doing so
6130 guarantees that the ADDR_EXPR and its operand will remain of the
6131 same type. */
6132 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
6133 op0 = TREE_OPERAND (op0, 0);
6134
6135 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
6136 build_fold_addr_expr_loc (loc,
6137 TREE_OPERAND (op0, 0)));
6138 ret = GS_OK;
6139 break;
6140
6141 case MEM_REF:
6142 if (integer_zerop (TREE_OPERAND (op0, 1)))
6143 goto do_indirect_ref;
6144
6145 /* fall through */
6146
6147 default:
6148 /* If we see a call to a declared builtin or see its address
6149 being taken (we can unify those cases here) then we can mark
6150 the builtin for implicit generation by GCC. */
6151 if (TREE_CODE (op0) == FUNCTION_DECL
6152 && fndecl_built_in_p (op0, BUILT_IN_NORMAL)
6153 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
6154 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
6155
6156 /* We use fb_either here because the C frontend sometimes takes
6157 the address of a call that returns a struct; see
6158 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
6159 the implied temporary explicit. */
6160
6161 /* Make the operand addressable. */
6162 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
6163 is_gimple_addressable, fb_either);
6164 if (ret == GS_ERROR)
6165 break;
6166
6167 /* Then mark it. Beware that it may not be possible to do so directly
6168 if a temporary has been created by the gimplification. */
6169 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
6170
6171 op0 = TREE_OPERAND (expr, 0);
6172
6173 /* For various reasons, the gimplification of the expression
6174 may have made a new INDIRECT_REF. */
6175 if (TREE_CODE (op0) == INDIRECT_REF)
6176 goto do_indirect_ref;
6177
6178 mark_addressable (TREE_OPERAND (expr, 0));
6179
6180 /* The FEs may end up building ADDR_EXPRs early on a decl with
6181 an incomplete type. Re-build ADDR_EXPRs in canonical form
6182 here. */
6183 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
6184 *expr_p = build_fold_addr_expr (op0);
6185
6186 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
6187 recompute_tree_invariant_for_addr_expr (*expr_p);
6188
6189 /* If we re-built the ADDR_EXPR add a conversion to the original type
6190 if required. */
6191 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
6192 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
6193
6194 break;
6195 }
6196
6197 return ret;
6198 }
6199
6200 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
6201 value; output operands should be a gimple lvalue. */
6202
6203 static enum gimplify_status
6204 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6205 {
6206 tree expr;
6207 int noutputs;
6208 const char **oconstraints;
6209 int i;
6210 tree link;
6211 const char *constraint;
6212 bool allows_mem, allows_reg, is_inout;
6213 enum gimplify_status ret, tret;
6214 gasm *stmt;
6215 vec<tree, va_gc> *inputs;
6216 vec<tree, va_gc> *outputs;
6217 vec<tree, va_gc> *clobbers;
6218 vec<tree, va_gc> *labels;
6219 tree link_next;
6220
6221 expr = *expr_p;
6222 noutputs = list_length (ASM_OUTPUTS (expr));
6223 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
6224
6225 inputs = NULL;
6226 outputs = NULL;
6227 clobbers = NULL;
6228 labels = NULL;
6229
6230 ret = GS_ALL_DONE;
6231 link_next = NULL_TREE;
6232 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
6233 {
6234 bool ok;
6235 size_t constraint_len;
6236
6237 link_next = TREE_CHAIN (link);
6238
6239 oconstraints[i]
6240 = constraint
6241 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6242 constraint_len = strlen (constraint);
6243 if (constraint_len == 0)
6244 continue;
6245
6246 ok = parse_output_constraint (&constraint, i, 0, 0,
6247 &allows_mem, &allows_reg, &is_inout);
6248 if (!ok)
6249 {
6250 ret = GS_ERROR;
6251 is_inout = false;
6252 }
6253
6254 /* If we can't make copies, we can only accept memory.
6255 Similarly for VLAs. */
6256 tree outtype = TREE_TYPE (TREE_VALUE (link));
6257 if (outtype != error_mark_node
6258 && (TREE_ADDRESSABLE (outtype)
6259 || !COMPLETE_TYPE_P (outtype)
6260 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (outtype))))
6261 {
6262 if (allows_mem)
6263 allows_reg = 0;
6264 else
6265 {
6266 error ("impossible constraint in %<asm%>");
6267 error ("non-memory output %d must stay in memory", i);
6268 return GS_ERROR;
6269 }
6270 }
6271
6272 if (!allows_reg && allows_mem)
6273 mark_addressable (TREE_VALUE (link));
6274
6275 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6276 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
6277 fb_lvalue | fb_mayfail);
6278 if (tret == GS_ERROR)
6279 {
6280 error ("invalid lvalue in %<asm%> output %d", i);
6281 ret = tret;
6282 }
6283
6284 /* If the constraint does not allow memory make sure we gimplify
6285 it to a register if it is not already but its base is. This
6286 happens for complex and vector components. */
6287 if (!allows_mem)
6288 {
6289 tree op = TREE_VALUE (link);
6290 if (! is_gimple_val (op)
6291 && is_gimple_reg_type (TREE_TYPE (op))
6292 && is_gimple_reg (get_base_address (op)))
6293 {
6294 tree tem = create_tmp_reg (TREE_TYPE (op));
6295 tree ass;
6296 if (is_inout)
6297 {
6298 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem),
6299 tem, unshare_expr (op));
6300 gimplify_and_add (ass, pre_p);
6301 }
6302 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem), op, tem);
6303 gimplify_and_add (ass, post_p);
6304
6305 TREE_VALUE (link) = tem;
6306 tret = GS_OK;
6307 }
6308 }
6309
6310 vec_safe_push (outputs, link);
6311 TREE_CHAIN (link) = NULL_TREE;
6312
6313 if (is_inout)
6314 {
6315 /* An input/output operand. To give the optimizers more
6316 flexibility, split it into separate input and output
6317 operands. */
6318 tree input;
6319 /* Buffer big enough to format a 32-bit UINT_MAX into. */
6320 char buf[11];
6321
6322 /* Turn the in/out constraint into an output constraint. */
6323 char *p = xstrdup (constraint);
6324 p[0] = '=';
6325 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
6326
6327 /* And add a matching input constraint. */
6328 if (allows_reg)
6329 {
6330 sprintf (buf, "%u", i);
6331
6332 /* If there are multiple alternatives in the constraint,
6333 handle each of them individually. Those that allow register
6334 will be replaced with operand number, the others will stay
6335 unchanged. */
6336 if (strchr (p, ',') != NULL)
6337 {
6338 size_t len = 0, buflen = strlen (buf);
6339 char *beg, *end, *str, *dst;
6340
6341 for (beg = p + 1;;)
6342 {
6343 end = strchr (beg, ',');
6344 if (end == NULL)
6345 end = strchr (beg, '\0');
6346 if ((size_t) (end - beg) < buflen)
6347 len += buflen + 1;
6348 else
6349 len += end - beg + 1;
6350 if (*end)
6351 beg = end + 1;
6352 else
6353 break;
6354 }
6355
6356 str = (char *) alloca (len);
6357 for (beg = p + 1, dst = str;;)
6358 {
6359 const char *tem;
6360 bool mem_p, reg_p, inout_p;
6361
6362 end = strchr (beg, ',');
6363 if (end)
6364 *end = '\0';
6365 beg[-1] = '=';
6366 tem = beg - 1;
6367 parse_output_constraint (&tem, i, 0, 0,
6368 &mem_p, &reg_p, &inout_p);
6369 if (dst != str)
6370 *dst++ = ',';
6371 if (reg_p)
6372 {
6373 memcpy (dst, buf, buflen);
6374 dst += buflen;
6375 }
6376 else
6377 {
6378 if (end)
6379 len = end - beg;
6380 else
6381 len = strlen (beg);
6382 memcpy (dst, beg, len);
6383 dst += len;
6384 }
6385 if (end)
6386 beg = end + 1;
6387 else
6388 break;
6389 }
6390 *dst = '\0';
6391 input = build_string (dst - str, str);
6392 }
6393 else
6394 input = build_string (strlen (buf), buf);
6395 }
6396 else
6397 input = build_string (constraint_len - 1, constraint + 1);
6398
6399 free (p);
6400
6401 input = build_tree_list (build_tree_list (NULL_TREE, input),
6402 unshare_expr (TREE_VALUE (link)));
6403 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
6404 }
6405 }
6406
6407 link_next = NULL_TREE;
6408 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
6409 {
6410 link_next = TREE_CHAIN (link);
6411 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6412 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
6413 oconstraints, &allows_mem, &allows_reg);
6414
6415 /* If we can't make copies, we can only accept memory. */
6416 tree intype = TREE_TYPE (TREE_VALUE (link));
6417 if (intype != error_mark_node
6418 && (TREE_ADDRESSABLE (intype)
6419 || !COMPLETE_TYPE_P (intype)
6420 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (intype))))
6421 {
6422 if (allows_mem)
6423 allows_reg = 0;
6424 else
6425 {
6426 error ("impossible constraint in %<asm%>");
6427 error ("non-memory input %d must stay in memory", i);
6428 return GS_ERROR;
6429 }
6430 }
6431
6432 /* If the operand is a memory input, it should be an lvalue. */
6433 if (!allows_reg && allows_mem)
6434 {
6435 tree inputv = TREE_VALUE (link);
6436 STRIP_NOPS (inputv);
6437 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
6438 || TREE_CODE (inputv) == PREINCREMENT_EXPR
6439 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
6440 || TREE_CODE (inputv) == POSTINCREMENT_EXPR
6441 || TREE_CODE (inputv) == MODIFY_EXPR)
6442 TREE_VALUE (link) = error_mark_node;
6443 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6444 is_gimple_lvalue, fb_lvalue | fb_mayfail);
6445 if (tret != GS_ERROR)
6446 {
6447 /* Unlike output operands, memory inputs are not guaranteed
6448 to be lvalues by the FE, and while the expressions are
6449 marked addressable there, if it is e.g. a statement
6450 expression, temporaries in it might not end up being
6451 addressable. They might be already used in the IL and thus
6452 it is too late to make them addressable now though. */
6453 tree x = TREE_VALUE (link);
6454 while (handled_component_p (x))
6455 x = TREE_OPERAND (x, 0);
6456 if (TREE_CODE (x) == MEM_REF
6457 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
6458 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
6459 if ((VAR_P (x)
6460 || TREE_CODE (x) == PARM_DECL
6461 || TREE_CODE (x) == RESULT_DECL)
6462 && !TREE_ADDRESSABLE (x)
6463 && is_gimple_reg (x))
6464 {
6465 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link),
6466 input_location), 0,
6467 "memory input %d is not directly addressable",
6468 i);
6469 prepare_gimple_addressable (&TREE_VALUE (link), pre_p);
6470 }
6471 }
6472 mark_addressable (TREE_VALUE (link));
6473 if (tret == GS_ERROR)
6474 {
6475 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), input_location),
6476 "memory input %d is not directly addressable", i);
6477 ret = tret;
6478 }
6479 }
6480 else
6481 {
6482 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6483 is_gimple_asm_val, fb_rvalue);
6484 if (tret == GS_ERROR)
6485 ret = tret;
6486 }
6487
6488 TREE_CHAIN (link) = NULL_TREE;
6489 vec_safe_push (inputs, link);
6490 }
6491
6492 link_next = NULL_TREE;
6493 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
6494 {
6495 link_next = TREE_CHAIN (link);
6496 TREE_CHAIN (link) = NULL_TREE;
6497 vec_safe_push (clobbers, link);
6498 }
6499
6500 link_next = NULL_TREE;
6501 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
6502 {
6503 link_next = TREE_CHAIN (link);
6504 TREE_CHAIN (link) = NULL_TREE;
6505 vec_safe_push (labels, link);
6506 }
6507
6508 /* Do not add ASMs with errors to the gimple IL stream. */
6509 if (ret != GS_ERROR)
6510 {
6511 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
6512 inputs, outputs, clobbers, labels);
6513
6514 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr) || noutputs == 0);
6515 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
6516 gimple_asm_set_inline (stmt, ASM_INLINE_P (expr));
6517
6518 gimplify_seq_add_stmt (pre_p, stmt);
6519 }
6520
6521 return ret;
6522 }
6523
6524 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
6525 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6526 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
6527 return to this function.
6528
6529 FIXME should we complexify the prequeue handling instead? Or use flags
6530 for all the cleanups and let the optimizer tighten them up? The current
6531 code seems pretty fragile; it will break on a cleanup within any
6532 non-conditional nesting. But any such nesting would be broken, anyway;
6533 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
6534 and continues out of it. We can do that at the RTL level, though, so
6535 having an optimizer to tighten up try/finally regions would be a Good
6536 Thing. */
6537
6538 static enum gimplify_status
6539 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
6540 {
6541 gimple_stmt_iterator iter;
6542 gimple_seq body_sequence = NULL;
6543
6544 tree temp = voidify_wrapper_expr (*expr_p, NULL);
6545
6546 /* We only care about the number of conditions between the innermost
6547 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
6548 any cleanups collected outside the CLEANUP_POINT_EXPR. */
6549 int old_conds = gimplify_ctxp->conditions;
6550 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
6551 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
6552 gimplify_ctxp->conditions = 0;
6553 gimplify_ctxp->conditional_cleanups = NULL;
6554 gimplify_ctxp->in_cleanup_point_expr = true;
6555
6556 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
6557
6558 gimplify_ctxp->conditions = old_conds;
6559 gimplify_ctxp->conditional_cleanups = old_cleanups;
6560 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
6561
6562 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
6563 {
6564 gimple *wce = gsi_stmt (iter);
6565
6566 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
6567 {
6568 if (gsi_one_before_end_p (iter))
6569 {
6570 /* Note that gsi_insert_seq_before and gsi_remove do not
6571 scan operands, unlike some other sequence mutators. */
6572 if (!gimple_wce_cleanup_eh_only (wce))
6573 gsi_insert_seq_before_without_update (&iter,
6574 gimple_wce_cleanup (wce),
6575 GSI_SAME_STMT);
6576 gsi_remove (&iter, true);
6577 break;
6578 }
6579 else
6580 {
6581 gtry *gtry;
6582 gimple_seq seq;
6583 enum gimple_try_flags kind;
6584
6585 if (gimple_wce_cleanup_eh_only (wce))
6586 kind = GIMPLE_TRY_CATCH;
6587 else
6588 kind = GIMPLE_TRY_FINALLY;
6589 seq = gsi_split_seq_after (iter);
6590
6591 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
6592 /* Do not use gsi_replace here, as it may scan operands.
6593 We want to do a simple structural modification only. */
6594 gsi_set_stmt (&iter, gtry);
6595 iter = gsi_start (gtry->eval);
6596 }
6597 }
6598 else
6599 gsi_next (&iter);
6600 }
6601
6602 gimplify_seq_add_seq (pre_p, body_sequence);
6603 if (temp)
6604 {
6605 *expr_p = temp;
6606 return GS_OK;
6607 }
6608 else
6609 {
6610 *expr_p = NULL;
6611 return GS_ALL_DONE;
6612 }
6613 }
6614
6615 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
6616 is the cleanup action required. EH_ONLY is true if the cleanup should
6617 only be executed if an exception is thrown, not on normal exit.
6618 If FORCE_UNCOND is true perform the cleanup unconditionally; this is
6619 only valid for clobbers. */
6620
6621 static void
6622 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p,
6623 bool force_uncond = false)
6624 {
6625 gimple *wce;
6626 gimple_seq cleanup_stmts = NULL;
6627
6628 /* Errors can result in improperly nested cleanups. Which results in
6629 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
6630 if (seen_error ())
6631 return;
6632
6633 if (gimple_conditional_context ())
6634 {
6635 /* If we're in a conditional context, this is more complex. We only
6636 want to run the cleanup if we actually ran the initialization that
6637 necessitates it, but we want to run it after the end of the
6638 conditional context. So we wrap the try/finally around the
6639 condition and use a flag to determine whether or not to actually
6640 run the destructor. Thus
6641
6642 test ? f(A()) : 0
6643
6644 becomes (approximately)
6645
6646 flag = 0;
6647 try {
6648 if (test) { A::A(temp); flag = 1; val = f(temp); }
6649 else { val = 0; }
6650 } finally {
6651 if (flag) A::~A(temp);
6652 }
6653 val
6654 */
6655 if (force_uncond)
6656 {
6657 gimplify_stmt (&cleanup, &cleanup_stmts);
6658 wce = gimple_build_wce (cleanup_stmts);
6659 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6660 }
6661 else
6662 {
6663 tree flag = create_tmp_var (boolean_type_node, "cleanup");
6664 gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
6665 gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
6666
6667 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
6668 gimplify_stmt (&cleanup, &cleanup_stmts);
6669 wce = gimple_build_wce (cleanup_stmts);
6670
6671 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
6672 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6673 gimplify_seq_add_stmt (pre_p, ftrue);
6674
6675 /* Because of this manipulation, and the EH edges that jump
6676 threading cannot redirect, the temporary (VAR) will appear
6677 to be used uninitialized. Don't warn. */
6678 TREE_NO_WARNING (var) = 1;
6679 }
6680 }
6681 else
6682 {
6683 gimplify_stmt (&cleanup, &cleanup_stmts);
6684 wce = gimple_build_wce (cleanup_stmts);
6685 gimple_wce_set_cleanup_eh_only (wce, eh_only);
6686 gimplify_seq_add_stmt (pre_p, wce);
6687 }
6688 }
6689
6690 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
6691
6692 static enum gimplify_status
6693 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6694 {
6695 tree targ = *expr_p;
6696 tree temp = TARGET_EXPR_SLOT (targ);
6697 tree init = TARGET_EXPR_INITIAL (targ);
6698 enum gimplify_status ret;
6699
6700 bool unpoison_empty_seq = false;
6701 gimple_stmt_iterator unpoison_it;
6702
6703 if (init)
6704 {
6705 tree cleanup = NULL_TREE;
6706
6707 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
6708 to the temps list. Handle also variable length TARGET_EXPRs. */
6709 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
6710 {
6711 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
6712 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
6713 gimplify_vla_decl (temp, pre_p);
6714 }
6715 else
6716 {
6717 /* Save location where we need to place unpoisoning. It's possible
6718 that a variable will be converted to needs_to_live_in_memory. */
6719 unpoison_it = gsi_last (*pre_p);
6720 unpoison_empty_seq = gsi_end_p (unpoison_it);
6721
6722 gimple_add_tmp_var (temp);
6723 }
6724
6725 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
6726 expression is supposed to initialize the slot. */
6727 if (VOID_TYPE_P (TREE_TYPE (init)))
6728 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6729 else
6730 {
6731 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
6732 init = init_expr;
6733 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6734 init = NULL;
6735 ggc_free (init_expr);
6736 }
6737 if (ret == GS_ERROR)
6738 {
6739 /* PR c++/28266 Make sure this is expanded only once. */
6740 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6741 return GS_ERROR;
6742 }
6743 if (init)
6744 gimplify_and_add (init, pre_p);
6745
6746 /* If needed, push the cleanup for the temp. */
6747 if (TARGET_EXPR_CLEANUP (targ))
6748 {
6749 if (CLEANUP_EH_ONLY (targ))
6750 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
6751 CLEANUP_EH_ONLY (targ), pre_p);
6752 else
6753 cleanup = TARGET_EXPR_CLEANUP (targ);
6754 }
6755
6756 /* Add a clobber for the temporary going out of scope, like
6757 gimplify_bind_expr. */
6758 if (gimplify_ctxp->in_cleanup_point_expr
6759 && needs_to_live_in_memory (temp))
6760 {
6761 if (flag_stack_reuse == SR_ALL)
6762 {
6763 tree clobber = build_clobber (TREE_TYPE (temp));
6764 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
6765 gimple_push_cleanup (temp, clobber, false, pre_p, true);
6766 }
6767 if (asan_poisoned_variables
6768 && DECL_ALIGN (temp) <= MAX_SUPPORTED_STACK_ALIGNMENT
6769 && !TREE_STATIC (temp)
6770 && dbg_cnt (asan_use_after_scope)
6771 && !gimplify_omp_ctxp)
6772 {
6773 tree asan_cleanup = build_asan_poison_call_expr (temp);
6774 if (asan_cleanup)
6775 {
6776 if (unpoison_empty_seq)
6777 unpoison_it = gsi_start (*pre_p);
6778
6779 asan_poison_variable (temp, false, &unpoison_it,
6780 unpoison_empty_seq);
6781 gimple_push_cleanup (temp, asan_cleanup, false, pre_p);
6782 }
6783 }
6784 }
6785 if (cleanup)
6786 gimple_push_cleanup (temp, cleanup, false, pre_p);
6787
6788 /* Only expand this once. */
6789 TREE_OPERAND (targ, 3) = init;
6790 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6791 }
6792 else
6793 /* We should have expanded this before. */
6794 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
6795
6796 *expr_p = temp;
6797 return GS_OK;
6798 }
6799
6800 /* Gimplification of expression trees. */
6801
6802 /* Gimplify an expression which appears at statement context. The
6803 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
6804 NULL, a new sequence is allocated.
6805
6806 Return true if we actually added a statement to the queue. */
6807
6808 bool
6809 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
6810 {
6811 gimple_seq_node last;
6812
6813 last = gimple_seq_last (*seq_p);
6814 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
6815 return last != gimple_seq_last (*seq_p);
6816 }
6817
6818 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
6819 to CTX. If entries already exist, force them to be some flavor of private.
6820 If there is no enclosing parallel, do nothing. */
6821
6822 void
6823 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
6824 {
6825 splay_tree_node n;
6826
6827 if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE)
6828 return;
6829
6830 do
6831 {
6832 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6833 if (n != NULL)
6834 {
6835 if (n->value & GOVD_SHARED)
6836 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
6837 else if (n->value & GOVD_MAP)
6838 n->value |= GOVD_MAP_TO_ONLY;
6839 else
6840 return;
6841 }
6842 else if ((ctx->region_type & ORT_TARGET) != 0)
6843 {
6844 if (ctx->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
6845 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6846 else
6847 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
6848 }
6849 else if (ctx->region_type != ORT_WORKSHARE
6850 && ctx->region_type != ORT_TASKGROUP
6851 && ctx->region_type != ORT_SIMD
6852 && ctx->region_type != ORT_ACC
6853 && !(ctx->region_type & ORT_TARGET_DATA))
6854 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6855
6856 ctx = ctx->outer_context;
6857 }
6858 while (ctx);
6859 }
6860
6861 /* Similarly for each of the type sizes of TYPE. */
6862
6863 static void
6864 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
6865 {
6866 if (type == NULL || type == error_mark_node)
6867 return;
6868 type = TYPE_MAIN_VARIANT (type);
6869
6870 if (ctx->privatized_types->add (type))
6871 return;
6872
6873 switch (TREE_CODE (type))
6874 {
6875 case INTEGER_TYPE:
6876 case ENUMERAL_TYPE:
6877 case BOOLEAN_TYPE:
6878 case REAL_TYPE:
6879 case FIXED_POINT_TYPE:
6880 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
6881 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
6882 break;
6883
6884 case ARRAY_TYPE:
6885 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6886 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
6887 break;
6888
6889 case RECORD_TYPE:
6890 case UNION_TYPE:
6891 case QUAL_UNION_TYPE:
6892 {
6893 tree field;
6894 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
6895 if (TREE_CODE (field) == FIELD_DECL)
6896 {
6897 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
6898 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
6899 }
6900 }
6901 break;
6902
6903 case POINTER_TYPE:
6904 case REFERENCE_TYPE:
6905 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6906 break;
6907
6908 default:
6909 break;
6910 }
6911
6912 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
6913 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
6914 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
6915 }
6916
6917 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
6918
6919 static void
6920 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
6921 {
6922 splay_tree_node n;
6923 unsigned int nflags;
6924 tree t;
6925
6926 if (error_operand_p (decl) || ctx->region_type == ORT_NONE)
6927 return;
6928
6929 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
6930 there are constructors involved somewhere. Exception is a shared clause,
6931 there is nothing privatized in that case. */
6932 if ((flags & GOVD_SHARED) == 0
6933 && (TREE_ADDRESSABLE (TREE_TYPE (decl))
6934 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl))))
6935 flags |= GOVD_SEEN;
6936
6937 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6938 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6939 {
6940 /* We shouldn't be re-adding the decl with the same data
6941 sharing class. */
6942 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
6943 nflags = n->value | flags;
6944 /* The only combination of data sharing classes we should see is
6945 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
6946 reduction variables to be used in data sharing clauses. */
6947 gcc_assert ((ctx->region_type & ORT_ACC) != 0
6948 || ((nflags & GOVD_DATA_SHARE_CLASS)
6949 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE))
6950 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
6951 n->value = nflags;
6952 return;
6953 }
6954
6955 /* When adding a variable-sized variable, we have to handle all sorts
6956 of additional bits of data: the pointer replacement variable, and
6957 the parameters of the type. */
6958 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6959 {
6960 /* Add the pointer replacement variable as PRIVATE if the variable
6961 replacement is private, else FIRSTPRIVATE since we'll need the
6962 address of the original variable either for SHARED, or for the
6963 copy into or out of the context. */
6964 if (!(flags & GOVD_LOCAL) && ctx->region_type != ORT_TASKGROUP)
6965 {
6966 if (flags & GOVD_MAP)
6967 nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
6968 else if (flags & GOVD_PRIVATE)
6969 nflags = GOVD_PRIVATE;
6970 else if (((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
6971 && (flags & GOVD_FIRSTPRIVATE))
6972 || (ctx->region_type == ORT_TARGET_DATA
6973 && (flags & GOVD_DATA_SHARE_CLASS) == 0))
6974 nflags = GOVD_PRIVATE | GOVD_EXPLICIT;
6975 else
6976 nflags = GOVD_FIRSTPRIVATE;
6977 nflags |= flags & GOVD_SEEN;
6978 t = DECL_VALUE_EXPR (decl);
6979 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
6980 t = TREE_OPERAND (t, 0);
6981 gcc_assert (DECL_P (t));
6982 omp_add_variable (ctx, t, nflags);
6983 }
6984
6985 /* Add all of the variable and type parameters (which should have
6986 been gimplified to a formal temporary) as FIRSTPRIVATE. */
6987 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
6988 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
6989 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
6990
6991 /* The variable-sized variable itself is never SHARED, only some form
6992 of PRIVATE. The sharing would take place via the pointer variable
6993 which we remapped above. */
6994 if (flags & GOVD_SHARED)
6995 flags = GOVD_SHARED | GOVD_DEBUG_PRIVATE
6996 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
6997
6998 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
6999 alloca statement we generate for the variable, so make sure it
7000 is available. This isn't automatically needed for the SHARED
7001 case, since we won't be allocating local storage then.
7002 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
7003 in this case omp_notice_variable will be called later
7004 on when it is gimplified. */
7005 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
7006 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
7007 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
7008 }
7009 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
7010 && lang_hooks.decls.omp_privatize_by_reference (decl))
7011 {
7012 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
7013
7014 /* Similar to the direct variable sized case above, we'll need the
7015 size of references being privatized. */
7016 if ((flags & GOVD_SHARED) == 0)
7017 {
7018 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
7019 if (DECL_P (t))
7020 omp_notice_variable (ctx, t, true);
7021 }
7022 }
7023
7024 if (n != NULL)
7025 n->value |= flags;
7026 else
7027 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
7028
7029 /* For reductions clauses in OpenACC loop directives, by default create a
7030 copy clause on the enclosing parallel construct for carrying back the
7031 results. */
7032 if (ctx->region_type == ORT_ACC && (flags & GOVD_REDUCTION))
7033 {
7034 struct gimplify_omp_ctx *outer_ctx = ctx->outer_context;
7035 while (outer_ctx)
7036 {
7037 n = splay_tree_lookup (outer_ctx->variables, (splay_tree_key)decl);
7038 if (n != NULL)
7039 {
7040 /* Ignore local variables and explicitly declared clauses. */
7041 if (n->value & (GOVD_LOCAL | GOVD_EXPLICIT))
7042 break;
7043 else if (outer_ctx->region_type == ORT_ACC_KERNELS)
7044 {
7045 /* According to the OpenACC spec, such a reduction variable
7046 should already have a copy map on a kernels construct,
7047 verify that here. */
7048 gcc_assert (!(n->value & GOVD_FIRSTPRIVATE)
7049 && (n->value & GOVD_MAP));
7050 }
7051 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
7052 {
7053 /* Remove firstprivate and make it a copy map. */
7054 n->value &= ~GOVD_FIRSTPRIVATE;
7055 n->value |= GOVD_MAP;
7056 }
7057 }
7058 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
7059 {
7060 splay_tree_insert (outer_ctx->variables, (splay_tree_key)decl,
7061 GOVD_MAP | GOVD_SEEN);
7062 break;
7063 }
7064 outer_ctx = outer_ctx->outer_context;
7065 }
7066 }
7067 }
7068
7069 /* Notice a threadprivate variable DECL used in OMP context CTX.
7070 This just prints out diagnostics about threadprivate variable uses
7071 in untied tasks. If DECL2 is non-NULL, prevent this warning
7072 on that variable. */
7073
7074 static bool
7075 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
7076 tree decl2)
7077 {
7078 splay_tree_node n;
7079 struct gimplify_omp_ctx *octx;
7080
7081 for (octx = ctx; octx; octx = octx->outer_context)
7082 if ((octx->region_type & ORT_TARGET) != 0
7083 || octx->order_concurrent)
7084 {
7085 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
7086 if (n == NULL)
7087 {
7088 if (octx->order_concurrent)
7089 {
7090 error ("threadprivate variable %qE used in a region with"
7091 " %<order(concurrent)%> clause", DECL_NAME (decl));
7092 error_at (octx->location, "enclosing region");
7093 }
7094 else
7095 {
7096 error ("threadprivate variable %qE used in target region",
7097 DECL_NAME (decl));
7098 error_at (octx->location, "enclosing target region");
7099 }
7100 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
7101 }
7102 if (decl2)
7103 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
7104 }
7105
7106 if (ctx->region_type != ORT_UNTIED_TASK)
7107 return false;
7108 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7109 if (n == NULL)
7110 {
7111 error ("threadprivate variable %qE used in untied task",
7112 DECL_NAME (decl));
7113 error_at (ctx->location, "enclosing task");
7114 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
7115 }
7116 if (decl2)
7117 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
7118 return false;
7119 }
7120
7121 /* Return true if global var DECL is device resident. */
7122
7123 static bool
7124 device_resident_p (tree decl)
7125 {
7126 tree attr = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl));
7127
7128 if (!attr)
7129 return false;
7130
7131 for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t))
7132 {
7133 tree c = TREE_VALUE (t);
7134 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT)
7135 return true;
7136 }
7137
7138 return false;
7139 }
7140
7141 /* Return true if DECL has an ACC DECLARE attribute. */
7142
7143 static bool
7144 is_oacc_declared (tree decl)
7145 {
7146 tree t = TREE_CODE (decl) == MEM_REF ? TREE_OPERAND (decl, 0) : decl;
7147 tree declared = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t));
7148 return declared != NULL_TREE;
7149 }
7150
7151 /* Determine outer default flags for DECL mentioned in an OMP region
7152 but not declared in an enclosing clause.
7153
7154 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
7155 remapped firstprivate instead of shared. To some extent this is
7156 addressed in omp_firstprivatize_type_sizes, but not
7157 effectively. */
7158
7159 static unsigned
7160 omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl,
7161 bool in_code, unsigned flags)
7162 {
7163 enum omp_clause_default_kind default_kind = ctx->default_kind;
7164 enum omp_clause_default_kind kind;
7165
7166 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
7167 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
7168 default_kind = kind;
7169 else if (VAR_P (decl) && TREE_STATIC (decl) && DECL_IN_CONSTANT_POOL (decl))
7170 default_kind = OMP_CLAUSE_DEFAULT_SHARED;
7171
7172 switch (default_kind)
7173 {
7174 case OMP_CLAUSE_DEFAULT_NONE:
7175 {
7176 const char *rtype;
7177
7178 if (ctx->region_type & ORT_PARALLEL)
7179 rtype = "parallel";
7180 else if ((ctx->region_type & ORT_TASKLOOP) == ORT_TASKLOOP)
7181 rtype = "taskloop";
7182 else if (ctx->region_type & ORT_TASK)
7183 rtype = "task";
7184 else if (ctx->region_type & ORT_TEAMS)
7185 rtype = "teams";
7186 else
7187 gcc_unreachable ();
7188
7189 error ("%qE not specified in enclosing %qs",
7190 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype);
7191 error_at (ctx->location, "enclosing %qs", rtype);
7192 }
7193 /* FALLTHRU */
7194 case OMP_CLAUSE_DEFAULT_SHARED:
7195 flags |= GOVD_SHARED;
7196 break;
7197 case OMP_CLAUSE_DEFAULT_PRIVATE:
7198 flags |= GOVD_PRIVATE;
7199 break;
7200 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
7201 flags |= GOVD_FIRSTPRIVATE;
7202 break;
7203 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
7204 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
7205 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
7206 if (struct gimplify_omp_ctx *octx = ctx->outer_context)
7207 {
7208 omp_notice_variable (octx, decl, in_code);
7209 for (; octx; octx = octx->outer_context)
7210 {
7211 splay_tree_node n2;
7212
7213 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
7214 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0
7215 && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0))
7216 continue;
7217 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
7218 {
7219 flags |= GOVD_FIRSTPRIVATE;
7220 goto found_outer;
7221 }
7222 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
7223 {
7224 flags |= GOVD_SHARED;
7225 goto found_outer;
7226 }
7227 }
7228 }
7229
7230 if (TREE_CODE (decl) == PARM_DECL
7231 || (!is_global_var (decl)
7232 && DECL_CONTEXT (decl) == current_function_decl))
7233 flags |= GOVD_FIRSTPRIVATE;
7234 else
7235 flags |= GOVD_SHARED;
7236 found_outer:
7237 break;
7238
7239 default:
7240 gcc_unreachable ();
7241 }
7242
7243 return flags;
7244 }
7245
7246
7247 /* Determine outer default flags for DECL mentioned in an OACC region
7248 but not declared in an enclosing clause. */
7249
7250 static unsigned
7251 oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags)
7252 {
7253 const char *rkind;
7254 bool on_device = false;
7255 bool is_private = false;
7256 bool declared = is_oacc_declared (decl);
7257 tree type = TREE_TYPE (decl);
7258
7259 if (lang_hooks.decls.omp_privatize_by_reference (decl))
7260 type = TREE_TYPE (type);
7261
7262 /* For Fortran COMMON blocks, only used variables in those blocks are
7263 transfered and remapped. The block itself will have a private clause to
7264 avoid transfering the data twice.
7265 The hook evaluates to false by default. For a variable in Fortran's COMMON
7266 or EQUIVALENCE block, returns 'true' (as we have shared=false) - as only
7267 the variables in such a COMMON/EQUIVALENCE block shall be privatized not
7268 the whole block. For C++ and Fortran, it can also be true under certain
7269 other conditions, if DECL_HAS_VALUE_EXPR. */
7270 if (RECORD_OR_UNION_TYPE_P (type))
7271 is_private = lang_hooks.decls.omp_disregard_value_expr (decl, false);
7272
7273 if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0
7274 && is_global_var (decl)
7275 && device_resident_p (decl)
7276 && !is_private)
7277 {
7278 on_device = true;
7279 flags |= GOVD_MAP_TO_ONLY;
7280 }
7281
7282 switch (ctx->region_type)
7283 {
7284 case ORT_ACC_KERNELS:
7285 rkind = "kernels";
7286
7287 if (is_private)
7288 flags |= GOVD_FIRSTPRIVATE;
7289 else if (AGGREGATE_TYPE_P (type))
7290 {
7291 /* Aggregates default to 'present_or_copy', or 'present'. */
7292 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7293 flags |= GOVD_MAP;
7294 else
7295 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7296 }
7297 else
7298 /* Scalars default to 'copy'. */
7299 flags |= GOVD_MAP | GOVD_MAP_FORCE;
7300
7301 break;
7302
7303 case ORT_ACC_PARALLEL:
7304 case ORT_ACC_SERIAL:
7305 rkind = ctx->region_type == ORT_ACC_PARALLEL ? "parallel" : "serial";
7306
7307 if (is_private)
7308 flags |= GOVD_FIRSTPRIVATE;
7309 else if (on_device || declared)
7310 flags |= GOVD_MAP;
7311 else if (AGGREGATE_TYPE_P (type))
7312 {
7313 /* Aggregates default to 'present_or_copy', or 'present'. */
7314 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7315 flags |= GOVD_MAP;
7316 else
7317 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7318 }
7319 else
7320 /* Scalars default to 'firstprivate'. */
7321 flags |= GOVD_FIRSTPRIVATE;
7322
7323 break;
7324
7325 default:
7326 gcc_unreachable ();
7327 }
7328
7329 if (DECL_ARTIFICIAL (decl))
7330 ; /* We can get compiler-generated decls, and should not complain
7331 about them. */
7332 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_NONE)
7333 {
7334 error ("%qE not specified in enclosing OpenACC %qs construct",
7335 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rkind);
7336 inform (ctx->location, "enclosing OpenACC %qs construct", rkind);
7337 }
7338 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_PRESENT)
7339 ; /* Handled above. */
7340 else
7341 gcc_checking_assert (ctx->default_kind == OMP_CLAUSE_DEFAULT_SHARED);
7342
7343 return flags;
7344 }
7345
7346 /* Record the fact that DECL was used within the OMP context CTX.
7347 IN_CODE is true when real code uses DECL, and false when we should
7348 merely emit default(none) errors. Return true if DECL is going to
7349 be remapped and thus DECL shouldn't be gimplified into its
7350 DECL_VALUE_EXPR (if any). */
7351
7352 static bool
7353 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
7354 {
7355 splay_tree_node n;
7356 unsigned flags = in_code ? GOVD_SEEN : 0;
7357 bool ret = false, shared;
7358
7359 if (error_operand_p (decl))
7360 return false;
7361
7362 if (ctx->region_type == ORT_NONE)
7363 return lang_hooks.decls.omp_disregard_value_expr (decl, false);
7364
7365 if (is_global_var (decl))
7366 {
7367 /* Threadprivate variables are predetermined. */
7368 if (DECL_THREAD_LOCAL_P (decl))
7369 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
7370
7371 if (DECL_HAS_VALUE_EXPR_P (decl))
7372 {
7373 if (ctx->region_type & ORT_ACC)
7374 /* For OpenACC, defer expansion of value to avoid transfering
7375 privatized common block data instead of im-/explicitly transfered
7376 variables which are in common blocks. */
7377 ;
7378 else
7379 {
7380 tree value = get_base_address (DECL_VALUE_EXPR (decl));
7381
7382 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
7383 return omp_notice_threadprivate_variable (ctx, decl, value);
7384 }
7385 }
7386
7387 if (gimplify_omp_ctxp->outer_context == NULL
7388 && VAR_P (decl)
7389 && oacc_get_fn_attrib (current_function_decl))
7390 {
7391 location_t loc = DECL_SOURCE_LOCATION (decl);
7392
7393 if (lookup_attribute ("omp declare target link",
7394 DECL_ATTRIBUTES (decl)))
7395 {
7396 error_at (loc,
7397 "%qE with %<link%> clause used in %<routine%> function",
7398 DECL_NAME (decl));
7399 return false;
7400 }
7401 else if (!lookup_attribute ("omp declare target",
7402 DECL_ATTRIBUTES (decl)))
7403 {
7404 error_at (loc,
7405 "%qE requires a %<declare%> directive for use "
7406 "in a %<routine%> function", DECL_NAME (decl));
7407 return false;
7408 }
7409 }
7410 }
7411
7412 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7413 if ((ctx->region_type & ORT_TARGET) != 0)
7414 {
7415 if (ctx->region_type & ORT_ACC)
7416 /* For OpenACC, as remarked above, defer expansion. */
7417 shared = false;
7418 else
7419 shared = true;
7420
7421 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7422 if (n == NULL)
7423 {
7424 unsigned nflags = flags;
7425 if ((ctx->region_type & ORT_ACC) == 0)
7426 {
7427 bool is_declare_target = false;
7428 if (is_global_var (decl)
7429 && varpool_node::get_create (decl)->offloadable)
7430 {
7431 struct gimplify_omp_ctx *octx;
7432 for (octx = ctx->outer_context;
7433 octx; octx = octx->outer_context)
7434 {
7435 n = splay_tree_lookup (octx->variables,
7436 (splay_tree_key)decl);
7437 if (n
7438 && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED
7439 && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7440 break;
7441 }
7442 is_declare_target = octx == NULL;
7443 }
7444 if (!is_declare_target)
7445 {
7446 int gdmk;
7447 if (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
7448 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
7449 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
7450 == POINTER_TYPE)))
7451 gdmk = GDMK_POINTER;
7452 else if (lang_hooks.decls.omp_scalar_p (decl))
7453 gdmk = GDMK_SCALAR;
7454 else
7455 gdmk = GDMK_AGGREGATE;
7456 if (ctx->defaultmap[gdmk] == 0)
7457 {
7458 tree d = lang_hooks.decls.omp_report_decl (decl);
7459 error ("%qE not specified in enclosing %<target%>",
7460 DECL_NAME (d));
7461 error_at (ctx->location, "enclosing %<target%>");
7462 }
7463 else if (ctx->defaultmap[gdmk]
7464 & (GOVD_MAP_0LEN_ARRAY | GOVD_FIRSTPRIVATE))
7465 nflags |= ctx->defaultmap[gdmk];
7466 else
7467 {
7468 gcc_assert (ctx->defaultmap[gdmk] & GOVD_MAP);
7469 nflags |= ctx->defaultmap[gdmk] & ~GOVD_MAP;
7470 }
7471 }
7472 }
7473
7474 struct gimplify_omp_ctx *octx = ctx->outer_context;
7475 if ((ctx->region_type & ORT_ACC) && octx)
7476 {
7477 /* Look in outer OpenACC contexts, to see if there's a
7478 data attribute for this variable. */
7479 omp_notice_variable (octx, decl, in_code);
7480
7481 for (; octx; octx = octx->outer_context)
7482 {
7483 if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)))
7484 break;
7485 splay_tree_node n2
7486 = splay_tree_lookup (octx->variables,
7487 (splay_tree_key) decl);
7488 if (n2)
7489 {
7490 if (octx->region_type == ORT_ACC_HOST_DATA)
7491 error ("variable %qE declared in enclosing "
7492 "%<host_data%> region", DECL_NAME (decl));
7493 nflags |= GOVD_MAP;
7494 if (octx->region_type == ORT_ACC_DATA
7495 && (n2->value & GOVD_MAP_0LEN_ARRAY))
7496 nflags |= GOVD_MAP_0LEN_ARRAY;
7497 goto found_outer;
7498 }
7499 }
7500 }
7501
7502 if ((nflags & ~(GOVD_MAP_TO_ONLY | GOVD_MAP_FROM_ONLY
7503 | GOVD_MAP_ALLOC_ONLY)) == flags)
7504 {
7505 tree type = TREE_TYPE (decl);
7506
7507 if (gimplify_omp_ctxp->target_firstprivatize_array_bases
7508 && lang_hooks.decls.omp_privatize_by_reference (decl))
7509 type = TREE_TYPE (type);
7510 if (!lang_hooks.types.omp_mappable_type (type))
7511 {
7512 error ("%qD referenced in target region does not have "
7513 "a mappable type", decl);
7514 nflags |= GOVD_MAP | GOVD_EXPLICIT;
7515 }
7516 else
7517 {
7518 if ((ctx->region_type & ORT_ACC) != 0)
7519 nflags = oacc_default_clause (ctx, decl, flags);
7520 else
7521 nflags |= GOVD_MAP;
7522 }
7523 }
7524 found_outer:
7525 omp_add_variable (ctx, decl, nflags);
7526 }
7527 else
7528 {
7529 /* If nothing changed, there's nothing left to do. */
7530 if ((n->value & flags) == flags)
7531 return ret;
7532 flags |= n->value;
7533 n->value = flags;
7534 }
7535 goto do_outer;
7536 }
7537
7538 if (n == NULL)
7539 {
7540 if (ctx->region_type == ORT_WORKSHARE
7541 || ctx->region_type == ORT_TASKGROUP
7542 || ctx->region_type == ORT_SIMD
7543 || ctx->region_type == ORT_ACC
7544 || (ctx->region_type & ORT_TARGET_DATA) != 0)
7545 goto do_outer;
7546
7547 flags = omp_default_clause (ctx, decl, in_code, flags);
7548
7549 if ((flags & GOVD_PRIVATE)
7550 && lang_hooks.decls.omp_private_outer_ref (decl))
7551 flags |= GOVD_PRIVATE_OUTER_REF;
7552
7553 omp_add_variable (ctx, decl, flags);
7554
7555 shared = (flags & GOVD_SHARED) != 0;
7556 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7557 goto do_outer;
7558 }
7559
7560 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
7561 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
7562 && DECL_SIZE (decl))
7563 {
7564 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7565 {
7566 splay_tree_node n2;
7567 tree t = DECL_VALUE_EXPR (decl);
7568 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
7569 t = TREE_OPERAND (t, 0);
7570 gcc_assert (DECL_P (t));
7571 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7572 n2->value |= GOVD_SEEN;
7573 }
7574 else if (lang_hooks.decls.omp_privatize_by_reference (decl)
7575 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))
7576 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))))
7577 != INTEGER_CST))
7578 {
7579 splay_tree_node n2;
7580 tree t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
7581 gcc_assert (DECL_P (t));
7582 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7583 if (n2)
7584 omp_notice_variable (ctx, t, true);
7585 }
7586 }
7587
7588 if (ctx->region_type & ORT_ACC)
7589 /* For OpenACC, as remarked above, defer expansion. */
7590 shared = false;
7591 else
7592 shared = ((flags | n->value) & GOVD_SHARED) != 0;
7593 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7594
7595 /* If nothing changed, there's nothing left to do. */
7596 if ((n->value & flags) == flags)
7597 return ret;
7598 flags |= n->value;
7599 n->value = flags;
7600
7601 do_outer:
7602 /* If the variable is private in the current context, then we don't
7603 need to propagate anything to an outer context. */
7604 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
7605 return ret;
7606 if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7607 == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7608 return ret;
7609 if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
7610 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7611 == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7612 return ret;
7613 if (ctx->outer_context
7614 && omp_notice_variable (ctx->outer_context, decl, in_code))
7615 return true;
7616 return ret;
7617 }
7618
7619 /* Verify that DECL is private within CTX. If there's specific information
7620 to the contrary in the innermost scope, generate an error. */
7621
7622 static bool
7623 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
7624 {
7625 splay_tree_node n;
7626
7627 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7628 if (n != NULL)
7629 {
7630 if (n->value & GOVD_SHARED)
7631 {
7632 if (ctx == gimplify_omp_ctxp)
7633 {
7634 if (simd)
7635 error ("iteration variable %qE is predetermined linear",
7636 DECL_NAME (decl));
7637 else
7638 error ("iteration variable %qE should be private",
7639 DECL_NAME (decl));
7640 n->value = GOVD_PRIVATE;
7641 return true;
7642 }
7643 else
7644 return false;
7645 }
7646 else if ((n->value & GOVD_EXPLICIT) != 0
7647 && (ctx == gimplify_omp_ctxp
7648 || (ctx->region_type == ORT_COMBINED_PARALLEL
7649 && gimplify_omp_ctxp->outer_context == ctx)))
7650 {
7651 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
7652 error ("iteration variable %qE should not be firstprivate",
7653 DECL_NAME (decl));
7654 else if ((n->value & GOVD_REDUCTION) != 0)
7655 error ("iteration variable %qE should not be reduction",
7656 DECL_NAME (decl));
7657 else if (simd != 1 && (n->value & GOVD_LINEAR) != 0)
7658 error ("iteration variable %qE should not be linear",
7659 DECL_NAME (decl));
7660 }
7661 return (ctx == gimplify_omp_ctxp
7662 || (ctx->region_type == ORT_COMBINED_PARALLEL
7663 && gimplify_omp_ctxp->outer_context == ctx));
7664 }
7665
7666 if (ctx->region_type != ORT_WORKSHARE
7667 && ctx->region_type != ORT_TASKGROUP
7668 && ctx->region_type != ORT_SIMD
7669 && ctx->region_type != ORT_ACC)
7670 return false;
7671 else if (ctx->outer_context)
7672 return omp_is_private (ctx->outer_context, decl, simd);
7673 return false;
7674 }
7675
7676 /* Return true if DECL is private within a parallel region
7677 that binds to the current construct's context or in parallel
7678 region's REDUCTION clause. */
7679
7680 static bool
7681 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
7682 {
7683 splay_tree_node n;
7684
7685 do
7686 {
7687 ctx = ctx->outer_context;
7688 if (ctx == NULL)
7689 {
7690 if (is_global_var (decl))
7691 return false;
7692
7693 /* References might be private, but might be shared too,
7694 when checking for copyprivate, assume they might be
7695 private, otherwise assume they might be shared. */
7696 if (copyprivate)
7697 return true;
7698
7699 if (lang_hooks.decls.omp_privatize_by_reference (decl))
7700 return false;
7701
7702 /* Treat C++ privatized non-static data members outside
7703 of the privatization the same. */
7704 if (omp_member_access_dummy_var (decl))
7705 return false;
7706
7707 return true;
7708 }
7709
7710 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7711
7712 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
7713 && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0))
7714 continue;
7715
7716 if (n != NULL)
7717 {
7718 if ((n->value & GOVD_LOCAL) != 0
7719 && omp_member_access_dummy_var (decl))
7720 return false;
7721 return (n->value & GOVD_SHARED) == 0;
7722 }
7723 }
7724 while (ctx->region_type == ORT_WORKSHARE
7725 || ctx->region_type == ORT_TASKGROUP
7726 || ctx->region_type == ORT_SIMD
7727 || ctx->region_type == ORT_ACC);
7728 return false;
7729 }
7730
7731 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
7732
7733 static tree
7734 find_decl_expr (tree *tp, int *walk_subtrees, void *data)
7735 {
7736 tree t = *tp;
7737
7738 /* If this node has been visited, unmark it and keep looking. */
7739 if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data)
7740 return t;
7741
7742 if (IS_TYPE_OR_DECL_P (t))
7743 *walk_subtrees = 0;
7744 return NULL_TREE;
7745 }
7746
7747 /* If *LIST_P contains any OpenMP depend clauses with iterators,
7748 lower all the depend clauses by populating corresponding depend
7749 array. Returns 0 if there are no such depend clauses, or
7750 2 if all depend clauses should be removed, 1 otherwise. */
7751
7752 static int
7753 gimplify_omp_depend (tree *list_p, gimple_seq *pre_p)
7754 {
7755 tree c;
7756 gimple *g;
7757 size_t n[4] = { 0, 0, 0, 0 };
7758 bool unused[4];
7759 tree counts[4] = { NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE };
7760 tree last_iter = NULL_TREE, last_count = NULL_TREE;
7761 size_t i, j;
7762 location_t first_loc = UNKNOWN_LOCATION;
7763
7764 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
7765 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
7766 {
7767 switch (OMP_CLAUSE_DEPEND_KIND (c))
7768 {
7769 case OMP_CLAUSE_DEPEND_IN:
7770 i = 2;
7771 break;
7772 case OMP_CLAUSE_DEPEND_OUT:
7773 case OMP_CLAUSE_DEPEND_INOUT:
7774 i = 0;
7775 break;
7776 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
7777 i = 1;
7778 break;
7779 case OMP_CLAUSE_DEPEND_DEPOBJ:
7780 i = 3;
7781 break;
7782 case OMP_CLAUSE_DEPEND_SOURCE:
7783 case OMP_CLAUSE_DEPEND_SINK:
7784 continue;
7785 default:
7786 gcc_unreachable ();
7787 }
7788 tree t = OMP_CLAUSE_DECL (c);
7789 if (first_loc == UNKNOWN_LOCATION)
7790 first_loc = OMP_CLAUSE_LOCATION (c);
7791 if (TREE_CODE (t) == TREE_LIST
7792 && TREE_PURPOSE (t)
7793 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
7794 {
7795 if (TREE_PURPOSE (t) != last_iter)
7796 {
7797 tree tcnt = size_one_node;
7798 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
7799 {
7800 if (gimplify_expr (&TREE_VEC_ELT (it, 1), pre_p, NULL,
7801 is_gimple_val, fb_rvalue) == GS_ERROR
7802 || gimplify_expr (&TREE_VEC_ELT (it, 2), pre_p, NULL,
7803 is_gimple_val, fb_rvalue) == GS_ERROR
7804 || gimplify_expr (&TREE_VEC_ELT (it, 3), pre_p, NULL,
7805 is_gimple_val, fb_rvalue) == GS_ERROR
7806 || (gimplify_expr (&TREE_VEC_ELT (it, 4), pre_p, NULL,
7807 is_gimple_val, fb_rvalue)
7808 == GS_ERROR))
7809 return 2;
7810 tree var = TREE_VEC_ELT (it, 0);
7811 tree begin = TREE_VEC_ELT (it, 1);
7812 tree end = TREE_VEC_ELT (it, 2);
7813 tree step = TREE_VEC_ELT (it, 3);
7814 tree orig_step = TREE_VEC_ELT (it, 4);
7815 tree type = TREE_TYPE (var);
7816 tree stype = TREE_TYPE (step);
7817 location_t loc = DECL_SOURCE_LOCATION (var);
7818 tree endmbegin;
7819 /* Compute count for this iterator as
7820 orig_step > 0
7821 ? (begin < end ? (end - begin + (step - 1)) / step : 0)
7822 : (begin > end ? (end - begin + (step + 1)) / step : 0)
7823 and compute product of those for the entire depend
7824 clause. */
7825 if (POINTER_TYPE_P (type))
7826 endmbegin = fold_build2_loc (loc, POINTER_DIFF_EXPR,
7827 stype, end, begin);
7828 else
7829 endmbegin = fold_build2_loc (loc, MINUS_EXPR, type,
7830 end, begin);
7831 tree stepm1 = fold_build2_loc (loc, MINUS_EXPR, stype,
7832 step,
7833 build_int_cst (stype, 1));
7834 tree stepp1 = fold_build2_loc (loc, PLUS_EXPR, stype, step,
7835 build_int_cst (stype, 1));
7836 tree pos = fold_build2_loc (loc, PLUS_EXPR, stype,
7837 unshare_expr (endmbegin),
7838 stepm1);
7839 pos = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
7840 pos, step);
7841 tree neg = fold_build2_loc (loc, PLUS_EXPR, stype,
7842 endmbegin, stepp1);
7843 if (TYPE_UNSIGNED (stype))
7844 {
7845 neg = fold_build1_loc (loc, NEGATE_EXPR, stype, neg);
7846 step = fold_build1_loc (loc, NEGATE_EXPR, stype, step);
7847 }
7848 neg = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
7849 neg, step);
7850 step = NULL_TREE;
7851 tree cond = fold_build2_loc (loc, LT_EXPR,
7852 boolean_type_node,
7853 begin, end);
7854 pos = fold_build3_loc (loc, COND_EXPR, stype, cond, pos,
7855 build_int_cst (stype, 0));
7856 cond = fold_build2_loc (loc, LT_EXPR, boolean_type_node,
7857 end, begin);
7858 neg = fold_build3_loc (loc, COND_EXPR, stype, cond, neg,
7859 build_int_cst (stype, 0));
7860 tree osteptype = TREE_TYPE (orig_step);
7861 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
7862 orig_step,
7863 build_int_cst (osteptype, 0));
7864 tree cnt = fold_build3_loc (loc, COND_EXPR, stype,
7865 cond, pos, neg);
7866 cnt = fold_convert_loc (loc, sizetype, cnt);
7867 if (gimplify_expr (&cnt, pre_p, NULL, is_gimple_val,
7868 fb_rvalue) == GS_ERROR)
7869 return 2;
7870 tcnt = size_binop_loc (loc, MULT_EXPR, tcnt, cnt);
7871 }
7872 if (gimplify_expr (&tcnt, pre_p, NULL, is_gimple_val,
7873 fb_rvalue) == GS_ERROR)
7874 return 2;
7875 last_iter = TREE_PURPOSE (t);
7876 last_count = tcnt;
7877 }
7878 if (counts[i] == NULL_TREE)
7879 counts[i] = last_count;
7880 else
7881 counts[i] = size_binop_loc (OMP_CLAUSE_LOCATION (c),
7882 PLUS_EXPR, counts[i], last_count);
7883 }
7884 else
7885 n[i]++;
7886 }
7887 for (i = 0; i < 4; i++)
7888 if (counts[i])
7889 break;
7890 if (i == 4)
7891 return 0;
7892
7893 tree total = size_zero_node;
7894 for (i = 0; i < 4; i++)
7895 {
7896 unused[i] = counts[i] == NULL_TREE && n[i] == 0;
7897 if (counts[i] == NULL_TREE)
7898 counts[i] = size_zero_node;
7899 if (n[i])
7900 counts[i] = size_binop (PLUS_EXPR, counts[i], size_int (n[i]));
7901 if (gimplify_expr (&counts[i], pre_p, NULL, is_gimple_val,
7902 fb_rvalue) == GS_ERROR)
7903 return 2;
7904 total = size_binop (PLUS_EXPR, total, counts[i]);
7905 }
7906
7907 if (gimplify_expr (&total, pre_p, NULL, is_gimple_val, fb_rvalue)
7908 == GS_ERROR)
7909 return 2;
7910 bool is_old = unused[1] && unused[3];
7911 tree totalpx = size_binop (PLUS_EXPR, unshare_expr (total),
7912 size_int (is_old ? 1 : 4));
7913 tree type = build_array_type (ptr_type_node, build_index_type (totalpx));
7914 tree array = create_tmp_var_raw (type);
7915 TREE_ADDRESSABLE (array) = 1;
7916 if (TREE_CODE (totalpx) != INTEGER_CST)
7917 {
7918 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (array)))
7919 gimplify_type_sizes (TREE_TYPE (array), pre_p);
7920 if (gimplify_omp_ctxp)
7921 {
7922 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7923 while (ctx
7924 && (ctx->region_type == ORT_WORKSHARE
7925 || ctx->region_type == ORT_TASKGROUP
7926 || ctx->region_type == ORT_SIMD
7927 || ctx->region_type == ORT_ACC))
7928 ctx = ctx->outer_context;
7929 if (ctx)
7930 omp_add_variable (ctx, array, GOVD_LOCAL | GOVD_SEEN);
7931 }
7932 gimplify_vla_decl (array, pre_p);
7933 }
7934 else
7935 gimple_add_tmp_var (array);
7936 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
7937 NULL_TREE);
7938 tree tem;
7939 if (!is_old)
7940 {
7941 tem = build2 (MODIFY_EXPR, void_type_node, r,
7942 build_int_cst (ptr_type_node, 0));
7943 gimplify_and_add (tem, pre_p);
7944 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
7945 NULL_TREE);
7946 }
7947 tem = build2 (MODIFY_EXPR, void_type_node, r,
7948 fold_convert (ptr_type_node, total));
7949 gimplify_and_add (tem, pre_p);
7950 for (i = 1; i < (is_old ? 2 : 4); i++)
7951 {
7952 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (i + !is_old),
7953 NULL_TREE, NULL_TREE);
7954 tem = build2 (MODIFY_EXPR, void_type_node, r, counts[i - 1]);
7955 gimplify_and_add (tem, pre_p);
7956 }
7957
7958 tree cnts[4];
7959 for (j = 4; j; j--)
7960 if (!unused[j - 1])
7961 break;
7962 for (i = 0; i < 4; i++)
7963 {
7964 if (i && (i >= j || unused[i - 1]))
7965 {
7966 cnts[i] = cnts[i - 1];
7967 continue;
7968 }
7969 cnts[i] = create_tmp_var (sizetype);
7970 if (i == 0)
7971 g = gimple_build_assign (cnts[i], size_int (is_old ? 2 : 5));
7972 else
7973 {
7974 tree t;
7975 if (is_old)
7976 t = size_binop (PLUS_EXPR, counts[0], size_int (2));
7977 else
7978 t = size_binop (PLUS_EXPR, cnts[i - 1], counts[i - 1]);
7979 if (gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue)
7980 == GS_ERROR)
7981 return 2;
7982 g = gimple_build_assign (cnts[i], t);
7983 }
7984 gimple_seq_add_stmt (pre_p, g);
7985 }
7986
7987 last_iter = NULL_TREE;
7988 tree last_bind = NULL_TREE;
7989 tree *last_body = NULL;
7990 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
7991 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
7992 {
7993 switch (OMP_CLAUSE_DEPEND_KIND (c))
7994 {
7995 case OMP_CLAUSE_DEPEND_IN:
7996 i = 2;
7997 break;
7998 case OMP_CLAUSE_DEPEND_OUT:
7999 case OMP_CLAUSE_DEPEND_INOUT:
8000 i = 0;
8001 break;
8002 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
8003 i = 1;
8004 break;
8005 case OMP_CLAUSE_DEPEND_DEPOBJ:
8006 i = 3;
8007 break;
8008 case OMP_CLAUSE_DEPEND_SOURCE:
8009 case OMP_CLAUSE_DEPEND_SINK:
8010 continue;
8011 default:
8012 gcc_unreachable ();
8013 }
8014 tree t = OMP_CLAUSE_DECL (c);
8015 if (TREE_CODE (t) == TREE_LIST
8016 && TREE_PURPOSE (t)
8017 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
8018 {
8019 if (TREE_PURPOSE (t) != last_iter)
8020 {
8021 if (last_bind)
8022 gimplify_and_add (last_bind, pre_p);
8023 tree block = TREE_VEC_ELT (TREE_PURPOSE (t), 5);
8024 last_bind = build3 (BIND_EXPR, void_type_node,
8025 BLOCK_VARS (block), NULL, block);
8026 TREE_SIDE_EFFECTS (last_bind) = 1;
8027 SET_EXPR_LOCATION (last_bind, OMP_CLAUSE_LOCATION (c));
8028 tree *p = &BIND_EXPR_BODY (last_bind);
8029 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8030 {
8031 tree var = TREE_VEC_ELT (it, 0);
8032 tree begin = TREE_VEC_ELT (it, 1);
8033 tree end = TREE_VEC_ELT (it, 2);
8034 tree step = TREE_VEC_ELT (it, 3);
8035 tree orig_step = TREE_VEC_ELT (it, 4);
8036 tree type = TREE_TYPE (var);
8037 location_t loc = DECL_SOURCE_LOCATION (var);
8038 /* Emit:
8039 var = begin;
8040 goto cond_label;
8041 beg_label:
8042 ...
8043 var = var + step;
8044 cond_label:
8045 if (orig_step > 0) {
8046 if (var < end) goto beg_label;
8047 } else {
8048 if (var > end) goto beg_label;
8049 }
8050 for each iterator, with inner iterators added to
8051 the ... above. */
8052 tree beg_label = create_artificial_label (loc);
8053 tree cond_label = NULL_TREE;
8054 tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
8055 var, begin);
8056 append_to_statement_list_force (tem, p);
8057 tem = build_and_jump (&cond_label);
8058 append_to_statement_list_force (tem, p);
8059 tem = build1 (LABEL_EXPR, void_type_node, beg_label);
8060 append_to_statement_list (tem, p);
8061 tree bind = build3 (BIND_EXPR, void_type_node, NULL_TREE,
8062 NULL_TREE, NULL_TREE);
8063 TREE_SIDE_EFFECTS (bind) = 1;
8064 SET_EXPR_LOCATION (bind, loc);
8065 append_to_statement_list_force (bind, p);
8066 if (POINTER_TYPE_P (type))
8067 tem = build2_loc (loc, POINTER_PLUS_EXPR, type,
8068 var, fold_convert_loc (loc, sizetype,
8069 step));
8070 else
8071 tem = build2_loc (loc, PLUS_EXPR, type, var, step);
8072 tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
8073 var, tem);
8074 append_to_statement_list_force (tem, p);
8075 tem = build1 (LABEL_EXPR, void_type_node, cond_label);
8076 append_to_statement_list (tem, p);
8077 tree cond = fold_build2_loc (loc, LT_EXPR,
8078 boolean_type_node,
8079 var, end);
8080 tree pos
8081 = fold_build3_loc (loc, COND_EXPR, void_type_node,
8082 cond, build_and_jump (&beg_label),
8083 void_node);
8084 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8085 var, end);
8086 tree neg
8087 = fold_build3_loc (loc, COND_EXPR, void_type_node,
8088 cond, build_and_jump (&beg_label),
8089 void_node);
8090 tree osteptype = TREE_TYPE (orig_step);
8091 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8092 orig_step,
8093 build_int_cst (osteptype, 0));
8094 tem = fold_build3_loc (loc, COND_EXPR, void_type_node,
8095 cond, pos, neg);
8096 append_to_statement_list_force (tem, p);
8097 p = &BIND_EXPR_BODY (bind);
8098 }
8099 last_body = p;
8100 }
8101 last_iter = TREE_PURPOSE (t);
8102 if (TREE_CODE (TREE_VALUE (t)) == COMPOUND_EXPR)
8103 {
8104 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t),
8105 0), last_body);
8106 TREE_VALUE (t) = TREE_OPERAND (TREE_VALUE (t), 1);
8107 }
8108 if (error_operand_p (TREE_VALUE (t)))
8109 return 2;
8110 TREE_VALUE (t) = build_fold_addr_expr (TREE_VALUE (t));
8111 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
8112 NULL_TREE, NULL_TREE);
8113 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8114 void_type_node, r, TREE_VALUE (t));
8115 append_to_statement_list_force (tem, last_body);
8116 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8117 void_type_node, cnts[i],
8118 size_binop (PLUS_EXPR, cnts[i], size_int (1)));
8119 append_to_statement_list_force (tem, last_body);
8120 TREE_VALUE (t) = null_pointer_node;
8121 }
8122 else
8123 {
8124 if (last_bind)
8125 {
8126 gimplify_and_add (last_bind, pre_p);
8127 last_bind = NULL_TREE;
8128 }
8129 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
8130 {
8131 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
8132 NULL, is_gimple_val, fb_rvalue);
8133 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
8134 }
8135 if (error_operand_p (OMP_CLAUSE_DECL (c)))
8136 return 2;
8137 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
8138 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
8139 is_gimple_val, fb_rvalue) == GS_ERROR)
8140 return 2;
8141 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
8142 NULL_TREE, NULL_TREE);
8143 tem = build2 (MODIFY_EXPR, void_type_node, r, OMP_CLAUSE_DECL (c));
8144 gimplify_and_add (tem, pre_p);
8145 g = gimple_build_assign (cnts[i], size_binop (PLUS_EXPR, cnts[i],
8146 size_int (1)));
8147 gimple_seq_add_stmt (pre_p, g);
8148 }
8149 }
8150 if (last_bind)
8151 gimplify_and_add (last_bind, pre_p);
8152 tree cond = boolean_false_node;
8153 if (is_old)
8154 {
8155 if (!unused[0])
8156 cond = build2_loc (first_loc, NE_EXPR, boolean_type_node, cnts[0],
8157 size_binop_loc (first_loc, PLUS_EXPR, counts[0],
8158 size_int (2)));
8159 if (!unused[2])
8160 cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
8161 build2_loc (first_loc, NE_EXPR, boolean_type_node,
8162 cnts[2],
8163 size_binop_loc (first_loc, PLUS_EXPR,
8164 totalpx,
8165 size_int (1))));
8166 }
8167 else
8168 {
8169 tree prev = size_int (5);
8170 for (i = 0; i < 4; i++)
8171 {
8172 if (unused[i])
8173 continue;
8174 prev = size_binop_loc (first_loc, PLUS_EXPR, counts[i], prev);
8175 cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
8176 build2_loc (first_loc, NE_EXPR, boolean_type_node,
8177 cnts[i], unshare_expr (prev)));
8178 }
8179 }
8180 tem = build3_loc (first_loc, COND_EXPR, void_type_node, cond,
8181 build_call_expr_loc (first_loc,
8182 builtin_decl_explicit (BUILT_IN_TRAP),
8183 0), void_node);
8184 gimplify_and_add (tem, pre_p);
8185 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
8186 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
8187 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
8188 OMP_CLAUSE_CHAIN (c) = *list_p;
8189 *list_p = c;
8190 return 1;
8191 }
8192
8193 /* Insert a GOMP_MAP_ALLOC or GOMP_MAP_RELEASE node following a
8194 GOMP_MAP_STRUCT mapping. C is an always_pointer mapping. STRUCT_NODE is
8195 the struct node to insert the new mapping after (when the struct node is
8196 initially created). PREV_NODE is the first of two or three mappings for a
8197 pointer, and is either:
8198 - the node before C, when a pair of mappings is used, e.g. for a C/C++
8199 array section.
8200 - not the node before C. This is true when we have a reference-to-pointer
8201 type (with a mapping for the reference and for the pointer), or for
8202 Fortran derived-type mappings with a GOMP_MAP_TO_PSET.
8203 If SCP is non-null, the new node is inserted before *SCP.
8204 if SCP is null, the new node is inserted before PREV_NODE.
8205 The return type is:
8206 - PREV_NODE, if SCP is non-null.
8207 - The newly-created ALLOC or RELEASE node, if SCP is null.
8208 - The second newly-created ALLOC or RELEASE node, if we are mapping a
8209 reference to a pointer. */
8210
8211 static tree
8212 insert_struct_comp_map (enum tree_code code, tree c, tree struct_node,
8213 tree prev_node, tree *scp)
8214 {
8215 enum gomp_map_kind mkind
8216 = (code == OMP_TARGET_EXIT_DATA || code == OACC_EXIT_DATA)
8217 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
8218
8219 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_MAP);
8220 tree cl = scp ? prev_node : c2;
8221 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
8222 OMP_CLAUSE_DECL (c2) = unshare_expr (OMP_CLAUSE_DECL (c));
8223 OMP_CLAUSE_CHAIN (c2) = scp ? *scp : prev_node;
8224 if (OMP_CLAUSE_CHAIN (prev_node) != c
8225 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (prev_node)) == OMP_CLAUSE_MAP
8226 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (prev_node))
8227 == GOMP_MAP_TO_PSET))
8228 OMP_CLAUSE_SIZE (c2) = OMP_CLAUSE_SIZE (OMP_CLAUSE_CHAIN (prev_node));
8229 else
8230 OMP_CLAUSE_SIZE (c2) = TYPE_SIZE_UNIT (ptr_type_node);
8231 if (struct_node)
8232 OMP_CLAUSE_CHAIN (struct_node) = c2;
8233
8234 /* We might need to create an additional mapping if we have a reference to a
8235 pointer (in C++). Don't do this if we have something other than a
8236 GOMP_MAP_ALWAYS_POINTER though, i.e. a GOMP_MAP_TO_PSET. */
8237 if (OMP_CLAUSE_CHAIN (prev_node) != c
8238 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (prev_node)) == OMP_CLAUSE_MAP
8239 && ((OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (prev_node))
8240 == GOMP_MAP_ALWAYS_POINTER)
8241 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (prev_node))
8242 == GOMP_MAP_ATTACH_DETACH)))
8243 {
8244 tree c4 = OMP_CLAUSE_CHAIN (prev_node);
8245 tree c3 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_MAP);
8246 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
8247 OMP_CLAUSE_DECL (c3) = unshare_expr (OMP_CLAUSE_DECL (c4));
8248 OMP_CLAUSE_SIZE (c3) = TYPE_SIZE_UNIT (ptr_type_node);
8249 OMP_CLAUSE_CHAIN (c3) = prev_node;
8250 if (!scp)
8251 OMP_CLAUSE_CHAIN (c2) = c3;
8252 else
8253 cl = c3;
8254 }
8255
8256 if (scp)
8257 *scp = c2;
8258
8259 return cl;
8260 }
8261
8262 /* Strip ARRAY_REFS or an indirect ref off BASE, find the containing object,
8263 and set *BITPOSP and *POFFSETP to the bit offset of the access.
8264 If BASE_REF is non-NULL and the containing object is a reference, set
8265 *BASE_REF to that reference before dereferencing the object.
8266 If BASE_REF is NULL, check that the containing object is a COMPONENT_REF or
8267 has array type, else return NULL. */
8268
8269 static tree
8270 extract_base_bit_offset (tree base, tree *base_ref, poly_int64 *bitposp,
8271 poly_offset_int *poffsetp)
8272 {
8273 tree offset;
8274 poly_int64 bitsize, bitpos;
8275 machine_mode mode;
8276 int unsignedp, reversep, volatilep = 0;
8277 poly_offset_int poffset;
8278
8279 if (base_ref)
8280 {
8281 *base_ref = NULL_TREE;
8282
8283 while (TREE_CODE (base) == ARRAY_REF)
8284 base = TREE_OPERAND (base, 0);
8285
8286 if (TREE_CODE (base) == INDIRECT_REF)
8287 base = TREE_OPERAND (base, 0);
8288 }
8289 else
8290 {
8291 if (TREE_CODE (base) == ARRAY_REF)
8292 {
8293 while (TREE_CODE (base) == ARRAY_REF)
8294 base = TREE_OPERAND (base, 0);
8295 if (TREE_CODE (base) != COMPONENT_REF
8296 || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE)
8297 return NULL_TREE;
8298 }
8299 else if (TREE_CODE (base) == INDIRECT_REF
8300 && TREE_CODE (TREE_OPERAND (base, 0)) == COMPONENT_REF
8301 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0)))
8302 == REFERENCE_TYPE))
8303 base = TREE_OPERAND (base, 0);
8304 }
8305
8306 base = get_inner_reference (base, &bitsize, &bitpos, &offset, &mode,
8307 &unsignedp, &reversep, &volatilep);
8308
8309 tree orig_base = base;
8310
8311 if ((TREE_CODE (base) == INDIRECT_REF
8312 || (TREE_CODE (base) == MEM_REF
8313 && integer_zerop (TREE_OPERAND (base, 1))))
8314 && DECL_P (TREE_OPERAND (base, 0))
8315 && TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0))) == REFERENCE_TYPE)
8316 base = TREE_OPERAND (base, 0);
8317
8318 gcc_assert (offset == NULL_TREE || poly_int_tree_p (offset));
8319
8320 if (offset)
8321 poffset = wi::to_poly_offset (offset);
8322 else
8323 poffset = 0;
8324
8325 if (maybe_ne (bitpos, 0))
8326 poffset += bits_to_bytes_round_down (bitpos);
8327
8328 *bitposp = bitpos;
8329 *poffsetp = poffset;
8330
8331 /* Set *BASE_REF if BASE was a dereferenced reference variable. */
8332 if (base_ref && orig_base != base)
8333 *base_ref = orig_base;
8334
8335 return base;
8336 }
8337
8338 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
8339 and previous omp contexts. */
8340
8341 static void
8342 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
8343 enum omp_region_type region_type,
8344 enum tree_code code)
8345 {
8346 struct gimplify_omp_ctx *ctx, *outer_ctx;
8347 tree c;
8348 hash_map<tree, tree> *struct_map_to_clause = NULL;
8349 hash_set<tree> *struct_deref_set = NULL;
8350 tree *prev_list_p = NULL, *orig_list_p = list_p;
8351 int handled_depend_iterators = -1;
8352 int nowait = -1;
8353
8354 ctx = new_omp_context (region_type);
8355 ctx->code = code;
8356 outer_ctx = ctx->outer_context;
8357 if (code == OMP_TARGET)
8358 {
8359 if (!lang_GNU_Fortran ())
8360 ctx->defaultmap[GDMK_POINTER] = GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
8361 ctx->defaultmap[GDMK_SCALAR] = GOVD_FIRSTPRIVATE;
8362 }
8363 if (!lang_GNU_Fortran ())
8364 switch (code)
8365 {
8366 case OMP_TARGET:
8367 case OMP_TARGET_DATA:
8368 case OMP_TARGET_ENTER_DATA:
8369 case OMP_TARGET_EXIT_DATA:
8370 case OACC_DECLARE:
8371 case OACC_HOST_DATA:
8372 case OACC_PARALLEL:
8373 case OACC_KERNELS:
8374 ctx->target_firstprivatize_array_bases = true;
8375 default:
8376 break;
8377 }
8378
8379 while ((c = *list_p) != NULL)
8380 {
8381 bool remove = false;
8382 bool notice_outer = true;
8383 const char *check_non_private = NULL;
8384 unsigned int flags;
8385 tree decl;
8386
8387 switch (OMP_CLAUSE_CODE (c))
8388 {
8389 case OMP_CLAUSE_PRIVATE:
8390 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
8391 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
8392 {
8393 flags |= GOVD_PRIVATE_OUTER_REF;
8394 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
8395 }
8396 else
8397 notice_outer = false;
8398 goto do_add;
8399 case OMP_CLAUSE_SHARED:
8400 flags = GOVD_SHARED | GOVD_EXPLICIT;
8401 goto do_add;
8402 case OMP_CLAUSE_FIRSTPRIVATE:
8403 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
8404 check_non_private = "firstprivate";
8405 goto do_add;
8406 case OMP_CLAUSE_LASTPRIVATE:
8407 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
8408 switch (code)
8409 {
8410 case OMP_DISTRIBUTE:
8411 error_at (OMP_CLAUSE_LOCATION (c),
8412 "conditional %<lastprivate%> clause on "
8413 "%qs construct", "distribute");
8414 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
8415 break;
8416 case OMP_TASKLOOP:
8417 error_at (OMP_CLAUSE_LOCATION (c),
8418 "conditional %<lastprivate%> clause on "
8419 "%qs construct", "taskloop");
8420 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
8421 break;
8422 default:
8423 break;
8424 }
8425 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
8426 if (code != OMP_LOOP)
8427 check_non_private = "lastprivate";
8428 decl = OMP_CLAUSE_DECL (c);
8429 if (error_operand_p (decl))
8430 goto do_add;
8431 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
8432 && !lang_hooks.decls.omp_scalar_p (decl))
8433 {
8434 error_at (OMP_CLAUSE_LOCATION (c),
8435 "non-scalar variable %qD in conditional "
8436 "%<lastprivate%> clause", decl);
8437 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
8438 }
8439 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
8440 flags |= GOVD_LASTPRIVATE_CONDITIONAL;
8441 if (outer_ctx
8442 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
8443 || ((outer_ctx->region_type & ORT_COMBINED_TEAMS)
8444 == ORT_COMBINED_TEAMS))
8445 && splay_tree_lookup (outer_ctx->variables,
8446 (splay_tree_key) decl) == NULL)
8447 {
8448 omp_add_variable (outer_ctx, decl, GOVD_SHARED | GOVD_SEEN);
8449 if (outer_ctx->outer_context)
8450 omp_notice_variable (outer_ctx->outer_context, decl, true);
8451 }
8452 else if (outer_ctx
8453 && (outer_ctx->region_type & ORT_TASK) != 0
8454 && outer_ctx->combined_loop
8455 && splay_tree_lookup (outer_ctx->variables,
8456 (splay_tree_key) decl) == NULL)
8457 {
8458 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
8459 if (outer_ctx->outer_context)
8460 omp_notice_variable (outer_ctx->outer_context, decl, true);
8461 }
8462 else if (outer_ctx
8463 && (outer_ctx->region_type == ORT_WORKSHARE
8464 || outer_ctx->region_type == ORT_ACC)
8465 && outer_ctx->combined_loop
8466 && splay_tree_lookup (outer_ctx->variables,
8467 (splay_tree_key) decl) == NULL
8468 && !omp_check_private (outer_ctx, decl, false))
8469 {
8470 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
8471 if (outer_ctx->outer_context
8472 && (outer_ctx->outer_context->region_type
8473 == ORT_COMBINED_PARALLEL)
8474 && splay_tree_lookup (outer_ctx->outer_context->variables,
8475 (splay_tree_key) decl) == NULL)
8476 {
8477 struct gimplify_omp_ctx *octx = outer_ctx->outer_context;
8478 omp_add_variable (octx, decl, GOVD_SHARED | GOVD_SEEN);
8479 if (octx->outer_context)
8480 {
8481 octx = octx->outer_context;
8482 if (octx->region_type == ORT_WORKSHARE
8483 && octx->combined_loop
8484 && splay_tree_lookup (octx->variables,
8485 (splay_tree_key) decl) == NULL
8486 && !omp_check_private (octx, decl, false))
8487 {
8488 omp_add_variable (octx, decl,
8489 GOVD_LASTPRIVATE | GOVD_SEEN);
8490 octx = octx->outer_context;
8491 if (octx
8492 && ((octx->region_type & ORT_COMBINED_TEAMS)
8493 == ORT_COMBINED_TEAMS)
8494 && (splay_tree_lookup (octx->variables,
8495 (splay_tree_key) decl)
8496 == NULL))
8497 {
8498 omp_add_variable (octx, decl,
8499 GOVD_SHARED | GOVD_SEEN);
8500 octx = octx->outer_context;
8501 }
8502 }
8503 if (octx)
8504 omp_notice_variable (octx, decl, true);
8505 }
8506 }
8507 else if (outer_ctx->outer_context)
8508 omp_notice_variable (outer_ctx->outer_context, decl, true);
8509 }
8510 goto do_add;
8511 case OMP_CLAUSE_REDUCTION:
8512 if (OMP_CLAUSE_REDUCTION_TASK (c))
8513 {
8514 if (region_type == ORT_WORKSHARE)
8515 {
8516 if (nowait == -1)
8517 nowait = omp_find_clause (*list_p,
8518 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8519 if (nowait
8520 && (outer_ctx == NULL
8521 || outer_ctx->region_type != ORT_COMBINED_PARALLEL))
8522 {
8523 error_at (OMP_CLAUSE_LOCATION (c),
8524 "%<task%> reduction modifier on a construct "
8525 "with a %<nowait%> clause");
8526 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
8527 }
8528 }
8529 else if ((region_type & ORT_PARALLEL) != ORT_PARALLEL)
8530 {
8531 error_at (OMP_CLAUSE_LOCATION (c),
8532 "invalid %<task%> reduction modifier on construct "
8533 "other than %<parallel%>, %<for%> or %<sections%>");
8534 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
8535 }
8536 }
8537 if (OMP_CLAUSE_REDUCTION_INSCAN (c))
8538 switch (code)
8539 {
8540 case OMP_SECTIONS:
8541 error_at (OMP_CLAUSE_LOCATION (c),
8542 "%<inscan%> %<reduction%> clause on "
8543 "%qs construct", "sections");
8544 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
8545 break;
8546 case OMP_PARALLEL:
8547 error_at (OMP_CLAUSE_LOCATION (c),
8548 "%<inscan%> %<reduction%> clause on "
8549 "%qs construct", "parallel");
8550 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
8551 break;
8552 case OMP_TEAMS:
8553 error_at (OMP_CLAUSE_LOCATION (c),
8554 "%<inscan%> %<reduction%> clause on "
8555 "%qs construct", "teams");
8556 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
8557 break;
8558 case OMP_TASKLOOP:
8559 error_at (OMP_CLAUSE_LOCATION (c),
8560 "%<inscan%> %<reduction%> clause on "
8561 "%qs construct", "taskloop");
8562 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
8563 break;
8564 default:
8565 break;
8566 }
8567 /* FALLTHRU */
8568 case OMP_CLAUSE_IN_REDUCTION:
8569 case OMP_CLAUSE_TASK_REDUCTION:
8570 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
8571 /* OpenACC permits reductions on private variables. */
8572 if (!(region_type & ORT_ACC)
8573 /* taskgroup is actually not a worksharing region. */
8574 && code != OMP_TASKGROUP)
8575 check_non_private = omp_clause_code_name[OMP_CLAUSE_CODE (c)];
8576 decl = OMP_CLAUSE_DECL (c);
8577 if (TREE_CODE (decl) == MEM_REF)
8578 {
8579 tree type = TREE_TYPE (decl);
8580 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p,
8581 NULL, is_gimple_val, fb_rvalue, false)
8582 == GS_ERROR)
8583 {
8584 remove = true;
8585 break;
8586 }
8587 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
8588 if (DECL_P (v))
8589 {
8590 omp_firstprivatize_variable (ctx, v);
8591 omp_notice_variable (ctx, v, true);
8592 }
8593 decl = TREE_OPERAND (decl, 0);
8594 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
8595 {
8596 if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p,
8597 NULL, is_gimple_val, fb_rvalue, false)
8598 == GS_ERROR)
8599 {
8600 remove = true;
8601 break;
8602 }
8603 v = TREE_OPERAND (decl, 1);
8604 if (DECL_P (v))
8605 {
8606 omp_firstprivatize_variable (ctx, v);
8607 omp_notice_variable (ctx, v, true);
8608 }
8609 decl = TREE_OPERAND (decl, 0);
8610 }
8611 if (TREE_CODE (decl) == ADDR_EXPR
8612 || TREE_CODE (decl) == INDIRECT_REF)
8613 decl = TREE_OPERAND (decl, 0);
8614 }
8615 goto do_add_decl;
8616 case OMP_CLAUSE_LINEAR:
8617 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
8618 is_gimple_val, fb_rvalue) == GS_ERROR)
8619 {
8620 remove = true;
8621 break;
8622 }
8623 else
8624 {
8625 if (code == OMP_SIMD
8626 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
8627 {
8628 struct gimplify_omp_ctx *octx = outer_ctx;
8629 if (octx
8630 && octx->region_type == ORT_WORKSHARE
8631 && octx->combined_loop
8632 && !octx->distribute)
8633 {
8634 if (octx->outer_context
8635 && (octx->outer_context->region_type
8636 == ORT_COMBINED_PARALLEL))
8637 octx = octx->outer_context->outer_context;
8638 else
8639 octx = octx->outer_context;
8640 }
8641 if (octx
8642 && octx->region_type == ORT_WORKSHARE
8643 && octx->combined_loop
8644 && octx->distribute)
8645 {
8646 error_at (OMP_CLAUSE_LOCATION (c),
8647 "%<linear%> clause for variable other than "
8648 "loop iterator specified on construct "
8649 "combined with %<distribute%>");
8650 remove = true;
8651 break;
8652 }
8653 }
8654 /* For combined #pragma omp parallel for simd, need to put
8655 lastprivate and perhaps firstprivate too on the
8656 parallel. Similarly for #pragma omp for simd. */
8657 struct gimplify_omp_ctx *octx = outer_ctx;
8658 decl = NULL_TREE;
8659 do
8660 {
8661 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
8662 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8663 break;
8664 decl = OMP_CLAUSE_DECL (c);
8665 if (error_operand_p (decl))
8666 {
8667 decl = NULL_TREE;
8668 break;
8669 }
8670 flags = GOVD_SEEN;
8671 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
8672 flags |= GOVD_FIRSTPRIVATE;
8673 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8674 flags |= GOVD_LASTPRIVATE;
8675 if (octx
8676 && octx->region_type == ORT_WORKSHARE
8677 && octx->combined_loop)
8678 {
8679 if (octx->outer_context
8680 && (octx->outer_context->region_type
8681 == ORT_COMBINED_PARALLEL))
8682 octx = octx->outer_context;
8683 else if (omp_check_private (octx, decl, false))
8684 break;
8685 }
8686 else if (octx
8687 && (octx->region_type & ORT_TASK) != 0
8688 && octx->combined_loop)
8689 ;
8690 else if (octx
8691 && octx->region_type == ORT_COMBINED_PARALLEL
8692 && ctx->region_type == ORT_WORKSHARE
8693 && octx == outer_ctx)
8694 flags = GOVD_SEEN | GOVD_SHARED;
8695 else if (octx
8696 && ((octx->region_type & ORT_COMBINED_TEAMS)
8697 == ORT_COMBINED_TEAMS))
8698 flags = GOVD_SEEN | GOVD_SHARED;
8699 else if (octx
8700 && octx->region_type == ORT_COMBINED_TARGET)
8701 {
8702 flags &= ~GOVD_LASTPRIVATE;
8703 if (flags == GOVD_SEEN)
8704 break;
8705 }
8706 else
8707 break;
8708 splay_tree_node on
8709 = splay_tree_lookup (octx->variables,
8710 (splay_tree_key) decl);
8711 if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0)
8712 {
8713 octx = NULL;
8714 break;
8715 }
8716 omp_add_variable (octx, decl, flags);
8717 if (octx->outer_context == NULL)
8718 break;
8719 octx = octx->outer_context;
8720 }
8721 while (1);
8722 if (octx
8723 && decl
8724 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
8725 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
8726 omp_notice_variable (octx, decl, true);
8727 }
8728 flags = GOVD_LINEAR | GOVD_EXPLICIT;
8729 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
8730 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8731 {
8732 notice_outer = false;
8733 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
8734 }
8735 goto do_add;
8736
8737 case OMP_CLAUSE_MAP:
8738 decl = OMP_CLAUSE_DECL (c);
8739 if (error_operand_p (decl))
8740 remove = true;
8741 switch (code)
8742 {
8743 case OMP_TARGET:
8744 break;
8745 case OACC_DATA:
8746 if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE)
8747 break;
8748 /* FALLTHRU */
8749 case OMP_TARGET_DATA:
8750 case OMP_TARGET_ENTER_DATA:
8751 case OMP_TARGET_EXIT_DATA:
8752 case OACC_HOST_DATA:
8753 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8754 || (OMP_CLAUSE_MAP_KIND (c)
8755 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
8756 /* For target {,enter ,exit }data only the array slice is
8757 mapped, but not the pointer to it. */
8758 remove = true;
8759 break;
8760 case OACC_ENTER_DATA:
8761 case OACC_EXIT_DATA:
8762 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
8763 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_TO_PSET
8764 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8765 || (OMP_CLAUSE_MAP_KIND (c)
8766 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
8767 remove = true;
8768 break;
8769 default:
8770 break;
8771 }
8772 /* For Fortran, not only the pointer to the data is mapped but also
8773 the address of the pointer, the array descriptor etc.; for
8774 'exit data' - and in particular for 'delete:' - having an 'alloc:'
8775 does not make sense. Likewise, for 'update' only transferring the
8776 data itself is needed as the rest has been handled in previous
8777 directives. */
8778 if ((code == OMP_TARGET_EXIT_DATA || code == OMP_TARGET_UPDATE)
8779 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
8780 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_TO_PSET))
8781 remove = true;
8782
8783 if (remove)
8784 break;
8785 if (DECL_P (decl) && outer_ctx && (region_type & ORT_ACC))
8786 {
8787 struct gimplify_omp_ctx *octx;
8788 for (octx = outer_ctx; octx; octx = octx->outer_context)
8789 {
8790 if (octx->region_type != ORT_ACC_HOST_DATA)
8791 break;
8792 splay_tree_node n2
8793 = splay_tree_lookup (octx->variables,
8794 (splay_tree_key) decl);
8795 if (n2)
8796 error_at (OMP_CLAUSE_LOCATION (c), "variable %qE "
8797 "declared in enclosing %<host_data%> region",
8798 DECL_NAME (decl));
8799 }
8800 }
8801 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
8802 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
8803 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
8804 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
8805 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
8806 {
8807 remove = true;
8808 break;
8809 }
8810 else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8811 || (OMP_CLAUSE_MAP_KIND (c)
8812 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
8813 && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST)
8814 {
8815 OMP_CLAUSE_SIZE (c)
8816 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL,
8817 false);
8818 omp_add_variable (ctx, OMP_CLAUSE_SIZE (c),
8819 GOVD_FIRSTPRIVATE | GOVD_SEEN);
8820 }
8821 if (!DECL_P (decl))
8822 {
8823 tree d = decl, *pd;
8824 if (TREE_CODE (d) == ARRAY_REF)
8825 {
8826 while (TREE_CODE (d) == ARRAY_REF)
8827 d = TREE_OPERAND (d, 0);
8828 if (TREE_CODE (d) == COMPONENT_REF
8829 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
8830 decl = d;
8831 }
8832 pd = &OMP_CLAUSE_DECL (c);
8833 if (d == decl
8834 && TREE_CODE (decl) == INDIRECT_REF
8835 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
8836 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
8837 == REFERENCE_TYPE))
8838 {
8839 pd = &TREE_OPERAND (decl, 0);
8840 decl = TREE_OPERAND (decl, 0);
8841 }
8842 bool indir_p = false;
8843 tree orig_decl = decl;
8844 tree decl_ref = NULL_TREE;
8845 if ((region_type & ORT_ACC) != 0
8846 && TREE_CODE (*pd) == COMPONENT_REF
8847 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH
8848 && code != OACC_UPDATE)
8849 {
8850 while (TREE_CODE (decl) == COMPONENT_REF)
8851 {
8852 decl = TREE_OPERAND (decl, 0);
8853 if ((TREE_CODE (decl) == MEM_REF
8854 && integer_zerop (TREE_OPERAND (decl, 1)))
8855 || INDIRECT_REF_P (decl))
8856 {
8857 indir_p = true;
8858 decl = TREE_OPERAND (decl, 0);
8859 }
8860 if (TREE_CODE (decl) == INDIRECT_REF
8861 && DECL_P (TREE_OPERAND (decl, 0))
8862 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
8863 == REFERENCE_TYPE))
8864 {
8865 decl_ref = decl;
8866 decl = TREE_OPERAND (decl, 0);
8867 }
8868 }
8869 }
8870 else if (TREE_CODE (decl) == COMPONENT_REF)
8871 {
8872 while (TREE_CODE (decl) == COMPONENT_REF)
8873 decl = TREE_OPERAND (decl, 0);
8874 if (TREE_CODE (decl) == INDIRECT_REF
8875 && DECL_P (TREE_OPERAND (decl, 0))
8876 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
8877 == REFERENCE_TYPE))
8878 decl = TREE_OPERAND (decl, 0);
8879 }
8880 if (decl != orig_decl && DECL_P (decl) && indir_p)
8881 {
8882 gomp_map_kind k = (code == OACC_EXIT_DATA) ? GOMP_MAP_DETACH
8883 : GOMP_MAP_ATTACH;
8884 /* We have a dereference of a struct member. Make this an
8885 attach/detach operation, and ensure the base pointer is
8886 mapped as a FIRSTPRIVATE_POINTER. */
8887 OMP_CLAUSE_SET_MAP_KIND (c, k);
8888 flags = GOVD_MAP | GOVD_SEEN | GOVD_EXPLICIT;
8889 tree next_clause = OMP_CLAUSE_CHAIN (c);
8890 if (k == GOMP_MAP_ATTACH
8891 && code != OACC_ENTER_DATA
8892 && (!next_clause
8893 || (OMP_CLAUSE_CODE (next_clause) != OMP_CLAUSE_MAP)
8894 || (OMP_CLAUSE_MAP_KIND (next_clause)
8895 != GOMP_MAP_POINTER)
8896 || OMP_CLAUSE_DECL (next_clause) != decl)
8897 && (!struct_deref_set
8898 || !struct_deref_set->contains (decl)))
8899 {
8900 if (!struct_deref_set)
8901 struct_deref_set = new hash_set<tree> ();
8902 /* As well as the attach, we also need a
8903 FIRSTPRIVATE_POINTER clause to properly map the
8904 pointer to the struct base. */
8905 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8906 OMP_CLAUSE_MAP);
8907 OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_ALLOC);
8908 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c2)
8909 = 1;
8910 tree charptr_zero
8911 = build_int_cst (build_pointer_type (char_type_node),
8912 0);
8913 OMP_CLAUSE_DECL (c2)
8914 = build2 (MEM_REF, char_type_node,
8915 decl_ref ? decl_ref : decl, charptr_zero);
8916 OMP_CLAUSE_SIZE (c2) = size_zero_node;
8917 tree c3 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8918 OMP_CLAUSE_MAP);
8919 OMP_CLAUSE_SET_MAP_KIND (c3,
8920 GOMP_MAP_FIRSTPRIVATE_POINTER);
8921 OMP_CLAUSE_DECL (c3) = decl;
8922 OMP_CLAUSE_SIZE (c3) = size_zero_node;
8923 tree mapgrp = *prev_list_p;
8924 *prev_list_p = c2;
8925 OMP_CLAUSE_CHAIN (c3) = mapgrp;
8926 OMP_CLAUSE_CHAIN (c2) = c3;
8927
8928 struct_deref_set->add (decl);
8929 }
8930 goto do_add_decl;
8931 }
8932 /* An "attach/detach" operation on an update directive should
8933 behave as a GOMP_MAP_ALWAYS_POINTER. Beware that
8934 unlike attach or detach map kinds, GOMP_MAP_ALWAYS_POINTER
8935 depends on the previous mapping. */
8936 if (code == OACC_UPDATE
8937 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
8938 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_ALWAYS_POINTER);
8939 if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue, fb_lvalue)
8940 == GS_ERROR)
8941 {
8942 remove = true;
8943 break;
8944 }
8945 if (DECL_P (decl)
8946 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
8947 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH
8948 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_DETACH
8949 && code != OACC_UPDATE)
8950 {
8951 if (error_operand_p (decl))
8952 {
8953 remove = true;
8954 break;
8955 }
8956
8957 tree stype = TREE_TYPE (decl);
8958 if (TREE_CODE (stype) == REFERENCE_TYPE)
8959 stype = TREE_TYPE (stype);
8960 if (TYPE_SIZE_UNIT (stype) == NULL
8961 || TREE_CODE (TYPE_SIZE_UNIT (stype)) != INTEGER_CST)
8962 {
8963 error_at (OMP_CLAUSE_LOCATION (c),
8964 "mapping field %qE of variable length "
8965 "structure", OMP_CLAUSE_DECL (c));
8966 remove = true;
8967 break;
8968 }
8969
8970 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER
8971 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
8972 {
8973 /* Error recovery. */
8974 if (prev_list_p == NULL)
8975 {
8976 remove = true;
8977 break;
8978 }
8979 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
8980 {
8981 tree ch = OMP_CLAUSE_CHAIN (*prev_list_p);
8982 if (ch == NULL_TREE || OMP_CLAUSE_CHAIN (ch) != c)
8983 {
8984 remove = true;
8985 break;
8986 }
8987 }
8988 }
8989
8990 poly_offset_int offset1;
8991 poly_int64 bitpos1;
8992 tree base_ref;
8993
8994 tree base
8995 = extract_base_bit_offset (OMP_CLAUSE_DECL (c), &base_ref,
8996 &bitpos1, &offset1);
8997
8998 gcc_assert (base == decl);
8999
9000 splay_tree_node n
9001 = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
9002 bool ptr = (OMP_CLAUSE_MAP_KIND (c)
9003 == GOMP_MAP_ALWAYS_POINTER);
9004 bool attach_detach = (OMP_CLAUSE_MAP_KIND (c)
9005 == GOMP_MAP_ATTACH_DETACH);
9006 bool attach = OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
9007 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH;
9008 bool has_attachments = false;
9009 /* For OpenACC, pointers in structs should trigger an
9010 attach action. */
9011 if (attach_detach && (region_type & ORT_ACC) != 0)
9012 {
9013 /* Turn a GOMP_MAP_ATTACH_DETACH clause into a
9014 GOMP_MAP_ATTACH or GOMP_MAP_DETACH clause after we
9015 have detected a case that needs a GOMP_MAP_STRUCT
9016 mapping added. */
9017 gomp_map_kind k
9018 = (code == OACC_EXIT_DATA) ? GOMP_MAP_DETACH
9019 : GOMP_MAP_ATTACH;
9020 OMP_CLAUSE_SET_MAP_KIND (c, k);
9021 has_attachments = true;
9022 }
9023 if (n == NULL || (n->value & GOVD_MAP) == 0)
9024 {
9025 tree l = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9026 OMP_CLAUSE_MAP);
9027 gomp_map_kind k = attach ? GOMP_MAP_FORCE_PRESENT
9028 : GOMP_MAP_STRUCT;
9029
9030 OMP_CLAUSE_SET_MAP_KIND (l, k);
9031 if (base_ref)
9032 OMP_CLAUSE_DECL (l) = unshare_expr (base_ref);
9033 else
9034 OMP_CLAUSE_DECL (l) = decl;
9035 OMP_CLAUSE_SIZE (l)
9036 = (!attach
9037 ? size_int (1)
9038 : DECL_P (OMP_CLAUSE_DECL (l))
9039 ? DECL_SIZE_UNIT (OMP_CLAUSE_DECL (l))
9040 : TYPE_SIZE_UNIT (TREE_TYPE (OMP_CLAUSE_DECL (l))));
9041 if (struct_map_to_clause == NULL)
9042 struct_map_to_clause = new hash_map<tree, tree>;
9043 struct_map_to_clause->put (decl, l);
9044 if (ptr || attach_detach)
9045 {
9046 insert_struct_comp_map (code, c, l, *prev_list_p,
9047 NULL);
9048 *prev_list_p = l;
9049 prev_list_p = NULL;
9050 }
9051 else
9052 {
9053 OMP_CLAUSE_CHAIN (l) = c;
9054 *list_p = l;
9055 list_p = &OMP_CLAUSE_CHAIN (l);
9056 }
9057 if (base_ref && code == OMP_TARGET)
9058 {
9059 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9060 OMP_CLAUSE_MAP);
9061 enum gomp_map_kind mkind
9062 = GOMP_MAP_FIRSTPRIVATE_REFERENCE;
9063 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
9064 OMP_CLAUSE_DECL (c2) = decl;
9065 OMP_CLAUSE_SIZE (c2) = size_zero_node;
9066 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (l);
9067 OMP_CLAUSE_CHAIN (l) = c2;
9068 }
9069 flags = GOVD_MAP | GOVD_EXPLICIT;
9070 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c))
9071 || ptr
9072 || attach_detach)
9073 flags |= GOVD_SEEN;
9074 if (has_attachments)
9075 flags |= GOVD_MAP_HAS_ATTACHMENTS;
9076 goto do_add_decl;
9077 }
9078 else if (struct_map_to_clause)
9079 {
9080 tree *osc = struct_map_to_clause->get (decl);
9081 tree *sc = NULL, *scp = NULL;
9082 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c))
9083 || ptr
9084 || attach_detach)
9085 n->value |= GOVD_SEEN;
9086 sc = &OMP_CLAUSE_CHAIN (*osc);
9087 if (*sc != c
9088 && (OMP_CLAUSE_MAP_KIND (*sc)
9089 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
9090 sc = &OMP_CLAUSE_CHAIN (*sc);
9091 /* Here "prev_list_p" is the end of the inserted
9092 alloc/release nodes after the struct node, OSC. */
9093 for (; *sc != c; sc = &OMP_CLAUSE_CHAIN (*sc))
9094 if ((ptr || attach_detach) && sc == prev_list_p)
9095 break;
9096 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc))
9097 != COMPONENT_REF
9098 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
9099 != INDIRECT_REF)
9100 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
9101 != ARRAY_REF))
9102 break;
9103 else
9104 {
9105 tree sc_decl = OMP_CLAUSE_DECL (*sc);
9106 poly_offset_int offsetn;
9107 poly_int64 bitposn;
9108 tree base
9109 = extract_base_bit_offset (sc_decl, NULL,
9110 &bitposn, &offsetn);
9111 if (base != decl)
9112 break;
9113 if (scp)
9114 continue;
9115 tree d1 = OMP_CLAUSE_DECL (*sc);
9116 tree d2 = OMP_CLAUSE_DECL (c);
9117 while (TREE_CODE (d1) == ARRAY_REF)
9118 d1 = TREE_OPERAND (d1, 0);
9119 while (TREE_CODE (d2) == ARRAY_REF)
9120 d2 = TREE_OPERAND (d2, 0);
9121 if (TREE_CODE (d1) == INDIRECT_REF)
9122 d1 = TREE_OPERAND (d1, 0);
9123 if (TREE_CODE (d2) == INDIRECT_REF)
9124 d2 = TREE_OPERAND (d2, 0);
9125 while (TREE_CODE (d1) == COMPONENT_REF)
9126 if (TREE_CODE (d2) == COMPONENT_REF
9127 && TREE_OPERAND (d1, 1)
9128 == TREE_OPERAND (d2, 1))
9129 {
9130 d1 = TREE_OPERAND (d1, 0);
9131 d2 = TREE_OPERAND (d2, 0);
9132 }
9133 else
9134 break;
9135 if (d1 == d2)
9136 {
9137 error_at (OMP_CLAUSE_LOCATION (c),
9138 "%qE appears more than once in map "
9139 "clauses", OMP_CLAUSE_DECL (c));
9140 remove = true;
9141 break;
9142 }
9143 if (maybe_lt (offset1, offsetn)
9144 || (known_eq (offset1, offsetn)
9145 && maybe_lt (bitpos1, bitposn)))
9146 {
9147 if (ptr || attach_detach)
9148 scp = sc;
9149 else
9150 break;
9151 }
9152 }
9153 if (remove)
9154 break;
9155 if (!attach)
9156 OMP_CLAUSE_SIZE (*osc)
9157 = size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc),
9158 size_one_node);
9159 if (ptr || attach_detach)
9160 {
9161 tree cl = insert_struct_comp_map (code, c, NULL,
9162 *prev_list_p, scp);
9163 if (sc == prev_list_p)
9164 {
9165 *sc = cl;
9166 prev_list_p = NULL;
9167 }
9168 else
9169 {
9170 *prev_list_p = OMP_CLAUSE_CHAIN (c);
9171 list_p = prev_list_p;
9172 prev_list_p = NULL;
9173 OMP_CLAUSE_CHAIN (c) = *sc;
9174 *sc = cl;
9175 continue;
9176 }
9177 }
9178 else if (*sc != c)
9179 {
9180 *list_p = OMP_CLAUSE_CHAIN (c);
9181 OMP_CLAUSE_CHAIN (c) = *sc;
9182 *sc = c;
9183 continue;
9184 }
9185 }
9186 }
9187 if (!remove
9188 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_POINTER
9189 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH_DETACH
9190 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
9191 && OMP_CLAUSE_CHAIN (c)
9192 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c)) == OMP_CLAUSE_MAP
9193 && ((OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
9194 == GOMP_MAP_ALWAYS_POINTER)
9195 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
9196 == GOMP_MAP_ATTACH_DETACH)
9197 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
9198 == GOMP_MAP_TO_PSET)))
9199 prev_list_p = list_p;
9200
9201 break;
9202 }
9203 flags = GOVD_MAP | GOVD_EXPLICIT;
9204 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
9205 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM)
9206 flags |= GOVD_MAP_ALWAYS_TO;
9207 goto do_add;
9208
9209 case OMP_CLAUSE_DEPEND:
9210 if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
9211 {
9212 tree deps = OMP_CLAUSE_DECL (c);
9213 while (deps && TREE_CODE (deps) == TREE_LIST)
9214 {
9215 if (TREE_CODE (TREE_PURPOSE (deps)) == TRUNC_DIV_EXPR
9216 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps), 1)))
9217 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps), 1),
9218 pre_p, NULL, is_gimple_val, fb_rvalue);
9219 deps = TREE_CHAIN (deps);
9220 }
9221 break;
9222 }
9223 else if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
9224 break;
9225 if (handled_depend_iterators == -1)
9226 handled_depend_iterators = gimplify_omp_depend (list_p, pre_p);
9227 if (handled_depend_iterators)
9228 {
9229 if (handled_depend_iterators == 2)
9230 remove = true;
9231 break;
9232 }
9233 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
9234 {
9235 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
9236 NULL, is_gimple_val, fb_rvalue);
9237 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
9238 }
9239 if (error_operand_p (OMP_CLAUSE_DECL (c)))
9240 {
9241 remove = true;
9242 break;
9243 }
9244 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
9245 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
9246 is_gimple_val, fb_rvalue) == GS_ERROR)
9247 {
9248 remove = true;
9249 break;
9250 }
9251 break;
9252
9253 case OMP_CLAUSE_TO:
9254 case OMP_CLAUSE_FROM:
9255 case OMP_CLAUSE__CACHE_:
9256 decl = OMP_CLAUSE_DECL (c);
9257 if (error_operand_p (decl))
9258 {
9259 remove = true;
9260 break;
9261 }
9262 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
9263 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
9264 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
9265 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
9266 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
9267 {
9268 remove = true;
9269 break;
9270 }
9271 if (!DECL_P (decl))
9272 {
9273 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
9274 NULL, is_gimple_lvalue, fb_lvalue)
9275 == GS_ERROR)
9276 {
9277 remove = true;
9278 break;
9279 }
9280 break;
9281 }
9282 goto do_notice;
9283
9284 case OMP_CLAUSE_USE_DEVICE_PTR:
9285 case OMP_CLAUSE_USE_DEVICE_ADDR:
9286 flags = GOVD_EXPLICIT;
9287 goto do_add;
9288
9289 case OMP_CLAUSE_IS_DEVICE_PTR:
9290 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
9291 goto do_add;
9292
9293 do_add:
9294 decl = OMP_CLAUSE_DECL (c);
9295 do_add_decl:
9296 if (error_operand_p (decl))
9297 {
9298 remove = true;
9299 break;
9300 }
9301 if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0)
9302 {
9303 tree t = omp_member_access_dummy_var (decl);
9304 if (t)
9305 {
9306 tree v = DECL_VALUE_EXPR (decl);
9307 DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1));
9308 if (outer_ctx)
9309 omp_notice_variable (outer_ctx, t, true);
9310 }
9311 }
9312 if (code == OACC_DATA
9313 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9314 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
9315 flags |= GOVD_MAP_0LEN_ARRAY;
9316 omp_add_variable (ctx, decl, flags);
9317 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9318 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
9319 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
9320 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9321 {
9322 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
9323 GOVD_LOCAL | GOVD_SEEN);
9324 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
9325 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
9326 find_decl_expr,
9327 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
9328 NULL) == NULL_TREE)
9329 omp_add_variable (ctx,
9330 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
9331 GOVD_LOCAL | GOVD_SEEN);
9332 gimplify_omp_ctxp = ctx;
9333 push_gimplify_context ();
9334
9335 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
9336 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9337
9338 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
9339 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
9340 pop_gimplify_context
9341 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
9342 push_gimplify_context ();
9343 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
9344 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
9345 pop_gimplify_context
9346 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
9347 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
9348 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
9349
9350 gimplify_omp_ctxp = outer_ctx;
9351 }
9352 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
9353 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
9354 {
9355 gimplify_omp_ctxp = ctx;
9356 push_gimplify_context ();
9357 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
9358 {
9359 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
9360 NULL, NULL);
9361 TREE_SIDE_EFFECTS (bind) = 1;
9362 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
9363 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
9364 }
9365 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
9366 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
9367 pop_gimplify_context
9368 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
9369 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
9370
9371 gimplify_omp_ctxp = outer_ctx;
9372 }
9373 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
9374 && OMP_CLAUSE_LINEAR_STMT (c))
9375 {
9376 gimplify_omp_ctxp = ctx;
9377 push_gimplify_context ();
9378 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
9379 {
9380 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
9381 NULL, NULL);
9382 TREE_SIDE_EFFECTS (bind) = 1;
9383 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
9384 OMP_CLAUSE_LINEAR_STMT (c) = bind;
9385 }
9386 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
9387 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
9388 pop_gimplify_context
9389 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
9390 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
9391
9392 gimplify_omp_ctxp = outer_ctx;
9393 }
9394 if (notice_outer)
9395 goto do_notice;
9396 break;
9397
9398 case OMP_CLAUSE_COPYIN:
9399 case OMP_CLAUSE_COPYPRIVATE:
9400 decl = OMP_CLAUSE_DECL (c);
9401 if (error_operand_p (decl))
9402 {
9403 remove = true;
9404 break;
9405 }
9406 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
9407 && !remove
9408 && !omp_check_private (ctx, decl, true))
9409 {
9410 remove = true;
9411 if (is_global_var (decl))
9412 {
9413 if (DECL_THREAD_LOCAL_P (decl))
9414 remove = false;
9415 else if (DECL_HAS_VALUE_EXPR_P (decl))
9416 {
9417 tree value = get_base_address (DECL_VALUE_EXPR (decl));
9418
9419 if (value
9420 && DECL_P (value)
9421 && DECL_THREAD_LOCAL_P (value))
9422 remove = false;
9423 }
9424 }
9425 if (remove)
9426 error_at (OMP_CLAUSE_LOCATION (c),
9427 "copyprivate variable %qE is not threadprivate"
9428 " or private in outer context", DECL_NAME (decl));
9429 }
9430 do_notice:
9431 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9432 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
9433 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
9434 && outer_ctx
9435 && ((region_type & ORT_TASKLOOP) == ORT_TASKLOOP
9436 || (region_type == ORT_WORKSHARE
9437 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9438 && (OMP_CLAUSE_REDUCTION_INSCAN (c)
9439 || code == OMP_LOOP)))
9440 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
9441 || (code == OMP_LOOP
9442 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9443 && ((outer_ctx->region_type & ORT_COMBINED_TEAMS)
9444 == ORT_COMBINED_TEAMS))))
9445 {
9446 splay_tree_node on
9447 = splay_tree_lookup (outer_ctx->variables,
9448 (splay_tree_key)decl);
9449 if (on == NULL || (on->value & GOVD_DATA_SHARE_CLASS) == 0)
9450 {
9451 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9452 && TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
9453 && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
9454 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
9455 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
9456 == POINTER_TYPE))))
9457 omp_firstprivatize_variable (outer_ctx, decl);
9458 else
9459 omp_add_variable (outer_ctx, decl,
9460 GOVD_SEEN | GOVD_SHARED);
9461 omp_notice_variable (outer_ctx, decl, true);
9462 }
9463 }
9464 if (outer_ctx)
9465 omp_notice_variable (outer_ctx, decl, true);
9466 if (check_non_private
9467 && region_type == ORT_WORKSHARE
9468 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
9469 || decl == OMP_CLAUSE_DECL (c)
9470 || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
9471 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
9472 == ADDR_EXPR
9473 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
9474 == POINTER_PLUS_EXPR
9475 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
9476 (OMP_CLAUSE_DECL (c), 0), 0))
9477 == ADDR_EXPR)))))
9478 && omp_check_private (ctx, decl, false))
9479 {
9480 error ("%s variable %qE is private in outer context",
9481 check_non_private, DECL_NAME (decl));
9482 remove = true;
9483 }
9484 break;
9485
9486 case OMP_CLAUSE_IF:
9487 if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK
9488 && OMP_CLAUSE_IF_MODIFIER (c) != code)
9489 {
9490 const char *p[2];
9491 for (int i = 0; i < 2; i++)
9492 switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code)
9493 {
9494 case VOID_CST: p[i] = "cancel"; break;
9495 case OMP_PARALLEL: p[i] = "parallel"; break;
9496 case OMP_SIMD: p[i] = "simd"; break;
9497 case OMP_TASK: p[i] = "task"; break;
9498 case OMP_TASKLOOP: p[i] = "taskloop"; break;
9499 case OMP_TARGET_DATA: p[i] = "target data"; break;
9500 case OMP_TARGET: p[i] = "target"; break;
9501 case OMP_TARGET_UPDATE: p[i] = "target update"; break;
9502 case OMP_TARGET_ENTER_DATA:
9503 p[i] = "target enter data"; break;
9504 case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break;
9505 default: gcc_unreachable ();
9506 }
9507 error_at (OMP_CLAUSE_LOCATION (c),
9508 "expected %qs %<if%> clause modifier rather than %qs",
9509 p[0], p[1]);
9510 remove = true;
9511 }
9512 /* Fall through. */
9513
9514 case OMP_CLAUSE_FINAL:
9515 OMP_CLAUSE_OPERAND (c, 0)
9516 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
9517 /* Fall through. */
9518
9519 case OMP_CLAUSE_SCHEDULE:
9520 case OMP_CLAUSE_NUM_THREADS:
9521 case OMP_CLAUSE_NUM_TEAMS:
9522 case OMP_CLAUSE_THREAD_LIMIT:
9523 case OMP_CLAUSE_DIST_SCHEDULE:
9524 case OMP_CLAUSE_DEVICE:
9525 case OMP_CLAUSE_PRIORITY:
9526 case OMP_CLAUSE_GRAINSIZE:
9527 case OMP_CLAUSE_NUM_TASKS:
9528 case OMP_CLAUSE_HINT:
9529 case OMP_CLAUSE_ASYNC:
9530 case OMP_CLAUSE_WAIT:
9531 case OMP_CLAUSE_NUM_GANGS:
9532 case OMP_CLAUSE_NUM_WORKERS:
9533 case OMP_CLAUSE_VECTOR_LENGTH:
9534 case OMP_CLAUSE_WORKER:
9535 case OMP_CLAUSE_VECTOR:
9536 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
9537 is_gimple_val, fb_rvalue) == GS_ERROR)
9538 remove = true;
9539 break;
9540
9541 case OMP_CLAUSE_GANG:
9542 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
9543 is_gimple_val, fb_rvalue) == GS_ERROR)
9544 remove = true;
9545 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
9546 is_gimple_val, fb_rvalue) == GS_ERROR)
9547 remove = true;
9548 break;
9549
9550 case OMP_CLAUSE_NOWAIT:
9551 nowait = 1;
9552 break;
9553
9554 case OMP_CLAUSE_ORDERED:
9555 case OMP_CLAUSE_UNTIED:
9556 case OMP_CLAUSE_COLLAPSE:
9557 case OMP_CLAUSE_TILE:
9558 case OMP_CLAUSE_AUTO:
9559 case OMP_CLAUSE_SEQ:
9560 case OMP_CLAUSE_INDEPENDENT:
9561 case OMP_CLAUSE_MERGEABLE:
9562 case OMP_CLAUSE_PROC_BIND:
9563 case OMP_CLAUSE_SAFELEN:
9564 case OMP_CLAUSE_SIMDLEN:
9565 case OMP_CLAUSE_NOGROUP:
9566 case OMP_CLAUSE_THREADS:
9567 case OMP_CLAUSE_SIMD:
9568 case OMP_CLAUSE_BIND:
9569 case OMP_CLAUSE_IF_PRESENT:
9570 case OMP_CLAUSE_FINALIZE:
9571 break;
9572
9573 case OMP_CLAUSE_ORDER:
9574 ctx->order_concurrent = true;
9575 break;
9576
9577 case OMP_CLAUSE_DEFAULTMAP:
9578 enum gimplify_defaultmap_kind gdmkmin, gdmkmax;
9579 switch (OMP_CLAUSE_DEFAULTMAP_CATEGORY (c))
9580 {
9581 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED:
9582 gdmkmin = GDMK_SCALAR;
9583 gdmkmax = GDMK_POINTER;
9584 break;
9585 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_SCALAR:
9586 gdmkmin = gdmkmax = GDMK_SCALAR;
9587 break;
9588 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_AGGREGATE:
9589 gdmkmin = gdmkmax = GDMK_AGGREGATE;
9590 break;
9591 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALLOCATABLE:
9592 gdmkmin = gdmkmax = GDMK_ALLOCATABLE;
9593 break;
9594 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_POINTER:
9595 gdmkmin = gdmkmax = GDMK_POINTER;
9596 break;
9597 default:
9598 gcc_unreachable ();
9599 }
9600 for (int gdmk = gdmkmin; gdmk <= gdmkmax; gdmk++)
9601 switch (OMP_CLAUSE_DEFAULTMAP_BEHAVIOR (c))
9602 {
9603 case OMP_CLAUSE_DEFAULTMAP_ALLOC:
9604 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_ALLOC_ONLY;
9605 break;
9606 case OMP_CLAUSE_DEFAULTMAP_TO:
9607 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_TO_ONLY;
9608 break;
9609 case OMP_CLAUSE_DEFAULTMAP_FROM:
9610 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_FROM_ONLY;
9611 break;
9612 case OMP_CLAUSE_DEFAULTMAP_TOFROM:
9613 ctx->defaultmap[gdmk] = GOVD_MAP;
9614 break;
9615 case OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE:
9616 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
9617 break;
9618 case OMP_CLAUSE_DEFAULTMAP_NONE:
9619 ctx->defaultmap[gdmk] = 0;
9620 break;
9621 case OMP_CLAUSE_DEFAULTMAP_DEFAULT:
9622 switch (gdmk)
9623 {
9624 case GDMK_SCALAR:
9625 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
9626 break;
9627 case GDMK_AGGREGATE:
9628 case GDMK_ALLOCATABLE:
9629 ctx->defaultmap[gdmk] = GOVD_MAP;
9630 break;
9631 case GDMK_POINTER:
9632 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
9633 break;
9634 default:
9635 gcc_unreachable ();
9636 }
9637 break;
9638 default:
9639 gcc_unreachable ();
9640 }
9641 break;
9642
9643 case OMP_CLAUSE_ALIGNED:
9644 decl = OMP_CLAUSE_DECL (c);
9645 if (error_operand_p (decl))
9646 {
9647 remove = true;
9648 break;
9649 }
9650 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
9651 is_gimple_val, fb_rvalue) == GS_ERROR)
9652 {
9653 remove = true;
9654 break;
9655 }
9656 if (!is_global_var (decl)
9657 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
9658 omp_add_variable (ctx, decl, GOVD_ALIGNED);
9659 break;
9660
9661 case OMP_CLAUSE_NONTEMPORAL:
9662 decl = OMP_CLAUSE_DECL (c);
9663 if (error_operand_p (decl))
9664 {
9665 remove = true;
9666 break;
9667 }
9668 omp_add_variable (ctx, decl, GOVD_NONTEMPORAL);
9669 break;
9670
9671 case OMP_CLAUSE_DEFAULT:
9672 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
9673 break;
9674
9675 case OMP_CLAUSE_INCLUSIVE:
9676 case OMP_CLAUSE_EXCLUSIVE:
9677 decl = OMP_CLAUSE_DECL (c);
9678 {
9679 splay_tree_node n = splay_tree_lookup (outer_ctx->variables,
9680 (splay_tree_key) decl);
9681 if (n == NULL || (n->value & GOVD_REDUCTION) == 0)
9682 {
9683 error_at (OMP_CLAUSE_LOCATION (c),
9684 "%qD specified in %qs clause but not in %<inscan%> "
9685 "%<reduction%> clause on the containing construct",
9686 decl, omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
9687 remove = true;
9688 }
9689 else
9690 {
9691 n->value |= GOVD_REDUCTION_INSCAN;
9692 if (outer_ctx->region_type == ORT_SIMD
9693 && outer_ctx->outer_context
9694 && outer_ctx->outer_context->region_type == ORT_WORKSHARE)
9695 {
9696 n = splay_tree_lookup (outer_ctx->outer_context->variables,
9697 (splay_tree_key) decl);
9698 if (n && (n->value & GOVD_REDUCTION) != 0)
9699 n->value |= GOVD_REDUCTION_INSCAN;
9700 }
9701 }
9702 }
9703 break;
9704
9705 default:
9706 gcc_unreachable ();
9707 }
9708
9709 if (code == OACC_DATA
9710 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9711 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
9712 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
9713 remove = true;
9714 if (remove)
9715 *list_p = OMP_CLAUSE_CHAIN (c);
9716 else
9717 list_p = &OMP_CLAUSE_CHAIN (c);
9718 }
9719
9720 ctx->clauses = *orig_list_p;
9721 gimplify_omp_ctxp = ctx;
9722 if (struct_map_to_clause)
9723 delete struct_map_to_clause;
9724 if (struct_deref_set)
9725 delete struct_deref_set;
9726 }
9727
9728 /* Return true if DECL is a candidate for shared to firstprivate
9729 optimization. We only consider non-addressable scalars, not
9730 too big, and not references. */
9731
9732 static bool
9733 omp_shared_to_firstprivate_optimizable_decl_p (tree decl)
9734 {
9735 if (TREE_ADDRESSABLE (decl))
9736 return false;
9737 tree type = TREE_TYPE (decl);
9738 if (!is_gimple_reg_type (type)
9739 || TREE_CODE (type) == REFERENCE_TYPE
9740 || TREE_ADDRESSABLE (type))
9741 return false;
9742 /* Don't optimize too large decls, as each thread/task will have
9743 its own. */
9744 HOST_WIDE_INT len = int_size_in_bytes (type);
9745 if (len == -1 || len > 4 * POINTER_SIZE / BITS_PER_UNIT)
9746 return false;
9747 if (lang_hooks.decls.omp_privatize_by_reference (decl))
9748 return false;
9749 return true;
9750 }
9751
9752 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
9753 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
9754 GOVD_WRITTEN in outer contexts. */
9755
9756 static void
9757 omp_mark_stores (struct gimplify_omp_ctx *ctx, tree decl)
9758 {
9759 for (; ctx; ctx = ctx->outer_context)
9760 {
9761 splay_tree_node n = splay_tree_lookup (ctx->variables,
9762 (splay_tree_key) decl);
9763 if (n == NULL)
9764 continue;
9765 else if (n->value & GOVD_SHARED)
9766 {
9767 n->value |= GOVD_WRITTEN;
9768 return;
9769 }
9770 else if (n->value & GOVD_DATA_SHARE_CLASS)
9771 return;
9772 }
9773 }
9774
9775 /* Helper callback for walk_gimple_seq to discover possible stores
9776 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
9777 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
9778 for those. */
9779
9780 static tree
9781 omp_find_stores_op (tree *tp, int *walk_subtrees, void *data)
9782 {
9783 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
9784
9785 *walk_subtrees = 0;
9786 if (!wi->is_lhs)
9787 return NULL_TREE;
9788
9789 tree op = *tp;
9790 do
9791 {
9792 if (handled_component_p (op))
9793 op = TREE_OPERAND (op, 0);
9794 else if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
9795 && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
9796 op = TREE_OPERAND (TREE_OPERAND (op, 0), 0);
9797 else
9798 break;
9799 }
9800 while (1);
9801 if (!DECL_P (op) || !omp_shared_to_firstprivate_optimizable_decl_p (op))
9802 return NULL_TREE;
9803
9804 omp_mark_stores (gimplify_omp_ctxp, op);
9805 return NULL_TREE;
9806 }
9807
9808 /* Helper callback for walk_gimple_seq to discover possible stores
9809 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
9810 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
9811 for those. */
9812
9813 static tree
9814 omp_find_stores_stmt (gimple_stmt_iterator *gsi_p,
9815 bool *handled_ops_p,
9816 struct walk_stmt_info *wi)
9817 {
9818 gimple *stmt = gsi_stmt (*gsi_p);
9819 switch (gimple_code (stmt))
9820 {
9821 /* Don't recurse on OpenMP constructs for which
9822 gimplify_adjust_omp_clauses already handled the bodies,
9823 except handle gimple_omp_for_pre_body. */
9824 case GIMPLE_OMP_FOR:
9825 *handled_ops_p = true;
9826 if (gimple_omp_for_pre_body (stmt))
9827 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
9828 omp_find_stores_stmt, omp_find_stores_op, wi);
9829 break;
9830 case GIMPLE_OMP_PARALLEL:
9831 case GIMPLE_OMP_TASK:
9832 case GIMPLE_OMP_SECTIONS:
9833 case GIMPLE_OMP_SINGLE:
9834 case GIMPLE_OMP_TARGET:
9835 case GIMPLE_OMP_TEAMS:
9836 case GIMPLE_OMP_CRITICAL:
9837 *handled_ops_p = true;
9838 break;
9839 default:
9840 break;
9841 }
9842 return NULL_TREE;
9843 }
9844
9845 struct gimplify_adjust_omp_clauses_data
9846 {
9847 tree *list_p;
9848 gimple_seq *pre_p;
9849 };
9850
9851 /* For all variables that were not actually used within the context,
9852 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
9853
9854 static int
9855 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
9856 {
9857 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
9858 gimple_seq *pre_p
9859 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
9860 tree decl = (tree) n->key;
9861 unsigned flags = n->value;
9862 enum omp_clause_code code;
9863 tree clause;
9864 bool private_debug;
9865
9866 if (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
9867 && (flags & GOVD_LASTPRIVATE_CONDITIONAL) != 0)
9868 flags = GOVD_SHARED | GOVD_SEEN | GOVD_WRITTEN;
9869 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
9870 return 0;
9871 if ((flags & GOVD_SEEN) == 0)
9872 return 0;
9873 if ((flags & GOVD_MAP_HAS_ATTACHMENTS) != 0)
9874 return 0;
9875 if (flags & GOVD_DEBUG_PRIVATE)
9876 {
9877 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_SHARED);
9878 private_debug = true;
9879 }
9880 else if (flags & GOVD_MAP)
9881 private_debug = false;
9882 else
9883 private_debug
9884 = lang_hooks.decls.omp_private_debug_clause (decl,
9885 !!(flags & GOVD_SHARED));
9886 if (private_debug)
9887 code = OMP_CLAUSE_PRIVATE;
9888 else if (flags & GOVD_MAP)
9889 {
9890 code = OMP_CLAUSE_MAP;
9891 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0
9892 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
9893 {
9894 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl);
9895 return 0;
9896 }
9897 }
9898 else if (flags & GOVD_SHARED)
9899 {
9900 if (is_global_var (decl))
9901 {
9902 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
9903 while (ctx != NULL)
9904 {
9905 splay_tree_node on
9906 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9907 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
9908 | GOVD_PRIVATE | GOVD_REDUCTION
9909 | GOVD_LINEAR | GOVD_MAP)) != 0)
9910 break;
9911 ctx = ctx->outer_context;
9912 }
9913 if (ctx == NULL)
9914 return 0;
9915 }
9916 code = OMP_CLAUSE_SHARED;
9917 }
9918 else if (flags & GOVD_PRIVATE)
9919 code = OMP_CLAUSE_PRIVATE;
9920 else if (flags & GOVD_FIRSTPRIVATE)
9921 {
9922 code = OMP_CLAUSE_FIRSTPRIVATE;
9923 if ((gimplify_omp_ctxp->region_type & ORT_TARGET)
9924 && (gimplify_omp_ctxp->region_type & ORT_ACC) == 0
9925 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
9926 {
9927 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
9928 "%<target%> construct", decl);
9929 return 0;
9930 }
9931 }
9932 else if (flags & GOVD_LASTPRIVATE)
9933 code = OMP_CLAUSE_LASTPRIVATE;
9934 else if (flags & (GOVD_ALIGNED | GOVD_NONTEMPORAL))
9935 return 0;
9936 else if (flags & GOVD_CONDTEMP)
9937 {
9938 code = OMP_CLAUSE__CONDTEMP_;
9939 gimple_add_tmp_var (decl);
9940 }
9941 else
9942 gcc_unreachable ();
9943
9944 if (((flags & GOVD_LASTPRIVATE)
9945 || (code == OMP_CLAUSE_SHARED && (flags & GOVD_WRITTEN)))
9946 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9947 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
9948
9949 tree chain = *list_p;
9950 clause = build_omp_clause (input_location, code);
9951 OMP_CLAUSE_DECL (clause) = decl;
9952 OMP_CLAUSE_CHAIN (clause) = chain;
9953 if (private_debug)
9954 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
9955 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
9956 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
9957 else if (code == OMP_CLAUSE_SHARED
9958 && (flags & GOVD_WRITTEN) == 0
9959 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9960 OMP_CLAUSE_SHARED_READONLY (clause) = 1;
9961 else if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_EXPLICIT) == 0)
9962 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause) = 1;
9963 else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0)
9964 {
9965 tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP);
9966 OMP_CLAUSE_DECL (nc) = decl;
9967 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
9968 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
9969 OMP_CLAUSE_DECL (clause)
9970 = build_simple_mem_ref_loc (input_location, decl);
9971 OMP_CLAUSE_DECL (clause)
9972 = build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause),
9973 build_int_cst (build_pointer_type (char_type_node), 0));
9974 OMP_CLAUSE_SIZE (clause) = size_zero_node;
9975 OMP_CLAUSE_SIZE (nc) = size_zero_node;
9976 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC);
9977 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1;
9978 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
9979 OMP_CLAUSE_CHAIN (nc) = chain;
9980 OMP_CLAUSE_CHAIN (clause) = nc;
9981 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9982 gimplify_omp_ctxp = ctx->outer_context;
9983 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0),
9984 pre_p, NULL, is_gimple_val, fb_rvalue);
9985 gimplify_omp_ctxp = ctx;
9986 }
9987 else if (code == OMP_CLAUSE_MAP)
9988 {
9989 int kind;
9990 /* Not all combinations of these GOVD_MAP flags are actually valid. */
9991 switch (flags & (GOVD_MAP_TO_ONLY
9992 | GOVD_MAP_FORCE
9993 | GOVD_MAP_FORCE_PRESENT
9994 | GOVD_MAP_ALLOC_ONLY
9995 | GOVD_MAP_FROM_ONLY))
9996 {
9997 case 0:
9998 kind = GOMP_MAP_TOFROM;
9999 break;
10000 case GOVD_MAP_FORCE:
10001 kind = GOMP_MAP_TOFROM | GOMP_MAP_FLAG_FORCE;
10002 break;
10003 case GOVD_MAP_TO_ONLY:
10004 kind = GOMP_MAP_TO;
10005 break;
10006 case GOVD_MAP_FROM_ONLY:
10007 kind = GOMP_MAP_FROM;
10008 break;
10009 case GOVD_MAP_ALLOC_ONLY:
10010 kind = GOMP_MAP_ALLOC;
10011 break;
10012 case GOVD_MAP_TO_ONLY | GOVD_MAP_FORCE:
10013 kind = GOMP_MAP_TO | GOMP_MAP_FLAG_FORCE;
10014 break;
10015 case GOVD_MAP_FORCE_PRESENT:
10016 kind = GOMP_MAP_FORCE_PRESENT;
10017 break;
10018 default:
10019 gcc_unreachable ();
10020 }
10021 OMP_CLAUSE_SET_MAP_KIND (clause, kind);
10022 if (DECL_SIZE (decl)
10023 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
10024 {
10025 tree decl2 = DECL_VALUE_EXPR (decl);
10026 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
10027 decl2 = TREE_OPERAND (decl2, 0);
10028 gcc_assert (DECL_P (decl2));
10029 tree mem = build_simple_mem_ref (decl2);
10030 OMP_CLAUSE_DECL (clause) = mem;
10031 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
10032 if (gimplify_omp_ctxp->outer_context)
10033 {
10034 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
10035 omp_notice_variable (ctx, decl2, true);
10036 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
10037 }
10038 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
10039 OMP_CLAUSE_MAP);
10040 OMP_CLAUSE_DECL (nc) = decl;
10041 OMP_CLAUSE_SIZE (nc) = size_zero_node;
10042 if (gimplify_omp_ctxp->target_firstprivatize_array_bases)
10043 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
10044 else
10045 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
10046 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
10047 OMP_CLAUSE_CHAIN (clause) = nc;
10048 }
10049 else if (gimplify_omp_ctxp->target_firstprivatize_array_bases
10050 && lang_hooks.decls.omp_privatize_by_reference (decl))
10051 {
10052 OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl);
10053 OMP_CLAUSE_SIZE (clause)
10054 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))));
10055 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10056 gimplify_omp_ctxp = ctx->outer_context;
10057 gimplify_expr (&OMP_CLAUSE_SIZE (clause),
10058 pre_p, NULL, is_gimple_val, fb_rvalue);
10059 gimplify_omp_ctxp = ctx;
10060 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
10061 OMP_CLAUSE_MAP);
10062 OMP_CLAUSE_DECL (nc) = decl;
10063 OMP_CLAUSE_SIZE (nc) = size_zero_node;
10064 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
10065 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
10066 OMP_CLAUSE_CHAIN (clause) = nc;
10067 }
10068 else
10069 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
10070 }
10071 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
10072 {
10073 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
10074 OMP_CLAUSE_DECL (nc) = decl;
10075 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
10076 OMP_CLAUSE_CHAIN (nc) = chain;
10077 OMP_CLAUSE_CHAIN (clause) = nc;
10078 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10079 gimplify_omp_ctxp = ctx->outer_context;
10080 lang_hooks.decls.omp_finish_clause (nc, pre_p);
10081 gimplify_omp_ctxp = ctx;
10082 }
10083 *list_p = clause;
10084 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10085 gimplify_omp_ctxp = ctx->outer_context;
10086 lang_hooks.decls.omp_finish_clause (clause, pre_p);
10087 if (gimplify_omp_ctxp)
10088 for (; clause != chain; clause = OMP_CLAUSE_CHAIN (clause))
10089 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP
10090 && DECL_P (OMP_CLAUSE_SIZE (clause)))
10091 omp_notice_variable (gimplify_omp_ctxp, OMP_CLAUSE_SIZE (clause),
10092 true);
10093 gimplify_omp_ctxp = ctx;
10094 return 0;
10095 }
10096
10097 static void
10098 gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
10099 enum tree_code code)
10100 {
10101 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10102 tree *orig_list_p = list_p;
10103 tree c, decl;
10104 bool has_inscan_reductions = false;
10105
10106 if (body)
10107 {
10108 struct gimplify_omp_ctx *octx;
10109 for (octx = ctx; octx; octx = octx->outer_context)
10110 if ((octx->region_type & (ORT_PARALLEL | ORT_TASK | ORT_TEAMS)) != 0)
10111 break;
10112 if (octx)
10113 {
10114 struct walk_stmt_info wi;
10115 memset (&wi, 0, sizeof (wi));
10116 walk_gimple_seq (body, omp_find_stores_stmt,
10117 omp_find_stores_op, &wi);
10118 }
10119 }
10120
10121 if (ctx->add_safelen1)
10122 {
10123 /* If there are VLAs in the body of simd loop, prevent
10124 vectorization. */
10125 gcc_assert (ctx->region_type == ORT_SIMD);
10126 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
10127 OMP_CLAUSE_SAFELEN_EXPR (c) = integer_one_node;
10128 OMP_CLAUSE_CHAIN (c) = *list_p;
10129 *list_p = c;
10130 list_p = &OMP_CLAUSE_CHAIN (c);
10131 }
10132
10133 if (ctx->region_type == ORT_WORKSHARE
10134 && ctx->outer_context
10135 && ctx->outer_context->region_type == ORT_COMBINED_PARALLEL)
10136 {
10137 for (c = ctx->outer_context->clauses; c; c = OMP_CLAUSE_CHAIN (c))
10138 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
10139 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
10140 {
10141 decl = OMP_CLAUSE_DECL (c);
10142 splay_tree_node n
10143 = splay_tree_lookup (ctx->outer_context->variables,
10144 (splay_tree_key) decl);
10145 gcc_checking_assert (!splay_tree_lookup (ctx->variables,
10146 (splay_tree_key) decl));
10147 omp_add_variable (ctx, decl, n->value);
10148 tree c2 = copy_node (c);
10149 OMP_CLAUSE_CHAIN (c2) = *list_p;
10150 *list_p = c2;
10151 if ((n->value & GOVD_FIRSTPRIVATE) == 0)
10152 continue;
10153 c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10154 OMP_CLAUSE_FIRSTPRIVATE);
10155 OMP_CLAUSE_DECL (c2) = decl;
10156 OMP_CLAUSE_CHAIN (c2) = *list_p;
10157 *list_p = c2;
10158 }
10159 }
10160 while ((c = *list_p) != NULL)
10161 {
10162 splay_tree_node n;
10163 bool remove = false;
10164
10165 switch (OMP_CLAUSE_CODE (c))
10166 {
10167 case OMP_CLAUSE_FIRSTPRIVATE:
10168 if ((ctx->region_type & ORT_TARGET)
10169 && (ctx->region_type & ORT_ACC) == 0
10170 && TYPE_ATOMIC (strip_array_types
10171 (TREE_TYPE (OMP_CLAUSE_DECL (c)))))
10172 {
10173 error_at (OMP_CLAUSE_LOCATION (c),
10174 "%<_Atomic%> %qD in %<firstprivate%> clause on "
10175 "%<target%> construct", OMP_CLAUSE_DECL (c));
10176 remove = true;
10177 break;
10178 }
10179 /* FALLTHRU */
10180 case OMP_CLAUSE_PRIVATE:
10181 case OMP_CLAUSE_SHARED:
10182 case OMP_CLAUSE_LINEAR:
10183 decl = OMP_CLAUSE_DECL (c);
10184 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10185 remove = !(n->value & GOVD_SEEN);
10186 if ((n->value & GOVD_LASTPRIVATE_CONDITIONAL) != 0
10187 && code == OMP_PARALLEL
10188 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
10189 remove = true;
10190 if (! remove)
10191 {
10192 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
10193 if ((n->value & GOVD_DEBUG_PRIVATE)
10194 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
10195 {
10196 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
10197 || ((n->value & GOVD_DATA_SHARE_CLASS)
10198 == GOVD_SHARED));
10199 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
10200 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
10201 }
10202 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
10203 && (n->value & GOVD_WRITTEN) == 0
10204 && DECL_P (decl)
10205 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
10206 OMP_CLAUSE_SHARED_READONLY (c) = 1;
10207 else if (DECL_P (decl)
10208 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
10209 && (n->value & GOVD_WRITTEN) != 0)
10210 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10211 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
10212 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
10213 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
10214 }
10215 break;
10216
10217 case OMP_CLAUSE_LASTPRIVATE:
10218 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
10219 accurately reflect the presence of a FIRSTPRIVATE clause. */
10220 decl = OMP_CLAUSE_DECL (c);
10221 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10222 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
10223 = (n->value & GOVD_FIRSTPRIVATE) != 0;
10224 if (code == OMP_DISTRIBUTE
10225 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
10226 {
10227 remove = true;
10228 error_at (OMP_CLAUSE_LOCATION (c),
10229 "same variable used in %<firstprivate%> and "
10230 "%<lastprivate%> clauses on %<distribute%> "
10231 "construct");
10232 }
10233 if (!remove
10234 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
10235 && DECL_P (decl)
10236 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
10237 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
10238 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) && code == OMP_PARALLEL)
10239 remove = true;
10240 break;
10241
10242 case OMP_CLAUSE_ALIGNED:
10243 decl = OMP_CLAUSE_DECL (c);
10244 if (!is_global_var (decl))
10245 {
10246 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10247 remove = n == NULL || !(n->value & GOVD_SEEN);
10248 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
10249 {
10250 struct gimplify_omp_ctx *octx;
10251 if (n != NULL
10252 && (n->value & (GOVD_DATA_SHARE_CLASS
10253 & ~GOVD_FIRSTPRIVATE)))
10254 remove = true;
10255 else
10256 for (octx = ctx->outer_context; octx;
10257 octx = octx->outer_context)
10258 {
10259 n = splay_tree_lookup (octx->variables,
10260 (splay_tree_key) decl);
10261 if (n == NULL)
10262 continue;
10263 if (n->value & GOVD_LOCAL)
10264 break;
10265 /* We have to avoid assigning a shared variable
10266 to itself when trying to add
10267 __builtin_assume_aligned. */
10268 if (n->value & GOVD_SHARED)
10269 {
10270 remove = true;
10271 break;
10272 }
10273 }
10274 }
10275 }
10276 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
10277 {
10278 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10279 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
10280 remove = true;
10281 }
10282 break;
10283
10284 case OMP_CLAUSE_NONTEMPORAL:
10285 decl = OMP_CLAUSE_DECL (c);
10286 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10287 remove = n == NULL || !(n->value & GOVD_SEEN);
10288 break;
10289
10290 case OMP_CLAUSE_MAP:
10291 if (code == OMP_TARGET_EXIT_DATA
10292 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
10293 {
10294 remove = true;
10295 break;
10296 }
10297 decl = OMP_CLAUSE_DECL (c);
10298 /* Data clauses associated with reductions must be
10299 compatible with present_or_copy. Warn and adjust the clause
10300 if that is not the case. */
10301 if (ctx->region_type == ORT_ACC_PARALLEL
10302 || ctx->region_type == ORT_ACC_SERIAL)
10303 {
10304 tree t = DECL_P (decl) ? decl : TREE_OPERAND (decl, 0);
10305 n = NULL;
10306
10307 if (DECL_P (t))
10308 n = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
10309
10310 if (n && (n->value & GOVD_REDUCTION))
10311 {
10312 enum gomp_map_kind kind = OMP_CLAUSE_MAP_KIND (c);
10313
10314 OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1;
10315 if ((kind & GOMP_MAP_TOFROM) != GOMP_MAP_TOFROM
10316 && kind != GOMP_MAP_FORCE_PRESENT
10317 && kind != GOMP_MAP_POINTER)
10318 {
10319 warning_at (OMP_CLAUSE_LOCATION (c), 0,
10320 "incompatible data clause with reduction "
10321 "on %qE; promoting to %<present_or_copy%>",
10322 DECL_NAME (t));
10323 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
10324 }
10325 }
10326 }
10327 if (!DECL_P (decl))
10328 {
10329 if ((ctx->region_type & ORT_TARGET) != 0
10330 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
10331 {
10332 if (TREE_CODE (decl) == INDIRECT_REF
10333 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
10334 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
10335 == REFERENCE_TYPE))
10336 decl = TREE_OPERAND (decl, 0);
10337 if (TREE_CODE (decl) == COMPONENT_REF)
10338 {
10339 while (TREE_CODE (decl) == COMPONENT_REF)
10340 decl = TREE_OPERAND (decl, 0);
10341 if (DECL_P (decl))
10342 {
10343 n = splay_tree_lookup (ctx->variables,
10344 (splay_tree_key) decl);
10345 if (!(n->value & GOVD_SEEN))
10346 remove = true;
10347 }
10348 }
10349 }
10350 break;
10351 }
10352 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10353 if ((ctx->region_type & ORT_TARGET) != 0
10354 && !(n->value & GOVD_SEEN)
10355 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0
10356 && (!is_global_var (decl)
10357 || !lookup_attribute ("omp declare target link",
10358 DECL_ATTRIBUTES (decl))))
10359 {
10360 remove = true;
10361 /* For struct element mapping, if struct is never referenced
10362 in target block and none of the mapping has always modifier,
10363 remove all the struct element mappings, which immediately
10364 follow the GOMP_MAP_STRUCT map clause. */
10365 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
10366 {
10367 HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
10368 while (cnt--)
10369 OMP_CLAUSE_CHAIN (c)
10370 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c));
10371 }
10372 }
10373 else if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
10374 && code == OMP_TARGET_EXIT_DATA)
10375 remove = true;
10376 else if (DECL_SIZE (decl)
10377 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
10378 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER
10379 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
10380 && (OMP_CLAUSE_MAP_KIND (c)
10381 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
10382 {
10383 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
10384 for these, TREE_CODE (DECL_SIZE (decl)) will always be
10385 INTEGER_CST. */
10386 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
10387
10388 tree decl2 = DECL_VALUE_EXPR (decl);
10389 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
10390 decl2 = TREE_OPERAND (decl2, 0);
10391 gcc_assert (DECL_P (decl2));
10392 tree mem = build_simple_mem_ref (decl2);
10393 OMP_CLAUSE_DECL (c) = mem;
10394 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
10395 if (ctx->outer_context)
10396 {
10397 omp_notice_variable (ctx->outer_context, decl2, true);
10398 omp_notice_variable (ctx->outer_context,
10399 OMP_CLAUSE_SIZE (c), true);
10400 }
10401 if (((ctx->region_type & ORT_TARGET) != 0
10402 || !ctx->target_firstprivatize_array_bases)
10403 && ((n->value & GOVD_SEEN) == 0
10404 || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0))
10405 {
10406 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10407 OMP_CLAUSE_MAP);
10408 OMP_CLAUSE_DECL (nc) = decl;
10409 OMP_CLAUSE_SIZE (nc) = size_zero_node;
10410 if (ctx->target_firstprivatize_array_bases)
10411 OMP_CLAUSE_SET_MAP_KIND (nc,
10412 GOMP_MAP_FIRSTPRIVATE_POINTER);
10413 else
10414 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
10415 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
10416 OMP_CLAUSE_CHAIN (c) = nc;
10417 c = nc;
10418 }
10419 }
10420 else
10421 {
10422 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
10423 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
10424 gcc_assert ((n->value & GOVD_SEEN) == 0
10425 || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
10426 == 0));
10427 }
10428 break;
10429
10430 case OMP_CLAUSE_TO:
10431 case OMP_CLAUSE_FROM:
10432 case OMP_CLAUSE__CACHE_:
10433 decl = OMP_CLAUSE_DECL (c);
10434 if (!DECL_P (decl))
10435 break;
10436 if (DECL_SIZE (decl)
10437 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
10438 {
10439 tree decl2 = DECL_VALUE_EXPR (decl);
10440 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
10441 decl2 = TREE_OPERAND (decl2, 0);
10442 gcc_assert (DECL_P (decl2));
10443 tree mem = build_simple_mem_ref (decl2);
10444 OMP_CLAUSE_DECL (c) = mem;
10445 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
10446 if (ctx->outer_context)
10447 {
10448 omp_notice_variable (ctx->outer_context, decl2, true);
10449 omp_notice_variable (ctx->outer_context,
10450 OMP_CLAUSE_SIZE (c), true);
10451 }
10452 }
10453 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
10454 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
10455 break;
10456
10457 case OMP_CLAUSE_REDUCTION:
10458 if (OMP_CLAUSE_REDUCTION_INSCAN (c))
10459 {
10460 decl = OMP_CLAUSE_DECL (c);
10461 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10462 if ((n->value & GOVD_REDUCTION_INSCAN) == 0)
10463 {
10464 remove = true;
10465 error_at (OMP_CLAUSE_LOCATION (c),
10466 "%qD specified in %<inscan%> %<reduction%> clause "
10467 "but not in %<scan%> directive clause", decl);
10468 break;
10469 }
10470 has_inscan_reductions = true;
10471 }
10472 /* FALLTHRU */
10473 case OMP_CLAUSE_IN_REDUCTION:
10474 case OMP_CLAUSE_TASK_REDUCTION:
10475 decl = OMP_CLAUSE_DECL (c);
10476 /* OpenACC reductions need a present_or_copy data clause.
10477 Add one if necessary. Emit error when the reduction is private. */
10478 if (ctx->region_type == ORT_ACC_PARALLEL
10479 || ctx->region_type == ORT_ACC_SERIAL)
10480 {
10481 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10482 if (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
10483 {
10484 remove = true;
10485 error_at (OMP_CLAUSE_LOCATION (c), "invalid private "
10486 "reduction on %qE", DECL_NAME (decl));
10487 }
10488 else if ((n->value & GOVD_MAP) == 0)
10489 {
10490 tree next = OMP_CLAUSE_CHAIN (c);
10491 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_MAP);
10492 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_TOFROM);
10493 OMP_CLAUSE_DECL (nc) = decl;
10494 OMP_CLAUSE_CHAIN (c) = nc;
10495 lang_hooks.decls.omp_finish_clause (nc, pre_p);
10496 while (1)
10497 {
10498 OMP_CLAUSE_MAP_IN_REDUCTION (nc) = 1;
10499 if (OMP_CLAUSE_CHAIN (nc) == NULL)
10500 break;
10501 nc = OMP_CLAUSE_CHAIN (nc);
10502 }
10503 OMP_CLAUSE_CHAIN (nc) = next;
10504 n->value |= GOVD_MAP;
10505 }
10506 }
10507 if (DECL_P (decl)
10508 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
10509 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
10510 break;
10511 case OMP_CLAUSE_COPYIN:
10512 case OMP_CLAUSE_COPYPRIVATE:
10513 case OMP_CLAUSE_IF:
10514 case OMP_CLAUSE_NUM_THREADS:
10515 case OMP_CLAUSE_NUM_TEAMS:
10516 case OMP_CLAUSE_THREAD_LIMIT:
10517 case OMP_CLAUSE_DIST_SCHEDULE:
10518 case OMP_CLAUSE_DEVICE:
10519 case OMP_CLAUSE_SCHEDULE:
10520 case OMP_CLAUSE_NOWAIT:
10521 case OMP_CLAUSE_ORDERED:
10522 case OMP_CLAUSE_DEFAULT:
10523 case OMP_CLAUSE_UNTIED:
10524 case OMP_CLAUSE_COLLAPSE:
10525 case OMP_CLAUSE_FINAL:
10526 case OMP_CLAUSE_MERGEABLE:
10527 case OMP_CLAUSE_PROC_BIND:
10528 case OMP_CLAUSE_SAFELEN:
10529 case OMP_CLAUSE_SIMDLEN:
10530 case OMP_CLAUSE_DEPEND:
10531 case OMP_CLAUSE_PRIORITY:
10532 case OMP_CLAUSE_GRAINSIZE:
10533 case OMP_CLAUSE_NUM_TASKS:
10534 case OMP_CLAUSE_NOGROUP:
10535 case OMP_CLAUSE_THREADS:
10536 case OMP_CLAUSE_SIMD:
10537 case OMP_CLAUSE_HINT:
10538 case OMP_CLAUSE_DEFAULTMAP:
10539 case OMP_CLAUSE_ORDER:
10540 case OMP_CLAUSE_BIND:
10541 case OMP_CLAUSE_USE_DEVICE_PTR:
10542 case OMP_CLAUSE_USE_DEVICE_ADDR:
10543 case OMP_CLAUSE_IS_DEVICE_PTR:
10544 case OMP_CLAUSE_ASYNC:
10545 case OMP_CLAUSE_WAIT:
10546 case OMP_CLAUSE_INDEPENDENT:
10547 case OMP_CLAUSE_NUM_GANGS:
10548 case OMP_CLAUSE_NUM_WORKERS:
10549 case OMP_CLAUSE_VECTOR_LENGTH:
10550 case OMP_CLAUSE_GANG:
10551 case OMP_CLAUSE_WORKER:
10552 case OMP_CLAUSE_VECTOR:
10553 case OMP_CLAUSE_AUTO:
10554 case OMP_CLAUSE_SEQ:
10555 case OMP_CLAUSE_TILE:
10556 case OMP_CLAUSE_IF_PRESENT:
10557 case OMP_CLAUSE_FINALIZE:
10558 case OMP_CLAUSE_INCLUSIVE:
10559 case OMP_CLAUSE_EXCLUSIVE:
10560 break;
10561
10562 default:
10563 gcc_unreachable ();
10564 }
10565
10566 if (remove)
10567 *list_p = OMP_CLAUSE_CHAIN (c);
10568 else
10569 list_p = &OMP_CLAUSE_CHAIN (c);
10570 }
10571
10572 /* Add in any implicit data sharing. */
10573 struct gimplify_adjust_omp_clauses_data data;
10574 data.list_p = list_p;
10575 data.pre_p = pre_p;
10576 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
10577
10578 if (has_inscan_reductions)
10579 for (c = *orig_list_p; c; c = OMP_CLAUSE_CHAIN (c))
10580 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10581 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
10582 {
10583 error_at (OMP_CLAUSE_LOCATION (c),
10584 "%<inscan%> %<reduction%> clause used together with "
10585 "%<linear%> clause for a variable other than loop "
10586 "iterator");
10587 break;
10588 }
10589
10590 gimplify_omp_ctxp = ctx->outer_context;
10591 delete_omp_context (ctx);
10592 }
10593
10594 /* Return 0 if CONSTRUCTS selectors don't match the OpenMP context,
10595 -1 if unknown yet (simd is involved, won't be known until vectorization)
10596 and 1 if they do. If SCORES is non-NULL, it should point to an array
10597 of at least 2*NCONSTRUCTS+2 ints, and will be filled with the positions
10598 of the CONSTRUCTS (position -1 if it will never match) followed by
10599 number of constructs in the OpenMP context construct trait. If the
10600 score depends on whether it will be in a declare simd clone or not,
10601 the function returns 2 and there will be two sets of the scores, the first
10602 one for the case that it is not in a declare simd clone, the other
10603 that it is in a declare simd clone. */
10604
10605 int
10606 omp_construct_selector_matches (enum tree_code *constructs, int nconstructs,
10607 int *scores)
10608 {
10609 int matched = 0, cnt = 0;
10610 bool simd_seen = false;
10611 bool target_seen = false;
10612 int declare_simd_cnt = -1;
10613 auto_vec<enum tree_code, 16> codes;
10614 for (struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; ctx;)
10615 {
10616 if (((ctx->region_type & ORT_PARALLEL) && ctx->code == OMP_PARALLEL)
10617 || ((ctx->region_type & (ORT_TARGET | ORT_IMPLICIT_TARGET | ORT_ACC))
10618 == ORT_TARGET && ctx->code == OMP_TARGET)
10619 || ((ctx->region_type & ORT_TEAMS) && ctx->code == OMP_TEAMS)
10620 || (ctx->region_type == ORT_WORKSHARE && ctx->code == OMP_FOR)
10621 || (ctx->region_type == ORT_SIMD
10622 && ctx->code == OMP_SIMD
10623 && !omp_find_clause (ctx->clauses, OMP_CLAUSE_BIND)))
10624 {
10625 ++cnt;
10626 if (scores)
10627 codes.safe_push (ctx->code);
10628 else if (matched < nconstructs && ctx->code == constructs[matched])
10629 {
10630 if (ctx->code == OMP_SIMD)
10631 {
10632 if (matched)
10633 return 0;
10634 simd_seen = true;
10635 }
10636 ++matched;
10637 }
10638 if (ctx->code == OMP_TARGET)
10639 {
10640 if (scores == NULL)
10641 return matched < nconstructs ? 0 : simd_seen ? -1 : 1;
10642 target_seen = true;
10643 break;
10644 }
10645 }
10646 else if (ctx->region_type == ORT_WORKSHARE
10647 && ctx->code == OMP_LOOP
10648 && ctx->outer_context
10649 && ctx->outer_context->region_type == ORT_COMBINED_PARALLEL
10650 && ctx->outer_context->outer_context
10651 && ctx->outer_context->outer_context->code == OMP_LOOP
10652 && ctx->outer_context->outer_context->distribute)
10653 ctx = ctx->outer_context->outer_context;
10654 ctx = ctx->outer_context;
10655 }
10656 if (!target_seen
10657 && lookup_attribute ("omp declare simd",
10658 DECL_ATTRIBUTES (current_function_decl)))
10659 {
10660 /* Declare simd is a maybe case, it is supposed to be added only to the
10661 omp-simd-clone.c added clones and not to the base function. */
10662 declare_simd_cnt = cnt++;
10663 if (scores)
10664 codes.safe_push (OMP_SIMD);
10665 else if (cnt == 0
10666 && constructs[0] == OMP_SIMD)
10667 {
10668 gcc_assert (matched == 0);
10669 simd_seen = true;
10670 if (++matched == nconstructs)
10671 return -1;
10672 }
10673 }
10674 if (tree attr = lookup_attribute ("omp declare variant variant",
10675 DECL_ATTRIBUTES (current_function_decl)))
10676 {
10677 enum tree_code variant_constructs[5];
10678 int variant_nconstructs = 0;
10679 if (!target_seen)
10680 variant_nconstructs
10681 = omp_constructor_traits_to_codes (TREE_VALUE (attr),
10682 variant_constructs);
10683 for (int i = 0; i < variant_nconstructs; i++)
10684 {
10685 ++cnt;
10686 if (scores)
10687 codes.safe_push (variant_constructs[i]);
10688 else if (matched < nconstructs
10689 && variant_constructs[i] == constructs[matched])
10690 {
10691 if (variant_constructs[i] == OMP_SIMD)
10692 {
10693 if (matched)
10694 return 0;
10695 simd_seen = true;
10696 }
10697 ++matched;
10698 }
10699 }
10700 }
10701 if (!target_seen
10702 && lookup_attribute ("omp declare target block",
10703 DECL_ATTRIBUTES (current_function_decl)))
10704 {
10705 if (scores)
10706 codes.safe_push (OMP_TARGET);
10707 else if (matched < nconstructs && constructs[matched] == OMP_TARGET)
10708 ++matched;
10709 }
10710 if (scores)
10711 {
10712 for (int pass = 0; pass < (declare_simd_cnt == -1 ? 1 : 2); pass++)
10713 {
10714 int j = codes.length () - 1;
10715 for (int i = nconstructs - 1; i >= 0; i--)
10716 {
10717 while (j >= 0
10718 && (pass != 0 || declare_simd_cnt != j)
10719 && constructs[i] != codes[j])
10720 --j;
10721 if (pass == 0 && declare_simd_cnt != -1 && j > declare_simd_cnt)
10722 *scores++ = j - 1;
10723 else
10724 *scores++ = j;
10725 }
10726 *scores++ = ((pass == 0 && declare_simd_cnt != -1)
10727 ? codes.length () - 1 : codes.length ());
10728 }
10729 return declare_simd_cnt == -1 ? 1 : 2;
10730 }
10731 if (matched == nconstructs)
10732 return simd_seen ? -1 : 1;
10733 return 0;
10734 }
10735
10736 /* Gimplify OACC_CACHE. */
10737
10738 static void
10739 gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
10740 {
10741 tree expr = *expr_p;
10742
10743 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_ACC,
10744 OACC_CACHE);
10745 gimplify_adjust_omp_clauses (pre_p, NULL, &OACC_CACHE_CLAUSES (expr),
10746 OACC_CACHE);
10747
10748 /* TODO: Do something sensible with this information. */
10749
10750 *expr_p = NULL_TREE;
10751 }
10752
10753 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
10754 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
10755 kind. The entry kind will replace the one in CLAUSE, while the exit
10756 kind will be used in a new omp_clause and returned to the caller. */
10757
10758 static tree
10759 gimplify_oacc_declare_1 (tree clause)
10760 {
10761 HOST_WIDE_INT kind, new_op;
10762 bool ret = false;
10763 tree c = NULL;
10764
10765 kind = OMP_CLAUSE_MAP_KIND (clause);
10766
10767 switch (kind)
10768 {
10769 case GOMP_MAP_ALLOC:
10770 new_op = GOMP_MAP_RELEASE;
10771 ret = true;
10772 break;
10773
10774 case GOMP_MAP_FROM:
10775 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
10776 new_op = GOMP_MAP_FROM;
10777 ret = true;
10778 break;
10779
10780 case GOMP_MAP_TOFROM:
10781 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO);
10782 new_op = GOMP_MAP_FROM;
10783 ret = true;
10784 break;
10785
10786 case GOMP_MAP_DEVICE_RESIDENT:
10787 case GOMP_MAP_FORCE_DEVICEPTR:
10788 case GOMP_MAP_FORCE_PRESENT:
10789 case GOMP_MAP_LINK:
10790 case GOMP_MAP_POINTER:
10791 case GOMP_MAP_TO:
10792 break;
10793
10794 default:
10795 gcc_unreachable ();
10796 break;
10797 }
10798
10799 if (ret)
10800 {
10801 c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP);
10802 OMP_CLAUSE_SET_MAP_KIND (c, new_op);
10803 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause);
10804 }
10805
10806 return c;
10807 }
10808
10809 /* Gimplify OACC_DECLARE. */
10810
10811 static void
10812 gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p)
10813 {
10814 tree expr = *expr_p;
10815 gomp_target *stmt;
10816 tree clauses, t, decl;
10817
10818 clauses = OACC_DECLARE_CLAUSES (expr);
10819
10820 gimplify_scan_omp_clauses (&clauses, pre_p, ORT_TARGET_DATA, OACC_DECLARE);
10821 gimplify_adjust_omp_clauses (pre_p, NULL, &clauses, OACC_DECLARE);
10822
10823 for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t))
10824 {
10825 decl = OMP_CLAUSE_DECL (t);
10826
10827 if (TREE_CODE (decl) == MEM_REF)
10828 decl = TREE_OPERAND (decl, 0);
10829
10830 if (VAR_P (decl) && !is_oacc_declared (decl))
10831 {
10832 tree attr = get_identifier ("oacc declare target");
10833 DECL_ATTRIBUTES (decl) = tree_cons (attr, NULL_TREE,
10834 DECL_ATTRIBUTES (decl));
10835 }
10836
10837 if (VAR_P (decl)
10838 && !is_global_var (decl)
10839 && DECL_CONTEXT (decl) == current_function_decl)
10840 {
10841 tree c = gimplify_oacc_declare_1 (t);
10842 if (c)
10843 {
10844 if (oacc_declare_returns == NULL)
10845 oacc_declare_returns = new hash_map<tree, tree>;
10846
10847 oacc_declare_returns->put (decl, c);
10848 }
10849 }
10850
10851 if (gimplify_omp_ctxp)
10852 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_SEEN);
10853 }
10854
10855 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
10856 clauses);
10857
10858 gimplify_seq_add_stmt (pre_p, stmt);
10859
10860 *expr_p = NULL_TREE;
10861 }
10862
10863 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
10864 gimplification of the body, as well as scanning the body for used
10865 variables. We need to do this scan now, because variable-sized
10866 decls will be decomposed during gimplification. */
10867
10868 static void
10869 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
10870 {
10871 tree expr = *expr_p;
10872 gimple *g;
10873 gimple_seq body = NULL;
10874
10875 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
10876 OMP_PARALLEL_COMBINED (expr)
10877 ? ORT_COMBINED_PARALLEL
10878 : ORT_PARALLEL, OMP_PARALLEL);
10879
10880 push_gimplify_context ();
10881
10882 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
10883 if (gimple_code (g) == GIMPLE_BIND)
10884 pop_gimplify_context (g);
10885 else
10886 pop_gimplify_context (NULL);
10887
10888 gimplify_adjust_omp_clauses (pre_p, body, &OMP_PARALLEL_CLAUSES (expr),
10889 OMP_PARALLEL);
10890
10891 g = gimple_build_omp_parallel (body,
10892 OMP_PARALLEL_CLAUSES (expr),
10893 NULL_TREE, NULL_TREE);
10894 if (OMP_PARALLEL_COMBINED (expr))
10895 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
10896 gimplify_seq_add_stmt (pre_p, g);
10897 *expr_p = NULL_TREE;
10898 }
10899
10900 /* Gimplify the contents of an OMP_TASK statement. This involves
10901 gimplification of the body, as well as scanning the body for used
10902 variables. We need to do this scan now, because variable-sized
10903 decls will be decomposed during gimplification. */
10904
10905 static void
10906 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
10907 {
10908 tree expr = *expr_p;
10909 gimple *g;
10910 gimple_seq body = NULL;
10911
10912 if (OMP_TASK_BODY (expr) == NULL_TREE)
10913 for (tree c = OMP_TASK_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
10914 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
10915 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_MUTEXINOUTSET)
10916 {
10917 error_at (OMP_CLAUSE_LOCATION (c),
10918 "%<mutexinoutset%> kind in %<depend%> clause on a "
10919 "%<taskwait%> construct");
10920 break;
10921 }
10922
10923 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
10924 omp_find_clause (OMP_TASK_CLAUSES (expr),
10925 OMP_CLAUSE_UNTIED)
10926 ? ORT_UNTIED_TASK : ORT_TASK, OMP_TASK);
10927
10928 if (OMP_TASK_BODY (expr))
10929 {
10930 push_gimplify_context ();
10931
10932 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
10933 if (gimple_code (g) == GIMPLE_BIND)
10934 pop_gimplify_context (g);
10935 else
10936 pop_gimplify_context (NULL);
10937 }
10938
10939 gimplify_adjust_omp_clauses (pre_p, body, &OMP_TASK_CLAUSES (expr),
10940 OMP_TASK);
10941
10942 g = gimple_build_omp_task (body,
10943 OMP_TASK_CLAUSES (expr),
10944 NULL_TREE, NULL_TREE,
10945 NULL_TREE, NULL_TREE, NULL_TREE);
10946 if (OMP_TASK_BODY (expr) == NULL_TREE)
10947 gimple_omp_task_set_taskwait_p (g, true);
10948 gimplify_seq_add_stmt (pre_p, g);
10949 *expr_p = NULL_TREE;
10950 }
10951
10952 /* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD
10953 with non-NULL OMP_FOR_INIT. Also, fill in pdata array,
10954 pdata[0] non-NULL if there is anything non-trivial in between, pdata[1]
10955 is address of OMP_PARALLEL in between if any, pdata[2] is address of
10956 OMP_FOR in between if any and pdata[3] is address of the inner
10957 OMP_FOR/OMP_SIMD. */
10958
10959 static tree
10960 find_combined_omp_for (tree *tp, int *walk_subtrees, void *data)
10961 {
10962 tree **pdata = (tree **) data;
10963 *walk_subtrees = 0;
10964 switch (TREE_CODE (*tp))
10965 {
10966 case OMP_FOR:
10967 if (OMP_FOR_INIT (*tp) != NULL_TREE)
10968 {
10969 pdata[3] = tp;
10970 return *tp;
10971 }
10972 pdata[2] = tp;
10973 *walk_subtrees = 1;
10974 break;
10975 case OMP_SIMD:
10976 if (OMP_FOR_INIT (*tp) != NULL_TREE)
10977 {
10978 pdata[3] = tp;
10979 return *tp;
10980 }
10981 break;
10982 case BIND_EXPR:
10983 if (BIND_EXPR_VARS (*tp)
10984 || (BIND_EXPR_BLOCK (*tp)
10985 && BLOCK_VARS (BIND_EXPR_BLOCK (*tp))))
10986 pdata[0] = tp;
10987 *walk_subtrees = 1;
10988 break;
10989 case STATEMENT_LIST:
10990 if (!tsi_one_before_end_p (tsi_start (*tp)))
10991 pdata[0] = tp;
10992 *walk_subtrees = 1;
10993 break;
10994 case TRY_FINALLY_EXPR:
10995 pdata[0] = tp;
10996 *walk_subtrees = 1;
10997 break;
10998 case OMP_PARALLEL:
10999 pdata[1] = tp;
11000 *walk_subtrees = 1;
11001 break;
11002 default:
11003 break;
11004 }
11005 return NULL_TREE;
11006 }
11007
11008 /* Gimplify the gross structure of an OMP_FOR statement. */
11009
11010 static enum gimplify_status
11011 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
11012 {
11013 tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
11014 enum gimplify_status ret = GS_ALL_DONE;
11015 enum gimplify_status tret;
11016 gomp_for *gfor;
11017 gimple_seq for_body, for_pre_body;
11018 int i;
11019 bitmap has_decl_expr = NULL;
11020 enum omp_region_type ort = ORT_WORKSHARE;
11021
11022 orig_for_stmt = for_stmt = *expr_p;
11023
11024 bool loop_p = (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_BIND)
11025 != NULL_TREE);
11026 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
11027 {
11028 tree *data[4] = { NULL, NULL, NULL, NULL };
11029 gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
11030 inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
11031 find_combined_omp_for, data, NULL);
11032 if (inner_for_stmt == NULL_TREE)
11033 {
11034 gcc_assert (seen_error ());
11035 *expr_p = NULL_TREE;
11036 return GS_ERROR;
11037 }
11038 if (data[2] && OMP_FOR_PRE_BODY (*data[2]))
11039 {
11040 append_to_statement_list_force (OMP_FOR_PRE_BODY (*data[2]),
11041 &OMP_FOR_PRE_BODY (for_stmt));
11042 OMP_FOR_PRE_BODY (*data[2]) = NULL_TREE;
11043 }
11044 if (OMP_FOR_PRE_BODY (inner_for_stmt))
11045 {
11046 append_to_statement_list_force (OMP_FOR_PRE_BODY (inner_for_stmt),
11047 &OMP_FOR_PRE_BODY (for_stmt));
11048 OMP_FOR_PRE_BODY (inner_for_stmt) = NULL_TREE;
11049 }
11050
11051 if (data[0])
11052 {
11053 /* We have some statements or variable declarations in between
11054 the composite construct directives. Move them around the
11055 inner_for_stmt. */
11056 data[0] = expr_p;
11057 for (i = 0; i < 3; i++)
11058 if (data[i])
11059 {
11060 tree t = *data[i];
11061 if (i < 2 && data[i + 1] == &OMP_BODY (t))
11062 data[i + 1] = data[i];
11063 *data[i] = OMP_BODY (t);
11064 tree body = build3 (BIND_EXPR, void_type_node, NULL_TREE,
11065 NULL_TREE, make_node (BLOCK));
11066 OMP_BODY (t) = body;
11067 append_to_statement_list_force (inner_for_stmt,
11068 &BIND_EXPR_BODY (body));
11069 *data[3] = t;
11070 data[3] = tsi_stmt_ptr (tsi_start (BIND_EXPR_BODY (body)));
11071 gcc_assert (*data[3] == inner_for_stmt);
11072 }
11073 return GS_OK;
11074 }
11075
11076 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
11077 if (!loop_p
11078 && OMP_FOR_ORIG_DECLS (inner_for_stmt)
11079 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
11080 i)) == TREE_LIST
11081 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
11082 i)))
11083 {
11084 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
11085 /* Class iterators aren't allowed on OMP_SIMD, so the only
11086 case we need to solve is distribute parallel for. They are
11087 allowed on the loop construct, but that is already handled
11088 in gimplify_omp_loop. */
11089 gcc_assert (TREE_CODE (inner_for_stmt) == OMP_FOR
11090 && TREE_CODE (for_stmt) == OMP_DISTRIBUTE
11091 && data[1]);
11092 tree orig_decl = TREE_PURPOSE (orig);
11093 tree last = TREE_VALUE (orig);
11094 tree *pc;
11095 for (pc = &OMP_FOR_CLAUSES (inner_for_stmt);
11096 *pc; pc = &OMP_CLAUSE_CHAIN (*pc))
11097 if ((OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE
11098 || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_LASTPRIVATE)
11099 && OMP_CLAUSE_DECL (*pc) == orig_decl)
11100 break;
11101 if (*pc == NULL_TREE)
11102 {
11103 tree *spc;
11104 for (spc = &OMP_PARALLEL_CLAUSES (*data[1]);
11105 *spc; spc = &OMP_CLAUSE_CHAIN (*spc))
11106 if (OMP_CLAUSE_CODE (*spc) == OMP_CLAUSE_PRIVATE
11107 && OMP_CLAUSE_DECL (*spc) == orig_decl)
11108 break;
11109 if (*spc)
11110 {
11111 tree c = *spc;
11112 *spc = OMP_CLAUSE_CHAIN (c);
11113 OMP_CLAUSE_CHAIN (c) = NULL_TREE;
11114 *pc = c;
11115 }
11116 }
11117 if (*pc == NULL_TREE)
11118 ;
11119 else if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE)
11120 {
11121 /* private clause will appear only on inner_for_stmt.
11122 Change it into firstprivate, and add private clause
11123 on for_stmt. */
11124 tree c = copy_node (*pc);
11125 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
11126 OMP_FOR_CLAUSES (for_stmt) = c;
11127 OMP_CLAUSE_CODE (*pc) = OMP_CLAUSE_FIRSTPRIVATE;
11128 lang_hooks.decls.omp_finish_clause (*pc, pre_p);
11129 }
11130 else
11131 {
11132 /* lastprivate clause will appear on both inner_for_stmt
11133 and for_stmt. Add firstprivate clause to
11134 inner_for_stmt. */
11135 tree c = build_omp_clause (OMP_CLAUSE_LOCATION (*pc),
11136 OMP_CLAUSE_FIRSTPRIVATE);
11137 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (*pc);
11138 OMP_CLAUSE_CHAIN (c) = *pc;
11139 *pc = c;
11140 lang_hooks.decls.omp_finish_clause (*pc, pre_p);
11141 }
11142 tree c = build_omp_clause (UNKNOWN_LOCATION,
11143 OMP_CLAUSE_FIRSTPRIVATE);
11144 OMP_CLAUSE_DECL (c) = last;
11145 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
11146 OMP_PARALLEL_CLAUSES (*data[1]) = c;
11147 c = build_omp_clause (UNKNOWN_LOCATION,
11148 *pc ? OMP_CLAUSE_SHARED
11149 : OMP_CLAUSE_FIRSTPRIVATE);
11150 OMP_CLAUSE_DECL (c) = orig_decl;
11151 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
11152 OMP_PARALLEL_CLAUSES (*data[1]) = c;
11153 }
11154 /* Similarly, take care of C++ range for temporaries, those should
11155 be firstprivate on OMP_PARALLEL if any. */
11156 if (data[1])
11157 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
11158 if (OMP_FOR_ORIG_DECLS (inner_for_stmt)
11159 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
11160 i)) == TREE_LIST
11161 && TREE_CHAIN (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
11162 i)))
11163 {
11164 tree orig
11165 = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
11166 tree v = TREE_CHAIN (orig);
11167 tree c = build_omp_clause (UNKNOWN_LOCATION,
11168 OMP_CLAUSE_FIRSTPRIVATE);
11169 /* First add firstprivate clause for the __for_end artificial
11170 decl. */
11171 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 1);
11172 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
11173 == REFERENCE_TYPE)
11174 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
11175 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
11176 OMP_PARALLEL_CLAUSES (*data[1]) = c;
11177 if (TREE_VEC_ELT (v, 0))
11178 {
11179 /* And now the same for __for_range artificial decl if it
11180 exists. */
11181 c = build_omp_clause (UNKNOWN_LOCATION,
11182 OMP_CLAUSE_FIRSTPRIVATE);
11183 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 0);
11184 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
11185 == REFERENCE_TYPE)
11186 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
11187 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
11188 OMP_PARALLEL_CLAUSES (*data[1]) = c;
11189 }
11190 }
11191 }
11192
11193 switch (TREE_CODE (for_stmt))
11194 {
11195 case OMP_FOR:
11196 case OMP_DISTRIBUTE:
11197 break;
11198 case OACC_LOOP:
11199 ort = ORT_ACC;
11200 break;
11201 case OMP_TASKLOOP:
11202 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_UNTIED))
11203 ort = ORT_UNTIED_TASKLOOP;
11204 else
11205 ort = ORT_TASKLOOP;
11206 break;
11207 case OMP_SIMD:
11208 ort = ORT_SIMD;
11209 break;
11210 default:
11211 gcc_unreachable ();
11212 }
11213
11214 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
11215 clause for the IV. */
11216 if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
11217 {
11218 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
11219 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
11220 decl = TREE_OPERAND (t, 0);
11221 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
11222 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
11223 && OMP_CLAUSE_DECL (c) == decl)
11224 {
11225 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
11226 break;
11227 }
11228 }
11229
11230 if (TREE_CODE (for_stmt) != OMP_TASKLOOP)
11231 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort,
11232 loop_p && TREE_CODE (for_stmt) != OMP_SIMD
11233 ? OMP_LOOP : TREE_CODE (for_stmt));
11234
11235 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
11236 gimplify_omp_ctxp->distribute = true;
11237
11238 /* Handle OMP_FOR_INIT. */
11239 for_pre_body = NULL;
11240 if ((ort == ORT_SIMD
11241 || (inner_for_stmt && TREE_CODE (inner_for_stmt) == OMP_SIMD))
11242 && OMP_FOR_PRE_BODY (for_stmt))
11243 {
11244 has_decl_expr = BITMAP_ALLOC (NULL);
11245 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
11246 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
11247 == VAR_DECL)
11248 {
11249 t = OMP_FOR_PRE_BODY (for_stmt);
11250 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
11251 }
11252 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
11253 {
11254 tree_stmt_iterator si;
11255 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
11256 tsi_next (&si))
11257 {
11258 t = tsi_stmt (si);
11259 if (TREE_CODE (t) == DECL_EXPR
11260 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
11261 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
11262 }
11263 }
11264 }
11265 if (OMP_FOR_PRE_BODY (for_stmt))
11266 {
11267 if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp)
11268 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
11269 else
11270 {
11271 struct gimplify_omp_ctx ctx;
11272 memset (&ctx, 0, sizeof (ctx));
11273 ctx.region_type = ORT_NONE;
11274 gimplify_omp_ctxp = &ctx;
11275 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
11276 gimplify_omp_ctxp = NULL;
11277 }
11278 }
11279 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
11280
11281 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
11282 for_stmt = inner_for_stmt;
11283
11284 /* For taskloop, need to gimplify the start, end and step before the
11285 taskloop, outside of the taskloop omp context. */
11286 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
11287 {
11288 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
11289 {
11290 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
11291 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
11292 {
11293 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
11294 TREE_OPERAND (t, 1)
11295 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
11296 gimple_seq_empty_p (for_pre_body)
11297 ? pre_p : &for_pre_body, NULL,
11298 false);
11299 /* Reference to pointer conversion is considered useless,
11300 but is significant for firstprivate clause. Force it
11301 here. */
11302 if (TREE_CODE (type) == POINTER_TYPE
11303 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 1)))
11304 == REFERENCE_TYPE))
11305 {
11306 tree v = create_tmp_var (TYPE_MAIN_VARIANT (type));
11307 tree m = build2 (INIT_EXPR, TREE_TYPE (v), v,
11308 TREE_OPERAND (t, 1));
11309 gimplify_and_add (m, gimple_seq_empty_p (for_pre_body)
11310 ? pre_p : &for_pre_body);
11311 TREE_OPERAND (t, 1) = v;
11312 }
11313 tree c = build_omp_clause (input_location,
11314 OMP_CLAUSE_FIRSTPRIVATE);
11315 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
11316 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
11317 OMP_FOR_CLAUSES (orig_for_stmt) = c;
11318 }
11319
11320 /* Handle OMP_FOR_COND. */
11321 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
11322 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
11323 {
11324 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
11325 TREE_OPERAND (t, 1)
11326 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
11327 gimple_seq_empty_p (for_pre_body)
11328 ? pre_p : &for_pre_body, NULL,
11329 false);
11330 /* Reference to pointer conversion is considered useless,
11331 but is significant for firstprivate clause. Force it
11332 here. */
11333 if (TREE_CODE (type) == POINTER_TYPE
11334 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 1)))
11335 == REFERENCE_TYPE))
11336 {
11337 tree v = create_tmp_var (TYPE_MAIN_VARIANT (type));
11338 tree m = build2 (INIT_EXPR, TREE_TYPE (v), v,
11339 TREE_OPERAND (t, 1));
11340 gimplify_and_add (m, gimple_seq_empty_p (for_pre_body)
11341 ? pre_p : &for_pre_body);
11342 TREE_OPERAND (t, 1) = v;
11343 }
11344 tree c = build_omp_clause (input_location,
11345 OMP_CLAUSE_FIRSTPRIVATE);
11346 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
11347 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
11348 OMP_FOR_CLAUSES (orig_for_stmt) = c;
11349 }
11350
11351 /* Handle OMP_FOR_INCR. */
11352 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
11353 if (TREE_CODE (t) == MODIFY_EXPR)
11354 {
11355 decl = TREE_OPERAND (t, 0);
11356 t = TREE_OPERAND (t, 1);
11357 tree *tp = &TREE_OPERAND (t, 1);
11358 if (TREE_CODE (t) == PLUS_EXPR && *tp == decl)
11359 tp = &TREE_OPERAND (t, 0);
11360
11361 if (!is_gimple_constant (*tp))
11362 {
11363 gimple_seq *seq = gimple_seq_empty_p (for_pre_body)
11364 ? pre_p : &for_pre_body;
11365 *tp = get_initialized_tmp_var (*tp, seq, NULL, false);
11366 tree c = build_omp_clause (input_location,
11367 OMP_CLAUSE_FIRSTPRIVATE);
11368 OMP_CLAUSE_DECL (c) = *tp;
11369 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
11370 OMP_FOR_CLAUSES (orig_for_stmt) = c;
11371 }
11372 }
11373 }
11374
11375 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt), pre_p, ort,
11376 OMP_TASKLOOP);
11377 }
11378
11379 if (orig_for_stmt != for_stmt)
11380 gimplify_omp_ctxp->combined_loop = true;
11381
11382 for_body = NULL;
11383 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
11384 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
11385 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
11386 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
11387
11388 tree c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED);
11389 bool is_doacross = false;
11390 if (c && OMP_CLAUSE_ORDERED_EXPR (c))
11391 {
11392 is_doacross = true;
11393 gimplify_omp_ctxp->loop_iter_var.create (TREE_VEC_LENGTH
11394 (OMP_FOR_INIT (for_stmt))
11395 * 2);
11396 }
11397 int collapse = 1, tile = 0;
11398 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_COLLAPSE);
11399 if (c)
11400 collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
11401 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_TILE);
11402 if (c)
11403 tile = list_length (OMP_CLAUSE_TILE_LIST (c));
11404 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
11405 {
11406 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
11407 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
11408 decl = TREE_OPERAND (t, 0);
11409 gcc_assert (DECL_P (decl));
11410 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
11411 || POINTER_TYPE_P (TREE_TYPE (decl)));
11412 if (is_doacross)
11413 {
11414 if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt))
11415 {
11416 tree orig_decl = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
11417 if (TREE_CODE (orig_decl) == TREE_LIST)
11418 {
11419 orig_decl = TREE_PURPOSE (orig_decl);
11420 if (!orig_decl)
11421 orig_decl = decl;
11422 }
11423 gimplify_omp_ctxp->loop_iter_var.quick_push (orig_decl);
11424 }
11425 else
11426 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
11427 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
11428 }
11429
11430 /* Make sure the iteration variable is private. */
11431 tree c = NULL_TREE;
11432 tree c2 = NULL_TREE;
11433 if (orig_for_stmt != for_stmt)
11434 {
11435 /* Preserve this information until we gimplify the inner simd. */
11436 if (has_decl_expr
11437 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
11438 TREE_PRIVATE (t) = 1;
11439 }
11440 else if (ort == ORT_SIMD)
11441 {
11442 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
11443 (splay_tree_key) decl);
11444 omp_is_private (gimplify_omp_ctxp, decl,
11445 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
11446 != 1));
11447 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
11448 {
11449 omp_notice_variable (gimplify_omp_ctxp, decl, true);
11450 if (n->value & GOVD_LASTPRIVATE_CONDITIONAL)
11451 for (tree c3 = omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
11452 OMP_CLAUSE_LASTPRIVATE);
11453 c3; c3 = omp_find_clause (OMP_CLAUSE_CHAIN (c3),
11454 OMP_CLAUSE_LASTPRIVATE))
11455 if (OMP_CLAUSE_DECL (c3) == decl)
11456 {
11457 warning_at (OMP_CLAUSE_LOCATION (c3), 0,
11458 "conditional %<lastprivate%> on loop "
11459 "iterator %qD ignored", decl);
11460 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3) = 0;
11461 n->value &= ~GOVD_LASTPRIVATE_CONDITIONAL;
11462 }
11463 }
11464 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1 && !loop_p)
11465 {
11466 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
11467 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
11468 unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
11469 if ((has_decl_expr
11470 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
11471 || TREE_PRIVATE (t))
11472 {
11473 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
11474 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
11475 }
11476 struct gimplify_omp_ctx *outer
11477 = gimplify_omp_ctxp->outer_context;
11478 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
11479 {
11480 if (outer->region_type == ORT_WORKSHARE
11481 && outer->combined_loop)
11482 {
11483 n = splay_tree_lookup (outer->variables,
11484 (splay_tree_key)decl);
11485 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
11486 {
11487 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
11488 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
11489 }
11490 else
11491 {
11492 struct gimplify_omp_ctx *octx = outer->outer_context;
11493 if (octx
11494 && octx->region_type == ORT_COMBINED_PARALLEL
11495 && octx->outer_context
11496 && (octx->outer_context->region_type
11497 == ORT_WORKSHARE)
11498 && octx->outer_context->combined_loop)
11499 {
11500 octx = octx->outer_context;
11501 n = splay_tree_lookup (octx->variables,
11502 (splay_tree_key)decl);
11503 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
11504 {
11505 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
11506 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
11507 }
11508 }
11509 }
11510 }
11511 }
11512
11513 OMP_CLAUSE_DECL (c) = decl;
11514 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
11515 OMP_FOR_CLAUSES (for_stmt) = c;
11516 omp_add_variable (gimplify_omp_ctxp, decl, flags);
11517 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
11518 {
11519 if (outer->region_type == ORT_WORKSHARE
11520 && outer->combined_loop)
11521 {
11522 if (outer->outer_context
11523 && (outer->outer_context->region_type
11524 == ORT_COMBINED_PARALLEL))
11525 outer = outer->outer_context;
11526 else if (omp_check_private (outer, decl, false))
11527 outer = NULL;
11528 }
11529 else if (((outer->region_type & ORT_TASKLOOP)
11530 == ORT_TASKLOOP)
11531 && outer->combined_loop
11532 && !omp_check_private (gimplify_omp_ctxp,
11533 decl, false))
11534 ;
11535 else if (outer->region_type != ORT_COMBINED_PARALLEL)
11536 {
11537 omp_notice_variable (outer, decl, true);
11538 outer = NULL;
11539 }
11540 if (outer)
11541 {
11542 n = splay_tree_lookup (outer->variables,
11543 (splay_tree_key)decl);
11544 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
11545 {
11546 omp_add_variable (outer, decl,
11547 GOVD_LASTPRIVATE | GOVD_SEEN);
11548 if (outer->region_type == ORT_COMBINED_PARALLEL
11549 && outer->outer_context
11550 && (outer->outer_context->region_type
11551 == ORT_WORKSHARE)
11552 && outer->outer_context->combined_loop)
11553 {
11554 outer = outer->outer_context;
11555 n = splay_tree_lookup (outer->variables,
11556 (splay_tree_key)decl);
11557 if (omp_check_private (outer, decl, false))
11558 outer = NULL;
11559 else if (n == NULL
11560 || ((n->value & GOVD_DATA_SHARE_CLASS)
11561 == 0))
11562 omp_add_variable (outer, decl,
11563 GOVD_LASTPRIVATE
11564 | GOVD_SEEN);
11565 else
11566 outer = NULL;
11567 }
11568 if (outer && outer->outer_context
11569 && ((outer->outer_context->region_type
11570 & ORT_COMBINED_TEAMS) == ORT_COMBINED_TEAMS
11571 || (((outer->region_type & ORT_TASKLOOP)
11572 == ORT_TASKLOOP)
11573 && (outer->outer_context->region_type
11574 == ORT_COMBINED_PARALLEL))))
11575 {
11576 outer = outer->outer_context;
11577 n = splay_tree_lookup (outer->variables,
11578 (splay_tree_key)decl);
11579 if (n == NULL
11580 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
11581 omp_add_variable (outer, decl,
11582 GOVD_SHARED | GOVD_SEEN);
11583 else
11584 outer = NULL;
11585 }
11586 if (outer && outer->outer_context)
11587 omp_notice_variable (outer->outer_context, decl,
11588 true);
11589 }
11590 }
11591 }
11592 }
11593 else
11594 {
11595 bool lastprivate
11596 = (!has_decl_expr
11597 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
11598 if (TREE_PRIVATE (t))
11599 lastprivate = false;
11600 if (loop_p && OMP_FOR_ORIG_DECLS (for_stmt))
11601 {
11602 tree elt = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
11603 if (TREE_CODE (elt) == TREE_LIST && TREE_PURPOSE (elt))
11604 lastprivate = false;
11605 }
11606
11607 struct gimplify_omp_ctx *outer
11608 = gimplify_omp_ctxp->outer_context;
11609 if (outer && lastprivate)
11610 {
11611 if (outer->region_type == ORT_WORKSHARE
11612 && outer->combined_loop)
11613 {
11614 n = splay_tree_lookup (outer->variables,
11615 (splay_tree_key)decl);
11616 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
11617 {
11618 lastprivate = false;
11619 outer = NULL;
11620 }
11621 else if (outer->outer_context
11622 && (outer->outer_context->region_type
11623 == ORT_COMBINED_PARALLEL))
11624 outer = outer->outer_context;
11625 else if (omp_check_private (outer, decl, false))
11626 outer = NULL;
11627 }
11628 else if (((outer->region_type & ORT_TASKLOOP)
11629 == ORT_TASKLOOP)
11630 && outer->combined_loop
11631 && !omp_check_private (gimplify_omp_ctxp,
11632 decl, false))
11633 ;
11634 else if (outer->region_type != ORT_COMBINED_PARALLEL)
11635 {
11636 omp_notice_variable (outer, decl, true);
11637 outer = NULL;
11638 }
11639 if (outer)
11640 {
11641 n = splay_tree_lookup (outer->variables,
11642 (splay_tree_key)decl);
11643 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
11644 {
11645 omp_add_variable (outer, decl,
11646 GOVD_LASTPRIVATE | GOVD_SEEN);
11647 if (outer->region_type == ORT_COMBINED_PARALLEL
11648 && outer->outer_context
11649 && (outer->outer_context->region_type
11650 == ORT_WORKSHARE)
11651 && outer->outer_context->combined_loop)
11652 {
11653 outer = outer->outer_context;
11654 n = splay_tree_lookup (outer->variables,
11655 (splay_tree_key)decl);
11656 if (omp_check_private (outer, decl, false))
11657 outer = NULL;
11658 else if (n == NULL
11659 || ((n->value & GOVD_DATA_SHARE_CLASS)
11660 == 0))
11661 omp_add_variable (outer, decl,
11662 GOVD_LASTPRIVATE
11663 | GOVD_SEEN);
11664 else
11665 outer = NULL;
11666 }
11667 if (outer && outer->outer_context
11668 && ((outer->outer_context->region_type
11669 & ORT_COMBINED_TEAMS) == ORT_COMBINED_TEAMS
11670 || (((outer->region_type & ORT_TASKLOOP)
11671 == ORT_TASKLOOP)
11672 && (outer->outer_context->region_type
11673 == ORT_COMBINED_PARALLEL))))
11674 {
11675 outer = outer->outer_context;
11676 n = splay_tree_lookup (outer->variables,
11677 (splay_tree_key)decl);
11678 if (n == NULL
11679 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
11680 omp_add_variable (outer, decl,
11681 GOVD_SHARED | GOVD_SEEN);
11682 else
11683 outer = NULL;
11684 }
11685 if (outer && outer->outer_context)
11686 omp_notice_variable (outer->outer_context, decl,
11687 true);
11688 }
11689 }
11690 }
11691
11692 c = build_omp_clause (input_location,
11693 lastprivate ? OMP_CLAUSE_LASTPRIVATE
11694 : OMP_CLAUSE_PRIVATE);
11695 OMP_CLAUSE_DECL (c) = decl;
11696 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
11697 OMP_FOR_CLAUSES (for_stmt) = c;
11698 omp_add_variable (gimplify_omp_ctxp, decl,
11699 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
11700 | GOVD_EXPLICIT | GOVD_SEEN);
11701 c = NULL_TREE;
11702 }
11703 }
11704 else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
11705 {
11706 omp_notice_variable (gimplify_omp_ctxp, decl, true);
11707 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
11708 (splay_tree_key) decl);
11709 if (n && (n->value & GOVD_LASTPRIVATE_CONDITIONAL))
11710 for (tree c3 = omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
11711 OMP_CLAUSE_LASTPRIVATE);
11712 c3; c3 = omp_find_clause (OMP_CLAUSE_CHAIN (c3),
11713 OMP_CLAUSE_LASTPRIVATE))
11714 if (OMP_CLAUSE_DECL (c3) == decl)
11715 {
11716 warning_at (OMP_CLAUSE_LOCATION (c3), 0,
11717 "conditional %<lastprivate%> on loop "
11718 "iterator %qD ignored", decl);
11719 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3) = 0;
11720 n->value &= ~GOVD_LASTPRIVATE_CONDITIONAL;
11721 }
11722 }
11723 else
11724 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
11725
11726 /* If DECL is not a gimple register, create a temporary variable to act
11727 as an iteration counter. This is valid, since DECL cannot be
11728 modified in the body of the loop. Similarly for any iteration vars
11729 in simd with collapse > 1 where the iterator vars must be
11730 lastprivate. */
11731 if (orig_for_stmt != for_stmt)
11732 var = decl;
11733 else if (!is_gimple_reg (decl)
11734 || (ort == ORT_SIMD
11735 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1))
11736 {
11737 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
11738 /* Make sure omp_add_variable is not called on it prematurely.
11739 We call it ourselves a few lines later. */
11740 gimplify_omp_ctxp = NULL;
11741 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
11742 gimplify_omp_ctxp = ctx;
11743 TREE_OPERAND (t, 0) = var;
11744
11745 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
11746
11747 if (ort == ORT_SIMD
11748 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
11749 {
11750 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
11751 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
11752 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
11753 OMP_CLAUSE_DECL (c2) = var;
11754 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
11755 OMP_FOR_CLAUSES (for_stmt) = c2;
11756 omp_add_variable (gimplify_omp_ctxp, var,
11757 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
11758 if (c == NULL_TREE)
11759 {
11760 c = c2;
11761 c2 = NULL_TREE;
11762 }
11763 }
11764 else
11765 omp_add_variable (gimplify_omp_ctxp, var,
11766 GOVD_PRIVATE | GOVD_SEEN);
11767 }
11768 else
11769 var = decl;
11770
11771 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
11772 is_gimple_val, fb_rvalue, false);
11773 ret = MIN (ret, tret);
11774 if (ret == GS_ERROR)
11775 return ret;
11776
11777 /* Handle OMP_FOR_COND. */
11778 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
11779 gcc_assert (COMPARISON_CLASS_P (t));
11780 gcc_assert (TREE_OPERAND (t, 0) == decl);
11781
11782 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
11783 is_gimple_val, fb_rvalue, false);
11784 ret = MIN (ret, tret);
11785
11786 /* Handle OMP_FOR_INCR. */
11787 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
11788 switch (TREE_CODE (t))
11789 {
11790 case PREINCREMENT_EXPR:
11791 case POSTINCREMENT_EXPR:
11792 {
11793 tree decl = TREE_OPERAND (t, 0);
11794 /* c_omp_for_incr_canonicalize_ptr() should have been
11795 called to massage things appropriately. */
11796 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
11797
11798 if (orig_for_stmt != for_stmt)
11799 break;
11800 t = build_int_cst (TREE_TYPE (decl), 1);
11801 if (c)
11802 OMP_CLAUSE_LINEAR_STEP (c) = t;
11803 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
11804 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
11805 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
11806 break;
11807 }
11808
11809 case PREDECREMENT_EXPR:
11810 case POSTDECREMENT_EXPR:
11811 /* c_omp_for_incr_canonicalize_ptr() should have been
11812 called to massage things appropriately. */
11813 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
11814 if (orig_for_stmt != for_stmt)
11815 break;
11816 t = build_int_cst (TREE_TYPE (decl), -1);
11817 if (c)
11818 OMP_CLAUSE_LINEAR_STEP (c) = t;
11819 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
11820 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
11821 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
11822 break;
11823
11824 case MODIFY_EXPR:
11825 gcc_assert (TREE_OPERAND (t, 0) == decl);
11826 TREE_OPERAND (t, 0) = var;
11827
11828 t = TREE_OPERAND (t, 1);
11829 switch (TREE_CODE (t))
11830 {
11831 case PLUS_EXPR:
11832 if (TREE_OPERAND (t, 1) == decl)
11833 {
11834 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
11835 TREE_OPERAND (t, 0) = var;
11836 break;
11837 }
11838
11839 /* Fallthru. */
11840 case MINUS_EXPR:
11841 case POINTER_PLUS_EXPR:
11842 gcc_assert (TREE_OPERAND (t, 0) == decl);
11843 TREE_OPERAND (t, 0) = var;
11844 break;
11845 default:
11846 gcc_unreachable ();
11847 }
11848
11849 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
11850 is_gimple_val, fb_rvalue, false);
11851 ret = MIN (ret, tret);
11852 if (c)
11853 {
11854 tree step = TREE_OPERAND (t, 1);
11855 tree stept = TREE_TYPE (decl);
11856 if (POINTER_TYPE_P (stept))
11857 stept = sizetype;
11858 step = fold_convert (stept, step);
11859 if (TREE_CODE (t) == MINUS_EXPR)
11860 step = fold_build1 (NEGATE_EXPR, stept, step);
11861 OMP_CLAUSE_LINEAR_STEP (c) = step;
11862 if (step != TREE_OPERAND (t, 1))
11863 {
11864 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
11865 &for_pre_body, NULL,
11866 is_gimple_val, fb_rvalue, false);
11867 ret = MIN (ret, tret);
11868 }
11869 }
11870 break;
11871
11872 default:
11873 gcc_unreachable ();
11874 }
11875
11876 if (c2)
11877 {
11878 gcc_assert (c);
11879 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
11880 }
11881
11882 if ((var != decl || collapse > 1 || tile) && orig_for_stmt == for_stmt)
11883 {
11884 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
11885 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
11886 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
11887 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
11888 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
11889 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
11890 && OMP_CLAUSE_DECL (c) == decl)
11891 {
11892 if (is_doacross && (collapse == 1 || i >= collapse))
11893 t = var;
11894 else
11895 {
11896 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
11897 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
11898 gcc_assert (TREE_OPERAND (t, 0) == var);
11899 t = TREE_OPERAND (t, 1);
11900 gcc_assert (TREE_CODE (t) == PLUS_EXPR
11901 || TREE_CODE (t) == MINUS_EXPR
11902 || TREE_CODE (t) == POINTER_PLUS_EXPR);
11903 gcc_assert (TREE_OPERAND (t, 0) == var);
11904 t = build2 (TREE_CODE (t), TREE_TYPE (decl),
11905 is_doacross ? var : decl,
11906 TREE_OPERAND (t, 1));
11907 }
11908 gimple_seq *seq;
11909 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
11910 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
11911 else
11912 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
11913 push_gimplify_context ();
11914 gimplify_assign (decl, t, seq);
11915 gimple *bind = NULL;
11916 if (gimplify_ctxp->temps)
11917 {
11918 bind = gimple_build_bind (NULL_TREE, *seq, NULL_TREE);
11919 *seq = NULL;
11920 gimplify_seq_add_stmt (seq, bind);
11921 }
11922 pop_gimplify_context (bind);
11923 }
11924 }
11925 }
11926
11927 BITMAP_FREE (has_decl_expr);
11928
11929 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP
11930 || (loop_p && orig_for_stmt == for_stmt))
11931 {
11932 push_gimplify_context ();
11933 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR)
11934 {
11935 OMP_FOR_BODY (orig_for_stmt)
11936 = build3 (BIND_EXPR, void_type_node, NULL,
11937 OMP_FOR_BODY (orig_for_stmt), NULL);
11938 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1;
11939 }
11940 }
11941
11942 gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt),
11943 &for_body);
11944
11945 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP
11946 || (loop_p && orig_for_stmt == for_stmt))
11947 {
11948 if (gimple_code (g) == GIMPLE_BIND)
11949 pop_gimplify_context (g);
11950 else
11951 pop_gimplify_context (NULL);
11952 }
11953
11954 if (orig_for_stmt != for_stmt)
11955 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
11956 {
11957 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
11958 decl = TREE_OPERAND (t, 0);
11959 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
11960 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
11961 gimplify_omp_ctxp = ctx->outer_context;
11962 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
11963 gimplify_omp_ctxp = ctx;
11964 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
11965 TREE_OPERAND (t, 0) = var;
11966 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
11967 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
11968 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
11969 }
11970
11971 gimplify_adjust_omp_clauses (pre_p, for_body,
11972 &OMP_FOR_CLAUSES (orig_for_stmt),
11973 TREE_CODE (orig_for_stmt));
11974
11975 int kind;
11976 switch (TREE_CODE (orig_for_stmt))
11977 {
11978 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
11979 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
11980 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
11981 case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break;
11982 case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
11983 default:
11984 gcc_unreachable ();
11985 }
11986 if (loop_p && kind == GF_OMP_FOR_KIND_SIMD)
11987 {
11988 gimplify_seq_add_seq (pre_p, for_pre_body);
11989 for_pre_body = NULL;
11990 }
11991 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
11992 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
11993 for_pre_body);
11994 if (orig_for_stmt != for_stmt)
11995 gimple_omp_for_set_combined_p (gfor, true);
11996 if (gimplify_omp_ctxp
11997 && (gimplify_omp_ctxp->combined_loop
11998 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
11999 && gimplify_omp_ctxp->outer_context
12000 && gimplify_omp_ctxp->outer_context->combined_loop)))
12001 {
12002 gimple_omp_for_set_combined_into_p (gfor, true);
12003 if (gimplify_omp_ctxp->combined_loop)
12004 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
12005 else
12006 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
12007 }
12008
12009 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
12010 {
12011 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
12012 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
12013 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
12014 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
12015 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
12016 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
12017 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
12018 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
12019 }
12020
12021 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
12022 constructs with GIMPLE_OMP_TASK sandwiched in between them.
12023 The outer taskloop stands for computing the number of iterations,
12024 counts for collapsed loops and holding taskloop specific clauses.
12025 The task construct stands for the effect of data sharing on the
12026 explicit task it creates and the inner taskloop stands for expansion
12027 of the static loop inside of the explicit task construct. */
12028 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
12029 {
12030 tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gfor);
12031 tree task_clauses = NULL_TREE;
12032 tree c = *gfor_clauses_ptr;
12033 tree *gtask_clauses_ptr = &task_clauses;
12034 tree outer_for_clauses = NULL_TREE;
12035 tree *gforo_clauses_ptr = &outer_for_clauses;
12036 for (; c; c = OMP_CLAUSE_CHAIN (c))
12037 switch (OMP_CLAUSE_CODE (c))
12038 {
12039 /* These clauses are allowed on task, move them there. */
12040 case OMP_CLAUSE_SHARED:
12041 case OMP_CLAUSE_FIRSTPRIVATE:
12042 case OMP_CLAUSE_DEFAULT:
12043 case OMP_CLAUSE_IF:
12044 case OMP_CLAUSE_UNTIED:
12045 case OMP_CLAUSE_FINAL:
12046 case OMP_CLAUSE_MERGEABLE:
12047 case OMP_CLAUSE_PRIORITY:
12048 case OMP_CLAUSE_REDUCTION:
12049 case OMP_CLAUSE_IN_REDUCTION:
12050 *gtask_clauses_ptr = c;
12051 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12052 break;
12053 case OMP_CLAUSE_PRIVATE:
12054 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c))
12055 {
12056 /* We want private on outer for and firstprivate
12057 on task. */
12058 *gtask_clauses_ptr
12059 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
12060 OMP_CLAUSE_FIRSTPRIVATE);
12061 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
12062 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
12063 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
12064 *gforo_clauses_ptr = c;
12065 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12066 }
12067 else
12068 {
12069 *gtask_clauses_ptr = c;
12070 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12071 }
12072 break;
12073 /* These clauses go into outer taskloop clauses. */
12074 case OMP_CLAUSE_GRAINSIZE:
12075 case OMP_CLAUSE_NUM_TASKS:
12076 case OMP_CLAUSE_NOGROUP:
12077 *gforo_clauses_ptr = c;
12078 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12079 break;
12080 /* Taskloop clause we duplicate on both taskloops. */
12081 case OMP_CLAUSE_COLLAPSE:
12082 *gfor_clauses_ptr = c;
12083 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12084 *gforo_clauses_ptr = copy_node (c);
12085 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
12086 break;
12087 /* For lastprivate, keep the clause on inner taskloop, and add
12088 a shared clause on task. If the same decl is also firstprivate,
12089 add also firstprivate clause on the inner taskloop. */
12090 case OMP_CLAUSE_LASTPRIVATE:
12091 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
12092 {
12093 /* For taskloop C++ lastprivate IVs, we want:
12094 1) private on outer taskloop
12095 2) firstprivate and shared on task
12096 3) lastprivate on inner taskloop */
12097 *gtask_clauses_ptr
12098 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
12099 OMP_CLAUSE_FIRSTPRIVATE);
12100 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
12101 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
12102 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
12103 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1;
12104 *gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c),
12105 OMP_CLAUSE_PRIVATE);
12106 OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c);
12107 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1;
12108 TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c);
12109 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
12110 }
12111 *gfor_clauses_ptr = c;
12112 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12113 *gtask_clauses_ptr
12114 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED);
12115 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
12116 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
12117 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1;
12118 gtask_clauses_ptr
12119 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
12120 break;
12121 default:
12122 gcc_unreachable ();
12123 }
12124 *gfor_clauses_ptr = NULL_TREE;
12125 *gtask_clauses_ptr = NULL_TREE;
12126 *gforo_clauses_ptr = NULL_TREE;
12127 g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE);
12128 g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE,
12129 NULL_TREE, NULL_TREE, NULL_TREE);
12130 gimple_omp_task_set_taskloop_p (g, true);
12131 g = gimple_build_bind (NULL_TREE, g, NULL_TREE);
12132 gomp_for *gforo
12133 = gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses,
12134 gimple_omp_for_collapse (gfor),
12135 gimple_omp_for_pre_body (gfor));
12136 gimple_omp_for_set_pre_body (gfor, NULL);
12137 gimple_omp_for_set_combined_p (gforo, true);
12138 gimple_omp_for_set_combined_into_p (gfor, true);
12139 for (i = 0; i < (int) gimple_omp_for_collapse (gfor); i++)
12140 {
12141 tree type = TREE_TYPE (gimple_omp_for_index (gfor, i));
12142 tree v = create_tmp_var (type);
12143 gimple_omp_for_set_index (gforo, i, v);
12144 t = unshare_expr (gimple_omp_for_initial (gfor, i));
12145 gimple_omp_for_set_initial (gforo, i, t);
12146 gimple_omp_for_set_cond (gforo, i,
12147 gimple_omp_for_cond (gfor, i));
12148 t = unshare_expr (gimple_omp_for_final (gfor, i));
12149 gimple_omp_for_set_final (gforo, i, t);
12150 t = unshare_expr (gimple_omp_for_incr (gfor, i));
12151 gcc_assert (TREE_OPERAND (t, 0) == gimple_omp_for_index (gfor, i));
12152 TREE_OPERAND (t, 0) = v;
12153 gimple_omp_for_set_incr (gforo, i, t);
12154 t = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
12155 OMP_CLAUSE_DECL (t) = v;
12156 OMP_CLAUSE_CHAIN (t) = gimple_omp_for_clauses (gforo);
12157 gimple_omp_for_set_clauses (gforo, t);
12158 }
12159 gimplify_seq_add_stmt (pre_p, gforo);
12160 }
12161 else
12162 gimplify_seq_add_stmt (pre_p, gfor);
12163
12164 if (TREE_CODE (orig_for_stmt) == OMP_FOR)
12165 {
12166 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12167 unsigned lastprivate_conditional = 0;
12168 while (ctx
12169 && (ctx->region_type == ORT_TARGET_DATA
12170 || ctx->region_type == ORT_TASKGROUP))
12171 ctx = ctx->outer_context;
12172 if (ctx && (ctx->region_type & ORT_PARALLEL) != 0)
12173 for (tree c = gimple_omp_for_clauses (gfor);
12174 c; c = OMP_CLAUSE_CHAIN (c))
12175 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
12176 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
12177 ++lastprivate_conditional;
12178 if (lastprivate_conditional)
12179 {
12180 struct omp_for_data fd;
12181 omp_extract_for_data (gfor, &fd, NULL);
12182 tree type = build_array_type_nelts (unsigned_type_for (fd.iter_type),
12183 lastprivate_conditional);
12184 tree var = create_tmp_var_raw (type);
12185 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
12186 OMP_CLAUSE_DECL (c) = var;
12187 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gfor);
12188 gimple_omp_for_set_clauses (gfor, c);
12189 omp_add_variable (ctx, var, GOVD_CONDTEMP | GOVD_SEEN);
12190 }
12191 }
12192 else if (TREE_CODE (orig_for_stmt) == OMP_SIMD)
12193 {
12194 unsigned lastprivate_conditional = 0;
12195 for (tree c = gimple_omp_for_clauses (gfor); c; c = OMP_CLAUSE_CHAIN (c))
12196 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
12197 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
12198 ++lastprivate_conditional;
12199 if (lastprivate_conditional)
12200 {
12201 struct omp_for_data fd;
12202 omp_extract_for_data (gfor, &fd, NULL);
12203 tree type = unsigned_type_for (fd.iter_type);
12204 while (lastprivate_conditional--)
12205 {
12206 tree c = build_omp_clause (UNKNOWN_LOCATION,
12207 OMP_CLAUSE__CONDTEMP_);
12208 OMP_CLAUSE_DECL (c) = create_tmp_var (type);
12209 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gfor);
12210 gimple_omp_for_set_clauses (gfor, c);
12211 }
12212 }
12213 }
12214
12215 if (ret != GS_ALL_DONE)
12216 return GS_ERROR;
12217 *expr_p = NULL_TREE;
12218 return GS_ALL_DONE;
12219 }
12220
12221 /* Helper for gimplify_omp_loop, called through walk_tree. */
12222
12223 static tree
12224 replace_reduction_placeholders (tree *tp, int *walk_subtrees, void *data)
12225 {
12226 if (DECL_P (*tp))
12227 {
12228 tree *d = (tree *) data;
12229 if (*tp == OMP_CLAUSE_REDUCTION_PLACEHOLDER (d[0]))
12230 {
12231 *tp = OMP_CLAUSE_REDUCTION_PLACEHOLDER (d[1]);
12232 *walk_subtrees = 0;
12233 }
12234 else if (*tp == OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (d[0]))
12235 {
12236 *tp = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (d[1]);
12237 *walk_subtrees = 0;
12238 }
12239 }
12240 return NULL_TREE;
12241 }
12242
12243 /* Gimplify the gross structure of an OMP_LOOP statement. */
12244
12245 static enum gimplify_status
12246 gimplify_omp_loop (tree *expr_p, gimple_seq *pre_p)
12247 {
12248 tree for_stmt = *expr_p;
12249 tree clauses = OMP_FOR_CLAUSES (for_stmt);
12250 struct gimplify_omp_ctx *octx = gimplify_omp_ctxp;
12251 enum omp_clause_bind_kind kind = OMP_CLAUSE_BIND_THREAD;
12252 int i;
12253
12254 /* If order is not present, the behavior is as if order(concurrent)
12255 appeared. */
12256 tree order = omp_find_clause (clauses, OMP_CLAUSE_ORDER);
12257 if (order == NULL_TREE)
12258 {
12259 order = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_ORDER);
12260 OMP_CLAUSE_CHAIN (order) = clauses;
12261 OMP_FOR_CLAUSES (for_stmt) = clauses = order;
12262 }
12263
12264 tree bind = omp_find_clause (clauses, OMP_CLAUSE_BIND);
12265 if (bind == NULL_TREE)
12266 {
12267 if (!flag_openmp) /* flag_openmp_simd */
12268 ;
12269 else if (octx && (octx->region_type & ORT_TEAMS) != 0)
12270 kind = OMP_CLAUSE_BIND_TEAMS;
12271 else if (octx && (octx->region_type & ORT_PARALLEL) != 0)
12272 kind = OMP_CLAUSE_BIND_PARALLEL;
12273 else
12274 {
12275 for (; octx; octx = octx->outer_context)
12276 {
12277 if ((octx->region_type & ORT_ACC) != 0
12278 || octx->region_type == ORT_NONE
12279 || octx->region_type == ORT_IMPLICIT_TARGET)
12280 continue;
12281 break;
12282 }
12283 if (octx == NULL && !in_omp_construct)
12284 error_at (EXPR_LOCATION (for_stmt),
12285 "%<bind%> clause not specified on a %<loop%> "
12286 "construct not nested inside another OpenMP construct");
12287 }
12288 bind = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_BIND);
12289 OMP_CLAUSE_CHAIN (bind) = clauses;
12290 OMP_CLAUSE_BIND_KIND (bind) = kind;
12291 OMP_FOR_CLAUSES (for_stmt) = bind;
12292 }
12293 else
12294 switch (OMP_CLAUSE_BIND_KIND (bind))
12295 {
12296 case OMP_CLAUSE_BIND_THREAD:
12297 break;
12298 case OMP_CLAUSE_BIND_PARALLEL:
12299 if (!flag_openmp) /* flag_openmp_simd */
12300 {
12301 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
12302 break;
12303 }
12304 for (; octx; octx = octx->outer_context)
12305 if (octx->region_type == ORT_SIMD
12306 && omp_find_clause (octx->clauses, OMP_CLAUSE_BIND) == NULL_TREE)
12307 {
12308 error_at (EXPR_LOCATION (for_stmt),
12309 "%<bind(parallel)%> on a %<loop%> construct nested "
12310 "inside %<simd%> construct");
12311 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
12312 break;
12313 }
12314 kind = OMP_CLAUSE_BIND_PARALLEL;
12315 break;
12316 case OMP_CLAUSE_BIND_TEAMS:
12317 if (!flag_openmp) /* flag_openmp_simd */
12318 {
12319 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
12320 break;
12321 }
12322 if ((octx
12323 && octx->region_type != ORT_IMPLICIT_TARGET
12324 && octx->region_type != ORT_NONE
12325 && (octx->region_type & ORT_TEAMS) == 0)
12326 || in_omp_construct)
12327 {
12328 error_at (EXPR_LOCATION (for_stmt),
12329 "%<bind(teams)%> on a %<loop%> region not strictly "
12330 "nested inside of a %<teams%> region");
12331 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
12332 break;
12333 }
12334 kind = OMP_CLAUSE_BIND_TEAMS;
12335 break;
12336 default:
12337 gcc_unreachable ();
12338 }
12339
12340 for (tree *pc = &OMP_FOR_CLAUSES (for_stmt); *pc; )
12341 switch (OMP_CLAUSE_CODE (*pc))
12342 {
12343 case OMP_CLAUSE_REDUCTION:
12344 if (OMP_CLAUSE_REDUCTION_INSCAN (*pc))
12345 {
12346 error_at (OMP_CLAUSE_LOCATION (*pc),
12347 "%<inscan%> %<reduction%> clause on "
12348 "%qs construct", "loop");
12349 OMP_CLAUSE_REDUCTION_INSCAN (*pc) = 0;
12350 }
12351 if (OMP_CLAUSE_REDUCTION_TASK (*pc))
12352 {
12353 error_at (OMP_CLAUSE_LOCATION (*pc),
12354 "invalid %<task%> reduction modifier on construct "
12355 "other than %<parallel%>, %<for%> or %<sections%>");
12356 OMP_CLAUSE_REDUCTION_TASK (*pc) = 0;
12357 }
12358 pc = &OMP_CLAUSE_CHAIN (*pc);
12359 break;
12360 case OMP_CLAUSE_LASTPRIVATE:
12361 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
12362 {
12363 tree t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
12364 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
12365 if (OMP_CLAUSE_DECL (*pc) == TREE_OPERAND (t, 0))
12366 break;
12367 if (OMP_FOR_ORIG_DECLS (for_stmt)
12368 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt),
12369 i)) == TREE_LIST
12370 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt),
12371 i)))
12372 {
12373 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
12374 if (OMP_CLAUSE_DECL (*pc) == TREE_PURPOSE (orig))
12375 break;
12376 }
12377 }
12378 if (i == TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)))
12379 {
12380 error_at (OMP_CLAUSE_LOCATION (*pc),
12381 "%<lastprivate%> clause on a %<loop%> construct refers "
12382 "to a variable %qD which is not the loop iterator",
12383 OMP_CLAUSE_DECL (*pc));
12384 *pc = OMP_CLAUSE_CHAIN (*pc);
12385 break;
12386 }
12387 pc = &OMP_CLAUSE_CHAIN (*pc);
12388 break;
12389 default:
12390 pc = &OMP_CLAUSE_CHAIN (*pc);
12391 break;
12392 }
12393
12394 TREE_SET_CODE (for_stmt, OMP_SIMD);
12395
12396 int last;
12397 switch (kind)
12398 {
12399 case OMP_CLAUSE_BIND_THREAD: last = 0; break;
12400 case OMP_CLAUSE_BIND_PARALLEL: last = 1; break;
12401 case OMP_CLAUSE_BIND_TEAMS: last = 2; break;
12402 }
12403 for (int pass = 1; pass <= last; pass++)
12404 {
12405 if (pass == 2)
12406 {
12407 tree bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
12408 append_to_statement_list (*expr_p, &BIND_EXPR_BODY (bind));
12409 *expr_p = make_node (OMP_PARALLEL);
12410 TREE_TYPE (*expr_p) = void_type_node;
12411 OMP_PARALLEL_BODY (*expr_p) = bind;
12412 OMP_PARALLEL_COMBINED (*expr_p) = 1;
12413 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (for_stmt));
12414 tree *pc = &OMP_PARALLEL_CLAUSES (*expr_p);
12415 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
12416 if (OMP_FOR_ORIG_DECLS (for_stmt)
12417 && (TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i))
12418 == TREE_LIST))
12419 {
12420 tree elt = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
12421 if (TREE_PURPOSE (elt) && TREE_VALUE (elt))
12422 {
12423 *pc = build_omp_clause (UNKNOWN_LOCATION,
12424 OMP_CLAUSE_FIRSTPRIVATE);
12425 OMP_CLAUSE_DECL (*pc) = TREE_VALUE (elt);
12426 pc = &OMP_CLAUSE_CHAIN (*pc);
12427 }
12428 }
12429 }
12430 tree t = make_node (pass == 2 ? OMP_DISTRIBUTE : OMP_FOR);
12431 tree *pc = &OMP_FOR_CLAUSES (t);
12432 TREE_TYPE (t) = void_type_node;
12433 OMP_FOR_BODY (t) = *expr_p;
12434 SET_EXPR_LOCATION (t, EXPR_LOCATION (for_stmt));
12435 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
12436 switch (OMP_CLAUSE_CODE (c))
12437 {
12438 case OMP_CLAUSE_BIND:
12439 case OMP_CLAUSE_ORDER:
12440 case OMP_CLAUSE_COLLAPSE:
12441 *pc = copy_node (c);
12442 pc = &OMP_CLAUSE_CHAIN (*pc);
12443 break;
12444 case OMP_CLAUSE_PRIVATE:
12445 case OMP_CLAUSE_FIRSTPRIVATE:
12446 /* Only needed on innermost. */
12447 break;
12448 case OMP_CLAUSE_LASTPRIVATE:
12449 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c) && pass != last)
12450 {
12451 *pc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
12452 OMP_CLAUSE_FIRSTPRIVATE);
12453 OMP_CLAUSE_DECL (*pc) = OMP_CLAUSE_DECL (c);
12454 lang_hooks.decls.omp_finish_clause (*pc, NULL);
12455 pc = &OMP_CLAUSE_CHAIN (*pc);
12456 }
12457 *pc = copy_node (c);
12458 OMP_CLAUSE_LASTPRIVATE_STMT (*pc) = NULL_TREE;
12459 TREE_TYPE (*pc) = unshare_expr (TREE_TYPE (c));
12460 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
12461 {
12462 if (pass != last)
12463 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (*pc) = 1;
12464 else
12465 lang_hooks.decls.omp_finish_clause (*pc, NULL);
12466 OMP_CLAUSE_LASTPRIVATE_LOOP_IV (*pc) = 0;
12467 }
12468 pc = &OMP_CLAUSE_CHAIN (*pc);
12469 break;
12470 case OMP_CLAUSE_REDUCTION:
12471 *pc = copy_node (c);
12472 OMP_CLAUSE_DECL (*pc) = unshare_expr (OMP_CLAUSE_DECL (c));
12473 TREE_TYPE (*pc) = unshare_expr (TREE_TYPE (c));
12474 OMP_CLAUSE_REDUCTION_INIT (*pc)
12475 = unshare_expr (OMP_CLAUSE_REDUCTION_INIT (c));
12476 OMP_CLAUSE_REDUCTION_MERGE (*pc)
12477 = unshare_expr (OMP_CLAUSE_REDUCTION_MERGE (c));
12478 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc))
12479 {
12480 OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc)
12481 = copy_node (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c));
12482 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc))
12483 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc)
12484 = copy_node (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c));
12485 tree nc = *pc;
12486 tree data[2] = { c, nc };
12487 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_INIT (nc),
12488 replace_reduction_placeholders,
12489 data);
12490 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_MERGE (nc),
12491 replace_reduction_placeholders,
12492 data);
12493 }
12494 pc = &OMP_CLAUSE_CHAIN (*pc);
12495 break;
12496 default:
12497 gcc_unreachable ();
12498 }
12499 *pc = NULL_TREE;
12500 *expr_p = t;
12501 }
12502 return gimplify_omp_for (expr_p, pre_p);
12503 }
12504
12505
12506 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
12507 of OMP_TARGET's body. */
12508
12509 static tree
12510 find_omp_teams (tree *tp, int *walk_subtrees, void *)
12511 {
12512 *walk_subtrees = 0;
12513 switch (TREE_CODE (*tp))
12514 {
12515 case OMP_TEAMS:
12516 return *tp;
12517 case BIND_EXPR:
12518 case STATEMENT_LIST:
12519 *walk_subtrees = 1;
12520 break;
12521 default:
12522 break;
12523 }
12524 return NULL_TREE;
12525 }
12526
12527 /* Helper function of optimize_target_teams, determine if the expression
12528 can be computed safely before the target construct on the host. */
12529
12530 static tree
12531 computable_teams_clause (tree *tp, int *walk_subtrees, void *)
12532 {
12533 splay_tree_node n;
12534
12535 if (TYPE_P (*tp))
12536 {
12537 *walk_subtrees = 0;
12538 return NULL_TREE;
12539 }
12540 switch (TREE_CODE (*tp))
12541 {
12542 case VAR_DECL:
12543 case PARM_DECL:
12544 case RESULT_DECL:
12545 *walk_subtrees = 0;
12546 if (error_operand_p (*tp)
12547 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp))
12548 || DECL_HAS_VALUE_EXPR_P (*tp)
12549 || DECL_THREAD_LOCAL_P (*tp)
12550 || TREE_SIDE_EFFECTS (*tp)
12551 || TREE_THIS_VOLATILE (*tp))
12552 return *tp;
12553 if (is_global_var (*tp)
12554 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp))
12555 || lookup_attribute ("omp declare target link",
12556 DECL_ATTRIBUTES (*tp))))
12557 return *tp;
12558 if (VAR_P (*tp)
12559 && !DECL_SEEN_IN_BIND_EXPR_P (*tp)
12560 && !is_global_var (*tp)
12561 && decl_function_context (*tp) == current_function_decl)
12562 return *tp;
12563 n = splay_tree_lookup (gimplify_omp_ctxp->variables,
12564 (splay_tree_key) *tp);
12565 if (n == NULL)
12566 {
12567 if (gimplify_omp_ctxp->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
12568 return NULL_TREE;
12569 return *tp;
12570 }
12571 else if (n->value & GOVD_LOCAL)
12572 return *tp;
12573 else if (n->value & GOVD_FIRSTPRIVATE)
12574 return NULL_TREE;
12575 else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
12576 == (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
12577 return NULL_TREE;
12578 return *tp;
12579 case INTEGER_CST:
12580 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
12581 return *tp;
12582 return NULL_TREE;
12583 case TARGET_EXPR:
12584 if (TARGET_EXPR_INITIAL (*tp)
12585 || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL)
12586 return *tp;
12587 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp),
12588 walk_subtrees, NULL);
12589 /* Allow some reasonable subset of integral arithmetics. */
12590 case PLUS_EXPR:
12591 case MINUS_EXPR:
12592 case MULT_EXPR:
12593 case TRUNC_DIV_EXPR:
12594 case CEIL_DIV_EXPR:
12595 case FLOOR_DIV_EXPR:
12596 case ROUND_DIV_EXPR:
12597 case TRUNC_MOD_EXPR:
12598 case CEIL_MOD_EXPR:
12599 case FLOOR_MOD_EXPR:
12600 case ROUND_MOD_EXPR:
12601 case RDIV_EXPR:
12602 case EXACT_DIV_EXPR:
12603 case MIN_EXPR:
12604 case MAX_EXPR:
12605 case LSHIFT_EXPR:
12606 case RSHIFT_EXPR:
12607 case BIT_IOR_EXPR:
12608 case BIT_XOR_EXPR:
12609 case BIT_AND_EXPR:
12610 case NEGATE_EXPR:
12611 case ABS_EXPR:
12612 case BIT_NOT_EXPR:
12613 case NON_LVALUE_EXPR:
12614 CASE_CONVERT:
12615 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
12616 return *tp;
12617 return NULL_TREE;
12618 /* And disallow anything else, except for comparisons. */
12619 default:
12620 if (COMPARISON_CLASS_P (*tp))
12621 return NULL_TREE;
12622 return *tp;
12623 }
12624 }
12625
12626 /* Try to determine if the num_teams and/or thread_limit expressions
12627 can have their values determined already before entering the
12628 target construct.
12629 INTEGER_CSTs trivially are,
12630 integral decls that are firstprivate (explicitly or implicitly)
12631 or explicitly map(always, to:) or map(always, tofrom:) on the target
12632 region too, and expressions involving simple arithmetics on those
12633 too, function calls are not ok, dereferencing something neither etc.
12634 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
12635 EXPR based on what we find:
12636 0 stands for clause not specified at all, use implementation default
12637 -1 stands for value that can't be determined easily before entering
12638 the target construct.
12639 If teams construct is not present at all, use 1 for num_teams
12640 and 0 for thread_limit (only one team is involved, and the thread
12641 limit is implementation defined. */
12642
12643 static void
12644 optimize_target_teams (tree target, gimple_seq *pre_p)
12645 {
12646 tree body = OMP_BODY (target);
12647 tree teams = walk_tree (&body, find_omp_teams, NULL, NULL);
12648 tree num_teams = integer_zero_node;
12649 tree thread_limit = integer_zero_node;
12650 location_t num_teams_loc = EXPR_LOCATION (target);
12651 location_t thread_limit_loc = EXPR_LOCATION (target);
12652 tree c, *p, expr;
12653 struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp;
12654
12655 if (teams == NULL_TREE)
12656 num_teams = integer_one_node;
12657 else
12658 for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c))
12659 {
12660 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS)
12661 {
12662 p = &num_teams;
12663 num_teams_loc = OMP_CLAUSE_LOCATION (c);
12664 }
12665 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT)
12666 {
12667 p = &thread_limit;
12668 thread_limit_loc = OMP_CLAUSE_LOCATION (c);
12669 }
12670 else
12671 continue;
12672 expr = OMP_CLAUSE_OPERAND (c, 0);
12673 if (TREE_CODE (expr) == INTEGER_CST)
12674 {
12675 *p = expr;
12676 continue;
12677 }
12678 if (walk_tree (&expr, computable_teams_clause, NULL, NULL))
12679 {
12680 *p = integer_minus_one_node;
12681 continue;
12682 }
12683 *p = expr;
12684 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
12685 if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue, false)
12686 == GS_ERROR)
12687 {
12688 gimplify_omp_ctxp = target_ctx;
12689 *p = integer_minus_one_node;
12690 continue;
12691 }
12692 gimplify_omp_ctxp = target_ctx;
12693 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
12694 OMP_CLAUSE_OPERAND (c, 0) = *p;
12695 }
12696 c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT);
12697 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
12698 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
12699 OMP_TARGET_CLAUSES (target) = c;
12700 c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS);
12701 OMP_CLAUSE_NUM_TEAMS_EXPR (c) = num_teams;
12702 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
12703 OMP_TARGET_CLAUSES (target) = c;
12704 }
12705
12706 /* Gimplify the gross structure of several OMP constructs. */
12707
12708 static void
12709 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
12710 {
12711 tree expr = *expr_p;
12712 gimple *stmt;
12713 gimple_seq body = NULL;
12714 enum omp_region_type ort;
12715
12716 switch (TREE_CODE (expr))
12717 {
12718 case OMP_SECTIONS:
12719 case OMP_SINGLE:
12720 ort = ORT_WORKSHARE;
12721 break;
12722 case OMP_TARGET:
12723 ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET;
12724 break;
12725 case OACC_KERNELS:
12726 ort = ORT_ACC_KERNELS;
12727 break;
12728 case OACC_PARALLEL:
12729 ort = ORT_ACC_PARALLEL;
12730 break;
12731 case OACC_SERIAL:
12732 ort = ORT_ACC_SERIAL;
12733 break;
12734 case OACC_DATA:
12735 ort = ORT_ACC_DATA;
12736 break;
12737 case OMP_TARGET_DATA:
12738 ort = ORT_TARGET_DATA;
12739 break;
12740 case OMP_TEAMS:
12741 ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
12742 if (gimplify_omp_ctxp == NULL
12743 || gimplify_omp_ctxp->region_type == ORT_IMPLICIT_TARGET)
12744 ort = (enum omp_region_type) (ort | ORT_HOST_TEAMS);
12745 break;
12746 case OACC_HOST_DATA:
12747 ort = ORT_ACC_HOST_DATA;
12748 break;
12749 default:
12750 gcc_unreachable ();
12751 }
12752
12753 bool save_in_omp_construct = in_omp_construct;
12754 if ((ort & ORT_ACC) == 0)
12755 in_omp_construct = false;
12756 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort,
12757 TREE_CODE (expr));
12758 if (TREE_CODE (expr) == OMP_TARGET)
12759 optimize_target_teams (expr, pre_p);
12760 if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0
12761 || (ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
12762 {
12763 push_gimplify_context ();
12764 gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body);
12765 if (gimple_code (g) == GIMPLE_BIND)
12766 pop_gimplify_context (g);
12767 else
12768 pop_gimplify_context (NULL);
12769 if ((ort & ORT_TARGET_DATA) != 0)
12770 {
12771 enum built_in_function end_ix;
12772 switch (TREE_CODE (expr))
12773 {
12774 case OACC_DATA:
12775 case OACC_HOST_DATA:
12776 end_ix = BUILT_IN_GOACC_DATA_END;
12777 break;
12778 case OMP_TARGET_DATA:
12779 end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
12780 break;
12781 default:
12782 gcc_unreachable ();
12783 }
12784 tree fn = builtin_decl_explicit (end_ix);
12785 g = gimple_build_call (fn, 0);
12786 gimple_seq cleanup = NULL;
12787 gimple_seq_add_stmt (&cleanup, g);
12788 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
12789 body = NULL;
12790 gimple_seq_add_stmt (&body, g);
12791 }
12792 }
12793 else
12794 gimplify_and_add (OMP_BODY (expr), &body);
12795 gimplify_adjust_omp_clauses (pre_p, body, &OMP_CLAUSES (expr),
12796 TREE_CODE (expr));
12797 in_omp_construct = save_in_omp_construct;
12798
12799 switch (TREE_CODE (expr))
12800 {
12801 case OACC_DATA:
12802 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
12803 OMP_CLAUSES (expr));
12804 break;
12805 case OACC_KERNELS:
12806 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
12807 OMP_CLAUSES (expr));
12808 break;
12809 case OACC_HOST_DATA:
12810 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_HOST_DATA,
12811 OMP_CLAUSES (expr));
12812 break;
12813 case OACC_PARALLEL:
12814 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
12815 OMP_CLAUSES (expr));
12816 break;
12817 case OACC_SERIAL:
12818 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_SERIAL,
12819 OMP_CLAUSES (expr));
12820 break;
12821 case OMP_SECTIONS:
12822 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
12823 break;
12824 case OMP_SINGLE:
12825 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
12826 break;
12827 case OMP_TARGET:
12828 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
12829 OMP_CLAUSES (expr));
12830 break;
12831 case OMP_TARGET_DATA:
12832 /* Put use_device_{ptr,addr} clauses last, as map clauses are supposed
12833 to be evaluated before the use_device_{ptr,addr} clauses if they
12834 refer to the same variables. */
12835 {
12836 tree use_device_clauses;
12837 tree *pc, *uc = &use_device_clauses;
12838 for (pc = &OMP_CLAUSES (expr); *pc; )
12839 if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_USE_DEVICE_PTR
12840 || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_USE_DEVICE_ADDR)
12841 {
12842 *uc = *pc;
12843 *pc = OMP_CLAUSE_CHAIN (*pc);
12844 uc = &OMP_CLAUSE_CHAIN (*uc);
12845 }
12846 else
12847 pc = &OMP_CLAUSE_CHAIN (*pc);
12848 *uc = NULL_TREE;
12849 *pc = use_device_clauses;
12850 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
12851 OMP_CLAUSES (expr));
12852 }
12853 break;
12854 case OMP_TEAMS:
12855 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
12856 if ((ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
12857 gimple_omp_teams_set_host (as_a <gomp_teams *> (stmt), true);
12858 break;
12859 default:
12860 gcc_unreachable ();
12861 }
12862
12863 gimplify_seq_add_stmt (pre_p, stmt);
12864 *expr_p = NULL_TREE;
12865 }
12866
12867 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
12868 target update constructs. */
12869
12870 static void
12871 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
12872 {
12873 tree expr = *expr_p;
12874 int kind;
12875 gomp_target *stmt;
12876 enum omp_region_type ort = ORT_WORKSHARE;
12877
12878 switch (TREE_CODE (expr))
12879 {
12880 case OACC_ENTER_DATA:
12881 case OACC_EXIT_DATA:
12882 kind = GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA;
12883 ort = ORT_ACC;
12884 break;
12885 case OACC_UPDATE:
12886 kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
12887 ort = ORT_ACC;
12888 break;
12889 case OMP_TARGET_UPDATE:
12890 kind = GF_OMP_TARGET_KIND_UPDATE;
12891 break;
12892 case OMP_TARGET_ENTER_DATA:
12893 kind = GF_OMP_TARGET_KIND_ENTER_DATA;
12894 break;
12895 case OMP_TARGET_EXIT_DATA:
12896 kind = GF_OMP_TARGET_KIND_EXIT_DATA;
12897 break;
12898 default:
12899 gcc_unreachable ();
12900 }
12901 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p,
12902 ort, TREE_CODE (expr));
12903 gimplify_adjust_omp_clauses (pre_p, NULL, &OMP_STANDALONE_CLAUSES (expr),
12904 TREE_CODE (expr));
12905 if (TREE_CODE (expr) == OACC_UPDATE
12906 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
12907 OMP_CLAUSE_IF_PRESENT))
12908 {
12909 /* The runtime uses GOMP_MAP_{TO,FROM} to denote the if_present
12910 clause. */
12911 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
12912 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
12913 switch (OMP_CLAUSE_MAP_KIND (c))
12914 {
12915 case GOMP_MAP_FORCE_TO:
12916 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
12917 break;
12918 case GOMP_MAP_FORCE_FROM:
12919 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FROM);
12920 break;
12921 default:
12922 break;
12923 }
12924 }
12925 else if (TREE_CODE (expr) == OACC_EXIT_DATA
12926 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
12927 OMP_CLAUSE_FINALIZE))
12928 {
12929 /* Use GOMP_MAP_DELETE/GOMP_MAP_FORCE_FROM to denote "finalize"
12930 semantics. */
12931 bool have_clause = false;
12932 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
12933 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
12934 switch (OMP_CLAUSE_MAP_KIND (c))
12935 {
12936 case GOMP_MAP_FROM:
12937 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_FROM);
12938 have_clause = true;
12939 break;
12940 case GOMP_MAP_RELEASE:
12941 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_DELETE);
12942 have_clause = true;
12943 break;
12944 case GOMP_MAP_POINTER:
12945 case GOMP_MAP_TO_PSET:
12946 /* TODO PR92929: we may see these here, but they'll always follow
12947 one of the clauses above, and will be handled by libgomp as
12948 one group, so no handling required here. */
12949 gcc_assert (have_clause);
12950 break;
12951 case GOMP_MAP_DETACH:
12952 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_DETACH);
12953 have_clause = false;
12954 break;
12955 case GOMP_MAP_STRUCT:
12956 have_clause = false;
12957 break;
12958 default:
12959 gcc_unreachable ();
12960 }
12961 }
12962 stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr));
12963
12964 gimplify_seq_add_stmt (pre_p, stmt);
12965 *expr_p = NULL_TREE;
12966 }
12967
12968 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
12969 stabilized the lhs of the atomic operation as *ADDR. Return true if
12970 EXPR is this stabilized form. */
12971
12972 static bool
12973 goa_lhs_expr_p (tree expr, tree addr)
12974 {
12975 /* Also include casts to other type variants. The C front end is fond
12976 of adding these for e.g. volatile variables. This is like
12977 STRIP_TYPE_NOPS but includes the main variant lookup. */
12978 STRIP_USELESS_TYPE_CONVERSION (expr);
12979
12980 if (TREE_CODE (expr) == INDIRECT_REF)
12981 {
12982 expr = TREE_OPERAND (expr, 0);
12983 while (expr != addr
12984 && (CONVERT_EXPR_P (expr)
12985 || TREE_CODE (expr) == NON_LVALUE_EXPR)
12986 && TREE_CODE (expr) == TREE_CODE (addr)
12987 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
12988 {
12989 expr = TREE_OPERAND (expr, 0);
12990 addr = TREE_OPERAND (addr, 0);
12991 }
12992 if (expr == addr)
12993 return true;
12994 return (TREE_CODE (addr) == ADDR_EXPR
12995 && TREE_CODE (expr) == ADDR_EXPR
12996 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
12997 }
12998 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
12999 return true;
13000 return false;
13001 }
13002
13003 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
13004 expression does not involve the lhs, evaluate it into a temporary.
13005 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
13006 or -1 if an error was encountered. */
13007
13008 static int
13009 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
13010 tree lhs_var)
13011 {
13012 tree expr = *expr_p;
13013 int saw_lhs;
13014
13015 if (goa_lhs_expr_p (expr, lhs_addr))
13016 {
13017 *expr_p = lhs_var;
13018 return 1;
13019 }
13020 if (is_gimple_val (expr))
13021 return 0;
13022
13023 saw_lhs = 0;
13024 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
13025 {
13026 case tcc_binary:
13027 case tcc_comparison:
13028 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
13029 lhs_var);
13030 /* FALLTHRU */
13031 case tcc_unary:
13032 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
13033 lhs_var);
13034 break;
13035 case tcc_expression:
13036 switch (TREE_CODE (expr))
13037 {
13038 case TRUTH_ANDIF_EXPR:
13039 case TRUTH_ORIF_EXPR:
13040 case TRUTH_AND_EXPR:
13041 case TRUTH_OR_EXPR:
13042 case TRUTH_XOR_EXPR:
13043 case BIT_INSERT_EXPR:
13044 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
13045 lhs_addr, lhs_var);
13046 /* FALLTHRU */
13047 case TRUTH_NOT_EXPR:
13048 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
13049 lhs_addr, lhs_var);
13050 break;
13051 case COMPOUND_EXPR:
13052 /* Break out any preevaluations from cp_build_modify_expr. */
13053 for (; TREE_CODE (expr) == COMPOUND_EXPR;
13054 expr = TREE_OPERAND (expr, 1))
13055 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
13056 *expr_p = expr;
13057 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
13058 default:
13059 break;
13060 }
13061 break;
13062 case tcc_reference:
13063 if (TREE_CODE (expr) == BIT_FIELD_REF)
13064 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
13065 lhs_addr, lhs_var);
13066 break;
13067 default:
13068 break;
13069 }
13070
13071 if (saw_lhs == 0)
13072 {
13073 enum gimplify_status gs;
13074 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
13075 if (gs != GS_ALL_DONE)
13076 saw_lhs = -1;
13077 }
13078
13079 return saw_lhs;
13080 }
13081
13082 /* Gimplify an OMP_ATOMIC statement. */
13083
13084 static enum gimplify_status
13085 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
13086 {
13087 tree addr = TREE_OPERAND (*expr_p, 0);
13088 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
13089 ? NULL : TREE_OPERAND (*expr_p, 1);
13090 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
13091 tree tmp_load;
13092 gomp_atomic_load *loadstmt;
13093 gomp_atomic_store *storestmt;
13094
13095 tmp_load = create_tmp_reg (type);
13096 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
13097 return GS_ERROR;
13098
13099 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
13100 != GS_ALL_DONE)
13101 return GS_ERROR;
13102
13103 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr,
13104 OMP_ATOMIC_MEMORY_ORDER (*expr_p));
13105 gimplify_seq_add_stmt (pre_p, loadstmt);
13106 if (rhs)
13107 {
13108 /* BIT_INSERT_EXPR is not valid for non-integral bitfield
13109 representatives. Use BIT_FIELD_REF on the lhs instead. */
13110 if (TREE_CODE (rhs) == BIT_INSERT_EXPR
13111 && !INTEGRAL_TYPE_P (TREE_TYPE (tmp_load)))
13112 {
13113 tree bitpos = TREE_OPERAND (rhs, 2);
13114 tree op1 = TREE_OPERAND (rhs, 1);
13115 tree bitsize;
13116 tree tmp_store = tmp_load;
13117 if (TREE_CODE (*expr_p) == OMP_ATOMIC_CAPTURE_OLD)
13118 tmp_store = get_initialized_tmp_var (tmp_load, pre_p);
13119 if (INTEGRAL_TYPE_P (TREE_TYPE (op1)))
13120 bitsize = bitsize_int (TYPE_PRECISION (TREE_TYPE (op1)));
13121 else
13122 bitsize = TYPE_SIZE (TREE_TYPE (op1));
13123 gcc_assert (TREE_OPERAND (rhs, 0) == tmp_load);
13124 tree t = build2_loc (EXPR_LOCATION (rhs),
13125 MODIFY_EXPR, void_type_node,
13126 build3_loc (EXPR_LOCATION (rhs), BIT_FIELD_REF,
13127 TREE_TYPE (op1), tmp_store, bitsize,
13128 bitpos), op1);
13129 gimplify_and_add (t, pre_p);
13130 rhs = tmp_store;
13131 }
13132 if (gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
13133 != GS_ALL_DONE)
13134 return GS_ERROR;
13135 }
13136
13137 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
13138 rhs = tmp_load;
13139 storestmt
13140 = gimple_build_omp_atomic_store (rhs, OMP_ATOMIC_MEMORY_ORDER (*expr_p));
13141 gimplify_seq_add_stmt (pre_p, storestmt);
13142 switch (TREE_CODE (*expr_p))
13143 {
13144 case OMP_ATOMIC_READ:
13145 case OMP_ATOMIC_CAPTURE_OLD:
13146 *expr_p = tmp_load;
13147 gimple_omp_atomic_set_need_value (loadstmt);
13148 break;
13149 case OMP_ATOMIC_CAPTURE_NEW:
13150 *expr_p = rhs;
13151 gimple_omp_atomic_set_need_value (storestmt);
13152 break;
13153 default:
13154 *expr_p = NULL;
13155 break;
13156 }
13157
13158 return GS_ALL_DONE;
13159 }
13160
13161 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
13162 body, and adding some EH bits. */
13163
13164 static enum gimplify_status
13165 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
13166 {
13167 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
13168 gimple *body_stmt;
13169 gtransaction *trans_stmt;
13170 gimple_seq body = NULL;
13171 int subcode = 0;
13172
13173 /* Wrap the transaction body in a BIND_EXPR so we have a context
13174 where to put decls for OMP. */
13175 if (TREE_CODE (tbody) != BIND_EXPR)
13176 {
13177 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
13178 TREE_SIDE_EFFECTS (bind) = 1;
13179 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
13180 TRANSACTION_EXPR_BODY (expr) = bind;
13181 }
13182
13183 push_gimplify_context ();
13184 temp = voidify_wrapper_expr (*expr_p, NULL);
13185
13186 body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
13187 pop_gimplify_context (body_stmt);
13188
13189 trans_stmt = gimple_build_transaction (body);
13190 if (TRANSACTION_EXPR_OUTER (expr))
13191 subcode = GTMA_IS_OUTER;
13192 else if (TRANSACTION_EXPR_RELAXED (expr))
13193 subcode = GTMA_IS_RELAXED;
13194 gimple_transaction_set_subcode (trans_stmt, subcode);
13195
13196 gimplify_seq_add_stmt (pre_p, trans_stmt);
13197
13198 if (temp)
13199 {
13200 *expr_p = temp;
13201 return GS_OK;
13202 }
13203
13204 *expr_p = NULL_TREE;
13205 return GS_ALL_DONE;
13206 }
13207
13208 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
13209 is the OMP_BODY of the original EXPR (which has already been
13210 gimplified so it's not present in the EXPR).
13211
13212 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
13213
13214 static gimple *
13215 gimplify_omp_ordered (tree expr, gimple_seq body)
13216 {
13217 tree c, decls;
13218 int failures = 0;
13219 unsigned int i;
13220 tree source_c = NULL_TREE;
13221 tree sink_c = NULL_TREE;
13222
13223 if (gimplify_omp_ctxp)
13224 {
13225 for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
13226 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
13227 && gimplify_omp_ctxp->loop_iter_var.is_empty ()
13228 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK
13229 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE))
13230 {
13231 error_at (OMP_CLAUSE_LOCATION (c),
13232 "%<ordered%> construct with %<depend%> clause must be "
13233 "closely nested inside a loop with %<ordered%> clause "
13234 "with a parameter");
13235 failures++;
13236 }
13237 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
13238 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
13239 {
13240 bool fail = false;
13241 for (decls = OMP_CLAUSE_DECL (c), i = 0;
13242 decls && TREE_CODE (decls) == TREE_LIST;
13243 decls = TREE_CHAIN (decls), ++i)
13244 if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2)
13245 continue;
13246 else if (TREE_VALUE (decls)
13247 != gimplify_omp_ctxp->loop_iter_var[2 * i])
13248 {
13249 error_at (OMP_CLAUSE_LOCATION (c),
13250 "variable %qE is not an iteration "
13251 "of outermost loop %d, expected %qE",
13252 TREE_VALUE (decls), i + 1,
13253 gimplify_omp_ctxp->loop_iter_var[2 * i]);
13254 fail = true;
13255 failures++;
13256 }
13257 else
13258 TREE_VALUE (decls)
13259 = gimplify_omp_ctxp->loop_iter_var[2 * i + 1];
13260 if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2)
13261 {
13262 error_at (OMP_CLAUSE_LOCATION (c),
13263 "number of variables in %<depend%> clause with "
13264 "%<sink%> modifier does not match number of "
13265 "iteration variables");
13266 failures++;
13267 }
13268 sink_c = c;
13269 }
13270 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
13271 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
13272 {
13273 if (source_c)
13274 {
13275 error_at (OMP_CLAUSE_LOCATION (c),
13276 "more than one %<depend%> clause with %<source%> "
13277 "modifier on an %<ordered%> construct");
13278 failures++;
13279 }
13280 else
13281 source_c = c;
13282 }
13283 }
13284 if (source_c && sink_c)
13285 {
13286 error_at (OMP_CLAUSE_LOCATION (source_c),
13287 "%<depend%> clause with %<source%> modifier specified "
13288 "together with %<depend%> clauses with %<sink%> modifier "
13289 "on the same construct");
13290 failures++;
13291 }
13292
13293 if (failures)
13294 return gimple_build_nop ();
13295 return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr));
13296 }
13297
13298 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
13299 expression produces a value to be used as an operand inside a GIMPLE
13300 statement, the value will be stored back in *EXPR_P. This value will
13301 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
13302 an SSA_NAME. The corresponding sequence of GIMPLE statements is
13303 emitted in PRE_P and POST_P.
13304
13305 Additionally, this process may overwrite parts of the input
13306 expression during gimplification. Ideally, it should be
13307 possible to do non-destructive gimplification.
13308
13309 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
13310 the expression needs to evaluate to a value to be used as
13311 an operand in a GIMPLE statement, this value will be stored in
13312 *EXPR_P on exit. This happens when the caller specifies one
13313 of fb_lvalue or fb_rvalue fallback flags.
13314
13315 PRE_P will contain the sequence of GIMPLE statements corresponding
13316 to the evaluation of EXPR and all the side-effects that must
13317 be executed before the main expression. On exit, the last
13318 statement of PRE_P is the core statement being gimplified. For
13319 instance, when gimplifying 'if (++a)' the last statement in
13320 PRE_P will be 'if (t.1)' where t.1 is the result of
13321 pre-incrementing 'a'.
13322
13323 POST_P will contain the sequence of GIMPLE statements corresponding
13324 to the evaluation of all the side-effects that must be executed
13325 after the main expression. If this is NULL, the post
13326 side-effects are stored at the end of PRE_P.
13327
13328 The reason why the output is split in two is to handle post
13329 side-effects explicitly. In some cases, an expression may have
13330 inner and outer post side-effects which need to be emitted in
13331 an order different from the one given by the recursive
13332 traversal. For instance, for the expression (*p--)++ the post
13333 side-effects of '--' must actually occur *after* the post
13334 side-effects of '++'. However, gimplification will first visit
13335 the inner expression, so if a separate POST sequence was not
13336 used, the resulting sequence would be:
13337
13338 1 t.1 = *p
13339 2 p = p - 1
13340 3 t.2 = t.1 + 1
13341 4 *p = t.2
13342
13343 However, the post-decrement operation in line #2 must not be
13344 evaluated until after the store to *p at line #4, so the
13345 correct sequence should be:
13346
13347 1 t.1 = *p
13348 2 t.2 = t.1 + 1
13349 3 *p = t.2
13350 4 p = p - 1
13351
13352 So, by specifying a separate post queue, it is possible
13353 to emit the post side-effects in the correct order.
13354 If POST_P is NULL, an internal queue will be used. Before
13355 returning to the caller, the sequence POST_P is appended to
13356 the main output sequence PRE_P.
13357
13358 GIMPLE_TEST_F points to a function that takes a tree T and
13359 returns nonzero if T is in the GIMPLE form requested by the
13360 caller. The GIMPLE predicates are in gimple.c.
13361
13362 FALLBACK tells the function what sort of a temporary we want if
13363 gimplification cannot produce an expression that complies with
13364 GIMPLE_TEST_F.
13365
13366 fb_none means that no temporary should be generated
13367 fb_rvalue means that an rvalue is OK to generate
13368 fb_lvalue means that an lvalue is OK to generate
13369 fb_either means that either is OK, but an lvalue is preferable.
13370 fb_mayfail means that gimplification may fail (in which case
13371 GS_ERROR will be returned)
13372
13373 The return value is either GS_ERROR or GS_ALL_DONE, since this
13374 function iterates until EXPR is completely gimplified or an error
13375 occurs. */
13376
13377 enum gimplify_status
13378 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
13379 bool (*gimple_test_f) (tree), fallback_t fallback)
13380 {
13381 tree tmp;
13382 gimple_seq internal_pre = NULL;
13383 gimple_seq internal_post = NULL;
13384 tree save_expr;
13385 bool is_statement;
13386 location_t saved_location;
13387 enum gimplify_status ret;
13388 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
13389 tree label;
13390
13391 save_expr = *expr_p;
13392 if (save_expr == NULL_TREE)
13393 return GS_ALL_DONE;
13394
13395 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
13396 is_statement = gimple_test_f == is_gimple_stmt;
13397 if (is_statement)
13398 gcc_assert (pre_p);
13399
13400 /* Consistency checks. */
13401 if (gimple_test_f == is_gimple_reg)
13402 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
13403 else if (gimple_test_f == is_gimple_val
13404 || gimple_test_f == is_gimple_call_addr
13405 || gimple_test_f == is_gimple_condexpr
13406 || gimple_test_f == is_gimple_condexpr_for_cond
13407 || gimple_test_f == is_gimple_mem_rhs
13408 || gimple_test_f == is_gimple_mem_rhs_or_call
13409 || gimple_test_f == is_gimple_reg_rhs
13410 || gimple_test_f == is_gimple_reg_rhs_or_call
13411 || gimple_test_f == is_gimple_asm_val
13412 || gimple_test_f == is_gimple_mem_ref_addr)
13413 gcc_assert (fallback & fb_rvalue);
13414 else if (gimple_test_f == is_gimple_min_lval
13415 || gimple_test_f == is_gimple_lvalue)
13416 gcc_assert (fallback & fb_lvalue);
13417 else if (gimple_test_f == is_gimple_addressable)
13418 gcc_assert (fallback & fb_either);
13419 else if (gimple_test_f == is_gimple_stmt)
13420 gcc_assert (fallback == fb_none);
13421 else
13422 {
13423 /* We should have recognized the GIMPLE_TEST_F predicate to
13424 know what kind of fallback to use in case a temporary is
13425 needed to hold the value or address of *EXPR_P. */
13426 gcc_unreachable ();
13427 }
13428
13429 /* We used to check the predicate here and return immediately if it
13430 succeeds. This is wrong; the design is for gimplification to be
13431 idempotent, and for the predicates to only test for valid forms, not
13432 whether they are fully simplified. */
13433 if (pre_p == NULL)
13434 pre_p = &internal_pre;
13435
13436 if (post_p == NULL)
13437 post_p = &internal_post;
13438
13439 /* Remember the last statements added to PRE_P and POST_P. Every
13440 new statement added by the gimplification helpers needs to be
13441 annotated with location information. To centralize the
13442 responsibility, we remember the last statement that had been
13443 added to both queues before gimplifying *EXPR_P. If
13444 gimplification produces new statements in PRE_P and POST_P, those
13445 statements will be annotated with the same location information
13446 as *EXPR_P. */
13447 pre_last_gsi = gsi_last (*pre_p);
13448 post_last_gsi = gsi_last (*post_p);
13449
13450 saved_location = input_location;
13451 if (save_expr != error_mark_node
13452 && EXPR_HAS_LOCATION (*expr_p))
13453 input_location = EXPR_LOCATION (*expr_p);
13454
13455 /* Loop over the specific gimplifiers until the toplevel node
13456 remains the same. */
13457 do
13458 {
13459 /* Strip away as many useless type conversions as possible
13460 at the toplevel. */
13461 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
13462
13463 /* Remember the expr. */
13464 save_expr = *expr_p;
13465
13466 /* Die, die, die, my darling. */
13467 if (error_operand_p (save_expr))
13468 {
13469 ret = GS_ERROR;
13470 break;
13471 }
13472
13473 /* Do any language-specific gimplification. */
13474 ret = ((enum gimplify_status)
13475 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
13476 if (ret == GS_OK)
13477 {
13478 if (*expr_p == NULL_TREE)
13479 break;
13480 if (*expr_p != save_expr)
13481 continue;
13482 }
13483 else if (ret != GS_UNHANDLED)
13484 break;
13485
13486 /* Make sure that all the cases set 'ret' appropriately. */
13487 ret = GS_UNHANDLED;
13488 switch (TREE_CODE (*expr_p))
13489 {
13490 /* First deal with the special cases. */
13491
13492 case POSTINCREMENT_EXPR:
13493 case POSTDECREMENT_EXPR:
13494 case PREINCREMENT_EXPR:
13495 case PREDECREMENT_EXPR:
13496 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
13497 fallback != fb_none,
13498 TREE_TYPE (*expr_p));
13499 break;
13500
13501 case VIEW_CONVERT_EXPR:
13502 if ((fallback & fb_rvalue)
13503 && is_gimple_reg_type (TREE_TYPE (*expr_p))
13504 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
13505 {
13506 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
13507 post_p, is_gimple_val, fb_rvalue);
13508 recalculate_side_effects (*expr_p);
13509 break;
13510 }
13511 /* Fallthru. */
13512
13513 case ARRAY_REF:
13514 case ARRAY_RANGE_REF:
13515 case REALPART_EXPR:
13516 case IMAGPART_EXPR:
13517 case COMPONENT_REF:
13518 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
13519 fallback ? fallback : fb_rvalue);
13520 break;
13521
13522 case COND_EXPR:
13523 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
13524
13525 /* C99 code may assign to an array in a structure value of a
13526 conditional expression, and this has undefined behavior
13527 only on execution, so create a temporary if an lvalue is
13528 required. */
13529 if (fallback == fb_lvalue)
13530 {
13531 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
13532 mark_addressable (*expr_p);
13533 ret = GS_OK;
13534 }
13535 break;
13536
13537 case CALL_EXPR:
13538 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
13539
13540 /* C99 code may assign to an array in a structure returned
13541 from a function, and this has undefined behavior only on
13542 execution, so create a temporary if an lvalue is
13543 required. */
13544 if (fallback == fb_lvalue)
13545 {
13546 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
13547 mark_addressable (*expr_p);
13548 ret = GS_OK;
13549 }
13550 break;
13551
13552 case TREE_LIST:
13553 gcc_unreachable ();
13554
13555 case COMPOUND_EXPR:
13556 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
13557 break;
13558
13559 case COMPOUND_LITERAL_EXPR:
13560 ret = gimplify_compound_literal_expr (expr_p, pre_p,
13561 gimple_test_f, fallback);
13562 break;
13563
13564 case MODIFY_EXPR:
13565 case INIT_EXPR:
13566 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
13567 fallback != fb_none);
13568 break;
13569
13570 case TRUTH_ANDIF_EXPR:
13571 case TRUTH_ORIF_EXPR:
13572 {
13573 /* Preserve the original type of the expression and the
13574 source location of the outer expression. */
13575 tree org_type = TREE_TYPE (*expr_p);
13576 *expr_p = gimple_boolify (*expr_p);
13577 *expr_p = build3_loc (input_location, COND_EXPR,
13578 org_type, *expr_p,
13579 fold_convert_loc
13580 (input_location,
13581 org_type, boolean_true_node),
13582 fold_convert_loc
13583 (input_location,
13584 org_type, boolean_false_node));
13585 ret = GS_OK;
13586 break;
13587 }
13588
13589 case TRUTH_NOT_EXPR:
13590 {
13591 tree type = TREE_TYPE (*expr_p);
13592 /* The parsers are careful to generate TRUTH_NOT_EXPR
13593 only with operands that are always zero or one.
13594 We do not fold here but handle the only interesting case
13595 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
13596 *expr_p = gimple_boolify (*expr_p);
13597 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
13598 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
13599 TREE_TYPE (*expr_p),
13600 TREE_OPERAND (*expr_p, 0));
13601 else
13602 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
13603 TREE_TYPE (*expr_p),
13604 TREE_OPERAND (*expr_p, 0),
13605 build_int_cst (TREE_TYPE (*expr_p), 1));
13606 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
13607 *expr_p = fold_convert_loc (input_location, type, *expr_p);
13608 ret = GS_OK;
13609 break;
13610 }
13611
13612 case ADDR_EXPR:
13613 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
13614 break;
13615
13616 case ANNOTATE_EXPR:
13617 {
13618 tree cond = TREE_OPERAND (*expr_p, 0);
13619 tree kind = TREE_OPERAND (*expr_p, 1);
13620 tree data = TREE_OPERAND (*expr_p, 2);
13621 tree type = TREE_TYPE (cond);
13622 if (!INTEGRAL_TYPE_P (type))
13623 {
13624 *expr_p = cond;
13625 ret = GS_OK;
13626 break;
13627 }
13628 tree tmp = create_tmp_var (type);
13629 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
13630 gcall *call
13631 = gimple_build_call_internal (IFN_ANNOTATE, 3, cond, kind, data);
13632 gimple_call_set_lhs (call, tmp);
13633 gimplify_seq_add_stmt (pre_p, call);
13634 *expr_p = tmp;
13635 ret = GS_ALL_DONE;
13636 break;
13637 }
13638
13639 case VA_ARG_EXPR:
13640 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
13641 break;
13642
13643 CASE_CONVERT:
13644 if (IS_EMPTY_STMT (*expr_p))
13645 {
13646 ret = GS_ALL_DONE;
13647 break;
13648 }
13649
13650 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
13651 || fallback == fb_none)
13652 {
13653 /* Just strip a conversion to void (or in void context) and
13654 try again. */
13655 *expr_p = TREE_OPERAND (*expr_p, 0);
13656 ret = GS_OK;
13657 break;
13658 }
13659
13660 ret = gimplify_conversion (expr_p);
13661 if (ret == GS_ERROR)
13662 break;
13663 if (*expr_p != save_expr)
13664 break;
13665 /* FALLTHRU */
13666
13667 case FIX_TRUNC_EXPR:
13668 /* unary_expr: ... | '(' cast ')' val | ... */
13669 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
13670 is_gimple_val, fb_rvalue);
13671 recalculate_side_effects (*expr_p);
13672 break;
13673
13674 case INDIRECT_REF:
13675 {
13676 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
13677 bool notrap = TREE_THIS_NOTRAP (*expr_p);
13678 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
13679
13680 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
13681 if (*expr_p != save_expr)
13682 {
13683 ret = GS_OK;
13684 break;
13685 }
13686
13687 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
13688 is_gimple_reg, fb_rvalue);
13689 if (ret == GS_ERROR)
13690 break;
13691
13692 recalculate_side_effects (*expr_p);
13693 *expr_p = fold_build2_loc (input_location, MEM_REF,
13694 TREE_TYPE (*expr_p),
13695 TREE_OPERAND (*expr_p, 0),
13696 build_int_cst (saved_ptr_type, 0));
13697 TREE_THIS_VOLATILE (*expr_p) = volatilep;
13698 TREE_THIS_NOTRAP (*expr_p) = notrap;
13699 ret = GS_OK;
13700 break;
13701 }
13702
13703 /* We arrive here through the various re-gimplifcation paths. */
13704 case MEM_REF:
13705 /* First try re-folding the whole thing. */
13706 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
13707 TREE_OPERAND (*expr_p, 0),
13708 TREE_OPERAND (*expr_p, 1));
13709 if (tmp)
13710 {
13711 REF_REVERSE_STORAGE_ORDER (tmp)
13712 = REF_REVERSE_STORAGE_ORDER (*expr_p);
13713 *expr_p = tmp;
13714 recalculate_side_effects (*expr_p);
13715 ret = GS_OK;
13716 break;
13717 }
13718 /* Avoid re-gimplifying the address operand if it is already
13719 in suitable form. Re-gimplifying would mark the address
13720 operand addressable. Always gimplify when not in SSA form
13721 as we still may have to gimplify decls with value-exprs. */
13722 if (!gimplify_ctxp || !gimple_in_ssa_p (cfun)
13723 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
13724 {
13725 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
13726 is_gimple_mem_ref_addr, fb_rvalue);
13727 if (ret == GS_ERROR)
13728 break;
13729 }
13730 recalculate_side_effects (*expr_p);
13731 ret = GS_ALL_DONE;
13732 break;
13733
13734 /* Constants need not be gimplified. */
13735 case INTEGER_CST:
13736 case REAL_CST:
13737 case FIXED_CST:
13738 case STRING_CST:
13739 case COMPLEX_CST:
13740 case VECTOR_CST:
13741 /* Drop the overflow flag on constants, we do not want
13742 that in the GIMPLE IL. */
13743 if (TREE_OVERFLOW_P (*expr_p))
13744 *expr_p = drop_tree_overflow (*expr_p);
13745 ret = GS_ALL_DONE;
13746 break;
13747
13748 case CONST_DECL:
13749 /* If we require an lvalue, such as for ADDR_EXPR, retain the
13750 CONST_DECL node. Otherwise the decl is replaceable by its
13751 value. */
13752 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
13753 if (fallback & fb_lvalue)
13754 ret = GS_ALL_DONE;
13755 else
13756 {
13757 *expr_p = DECL_INITIAL (*expr_p);
13758 ret = GS_OK;
13759 }
13760 break;
13761
13762 case DECL_EXPR:
13763 ret = gimplify_decl_expr (expr_p, pre_p);
13764 break;
13765
13766 case BIND_EXPR:
13767 ret = gimplify_bind_expr (expr_p, pre_p);
13768 break;
13769
13770 case LOOP_EXPR:
13771 ret = gimplify_loop_expr (expr_p, pre_p);
13772 break;
13773
13774 case SWITCH_EXPR:
13775 ret = gimplify_switch_expr (expr_p, pre_p);
13776 break;
13777
13778 case EXIT_EXPR:
13779 ret = gimplify_exit_expr (expr_p);
13780 break;
13781
13782 case GOTO_EXPR:
13783 /* If the target is not LABEL, then it is a computed jump
13784 and the target needs to be gimplified. */
13785 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
13786 {
13787 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
13788 NULL, is_gimple_val, fb_rvalue);
13789 if (ret == GS_ERROR)
13790 break;
13791 }
13792 gimplify_seq_add_stmt (pre_p,
13793 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
13794 ret = GS_ALL_DONE;
13795 break;
13796
13797 case PREDICT_EXPR:
13798 gimplify_seq_add_stmt (pre_p,
13799 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
13800 PREDICT_EXPR_OUTCOME (*expr_p)));
13801 ret = GS_ALL_DONE;
13802 break;
13803
13804 case LABEL_EXPR:
13805 ret = gimplify_label_expr (expr_p, pre_p);
13806 label = LABEL_EXPR_LABEL (*expr_p);
13807 gcc_assert (decl_function_context (label) == current_function_decl);
13808
13809 /* If the label is used in a goto statement, or address of the label
13810 is taken, we need to unpoison all variables that were seen so far.
13811 Doing so would prevent us from reporting a false positives. */
13812 if (asan_poisoned_variables
13813 && asan_used_labels != NULL
13814 && asan_used_labels->contains (label))
13815 asan_poison_variables (asan_poisoned_variables, false, pre_p);
13816 break;
13817
13818 case CASE_LABEL_EXPR:
13819 ret = gimplify_case_label_expr (expr_p, pre_p);
13820
13821 if (gimplify_ctxp->live_switch_vars)
13822 asan_poison_variables (gimplify_ctxp->live_switch_vars, false,
13823 pre_p);
13824 break;
13825
13826 case RETURN_EXPR:
13827 ret = gimplify_return_expr (*expr_p, pre_p);
13828 break;
13829
13830 case CONSTRUCTOR:
13831 /* Don't reduce this in place; let gimplify_init_constructor work its
13832 magic. Buf if we're just elaborating this for side effects, just
13833 gimplify any element that has side-effects. */
13834 if (fallback == fb_none)
13835 {
13836 unsigned HOST_WIDE_INT ix;
13837 tree val;
13838 tree temp = NULL_TREE;
13839 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
13840 if (TREE_SIDE_EFFECTS (val))
13841 append_to_statement_list (val, &temp);
13842
13843 *expr_p = temp;
13844 ret = temp ? GS_OK : GS_ALL_DONE;
13845 }
13846 /* C99 code may assign to an array in a constructed
13847 structure or union, and this has undefined behavior only
13848 on execution, so create a temporary if an lvalue is
13849 required. */
13850 else if (fallback == fb_lvalue)
13851 {
13852 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
13853 mark_addressable (*expr_p);
13854 ret = GS_OK;
13855 }
13856 else
13857 ret = GS_ALL_DONE;
13858 break;
13859
13860 /* The following are special cases that are not handled by the
13861 original GIMPLE grammar. */
13862
13863 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
13864 eliminated. */
13865 case SAVE_EXPR:
13866 ret = gimplify_save_expr (expr_p, pre_p, post_p);
13867 break;
13868
13869 case BIT_FIELD_REF:
13870 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
13871 post_p, is_gimple_lvalue, fb_either);
13872 recalculate_side_effects (*expr_p);
13873 break;
13874
13875 case TARGET_MEM_REF:
13876 {
13877 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
13878
13879 if (TMR_BASE (*expr_p))
13880 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
13881 post_p, is_gimple_mem_ref_addr, fb_either);
13882 if (TMR_INDEX (*expr_p))
13883 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
13884 post_p, is_gimple_val, fb_rvalue);
13885 if (TMR_INDEX2 (*expr_p))
13886 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
13887 post_p, is_gimple_val, fb_rvalue);
13888 /* TMR_STEP and TMR_OFFSET are always integer constants. */
13889 ret = MIN (r0, r1);
13890 }
13891 break;
13892
13893 case NON_LVALUE_EXPR:
13894 /* This should have been stripped above. */
13895 gcc_unreachable ();
13896
13897 case ASM_EXPR:
13898 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
13899 break;
13900
13901 case TRY_FINALLY_EXPR:
13902 case TRY_CATCH_EXPR:
13903 {
13904 gimple_seq eval, cleanup;
13905 gtry *try_;
13906
13907 /* Calls to destructors are generated automatically in FINALLY/CATCH
13908 block. They should have location as UNKNOWN_LOCATION. However,
13909 gimplify_call_expr will reset these call stmts to input_location
13910 if it finds stmt's location is unknown. To prevent resetting for
13911 destructors, we set the input_location to unknown.
13912 Note that this only affects the destructor calls in FINALLY/CATCH
13913 block, and will automatically reset to its original value by the
13914 end of gimplify_expr. */
13915 input_location = UNKNOWN_LOCATION;
13916 eval = cleanup = NULL;
13917 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
13918 if (TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
13919 && TREE_CODE (TREE_OPERAND (*expr_p, 1)) == EH_ELSE_EXPR)
13920 {
13921 gimple_seq n = NULL, e = NULL;
13922 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p, 1),
13923 0), &n);
13924 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p, 1),
13925 1), &e);
13926 if (!gimple_seq_empty_p (n) && !gimple_seq_empty_p (e))
13927 {
13928 geh_else *stmt = gimple_build_eh_else (n, e);
13929 gimple_seq_add_stmt (&cleanup, stmt);
13930 }
13931 }
13932 else
13933 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
13934 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
13935 if (gimple_seq_empty_p (cleanup))
13936 {
13937 gimple_seq_add_seq (pre_p, eval);
13938 ret = GS_ALL_DONE;
13939 break;
13940 }
13941 try_ = gimple_build_try (eval, cleanup,
13942 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
13943 ? GIMPLE_TRY_FINALLY
13944 : GIMPLE_TRY_CATCH);
13945 if (EXPR_HAS_LOCATION (save_expr))
13946 gimple_set_location (try_, EXPR_LOCATION (save_expr));
13947 else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
13948 gimple_set_location (try_, saved_location);
13949 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
13950 gimple_try_set_catch_is_cleanup (try_,
13951 TRY_CATCH_IS_CLEANUP (*expr_p));
13952 gimplify_seq_add_stmt (pre_p, try_);
13953 ret = GS_ALL_DONE;
13954 break;
13955 }
13956
13957 case CLEANUP_POINT_EXPR:
13958 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
13959 break;
13960
13961 case TARGET_EXPR:
13962 ret = gimplify_target_expr (expr_p, pre_p, post_p);
13963 break;
13964
13965 case CATCH_EXPR:
13966 {
13967 gimple *c;
13968 gimple_seq handler = NULL;
13969 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
13970 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
13971 gimplify_seq_add_stmt (pre_p, c);
13972 ret = GS_ALL_DONE;
13973 break;
13974 }
13975
13976 case EH_FILTER_EXPR:
13977 {
13978 gimple *ehf;
13979 gimple_seq failure = NULL;
13980
13981 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
13982 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
13983 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
13984 gimplify_seq_add_stmt (pre_p, ehf);
13985 ret = GS_ALL_DONE;
13986 break;
13987 }
13988
13989 case OBJ_TYPE_REF:
13990 {
13991 enum gimplify_status r0, r1;
13992 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
13993 post_p, is_gimple_val, fb_rvalue);
13994 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
13995 post_p, is_gimple_val, fb_rvalue);
13996 TREE_SIDE_EFFECTS (*expr_p) = 0;
13997 ret = MIN (r0, r1);
13998 }
13999 break;
14000
14001 case LABEL_DECL:
14002 /* We get here when taking the address of a label. We mark
14003 the label as "forced"; meaning it can never be removed and
14004 it is a potential target for any computed goto. */
14005 FORCED_LABEL (*expr_p) = 1;
14006 ret = GS_ALL_DONE;
14007 break;
14008
14009 case STATEMENT_LIST:
14010 ret = gimplify_statement_list (expr_p, pre_p);
14011 break;
14012
14013 case WITH_SIZE_EXPR:
14014 {
14015 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
14016 post_p == &internal_post ? NULL : post_p,
14017 gimple_test_f, fallback);
14018 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
14019 is_gimple_val, fb_rvalue);
14020 ret = GS_ALL_DONE;
14021 }
14022 break;
14023
14024 case VAR_DECL:
14025 case PARM_DECL:
14026 ret = gimplify_var_or_parm_decl (expr_p);
14027 break;
14028
14029 case RESULT_DECL:
14030 /* When within an OMP context, notice uses of variables. */
14031 if (gimplify_omp_ctxp)
14032 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
14033 ret = GS_ALL_DONE;
14034 break;
14035
14036 case DEBUG_EXPR_DECL:
14037 gcc_unreachable ();
14038
14039 case DEBUG_BEGIN_STMT:
14040 gimplify_seq_add_stmt (pre_p,
14041 gimple_build_debug_begin_stmt
14042 (TREE_BLOCK (*expr_p),
14043 EXPR_LOCATION (*expr_p)));
14044 ret = GS_ALL_DONE;
14045 *expr_p = NULL;
14046 break;
14047
14048 case SSA_NAME:
14049 /* Allow callbacks into the gimplifier during optimization. */
14050 ret = GS_ALL_DONE;
14051 break;
14052
14053 case OMP_PARALLEL:
14054 gimplify_omp_parallel (expr_p, pre_p);
14055 ret = GS_ALL_DONE;
14056 break;
14057
14058 case OMP_TASK:
14059 gimplify_omp_task (expr_p, pre_p);
14060 ret = GS_ALL_DONE;
14061 break;
14062
14063 case OMP_FOR:
14064 case OMP_SIMD:
14065 case OMP_DISTRIBUTE:
14066 case OMP_TASKLOOP:
14067 case OACC_LOOP:
14068 ret = gimplify_omp_for (expr_p, pre_p);
14069 break;
14070
14071 case OMP_LOOP:
14072 ret = gimplify_omp_loop (expr_p, pre_p);
14073 break;
14074
14075 case OACC_CACHE:
14076 gimplify_oacc_cache (expr_p, pre_p);
14077 ret = GS_ALL_DONE;
14078 break;
14079
14080 case OACC_DECLARE:
14081 gimplify_oacc_declare (expr_p, pre_p);
14082 ret = GS_ALL_DONE;
14083 break;
14084
14085 case OACC_HOST_DATA:
14086 case OACC_DATA:
14087 case OACC_KERNELS:
14088 case OACC_PARALLEL:
14089 case OACC_SERIAL:
14090 case OMP_SECTIONS:
14091 case OMP_SINGLE:
14092 case OMP_TARGET:
14093 case OMP_TARGET_DATA:
14094 case OMP_TEAMS:
14095 gimplify_omp_workshare (expr_p, pre_p);
14096 ret = GS_ALL_DONE;
14097 break;
14098
14099 case OACC_ENTER_DATA:
14100 case OACC_EXIT_DATA:
14101 case OACC_UPDATE:
14102 case OMP_TARGET_UPDATE:
14103 case OMP_TARGET_ENTER_DATA:
14104 case OMP_TARGET_EXIT_DATA:
14105 gimplify_omp_target_update (expr_p, pre_p);
14106 ret = GS_ALL_DONE;
14107 break;
14108
14109 case OMP_SECTION:
14110 case OMP_MASTER:
14111 case OMP_ORDERED:
14112 case OMP_CRITICAL:
14113 case OMP_SCAN:
14114 {
14115 gimple_seq body = NULL;
14116 gimple *g;
14117 bool saved_in_omp_construct = in_omp_construct;
14118
14119 in_omp_construct = true;
14120 gimplify_and_add (OMP_BODY (*expr_p), &body);
14121 in_omp_construct = saved_in_omp_construct;
14122 switch (TREE_CODE (*expr_p))
14123 {
14124 case OMP_SECTION:
14125 g = gimple_build_omp_section (body);
14126 break;
14127 case OMP_MASTER:
14128 g = gimple_build_omp_master (body);
14129 break;
14130 case OMP_ORDERED:
14131 g = gimplify_omp_ordered (*expr_p, body);
14132 break;
14133 case OMP_CRITICAL:
14134 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p),
14135 pre_p, ORT_WORKSHARE, OMP_CRITICAL);
14136 gimplify_adjust_omp_clauses (pre_p, body,
14137 &OMP_CRITICAL_CLAUSES (*expr_p),
14138 OMP_CRITICAL);
14139 g = gimple_build_omp_critical (body,
14140 OMP_CRITICAL_NAME (*expr_p),
14141 OMP_CRITICAL_CLAUSES (*expr_p));
14142 break;
14143 case OMP_SCAN:
14144 gimplify_scan_omp_clauses (&OMP_SCAN_CLAUSES (*expr_p),
14145 pre_p, ORT_WORKSHARE, OMP_SCAN);
14146 gimplify_adjust_omp_clauses (pre_p, body,
14147 &OMP_SCAN_CLAUSES (*expr_p),
14148 OMP_SCAN);
14149 g = gimple_build_omp_scan (body, OMP_SCAN_CLAUSES (*expr_p));
14150 break;
14151 default:
14152 gcc_unreachable ();
14153 }
14154 gimplify_seq_add_stmt (pre_p, g);
14155 ret = GS_ALL_DONE;
14156 break;
14157 }
14158
14159 case OMP_TASKGROUP:
14160 {
14161 gimple_seq body = NULL;
14162
14163 tree *pclauses = &OMP_TASKGROUP_CLAUSES (*expr_p);
14164 bool saved_in_omp_construct = in_omp_construct;
14165 gimplify_scan_omp_clauses (pclauses, pre_p, ORT_TASKGROUP,
14166 OMP_TASKGROUP);
14167 gimplify_adjust_omp_clauses (pre_p, NULL, pclauses, OMP_TASKGROUP);
14168
14169 in_omp_construct = true;
14170 gimplify_and_add (OMP_BODY (*expr_p), &body);
14171 in_omp_construct = saved_in_omp_construct;
14172 gimple_seq cleanup = NULL;
14173 tree fn = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
14174 gimple *g = gimple_build_call (fn, 0);
14175 gimple_seq_add_stmt (&cleanup, g);
14176 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
14177 body = NULL;
14178 gimple_seq_add_stmt (&body, g);
14179 g = gimple_build_omp_taskgroup (body, *pclauses);
14180 gimplify_seq_add_stmt (pre_p, g);
14181 ret = GS_ALL_DONE;
14182 break;
14183 }
14184
14185 case OMP_ATOMIC:
14186 case OMP_ATOMIC_READ:
14187 case OMP_ATOMIC_CAPTURE_OLD:
14188 case OMP_ATOMIC_CAPTURE_NEW:
14189 ret = gimplify_omp_atomic (expr_p, pre_p);
14190 break;
14191
14192 case TRANSACTION_EXPR:
14193 ret = gimplify_transaction (expr_p, pre_p);
14194 break;
14195
14196 case TRUTH_AND_EXPR:
14197 case TRUTH_OR_EXPR:
14198 case TRUTH_XOR_EXPR:
14199 {
14200 tree orig_type = TREE_TYPE (*expr_p);
14201 tree new_type, xop0, xop1;
14202 *expr_p = gimple_boolify (*expr_p);
14203 new_type = TREE_TYPE (*expr_p);
14204 if (!useless_type_conversion_p (orig_type, new_type))
14205 {
14206 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
14207 ret = GS_OK;
14208 break;
14209 }
14210
14211 /* Boolified binary truth expressions are semantically equivalent
14212 to bitwise binary expressions. Canonicalize them to the
14213 bitwise variant. */
14214 switch (TREE_CODE (*expr_p))
14215 {
14216 case TRUTH_AND_EXPR:
14217 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
14218 break;
14219 case TRUTH_OR_EXPR:
14220 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
14221 break;
14222 case TRUTH_XOR_EXPR:
14223 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
14224 break;
14225 default:
14226 break;
14227 }
14228 /* Now make sure that operands have compatible type to
14229 expression's new_type. */
14230 xop0 = TREE_OPERAND (*expr_p, 0);
14231 xop1 = TREE_OPERAND (*expr_p, 1);
14232 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
14233 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
14234 new_type,
14235 xop0);
14236 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
14237 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
14238 new_type,
14239 xop1);
14240 /* Continue classified as tcc_binary. */
14241 goto expr_2;
14242 }
14243
14244 case VEC_COND_EXPR:
14245 {
14246 enum gimplify_status r0, r1, r2;
14247
14248 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
14249 post_p, is_gimple_condexpr, fb_rvalue);
14250 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
14251 post_p, is_gimple_val, fb_rvalue);
14252 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
14253 post_p, is_gimple_val, fb_rvalue);
14254
14255 ret = MIN (MIN (r0, r1), r2);
14256 recalculate_side_effects (*expr_p);
14257 }
14258 break;
14259
14260 case VEC_PERM_EXPR:
14261 /* Classified as tcc_expression. */
14262 goto expr_3;
14263
14264 case BIT_INSERT_EXPR:
14265 /* Argument 3 is a constant. */
14266 goto expr_2;
14267
14268 case POINTER_PLUS_EXPR:
14269 {
14270 enum gimplify_status r0, r1;
14271 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
14272 post_p, is_gimple_val, fb_rvalue);
14273 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
14274 post_p, is_gimple_val, fb_rvalue);
14275 recalculate_side_effects (*expr_p);
14276 ret = MIN (r0, r1);
14277 break;
14278 }
14279
14280 default:
14281 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
14282 {
14283 case tcc_comparison:
14284 /* Handle comparison of objects of non scalar mode aggregates
14285 with a call to memcmp. It would be nice to only have to do
14286 this for variable-sized objects, but then we'd have to allow
14287 the same nest of reference nodes we allow for MODIFY_EXPR and
14288 that's too complex.
14289
14290 Compare scalar mode aggregates as scalar mode values. Using
14291 memcmp for them would be very inefficient at best, and is
14292 plain wrong if bitfields are involved. */
14293 {
14294 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
14295
14296 /* Vector comparisons need no boolification. */
14297 if (TREE_CODE (type) == VECTOR_TYPE)
14298 goto expr_2;
14299 else if (!AGGREGATE_TYPE_P (type))
14300 {
14301 tree org_type = TREE_TYPE (*expr_p);
14302 *expr_p = gimple_boolify (*expr_p);
14303 if (!useless_type_conversion_p (org_type,
14304 TREE_TYPE (*expr_p)))
14305 {
14306 *expr_p = fold_convert_loc (input_location,
14307 org_type, *expr_p);
14308 ret = GS_OK;
14309 }
14310 else
14311 goto expr_2;
14312 }
14313 else if (TYPE_MODE (type) != BLKmode)
14314 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
14315 else
14316 ret = gimplify_variable_sized_compare (expr_p);
14317
14318 break;
14319 }
14320
14321 /* If *EXPR_P does not need to be special-cased, handle it
14322 according to its class. */
14323 case tcc_unary:
14324 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
14325 post_p, is_gimple_val, fb_rvalue);
14326 break;
14327
14328 case tcc_binary:
14329 expr_2:
14330 {
14331 enum gimplify_status r0, r1;
14332
14333 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
14334 post_p, is_gimple_val, fb_rvalue);
14335 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
14336 post_p, is_gimple_val, fb_rvalue);
14337
14338 ret = MIN (r0, r1);
14339 break;
14340 }
14341
14342 expr_3:
14343 {
14344 enum gimplify_status r0, r1, r2;
14345
14346 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
14347 post_p, is_gimple_val, fb_rvalue);
14348 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
14349 post_p, is_gimple_val, fb_rvalue);
14350 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
14351 post_p, is_gimple_val, fb_rvalue);
14352
14353 ret = MIN (MIN (r0, r1), r2);
14354 break;
14355 }
14356
14357 case tcc_declaration:
14358 case tcc_constant:
14359 ret = GS_ALL_DONE;
14360 goto dont_recalculate;
14361
14362 default:
14363 gcc_unreachable ();
14364 }
14365
14366 recalculate_side_effects (*expr_p);
14367
14368 dont_recalculate:
14369 break;
14370 }
14371
14372 gcc_assert (*expr_p || ret != GS_OK);
14373 }
14374 while (ret == GS_OK);
14375
14376 /* If we encountered an error_mark somewhere nested inside, either
14377 stub out the statement or propagate the error back out. */
14378 if (ret == GS_ERROR)
14379 {
14380 if (is_statement)
14381 *expr_p = NULL;
14382 goto out;
14383 }
14384
14385 /* This was only valid as a return value from the langhook, which
14386 we handled. Make sure it doesn't escape from any other context. */
14387 gcc_assert (ret != GS_UNHANDLED);
14388
14389 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
14390 {
14391 /* We aren't looking for a value, and we don't have a valid
14392 statement. If it doesn't have side-effects, throw it away.
14393 We can also get here with code such as "*&&L;", where L is
14394 a LABEL_DECL that is marked as FORCED_LABEL. */
14395 if (TREE_CODE (*expr_p) == LABEL_DECL
14396 || !TREE_SIDE_EFFECTS (*expr_p))
14397 *expr_p = NULL;
14398 else if (!TREE_THIS_VOLATILE (*expr_p))
14399 {
14400 /* This is probably a _REF that contains something nested that
14401 has side effects. Recurse through the operands to find it. */
14402 enum tree_code code = TREE_CODE (*expr_p);
14403
14404 switch (code)
14405 {
14406 case COMPONENT_REF:
14407 case REALPART_EXPR:
14408 case IMAGPART_EXPR:
14409 case VIEW_CONVERT_EXPR:
14410 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
14411 gimple_test_f, fallback);
14412 break;
14413
14414 case ARRAY_REF:
14415 case ARRAY_RANGE_REF:
14416 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
14417 gimple_test_f, fallback);
14418 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
14419 gimple_test_f, fallback);
14420 break;
14421
14422 default:
14423 /* Anything else with side-effects must be converted to
14424 a valid statement before we get here. */
14425 gcc_unreachable ();
14426 }
14427
14428 *expr_p = NULL;
14429 }
14430 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
14431 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
14432 {
14433 /* Historically, the compiler has treated a bare reference
14434 to a non-BLKmode volatile lvalue as forcing a load. */
14435 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
14436
14437 /* Normally, we do not want to create a temporary for a
14438 TREE_ADDRESSABLE type because such a type should not be
14439 copied by bitwise-assignment. However, we make an
14440 exception here, as all we are doing here is ensuring that
14441 we read the bytes that make up the type. We use
14442 create_tmp_var_raw because create_tmp_var will abort when
14443 given a TREE_ADDRESSABLE type. */
14444 tree tmp = create_tmp_var_raw (type, "vol");
14445 gimple_add_tmp_var (tmp);
14446 gimplify_assign (tmp, *expr_p, pre_p);
14447 *expr_p = NULL;
14448 }
14449 else
14450 /* We can't do anything useful with a volatile reference to
14451 an incomplete type, so just throw it away. Likewise for
14452 a BLKmode type, since any implicit inner load should
14453 already have been turned into an explicit one by the
14454 gimplification process. */
14455 *expr_p = NULL;
14456 }
14457
14458 /* If we are gimplifying at the statement level, we're done. Tack
14459 everything together and return. */
14460 if (fallback == fb_none || is_statement)
14461 {
14462 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
14463 it out for GC to reclaim it. */
14464 *expr_p = NULL_TREE;
14465
14466 if (!gimple_seq_empty_p (internal_pre)
14467 || !gimple_seq_empty_p (internal_post))
14468 {
14469 gimplify_seq_add_seq (&internal_pre, internal_post);
14470 gimplify_seq_add_seq (pre_p, internal_pre);
14471 }
14472
14473 /* The result of gimplifying *EXPR_P is going to be the last few
14474 statements in *PRE_P and *POST_P. Add location information
14475 to all the statements that were added by the gimplification
14476 helpers. */
14477 if (!gimple_seq_empty_p (*pre_p))
14478 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
14479
14480 if (!gimple_seq_empty_p (*post_p))
14481 annotate_all_with_location_after (*post_p, post_last_gsi,
14482 input_location);
14483
14484 goto out;
14485 }
14486
14487 #ifdef ENABLE_GIMPLE_CHECKING
14488 if (*expr_p)
14489 {
14490 enum tree_code code = TREE_CODE (*expr_p);
14491 /* These expressions should already be in gimple IR form. */
14492 gcc_assert (code != MODIFY_EXPR
14493 && code != ASM_EXPR
14494 && code != BIND_EXPR
14495 && code != CATCH_EXPR
14496 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
14497 && code != EH_FILTER_EXPR
14498 && code != GOTO_EXPR
14499 && code != LABEL_EXPR
14500 && code != LOOP_EXPR
14501 && code != SWITCH_EXPR
14502 && code != TRY_FINALLY_EXPR
14503 && code != EH_ELSE_EXPR
14504 && code != OACC_PARALLEL
14505 && code != OACC_KERNELS
14506 && code != OACC_SERIAL
14507 && code != OACC_DATA
14508 && code != OACC_HOST_DATA
14509 && code != OACC_DECLARE
14510 && code != OACC_UPDATE
14511 && code != OACC_ENTER_DATA
14512 && code != OACC_EXIT_DATA
14513 && code != OACC_CACHE
14514 && code != OMP_CRITICAL
14515 && code != OMP_FOR
14516 && code != OACC_LOOP
14517 && code != OMP_MASTER
14518 && code != OMP_TASKGROUP
14519 && code != OMP_ORDERED
14520 && code != OMP_PARALLEL
14521 && code != OMP_SCAN
14522 && code != OMP_SECTIONS
14523 && code != OMP_SECTION
14524 && code != OMP_SINGLE);
14525 }
14526 #endif
14527
14528 /* Otherwise we're gimplifying a subexpression, so the resulting
14529 value is interesting. If it's a valid operand that matches
14530 GIMPLE_TEST_F, we're done. Unless we are handling some
14531 post-effects internally; if that's the case, we need to copy into
14532 a temporary before adding the post-effects to POST_P. */
14533 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
14534 goto out;
14535
14536 /* Otherwise, we need to create a new temporary for the gimplified
14537 expression. */
14538
14539 /* We can't return an lvalue if we have an internal postqueue. The
14540 object the lvalue refers to would (probably) be modified by the
14541 postqueue; we need to copy the value out first, which means an
14542 rvalue. */
14543 if ((fallback & fb_lvalue)
14544 && gimple_seq_empty_p (internal_post)
14545 && is_gimple_addressable (*expr_p))
14546 {
14547 /* An lvalue will do. Take the address of the expression, store it
14548 in a temporary, and replace the expression with an INDIRECT_REF of
14549 that temporary. */
14550 tree ref_alias_type = reference_alias_ptr_type (*expr_p);
14551 unsigned int ref_align = get_object_alignment (*expr_p);
14552 tree ref_type = TREE_TYPE (*expr_p);
14553 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
14554 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
14555 if (TYPE_ALIGN (ref_type) != ref_align)
14556 ref_type = build_aligned_type (ref_type, ref_align);
14557 *expr_p = build2 (MEM_REF, ref_type,
14558 tmp, build_zero_cst (ref_alias_type));
14559 }
14560 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
14561 {
14562 /* An rvalue will do. Assign the gimplified expression into a
14563 new temporary TMP and replace the original expression with
14564 TMP. First, make sure that the expression has a type so that
14565 it can be assigned into a temporary. */
14566 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
14567 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
14568 }
14569 else
14570 {
14571 #ifdef ENABLE_GIMPLE_CHECKING
14572 if (!(fallback & fb_mayfail))
14573 {
14574 fprintf (stderr, "gimplification failed:\n");
14575 print_generic_expr (stderr, *expr_p);
14576 debug_tree (*expr_p);
14577 internal_error ("gimplification failed");
14578 }
14579 #endif
14580 gcc_assert (fallback & fb_mayfail);
14581
14582 /* If this is an asm statement, and the user asked for the
14583 impossible, don't die. Fail and let gimplify_asm_expr
14584 issue an error. */
14585 ret = GS_ERROR;
14586 goto out;
14587 }
14588
14589 /* Make sure the temporary matches our predicate. */
14590 gcc_assert ((*gimple_test_f) (*expr_p));
14591
14592 if (!gimple_seq_empty_p (internal_post))
14593 {
14594 annotate_all_with_location (internal_post, input_location);
14595 gimplify_seq_add_seq (pre_p, internal_post);
14596 }
14597
14598 out:
14599 input_location = saved_location;
14600 return ret;
14601 }
14602
14603 /* Like gimplify_expr but make sure the gimplified result is not itself
14604 a SSA name (but a decl if it were). Temporaries required by
14605 evaluating *EXPR_P may be still SSA names. */
14606
14607 static enum gimplify_status
14608 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
14609 bool (*gimple_test_f) (tree), fallback_t fallback,
14610 bool allow_ssa)
14611 {
14612 bool was_ssa_name_p = TREE_CODE (*expr_p) == SSA_NAME;
14613 enum gimplify_status ret = gimplify_expr (expr_p, pre_p, post_p,
14614 gimple_test_f, fallback);
14615 if (! allow_ssa
14616 && TREE_CODE (*expr_p) == SSA_NAME)
14617 {
14618 tree name = *expr_p;
14619 if (was_ssa_name_p)
14620 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, NULL, false);
14621 else
14622 {
14623 /* Avoid the extra copy if possible. */
14624 *expr_p = create_tmp_reg (TREE_TYPE (name));
14625 gimple_set_lhs (SSA_NAME_DEF_STMT (name), *expr_p);
14626 release_ssa_name (name);
14627 }
14628 }
14629 return ret;
14630 }
14631
14632 /* Look through TYPE for variable-sized objects and gimplify each such
14633 size that we find. Add to LIST_P any statements generated. */
14634
14635 void
14636 gimplify_type_sizes (tree type, gimple_seq *list_p)
14637 {
14638 tree field, t;
14639
14640 if (type == NULL || type == error_mark_node)
14641 return;
14642
14643 /* We first do the main variant, then copy into any other variants. */
14644 type = TYPE_MAIN_VARIANT (type);
14645
14646 /* Avoid infinite recursion. */
14647 if (TYPE_SIZES_GIMPLIFIED (type))
14648 return;
14649
14650 TYPE_SIZES_GIMPLIFIED (type) = 1;
14651
14652 switch (TREE_CODE (type))
14653 {
14654 case INTEGER_TYPE:
14655 case ENUMERAL_TYPE:
14656 case BOOLEAN_TYPE:
14657 case REAL_TYPE:
14658 case FIXED_POINT_TYPE:
14659 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
14660 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
14661
14662 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
14663 {
14664 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
14665 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
14666 }
14667 break;
14668
14669 case ARRAY_TYPE:
14670 /* These types may not have declarations, so handle them here. */
14671 gimplify_type_sizes (TREE_TYPE (type), list_p);
14672 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
14673 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
14674 with assigned stack slots, for -O1+ -g they should be tracked
14675 by VTA. */
14676 if (!(TYPE_NAME (type)
14677 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
14678 && DECL_IGNORED_P (TYPE_NAME (type)))
14679 && TYPE_DOMAIN (type)
14680 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
14681 {
14682 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
14683 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
14684 DECL_IGNORED_P (t) = 0;
14685 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
14686 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
14687 DECL_IGNORED_P (t) = 0;
14688 }
14689 break;
14690
14691 case RECORD_TYPE:
14692 case UNION_TYPE:
14693 case QUAL_UNION_TYPE:
14694 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
14695 if (TREE_CODE (field) == FIELD_DECL)
14696 {
14697 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
14698 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
14699 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
14700 gimplify_type_sizes (TREE_TYPE (field), list_p);
14701 }
14702 break;
14703
14704 case POINTER_TYPE:
14705 case REFERENCE_TYPE:
14706 /* We used to recurse on the pointed-to type here, which turned out to
14707 be incorrect because its definition might refer to variables not
14708 yet initialized at this point if a forward declaration is involved.
14709
14710 It was actually useful for anonymous pointed-to types to ensure
14711 that the sizes evaluation dominates every possible later use of the
14712 values. Restricting to such types here would be safe since there
14713 is no possible forward declaration around, but would introduce an
14714 undesirable middle-end semantic to anonymity. We then defer to
14715 front-ends the responsibility of ensuring that the sizes are
14716 evaluated both early and late enough, e.g. by attaching artificial
14717 type declarations to the tree. */
14718 break;
14719
14720 default:
14721 break;
14722 }
14723
14724 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
14725 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
14726
14727 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
14728 {
14729 TYPE_SIZE (t) = TYPE_SIZE (type);
14730 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
14731 TYPE_SIZES_GIMPLIFIED (t) = 1;
14732 }
14733 }
14734
14735 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
14736 a size or position, has had all of its SAVE_EXPRs evaluated.
14737 We add any required statements to *STMT_P. */
14738
14739 void
14740 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
14741 {
14742 tree expr = *expr_p;
14743
14744 /* We don't do anything if the value isn't there, is constant, or contains
14745 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
14746 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
14747 will want to replace it with a new variable, but that will cause problems
14748 if this type is from outside the function. It's OK to have that here. */
14749 if (expr == NULL_TREE
14750 || is_gimple_constant (expr)
14751 || TREE_CODE (expr) == VAR_DECL
14752 || CONTAINS_PLACEHOLDER_P (expr))
14753 return;
14754
14755 *expr_p = unshare_expr (expr);
14756
14757 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
14758 if the def vanishes. */
14759 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue, false);
14760
14761 /* If expr wasn't already is_gimple_sizepos or is_gimple_constant from the
14762 FE, ensure that it is a VAR_DECL, otherwise we might handle some decls
14763 as gimplify_vla_decl even when they would have all sizes INTEGER_CSTs. */
14764 if (is_gimple_constant (*expr_p))
14765 *expr_p = get_initialized_tmp_var (*expr_p, stmt_p, NULL, false);
14766 }
14767
14768 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
14769 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
14770 is true, also gimplify the parameters. */
14771
14772 gbind *
14773 gimplify_body (tree fndecl, bool do_parms)
14774 {
14775 location_t saved_location = input_location;
14776 gimple_seq parm_stmts, parm_cleanup = NULL, seq;
14777 gimple *outer_stmt;
14778 gbind *outer_bind;
14779
14780 timevar_push (TV_TREE_GIMPLIFY);
14781
14782 init_tree_ssa (cfun);
14783
14784 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
14785 gimplification. */
14786 default_rtl_profile ();
14787
14788 gcc_assert (gimplify_ctxp == NULL);
14789 push_gimplify_context (true);
14790
14791 if (flag_openacc || flag_openmp)
14792 {
14793 gcc_assert (gimplify_omp_ctxp == NULL);
14794 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
14795 gimplify_omp_ctxp = new_omp_context (ORT_IMPLICIT_TARGET);
14796 }
14797
14798 /* Unshare most shared trees in the body and in that of any nested functions.
14799 It would seem we don't have to do this for nested functions because
14800 they are supposed to be output and then the outer function gimplified
14801 first, but the g++ front end doesn't always do it that way. */
14802 unshare_body (fndecl);
14803 unvisit_body (fndecl);
14804
14805 /* Make sure input_location isn't set to something weird. */
14806 input_location = DECL_SOURCE_LOCATION (fndecl);
14807
14808 /* Resolve callee-copies. This has to be done before processing
14809 the body so that DECL_VALUE_EXPR gets processed correctly. */
14810 parm_stmts = do_parms ? gimplify_parameters (&parm_cleanup) : NULL;
14811
14812 /* Gimplify the function's body. */
14813 seq = NULL;
14814 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
14815 outer_stmt = gimple_seq_first_stmt (seq);
14816 if (!outer_stmt)
14817 {
14818 outer_stmt = gimple_build_nop ();
14819 gimplify_seq_add_stmt (&seq, outer_stmt);
14820 }
14821
14822 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
14823 not the case, wrap everything in a GIMPLE_BIND to make it so. */
14824 if (gimple_code (outer_stmt) == GIMPLE_BIND
14825 && gimple_seq_first (seq) == gimple_seq_last (seq))
14826 outer_bind = as_a <gbind *> (outer_stmt);
14827 else
14828 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
14829
14830 DECL_SAVED_TREE (fndecl) = NULL_TREE;
14831
14832 /* If we had callee-copies statements, insert them at the beginning
14833 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
14834 if (!gimple_seq_empty_p (parm_stmts))
14835 {
14836 tree parm;
14837
14838 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
14839 if (parm_cleanup)
14840 {
14841 gtry *g = gimple_build_try (parm_stmts, parm_cleanup,
14842 GIMPLE_TRY_FINALLY);
14843 parm_stmts = NULL;
14844 gimple_seq_add_stmt (&parm_stmts, g);
14845 }
14846 gimple_bind_set_body (outer_bind, parm_stmts);
14847
14848 for (parm = DECL_ARGUMENTS (current_function_decl);
14849 parm; parm = DECL_CHAIN (parm))
14850 if (DECL_HAS_VALUE_EXPR_P (parm))
14851 {
14852 DECL_HAS_VALUE_EXPR_P (parm) = 0;
14853 DECL_IGNORED_P (parm) = 0;
14854 }
14855 }
14856
14857 if ((flag_openacc || flag_openmp || flag_openmp_simd)
14858 && gimplify_omp_ctxp)
14859 {
14860 delete_omp_context (gimplify_omp_ctxp);
14861 gimplify_omp_ctxp = NULL;
14862 }
14863
14864 pop_gimplify_context (outer_bind);
14865 gcc_assert (gimplify_ctxp == NULL);
14866
14867 if (flag_checking && !seen_error ())
14868 verify_gimple_in_seq (gimple_bind_body (outer_bind));
14869
14870 timevar_pop (TV_TREE_GIMPLIFY);
14871 input_location = saved_location;
14872
14873 return outer_bind;
14874 }
14875
14876 typedef char *char_p; /* For DEF_VEC_P. */
14877
14878 /* Return whether we should exclude FNDECL from instrumentation. */
14879
14880 static bool
14881 flag_instrument_functions_exclude_p (tree fndecl)
14882 {
14883 vec<char_p> *v;
14884
14885 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
14886 if (v && v->length () > 0)
14887 {
14888 const char *name;
14889 int i;
14890 char *s;
14891
14892 name = lang_hooks.decl_printable_name (fndecl, 1);
14893 FOR_EACH_VEC_ELT (*v, i, s)
14894 if (strstr (name, s) != NULL)
14895 return true;
14896 }
14897
14898 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
14899 if (v && v->length () > 0)
14900 {
14901 const char *name;
14902 int i;
14903 char *s;
14904
14905 name = DECL_SOURCE_FILE (fndecl);
14906 FOR_EACH_VEC_ELT (*v, i, s)
14907 if (strstr (name, s) != NULL)
14908 return true;
14909 }
14910
14911 return false;
14912 }
14913
14914 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
14915 node for the function we want to gimplify.
14916
14917 Return the sequence of GIMPLE statements corresponding to the body
14918 of FNDECL. */
14919
14920 void
14921 gimplify_function_tree (tree fndecl)
14922 {
14923 tree parm, ret;
14924 gimple_seq seq;
14925 gbind *bind;
14926
14927 gcc_assert (!gimple_body (fndecl));
14928
14929 if (DECL_STRUCT_FUNCTION (fndecl))
14930 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
14931 else
14932 push_struct_function (fndecl);
14933
14934 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
14935 if necessary. */
14936 cfun->curr_properties |= PROP_gimple_lva;
14937
14938 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
14939 {
14940 /* Preliminarily mark non-addressed complex variables as eligible
14941 for promotion to gimple registers. We'll transform their uses
14942 as we find them. */
14943 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
14944 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
14945 && !TREE_THIS_VOLATILE (parm)
14946 && !needs_to_live_in_memory (parm))
14947 DECL_GIMPLE_REG_P (parm) = 1;
14948 }
14949
14950 ret = DECL_RESULT (fndecl);
14951 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
14952 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
14953 && !needs_to_live_in_memory (ret))
14954 DECL_GIMPLE_REG_P (ret) = 1;
14955
14956 if (asan_sanitize_use_after_scope () && sanitize_flags_p (SANITIZE_ADDRESS))
14957 asan_poisoned_variables = new hash_set<tree> ();
14958 bind = gimplify_body (fndecl, true);
14959 if (asan_poisoned_variables)
14960 {
14961 delete asan_poisoned_variables;
14962 asan_poisoned_variables = NULL;
14963 }
14964
14965 /* The tree body of the function is no longer needed, replace it
14966 with the new GIMPLE body. */
14967 seq = NULL;
14968 gimple_seq_add_stmt (&seq, bind);
14969 gimple_set_body (fndecl, seq);
14970
14971 /* If we're instrumenting function entry/exit, then prepend the call to
14972 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
14973 catch the exit hook. */
14974 /* ??? Add some way to ignore exceptions for this TFE. */
14975 if (flag_instrument_function_entry_exit
14976 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
14977 /* Do not instrument extern inline functions. */
14978 && !(DECL_DECLARED_INLINE_P (fndecl)
14979 && DECL_EXTERNAL (fndecl)
14980 && DECL_DISREGARD_INLINE_LIMITS (fndecl))
14981 && !flag_instrument_functions_exclude_p (fndecl))
14982 {
14983 tree x;
14984 gbind *new_bind;
14985 gimple *tf;
14986 gimple_seq cleanup = NULL, body = NULL;
14987 tree tmp_var, this_fn_addr;
14988 gcall *call;
14989
14990 /* The instrumentation hooks aren't going to call the instrumented
14991 function and the address they receive is expected to be matchable
14992 against symbol addresses. Make sure we don't create a trampoline,
14993 in case the current function is nested. */
14994 this_fn_addr = build_fold_addr_expr (current_function_decl);
14995 TREE_NO_TRAMPOLINE (this_fn_addr) = 1;
14996
14997 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
14998 call = gimple_build_call (x, 1, integer_zero_node);
14999 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
15000 gimple_call_set_lhs (call, tmp_var);
15001 gimplify_seq_add_stmt (&cleanup, call);
15002 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
15003 call = gimple_build_call (x, 2, this_fn_addr, tmp_var);
15004 gimplify_seq_add_stmt (&cleanup, call);
15005 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
15006
15007 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
15008 call = gimple_build_call (x, 1, integer_zero_node);
15009 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
15010 gimple_call_set_lhs (call, tmp_var);
15011 gimplify_seq_add_stmt (&body, call);
15012 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
15013 call = gimple_build_call (x, 2, this_fn_addr, tmp_var);
15014 gimplify_seq_add_stmt (&body, call);
15015 gimplify_seq_add_stmt (&body, tf);
15016 new_bind = gimple_build_bind (NULL, body, NULL);
15017
15018 /* Replace the current function body with the body
15019 wrapped in the try/finally TF. */
15020 seq = NULL;
15021 gimple_seq_add_stmt (&seq, new_bind);
15022 gimple_set_body (fndecl, seq);
15023 bind = new_bind;
15024 }
15025
15026 if (sanitize_flags_p (SANITIZE_THREAD))
15027 {
15028 gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
15029 gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
15030 gbind *new_bind = gimple_build_bind (NULL, tf, NULL);
15031 /* Replace the current function body with the body
15032 wrapped in the try/finally TF. */
15033 seq = NULL;
15034 gimple_seq_add_stmt (&seq, new_bind);
15035 gimple_set_body (fndecl, seq);
15036 }
15037
15038 DECL_SAVED_TREE (fndecl) = NULL_TREE;
15039 cfun->curr_properties |= PROP_gimple_any;
15040
15041 pop_cfun ();
15042
15043 dump_function (TDI_gimple, fndecl);
15044 }
15045
15046 /* Return a dummy expression of type TYPE in order to keep going after an
15047 error. */
15048
15049 static tree
15050 dummy_object (tree type)
15051 {
15052 tree t = build_int_cst (build_pointer_type (type), 0);
15053 return build2 (MEM_REF, type, t, t);
15054 }
15055
15056 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
15057 builtin function, but a very special sort of operator. */
15058
15059 enum gimplify_status
15060 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
15061 gimple_seq *post_p ATTRIBUTE_UNUSED)
15062 {
15063 tree promoted_type, have_va_type;
15064 tree valist = TREE_OPERAND (*expr_p, 0);
15065 tree type = TREE_TYPE (*expr_p);
15066 tree t, tag, aptag;
15067 location_t loc = EXPR_LOCATION (*expr_p);
15068
15069 /* Verify that valist is of the proper type. */
15070 have_va_type = TREE_TYPE (valist);
15071 if (have_va_type == error_mark_node)
15072 return GS_ERROR;
15073 have_va_type = targetm.canonical_va_list_type (have_va_type);
15074 if (have_va_type == NULL_TREE
15075 && POINTER_TYPE_P (TREE_TYPE (valist)))
15076 /* Handle 'Case 1: Not an array type' from c-common.c/build_va_arg. */
15077 have_va_type
15078 = targetm.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist)));
15079 gcc_assert (have_va_type != NULL_TREE);
15080
15081 /* Generate a diagnostic for requesting data of a type that cannot
15082 be passed through `...' due to type promotion at the call site. */
15083 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
15084 != type)
15085 {
15086 static bool gave_help;
15087 bool warned;
15088 /* Use the expansion point to handle cases such as passing bool (defined
15089 in a system header) through `...'. */
15090 location_t xloc
15091 = expansion_point_location_if_in_system_header (loc);
15092
15093 /* Unfortunately, this is merely undefined, rather than a constraint
15094 violation, so we cannot make this an error. If this call is never
15095 executed, the program is still strictly conforming. */
15096 auto_diagnostic_group d;
15097 warned = warning_at (xloc, 0,
15098 "%qT is promoted to %qT when passed through %<...%>",
15099 type, promoted_type);
15100 if (!gave_help && warned)
15101 {
15102 gave_help = true;
15103 inform (xloc, "(so you should pass %qT not %qT to %<va_arg%>)",
15104 promoted_type, type);
15105 }
15106
15107 /* We can, however, treat "undefined" any way we please.
15108 Call abort to encourage the user to fix the program. */
15109 if (warned)
15110 inform (xloc, "if this code is reached, the program will abort");
15111 /* Before the abort, allow the evaluation of the va_list
15112 expression to exit or longjmp. */
15113 gimplify_and_add (valist, pre_p);
15114 t = build_call_expr_loc (loc,
15115 builtin_decl_implicit (BUILT_IN_TRAP), 0);
15116 gimplify_and_add (t, pre_p);
15117
15118 /* This is dead code, but go ahead and finish so that the
15119 mode of the result comes out right. */
15120 *expr_p = dummy_object (type);
15121 return GS_ALL_DONE;
15122 }
15123
15124 tag = build_int_cst (build_pointer_type (type), 0);
15125 aptag = build_int_cst (TREE_TYPE (valist), 0);
15126
15127 *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 3,
15128 valist, tag, aptag);
15129
15130 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
15131 needs to be expanded. */
15132 cfun->curr_properties &= ~PROP_gimple_lva;
15133
15134 return GS_OK;
15135 }
15136
15137 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
15138
15139 DST/SRC are the destination and source respectively. You can pass
15140 ungimplified trees in DST or SRC, in which case they will be
15141 converted to a gimple operand if necessary.
15142
15143 This function returns the newly created GIMPLE_ASSIGN tuple. */
15144
15145 gimple *
15146 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
15147 {
15148 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
15149 gimplify_and_add (t, seq_p);
15150 ggc_free (t);
15151 return gimple_seq_last_stmt (*seq_p);
15152 }
15153
15154 inline hashval_t
15155 gimplify_hasher::hash (const elt_t *p)
15156 {
15157 tree t = p->val;
15158 return iterative_hash_expr (t, 0);
15159 }
15160
15161 inline bool
15162 gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
15163 {
15164 tree t1 = p1->val;
15165 tree t2 = p2->val;
15166 enum tree_code code = TREE_CODE (t1);
15167
15168 if (TREE_CODE (t2) != code
15169 || TREE_TYPE (t1) != TREE_TYPE (t2))
15170 return false;
15171
15172 if (!operand_equal_p (t1, t2, 0))
15173 return false;
15174
15175 /* Only allow them to compare equal if they also hash equal; otherwise
15176 results are nondeterminate, and we fail bootstrap comparison. */
15177 gcc_checking_assert (hash (p1) == hash (p2));
15178
15179 return true;
15180 }