]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/gimplify.c
PR target/89752
[thirdparty/gcc.git] / gcc / gimplify.c
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2019 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "gimple.h"
33 #include "gimple-predict.h"
34 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
35 #include "ssa.h"
36 #include "cgraph.h"
37 #include "tree-pretty-print.h"
38 #include "diagnostic-core.h"
39 #include "alias.h"
40 #include "fold-const.h"
41 #include "calls.h"
42 #include "varasm.h"
43 #include "stmt.h"
44 #include "expr.h"
45 #include "gimple-fold.h"
46 #include "tree-eh.h"
47 #include "gimplify.h"
48 #include "gimple-iterator.h"
49 #include "stor-layout.h"
50 #include "print-tree.h"
51 #include "tree-iterator.h"
52 #include "tree-inline.h"
53 #include "langhooks.h"
54 #include "tree-cfg.h"
55 #include "tree-ssa.h"
56 #include "omp-general.h"
57 #include "omp-low.h"
58 #include "gimple-low.h"
59 #include "gomp-constants.h"
60 #include "splay-tree.h"
61 #include "gimple-walk.h"
62 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
63 #include "builtins.h"
64 #include "stringpool.h"
65 #include "attribs.h"
66 #include "asan.h"
67 #include "dbgcnt.h"
68
69 /* Hash set of poisoned variables in a bind expr. */
70 static hash_set<tree> *asan_poisoned_variables = NULL;
71
72 enum gimplify_omp_var_data
73 {
74 GOVD_SEEN = 1,
75 GOVD_EXPLICIT = 2,
76 GOVD_SHARED = 4,
77 GOVD_PRIVATE = 8,
78 GOVD_FIRSTPRIVATE = 16,
79 GOVD_LASTPRIVATE = 32,
80 GOVD_REDUCTION = 64,
81 GOVD_LOCAL = 128,
82 GOVD_MAP = 256,
83 GOVD_DEBUG_PRIVATE = 512,
84 GOVD_PRIVATE_OUTER_REF = 1024,
85 GOVD_LINEAR = 2048,
86 GOVD_ALIGNED = 4096,
87
88 /* Flag for GOVD_MAP: don't copy back. */
89 GOVD_MAP_TO_ONLY = 8192,
90
91 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
92 GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 16384,
93
94 GOVD_MAP_0LEN_ARRAY = 32768,
95
96 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
97 GOVD_MAP_ALWAYS_TO = 65536,
98
99 /* Flag for shared vars that are or might be stored to in the region. */
100 GOVD_WRITTEN = 131072,
101
102 /* Flag for GOVD_MAP, if it is a forced mapping. */
103 GOVD_MAP_FORCE = 262144,
104
105 /* Flag for GOVD_MAP: must be present already. */
106 GOVD_MAP_FORCE_PRESENT = 524288,
107
108 /* Flag for GOVD_MAP: only allocate. */
109 GOVD_MAP_ALLOC_ONLY = 1048576,
110
111 /* Flag for GOVD_MAP: only copy back. */
112 GOVD_MAP_FROM_ONLY = 2097152,
113
114 GOVD_NONTEMPORAL = 4194304,
115
116 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
117 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
118 | GOVD_LOCAL)
119 };
120
121
122 enum omp_region_type
123 {
124 ORT_WORKSHARE = 0x00,
125 ORT_TASKGROUP = 0x01,
126 ORT_SIMD = 0x04,
127
128 ORT_PARALLEL = 0x08,
129 ORT_COMBINED_PARALLEL = ORT_PARALLEL | 1,
130
131 ORT_TASK = 0x10,
132 ORT_UNTIED_TASK = ORT_TASK | 1,
133 ORT_TASKLOOP = ORT_TASK | 2,
134 ORT_UNTIED_TASKLOOP = ORT_UNTIED_TASK | 2,
135
136 ORT_TEAMS = 0x20,
137 ORT_COMBINED_TEAMS = ORT_TEAMS | 1,
138 ORT_HOST_TEAMS = ORT_TEAMS | 2,
139 ORT_COMBINED_HOST_TEAMS = ORT_COMBINED_TEAMS | 2,
140
141 /* Data region. */
142 ORT_TARGET_DATA = 0x40,
143
144 /* Data region with offloading. */
145 ORT_TARGET = 0x80,
146 ORT_COMBINED_TARGET = ORT_TARGET | 1,
147
148 /* OpenACC variants. */
149 ORT_ACC = 0x100, /* A generic OpenACC region. */
150 ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */
151 ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */
152 ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 2, /* Kernels construct. */
153 ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 2, /* Host data. */
154
155 /* Dummy OpenMP region, used to disable expansion of
156 DECL_VALUE_EXPRs in taskloop pre body. */
157 ORT_NONE = 0x200
158 };
159
160 /* Gimplify hashtable helper. */
161
162 struct gimplify_hasher : free_ptr_hash <elt_t>
163 {
164 static inline hashval_t hash (const elt_t *);
165 static inline bool equal (const elt_t *, const elt_t *);
166 };
167
168 struct gimplify_ctx
169 {
170 struct gimplify_ctx *prev_context;
171
172 vec<gbind *> bind_expr_stack;
173 tree temps;
174 gimple_seq conditional_cleanups;
175 tree exit_label;
176 tree return_temp;
177
178 vec<tree> case_labels;
179 hash_set<tree> *live_switch_vars;
180 /* The formal temporary table. Should this be persistent? */
181 hash_table<gimplify_hasher> *temp_htab;
182
183 int conditions;
184 unsigned into_ssa : 1;
185 unsigned allow_rhs_cond_expr : 1;
186 unsigned in_cleanup_point_expr : 1;
187 unsigned keep_stack : 1;
188 unsigned save_stack : 1;
189 unsigned in_switch_expr : 1;
190 };
191
192 enum gimplify_defaultmap_kind
193 {
194 GDMK_SCALAR,
195 GDMK_AGGREGATE,
196 GDMK_ALLOCATABLE,
197 GDMK_POINTER
198 };
199
200 struct gimplify_omp_ctx
201 {
202 struct gimplify_omp_ctx *outer_context;
203 splay_tree variables;
204 hash_set<tree> *privatized_types;
205 /* Iteration variables in an OMP_FOR. */
206 vec<tree> loop_iter_var;
207 location_t location;
208 enum omp_clause_default_kind default_kind;
209 enum omp_region_type region_type;
210 bool combined_loop;
211 bool distribute;
212 bool target_firstprivatize_array_bases;
213 int defaultmap[4];
214 };
215
216 static struct gimplify_ctx *gimplify_ctxp;
217 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
218
219 /* Forward declaration. */
220 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
221 static hash_map<tree, tree> *oacc_declare_returns;
222 static enum gimplify_status gimplify_expr (tree *, gimple_seq *, gimple_seq *,
223 bool (*) (tree), fallback_t, bool);
224
225 /* Shorter alias name for the above function for use in gimplify.c
226 only. */
227
228 static inline void
229 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
230 {
231 gimple_seq_add_stmt_without_update (seq_p, gs);
232 }
233
234 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
235 NULL, a new sequence is allocated. This function is
236 similar to gimple_seq_add_seq, but does not scan the operands.
237 During gimplification, we need to manipulate statement sequences
238 before the def/use vectors have been constructed. */
239
240 static void
241 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
242 {
243 gimple_stmt_iterator si;
244
245 if (src == NULL)
246 return;
247
248 si = gsi_last (*dst_p);
249 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
250 }
251
252
253 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
254 and popping gimplify contexts. */
255
256 static struct gimplify_ctx *ctx_pool = NULL;
257
258 /* Return a gimplify context struct from the pool. */
259
260 static inline struct gimplify_ctx *
261 ctx_alloc (void)
262 {
263 struct gimplify_ctx * c = ctx_pool;
264
265 if (c)
266 ctx_pool = c->prev_context;
267 else
268 c = XNEW (struct gimplify_ctx);
269
270 memset (c, '\0', sizeof (*c));
271 return c;
272 }
273
274 /* Put gimplify context C back into the pool. */
275
276 static inline void
277 ctx_free (struct gimplify_ctx *c)
278 {
279 c->prev_context = ctx_pool;
280 ctx_pool = c;
281 }
282
283 /* Free allocated ctx stack memory. */
284
285 void
286 free_gimplify_stack (void)
287 {
288 struct gimplify_ctx *c;
289
290 while ((c = ctx_pool))
291 {
292 ctx_pool = c->prev_context;
293 free (c);
294 }
295 }
296
297
298 /* Set up a context for the gimplifier. */
299
300 void
301 push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
302 {
303 struct gimplify_ctx *c = ctx_alloc ();
304
305 c->prev_context = gimplify_ctxp;
306 gimplify_ctxp = c;
307 gimplify_ctxp->into_ssa = in_ssa;
308 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
309 }
310
311 /* Tear down a context for the gimplifier. If BODY is non-null, then
312 put the temporaries into the outer BIND_EXPR. Otherwise, put them
313 in the local_decls.
314
315 BODY is not a sequence, but the first tuple in a sequence. */
316
317 void
318 pop_gimplify_context (gimple *body)
319 {
320 struct gimplify_ctx *c = gimplify_ctxp;
321
322 gcc_assert (c
323 && (!c->bind_expr_stack.exists ()
324 || c->bind_expr_stack.is_empty ()));
325 c->bind_expr_stack.release ();
326 gimplify_ctxp = c->prev_context;
327
328 if (body)
329 declare_vars (c->temps, body, false);
330 else
331 record_vars (c->temps);
332
333 delete c->temp_htab;
334 c->temp_htab = NULL;
335 ctx_free (c);
336 }
337
338 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
339
340 static void
341 gimple_push_bind_expr (gbind *bind_stmt)
342 {
343 gimplify_ctxp->bind_expr_stack.reserve (8);
344 gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
345 }
346
347 /* Pop the first element off the stack of bindings. */
348
349 static void
350 gimple_pop_bind_expr (void)
351 {
352 gimplify_ctxp->bind_expr_stack.pop ();
353 }
354
355 /* Return the first element of the stack of bindings. */
356
357 gbind *
358 gimple_current_bind_expr (void)
359 {
360 return gimplify_ctxp->bind_expr_stack.last ();
361 }
362
363 /* Return the stack of bindings created during gimplification. */
364
365 vec<gbind *>
366 gimple_bind_expr_stack (void)
367 {
368 return gimplify_ctxp->bind_expr_stack;
369 }
370
371 /* Return true iff there is a COND_EXPR between us and the innermost
372 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
373
374 static bool
375 gimple_conditional_context (void)
376 {
377 return gimplify_ctxp->conditions > 0;
378 }
379
380 /* Note that we've entered a COND_EXPR. */
381
382 static void
383 gimple_push_condition (void)
384 {
385 #ifdef ENABLE_GIMPLE_CHECKING
386 if (gimplify_ctxp->conditions == 0)
387 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
388 #endif
389 ++(gimplify_ctxp->conditions);
390 }
391
392 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
393 now, add any conditional cleanups we've seen to the prequeue. */
394
395 static void
396 gimple_pop_condition (gimple_seq *pre_p)
397 {
398 int conds = --(gimplify_ctxp->conditions);
399
400 gcc_assert (conds >= 0);
401 if (conds == 0)
402 {
403 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
404 gimplify_ctxp->conditional_cleanups = NULL;
405 }
406 }
407
408 /* A stable comparison routine for use with splay trees and DECLs. */
409
410 static int
411 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
412 {
413 tree a = (tree) xa;
414 tree b = (tree) xb;
415
416 return DECL_UID (a) - DECL_UID (b);
417 }
418
419 /* Create a new omp construct that deals with variable remapping. */
420
421 static struct gimplify_omp_ctx *
422 new_omp_context (enum omp_region_type region_type)
423 {
424 struct gimplify_omp_ctx *c;
425
426 c = XCNEW (struct gimplify_omp_ctx);
427 c->outer_context = gimplify_omp_ctxp;
428 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
429 c->privatized_types = new hash_set<tree>;
430 c->location = input_location;
431 c->region_type = region_type;
432 if ((region_type & ORT_TASK) == 0)
433 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
434 else
435 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
436 c->defaultmap[GDMK_SCALAR] = GOVD_MAP;
437 c->defaultmap[GDMK_AGGREGATE] = GOVD_MAP;
438 c->defaultmap[GDMK_ALLOCATABLE] = GOVD_MAP;
439 c->defaultmap[GDMK_POINTER] = GOVD_MAP;
440
441 return c;
442 }
443
444 /* Destroy an omp construct that deals with variable remapping. */
445
446 static void
447 delete_omp_context (struct gimplify_omp_ctx *c)
448 {
449 splay_tree_delete (c->variables);
450 delete c->privatized_types;
451 c->loop_iter_var.release ();
452 XDELETE (c);
453 }
454
455 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
456 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
457
458 /* Both gimplify the statement T and append it to *SEQ_P. This function
459 behaves exactly as gimplify_stmt, but you don't have to pass T as a
460 reference. */
461
462 void
463 gimplify_and_add (tree t, gimple_seq *seq_p)
464 {
465 gimplify_stmt (&t, seq_p);
466 }
467
468 /* Gimplify statement T into sequence *SEQ_P, and return the first
469 tuple in the sequence of generated tuples for this statement.
470 Return NULL if gimplifying T produced no tuples. */
471
472 static gimple *
473 gimplify_and_return_first (tree t, gimple_seq *seq_p)
474 {
475 gimple_stmt_iterator last = gsi_last (*seq_p);
476
477 gimplify_and_add (t, seq_p);
478
479 if (!gsi_end_p (last))
480 {
481 gsi_next (&last);
482 return gsi_stmt (last);
483 }
484 else
485 return gimple_seq_first_stmt (*seq_p);
486 }
487
488 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
489 LHS, or for a call argument. */
490
491 static bool
492 is_gimple_mem_rhs (tree t)
493 {
494 /* If we're dealing with a renamable type, either source or dest must be
495 a renamed variable. */
496 if (is_gimple_reg_type (TREE_TYPE (t)))
497 return is_gimple_val (t);
498 else
499 return is_gimple_val (t) || is_gimple_lvalue (t);
500 }
501
502 /* Return true if T is a CALL_EXPR or an expression that can be
503 assigned to a temporary. Note that this predicate should only be
504 used during gimplification. See the rationale for this in
505 gimplify_modify_expr. */
506
507 static bool
508 is_gimple_reg_rhs_or_call (tree t)
509 {
510 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
511 || TREE_CODE (t) == CALL_EXPR);
512 }
513
514 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
515 this predicate should only be used during gimplification. See the
516 rationale for this in gimplify_modify_expr. */
517
518 static bool
519 is_gimple_mem_rhs_or_call (tree t)
520 {
521 /* If we're dealing with a renamable type, either source or dest must be
522 a renamed variable. */
523 if (is_gimple_reg_type (TREE_TYPE (t)))
524 return is_gimple_val (t);
525 else
526 return (is_gimple_val (t)
527 || is_gimple_lvalue (t)
528 || TREE_CLOBBER_P (t)
529 || TREE_CODE (t) == CALL_EXPR);
530 }
531
532 /* Create a temporary with a name derived from VAL. Subroutine of
533 lookup_tmp_var; nobody else should call this function. */
534
535 static inline tree
536 create_tmp_from_val (tree val)
537 {
538 /* Drop all qualifiers and address-space information from the value type. */
539 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
540 tree var = create_tmp_var (type, get_name (val));
541 if (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
542 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
543 DECL_GIMPLE_REG_P (var) = 1;
544 return var;
545 }
546
547 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
548 an existing expression temporary. */
549
550 static tree
551 lookup_tmp_var (tree val, bool is_formal)
552 {
553 tree ret;
554
555 /* If not optimizing, never really reuse a temporary. local-alloc
556 won't allocate any variable that is used in more than one basic
557 block, which means it will go into memory, causing much extra
558 work in reload and final and poorer code generation, outweighing
559 the extra memory allocation here. */
560 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
561 ret = create_tmp_from_val (val);
562 else
563 {
564 elt_t elt, *elt_p;
565 elt_t **slot;
566
567 elt.val = val;
568 if (!gimplify_ctxp->temp_htab)
569 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
570 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
571 if (*slot == NULL)
572 {
573 elt_p = XNEW (elt_t);
574 elt_p->val = val;
575 elt_p->temp = ret = create_tmp_from_val (val);
576 *slot = elt_p;
577 }
578 else
579 {
580 elt_p = *slot;
581 ret = elt_p->temp;
582 }
583 }
584
585 return ret;
586 }
587
588 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
589
590 static tree
591 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
592 bool is_formal, bool allow_ssa)
593 {
594 tree t, mod;
595
596 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
597 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
598 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
599 fb_rvalue);
600
601 if (allow_ssa
602 && gimplify_ctxp->into_ssa
603 && is_gimple_reg_type (TREE_TYPE (val)))
604 {
605 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
606 if (! gimple_in_ssa_p (cfun))
607 {
608 const char *name = get_name (val);
609 if (name)
610 SET_SSA_NAME_VAR_OR_IDENTIFIER (t, create_tmp_var_name (name));
611 }
612 }
613 else
614 t = lookup_tmp_var (val, is_formal);
615
616 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
617
618 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
619
620 /* gimplify_modify_expr might want to reduce this further. */
621 gimplify_and_add (mod, pre_p);
622 ggc_free (mod);
623
624 return t;
625 }
626
627 /* Return a formal temporary variable initialized with VAL. PRE_P is as
628 in gimplify_expr. Only use this function if:
629
630 1) The value of the unfactored expression represented by VAL will not
631 change between the initialization and use of the temporary, and
632 2) The temporary will not be otherwise modified.
633
634 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
635 and #2 means it is inappropriate for && temps.
636
637 For other cases, use get_initialized_tmp_var instead. */
638
639 tree
640 get_formal_tmp_var (tree val, gimple_seq *pre_p)
641 {
642 return internal_get_tmp_var (val, pre_p, NULL, true, true);
643 }
644
645 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
646 are as in gimplify_expr. */
647
648 tree
649 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
650 bool allow_ssa)
651 {
652 return internal_get_tmp_var (val, pre_p, post_p, false, allow_ssa);
653 }
654
655 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
656 generate debug info for them; otherwise don't. */
657
658 void
659 declare_vars (tree vars, gimple *gs, bool debug_info)
660 {
661 tree last = vars;
662 if (last)
663 {
664 tree temps, block;
665
666 gbind *scope = as_a <gbind *> (gs);
667
668 temps = nreverse (last);
669
670 block = gimple_bind_block (scope);
671 gcc_assert (!block || TREE_CODE (block) == BLOCK);
672 if (!block || !debug_info)
673 {
674 DECL_CHAIN (last) = gimple_bind_vars (scope);
675 gimple_bind_set_vars (scope, temps);
676 }
677 else
678 {
679 /* We need to attach the nodes both to the BIND_EXPR and to its
680 associated BLOCK for debugging purposes. The key point here
681 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
682 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
683 if (BLOCK_VARS (block))
684 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
685 else
686 {
687 gimple_bind_set_vars (scope,
688 chainon (gimple_bind_vars (scope), temps));
689 BLOCK_VARS (block) = temps;
690 }
691 }
692 }
693 }
694
695 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
696 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
697 no such upper bound can be obtained. */
698
699 static void
700 force_constant_size (tree var)
701 {
702 /* The only attempt we make is by querying the maximum size of objects
703 of the variable's type. */
704
705 HOST_WIDE_INT max_size;
706
707 gcc_assert (VAR_P (var));
708
709 max_size = max_int_size_in_bytes (TREE_TYPE (var));
710
711 gcc_assert (max_size >= 0);
712
713 DECL_SIZE_UNIT (var)
714 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
715 DECL_SIZE (var)
716 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
717 }
718
719 /* Push the temporary variable TMP into the current binding. */
720
721 void
722 gimple_add_tmp_var_fn (struct function *fn, tree tmp)
723 {
724 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
725
726 /* Later processing assumes that the object size is constant, which might
727 not be true at this point. Force the use of a constant upper bound in
728 this case. */
729 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
730 force_constant_size (tmp);
731
732 DECL_CONTEXT (tmp) = fn->decl;
733 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
734
735 record_vars_into (tmp, fn->decl);
736 }
737
738 /* Push the temporary variable TMP into the current binding. */
739
740 void
741 gimple_add_tmp_var (tree tmp)
742 {
743 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
744
745 /* Later processing assumes that the object size is constant, which might
746 not be true at this point. Force the use of a constant upper bound in
747 this case. */
748 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
749 force_constant_size (tmp);
750
751 DECL_CONTEXT (tmp) = current_function_decl;
752 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
753
754 if (gimplify_ctxp)
755 {
756 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
757 gimplify_ctxp->temps = tmp;
758
759 /* Mark temporaries local within the nearest enclosing parallel. */
760 if (gimplify_omp_ctxp)
761 {
762 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
763 while (ctx
764 && (ctx->region_type == ORT_WORKSHARE
765 || ctx->region_type == ORT_TASKGROUP
766 || ctx->region_type == ORT_SIMD
767 || ctx->region_type == ORT_ACC))
768 ctx = ctx->outer_context;
769 if (ctx)
770 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
771 }
772 }
773 else if (cfun)
774 record_vars (tmp);
775 else
776 {
777 gimple_seq body_seq;
778
779 /* This case is for nested functions. We need to expose the locals
780 they create. */
781 body_seq = gimple_body (current_function_decl);
782 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
783 }
784 }
785
786
787 \f
788 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
789 nodes that are referenced more than once in GENERIC functions. This is
790 necessary because gimplification (translation into GIMPLE) is performed
791 by modifying tree nodes in-place, so gimplication of a shared node in a
792 first context could generate an invalid GIMPLE form in a second context.
793
794 This is achieved with a simple mark/copy/unmark algorithm that walks the
795 GENERIC representation top-down, marks nodes with TREE_VISITED the first
796 time it encounters them, duplicates them if they already have TREE_VISITED
797 set, and finally removes the TREE_VISITED marks it has set.
798
799 The algorithm works only at the function level, i.e. it generates a GENERIC
800 representation of a function with no nodes shared within the function when
801 passed a GENERIC function (except for nodes that are allowed to be shared).
802
803 At the global level, it is also necessary to unshare tree nodes that are
804 referenced in more than one function, for the same aforementioned reason.
805 This requires some cooperation from the front-end. There are 2 strategies:
806
807 1. Manual unsharing. The front-end needs to call unshare_expr on every
808 expression that might end up being shared across functions.
809
810 2. Deep unsharing. This is an extension of regular unsharing. Instead
811 of calling unshare_expr on expressions that might be shared across
812 functions, the front-end pre-marks them with TREE_VISITED. This will
813 ensure that they are unshared on the first reference within functions
814 when the regular unsharing algorithm runs. The counterpart is that
815 this algorithm must look deeper than for manual unsharing, which is
816 specified by LANG_HOOKS_DEEP_UNSHARING.
817
818 If there are only few specific cases of node sharing across functions, it is
819 probably easier for a front-end to unshare the expressions manually. On the
820 contrary, if the expressions generated at the global level are as widespread
821 as expressions generated within functions, deep unsharing is very likely the
822 way to go. */
823
824 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
825 These nodes model computations that must be done once. If we were to
826 unshare something like SAVE_EXPR(i++), the gimplification process would
827 create wrong code. However, if DATA is non-null, it must hold a pointer
828 set that is used to unshare the subtrees of these nodes. */
829
830 static tree
831 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
832 {
833 tree t = *tp;
834 enum tree_code code = TREE_CODE (t);
835
836 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
837 copy their subtrees if we can make sure to do it only once. */
838 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
839 {
840 if (data && !((hash_set<tree> *)data)->add (t))
841 ;
842 else
843 *walk_subtrees = 0;
844 }
845
846 /* Stop at types, decls, constants like copy_tree_r. */
847 else if (TREE_CODE_CLASS (code) == tcc_type
848 || TREE_CODE_CLASS (code) == tcc_declaration
849 || TREE_CODE_CLASS (code) == tcc_constant)
850 *walk_subtrees = 0;
851
852 /* Cope with the statement expression extension. */
853 else if (code == STATEMENT_LIST)
854 ;
855
856 /* Leave the bulk of the work to copy_tree_r itself. */
857 else
858 copy_tree_r (tp, walk_subtrees, NULL);
859
860 return NULL_TREE;
861 }
862
863 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
864 If *TP has been visited already, then *TP is deeply copied by calling
865 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
866
867 static tree
868 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
869 {
870 tree t = *tp;
871 enum tree_code code = TREE_CODE (t);
872
873 /* Skip types, decls, and constants. But we do want to look at their
874 types and the bounds of types. Mark them as visited so we properly
875 unmark their subtrees on the unmark pass. If we've already seen them,
876 don't look down further. */
877 if (TREE_CODE_CLASS (code) == tcc_type
878 || TREE_CODE_CLASS (code) == tcc_declaration
879 || TREE_CODE_CLASS (code) == tcc_constant)
880 {
881 if (TREE_VISITED (t))
882 *walk_subtrees = 0;
883 else
884 TREE_VISITED (t) = 1;
885 }
886
887 /* If this node has been visited already, unshare it and don't look
888 any deeper. */
889 else if (TREE_VISITED (t))
890 {
891 walk_tree (tp, mostly_copy_tree_r, data, NULL);
892 *walk_subtrees = 0;
893 }
894
895 /* Otherwise, mark the node as visited and keep looking. */
896 else
897 TREE_VISITED (t) = 1;
898
899 return NULL_TREE;
900 }
901
902 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
903 copy_if_shared_r callback unmodified. */
904
905 static inline void
906 copy_if_shared (tree *tp, void *data)
907 {
908 walk_tree (tp, copy_if_shared_r, data, NULL);
909 }
910
911 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
912 any nested functions. */
913
914 static void
915 unshare_body (tree fndecl)
916 {
917 struct cgraph_node *cgn = cgraph_node::get (fndecl);
918 /* If the language requires deep unsharing, we need a pointer set to make
919 sure we don't repeatedly unshare subtrees of unshareable nodes. */
920 hash_set<tree> *visited
921 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
922
923 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
924 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
925 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
926
927 delete visited;
928
929 if (cgn)
930 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
931 unshare_body (cgn->decl);
932 }
933
934 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
935 Subtrees are walked until the first unvisited node is encountered. */
936
937 static tree
938 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
939 {
940 tree t = *tp;
941
942 /* If this node has been visited, unmark it and keep looking. */
943 if (TREE_VISITED (t))
944 TREE_VISITED (t) = 0;
945
946 /* Otherwise, don't look any deeper. */
947 else
948 *walk_subtrees = 0;
949
950 return NULL_TREE;
951 }
952
953 /* Unmark the visited trees rooted at *TP. */
954
955 static inline void
956 unmark_visited (tree *tp)
957 {
958 walk_tree (tp, unmark_visited_r, NULL, NULL);
959 }
960
961 /* Likewise, but mark all trees as not visited. */
962
963 static void
964 unvisit_body (tree fndecl)
965 {
966 struct cgraph_node *cgn = cgraph_node::get (fndecl);
967
968 unmark_visited (&DECL_SAVED_TREE (fndecl));
969 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
970 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
971
972 if (cgn)
973 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
974 unvisit_body (cgn->decl);
975 }
976
977 /* Unconditionally make an unshared copy of EXPR. This is used when using
978 stored expressions which span multiple functions, such as BINFO_VTABLE,
979 as the normal unsharing process can't tell that they're shared. */
980
981 tree
982 unshare_expr (tree expr)
983 {
984 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
985 return expr;
986 }
987
988 /* Worker for unshare_expr_without_location. */
989
990 static tree
991 prune_expr_location (tree *tp, int *walk_subtrees, void *)
992 {
993 if (EXPR_P (*tp))
994 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
995 else
996 *walk_subtrees = 0;
997 return NULL_TREE;
998 }
999
1000 /* Similar to unshare_expr but also prune all expression locations
1001 from EXPR. */
1002
1003 tree
1004 unshare_expr_without_location (tree expr)
1005 {
1006 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1007 if (EXPR_P (expr))
1008 walk_tree (&expr, prune_expr_location, NULL, NULL);
1009 return expr;
1010 }
1011
1012 /* Return the EXPR_LOCATION of EXPR, if it (maybe recursively) has
1013 one, OR_ELSE otherwise. The location of a STATEMENT_LISTs
1014 comprising at least one DEBUG_BEGIN_STMT followed by exactly one
1015 EXPR is the location of the EXPR. */
1016
1017 static location_t
1018 rexpr_location (tree expr, location_t or_else = UNKNOWN_LOCATION)
1019 {
1020 if (!expr)
1021 return or_else;
1022
1023 if (EXPR_HAS_LOCATION (expr))
1024 return EXPR_LOCATION (expr);
1025
1026 if (TREE_CODE (expr) != STATEMENT_LIST)
1027 return or_else;
1028
1029 tree_stmt_iterator i = tsi_start (expr);
1030
1031 bool found = false;
1032 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
1033 {
1034 found = true;
1035 tsi_next (&i);
1036 }
1037
1038 if (!found || !tsi_one_before_end_p (i))
1039 return or_else;
1040
1041 return rexpr_location (tsi_stmt (i), or_else);
1042 }
1043
1044 /* Return TRUE iff EXPR (maybe recursively) has a location; see
1045 rexpr_location for the potential recursion. */
1046
1047 static inline bool
1048 rexpr_has_location (tree expr)
1049 {
1050 return rexpr_location (expr) != UNKNOWN_LOCATION;
1051 }
1052
1053 \f
1054 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1055 contain statements and have a value. Assign its value to a temporary
1056 and give it void_type_node. Return the temporary, or NULL_TREE if
1057 WRAPPER was already void. */
1058
1059 tree
1060 voidify_wrapper_expr (tree wrapper, tree temp)
1061 {
1062 tree type = TREE_TYPE (wrapper);
1063 if (type && !VOID_TYPE_P (type))
1064 {
1065 tree *p;
1066
1067 /* Set p to point to the body of the wrapper. Loop until we find
1068 something that isn't a wrapper. */
1069 for (p = &wrapper; p && *p; )
1070 {
1071 switch (TREE_CODE (*p))
1072 {
1073 case BIND_EXPR:
1074 TREE_SIDE_EFFECTS (*p) = 1;
1075 TREE_TYPE (*p) = void_type_node;
1076 /* For a BIND_EXPR, the body is operand 1. */
1077 p = &BIND_EXPR_BODY (*p);
1078 break;
1079
1080 case CLEANUP_POINT_EXPR:
1081 case TRY_FINALLY_EXPR:
1082 case TRY_CATCH_EXPR:
1083 TREE_SIDE_EFFECTS (*p) = 1;
1084 TREE_TYPE (*p) = void_type_node;
1085 p = &TREE_OPERAND (*p, 0);
1086 break;
1087
1088 case STATEMENT_LIST:
1089 {
1090 tree_stmt_iterator i = tsi_last (*p);
1091 TREE_SIDE_EFFECTS (*p) = 1;
1092 TREE_TYPE (*p) = void_type_node;
1093 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1094 }
1095 break;
1096
1097 case COMPOUND_EXPR:
1098 /* Advance to the last statement. Set all container types to
1099 void. */
1100 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1101 {
1102 TREE_SIDE_EFFECTS (*p) = 1;
1103 TREE_TYPE (*p) = void_type_node;
1104 }
1105 break;
1106
1107 case TRANSACTION_EXPR:
1108 TREE_SIDE_EFFECTS (*p) = 1;
1109 TREE_TYPE (*p) = void_type_node;
1110 p = &TRANSACTION_EXPR_BODY (*p);
1111 break;
1112
1113 default:
1114 /* Assume that any tree upon which voidify_wrapper_expr is
1115 directly called is a wrapper, and that its body is op0. */
1116 if (p == &wrapper)
1117 {
1118 TREE_SIDE_EFFECTS (*p) = 1;
1119 TREE_TYPE (*p) = void_type_node;
1120 p = &TREE_OPERAND (*p, 0);
1121 break;
1122 }
1123 goto out;
1124 }
1125 }
1126
1127 out:
1128 if (p == NULL || IS_EMPTY_STMT (*p))
1129 temp = NULL_TREE;
1130 else if (temp)
1131 {
1132 /* The wrapper is on the RHS of an assignment that we're pushing
1133 down. */
1134 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1135 || TREE_CODE (temp) == MODIFY_EXPR);
1136 TREE_OPERAND (temp, 1) = *p;
1137 *p = temp;
1138 }
1139 else
1140 {
1141 temp = create_tmp_var (type, "retval");
1142 *p = build2 (INIT_EXPR, type, temp, *p);
1143 }
1144
1145 return temp;
1146 }
1147
1148 return NULL_TREE;
1149 }
1150
1151 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1152 a temporary through which they communicate. */
1153
1154 static void
1155 build_stack_save_restore (gcall **save, gcall **restore)
1156 {
1157 tree tmp_var;
1158
1159 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1160 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1161 gimple_call_set_lhs (*save, tmp_var);
1162
1163 *restore
1164 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1165 1, tmp_var);
1166 }
1167
1168 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1169
1170 static tree
1171 build_asan_poison_call_expr (tree decl)
1172 {
1173 /* Do not poison variables that have size equal to zero. */
1174 tree unit_size = DECL_SIZE_UNIT (decl);
1175 if (zerop (unit_size))
1176 return NULL_TREE;
1177
1178 tree base = build_fold_addr_expr (decl);
1179
1180 return build_call_expr_internal_loc (UNKNOWN_LOCATION, IFN_ASAN_MARK,
1181 void_type_node, 3,
1182 build_int_cst (integer_type_node,
1183 ASAN_MARK_POISON),
1184 base, unit_size);
1185 }
1186
1187 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1188 on POISON flag, shadow memory of a DECL variable. The call will be
1189 put on location identified by IT iterator, where BEFORE flag drives
1190 position where the stmt will be put. */
1191
1192 static void
1193 asan_poison_variable (tree decl, bool poison, gimple_stmt_iterator *it,
1194 bool before)
1195 {
1196 tree unit_size = DECL_SIZE_UNIT (decl);
1197 tree base = build_fold_addr_expr (decl);
1198
1199 /* Do not poison variables that have size equal to zero. */
1200 if (zerop (unit_size))
1201 return;
1202
1203 /* It's necessary to have all stack variables aligned to ASAN granularity
1204 bytes. */
1205 if (DECL_ALIGN_UNIT (decl) <= ASAN_SHADOW_GRANULARITY)
1206 SET_DECL_ALIGN (decl, BITS_PER_UNIT * ASAN_SHADOW_GRANULARITY);
1207
1208 HOST_WIDE_INT flags = poison ? ASAN_MARK_POISON : ASAN_MARK_UNPOISON;
1209
1210 gimple *g
1211 = gimple_build_call_internal (IFN_ASAN_MARK, 3,
1212 build_int_cst (integer_type_node, flags),
1213 base, unit_size);
1214
1215 if (before)
1216 gsi_insert_before (it, g, GSI_NEW_STMT);
1217 else
1218 gsi_insert_after (it, g, GSI_NEW_STMT);
1219 }
1220
1221 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1222 either poisons or unpoisons a DECL. Created statement is appended
1223 to SEQ_P gimple sequence. */
1224
1225 static void
1226 asan_poison_variable (tree decl, bool poison, gimple_seq *seq_p)
1227 {
1228 gimple_stmt_iterator it = gsi_last (*seq_p);
1229 bool before = false;
1230
1231 if (gsi_end_p (it))
1232 before = true;
1233
1234 asan_poison_variable (decl, poison, &it, before);
1235 }
1236
1237 /* Sort pair of VAR_DECLs A and B by DECL_UID. */
1238
1239 static int
1240 sort_by_decl_uid (const void *a, const void *b)
1241 {
1242 const tree *t1 = (const tree *)a;
1243 const tree *t2 = (const tree *)b;
1244
1245 int uid1 = DECL_UID (*t1);
1246 int uid2 = DECL_UID (*t2);
1247
1248 if (uid1 < uid2)
1249 return -1;
1250 else if (uid1 > uid2)
1251 return 1;
1252 else
1253 return 0;
1254 }
1255
1256 /* Generate IFN_ASAN_MARK internal call for all VARIABLES
1257 depending on POISON flag. Created statement is appended
1258 to SEQ_P gimple sequence. */
1259
1260 static void
1261 asan_poison_variables (hash_set<tree> *variables, bool poison, gimple_seq *seq_p)
1262 {
1263 unsigned c = variables->elements ();
1264 if (c == 0)
1265 return;
1266
1267 auto_vec<tree> sorted_variables (c);
1268
1269 for (hash_set<tree>::iterator it = variables->begin ();
1270 it != variables->end (); ++it)
1271 sorted_variables.safe_push (*it);
1272
1273 sorted_variables.qsort (sort_by_decl_uid);
1274
1275 unsigned i;
1276 tree var;
1277 FOR_EACH_VEC_ELT (sorted_variables, i, var)
1278 {
1279 asan_poison_variable (var, poison, seq_p);
1280
1281 /* Add use_after_scope_memory attribute for the variable in order
1282 to prevent re-written into SSA. */
1283 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE,
1284 DECL_ATTRIBUTES (var)))
1285 DECL_ATTRIBUTES (var)
1286 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE),
1287 integer_one_node,
1288 DECL_ATTRIBUTES (var));
1289 }
1290 }
1291
1292 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1293
1294 static enum gimplify_status
1295 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1296 {
1297 tree bind_expr = *expr_p;
1298 bool old_keep_stack = gimplify_ctxp->keep_stack;
1299 bool old_save_stack = gimplify_ctxp->save_stack;
1300 tree t;
1301 gbind *bind_stmt;
1302 gimple_seq body, cleanup;
1303 gcall *stack_save;
1304 location_t start_locus = 0, end_locus = 0;
1305 tree ret_clauses = NULL;
1306
1307 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1308
1309 /* Mark variables seen in this bind expr. */
1310 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1311 {
1312 if (VAR_P (t))
1313 {
1314 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1315
1316 /* Mark variable as local. */
1317 if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t)
1318 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1319 || splay_tree_lookup (ctx->variables,
1320 (splay_tree_key) t) == NULL))
1321 {
1322 if (ctx->region_type == ORT_SIMD
1323 && TREE_ADDRESSABLE (t)
1324 && !TREE_STATIC (t))
1325 omp_add_variable (ctx, t, GOVD_PRIVATE | GOVD_SEEN);
1326 else
1327 omp_add_variable (ctx, t, GOVD_LOCAL | GOVD_SEEN);
1328 }
1329
1330 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1331
1332 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1333 cfun->has_local_explicit_reg_vars = true;
1334 }
1335
1336 /* Preliminarily mark non-addressed complex variables as eligible
1337 for promotion to gimple registers. We'll transform their uses
1338 as we find them. */
1339 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1340 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1341 && !TREE_THIS_VOLATILE (t)
1342 && (VAR_P (t) && !DECL_HARD_REGISTER (t))
1343 && !needs_to_live_in_memory (t))
1344 DECL_GIMPLE_REG_P (t) = 1;
1345 }
1346
1347 bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1348 BIND_EXPR_BLOCK (bind_expr));
1349 gimple_push_bind_expr (bind_stmt);
1350
1351 gimplify_ctxp->keep_stack = false;
1352 gimplify_ctxp->save_stack = false;
1353
1354 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1355 body = NULL;
1356 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1357 gimple_bind_set_body (bind_stmt, body);
1358
1359 /* Source location wise, the cleanup code (stack_restore and clobbers)
1360 belongs to the end of the block, so propagate what we have. The
1361 stack_save operation belongs to the beginning of block, which we can
1362 infer from the bind_expr directly if the block has no explicit
1363 assignment. */
1364 if (BIND_EXPR_BLOCK (bind_expr))
1365 {
1366 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1367 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1368 }
1369 if (start_locus == 0)
1370 start_locus = EXPR_LOCATION (bind_expr);
1371
1372 cleanup = NULL;
1373 stack_save = NULL;
1374
1375 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1376 the stack space allocated to the VLAs. */
1377 if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack)
1378 {
1379 gcall *stack_restore;
1380
1381 /* Save stack on entry and restore it on exit. Add a try_finally
1382 block to achieve this. */
1383 build_stack_save_restore (&stack_save, &stack_restore);
1384
1385 gimple_set_location (stack_save, start_locus);
1386 gimple_set_location (stack_restore, end_locus);
1387
1388 gimplify_seq_add_stmt (&cleanup, stack_restore);
1389 }
1390
1391 /* Add clobbers for all variables that go out of scope. */
1392 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1393 {
1394 if (VAR_P (t)
1395 && !is_global_var (t)
1396 && DECL_CONTEXT (t) == current_function_decl)
1397 {
1398 if (!DECL_HARD_REGISTER (t)
1399 && !TREE_THIS_VOLATILE (t)
1400 && !DECL_HAS_VALUE_EXPR_P (t)
1401 /* Only care for variables that have to be in memory. Others
1402 will be rewritten into SSA names, hence moved to the
1403 top-level. */
1404 && !is_gimple_reg (t)
1405 && flag_stack_reuse != SR_NONE)
1406 {
1407 tree clobber = build_clobber (TREE_TYPE (t));
1408 gimple *clobber_stmt;
1409 clobber_stmt = gimple_build_assign (t, clobber);
1410 gimple_set_location (clobber_stmt, end_locus);
1411 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1412 }
1413
1414 if (flag_openacc && oacc_declare_returns != NULL)
1415 {
1416 tree *c = oacc_declare_returns->get (t);
1417 if (c != NULL)
1418 {
1419 if (ret_clauses)
1420 OMP_CLAUSE_CHAIN (*c) = ret_clauses;
1421
1422 ret_clauses = *c;
1423
1424 oacc_declare_returns->remove (t);
1425
1426 if (oacc_declare_returns->elements () == 0)
1427 {
1428 delete oacc_declare_returns;
1429 oacc_declare_returns = NULL;
1430 }
1431 }
1432 }
1433 }
1434
1435 if (asan_poisoned_variables != NULL
1436 && asan_poisoned_variables->contains (t))
1437 {
1438 asan_poisoned_variables->remove (t);
1439 asan_poison_variable (t, true, &cleanup);
1440 }
1441
1442 if (gimplify_ctxp->live_switch_vars != NULL
1443 && gimplify_ctxp->live_switch_vars->contains (t))
1444 gimplify_ctxp->live_switch_vars->remove (t);
1445 }
1446
1447 if (ret_clauses)
1448 {
1449 gomp_target *stmt;
1450 gimple_stmt_iterator si = gsi_start (cleanup);
1451
1452 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
1453 ret_clauses);
1454 gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT);
1455 }
1456
1457 if (cleanup)
1458 {
1459 gtry *gs;
1460 gimple_seq new_body;
1461
1462 new_body = NULL;
1463 gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
1464 GIMPLE_TRY_FINALLY);
1465
1466 if (stack_save)
1467 gimplify_seq_add_stmt (&new_body, stack_save);
1468 gimplify_seq_add_stmt (&new_body, gs);
1469 gimple_bind_set_body (bind_stmt, new_body);
1470 }
1471
1472 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1473 if (!gimplify_ctxp->keep_stack)
1474 gimplify_ctxp->keep_stack = old_keep_stack;
1475 gimplify_ctxp->save_stack = old_save_stack;
1476
1477 gimple_pop_bind_expr ();
1478
1479 gimplify_seq_add_stmt (pre_p, bind_stmt);
1480
1481 if (temp)
1482 {
1483 *expr_p = temp;
1484 return GS_OK;
1485 }
1486
1487 *expr_p = NULL_TREE;
1488 return GS_ALL_DONE;
1489 }
1490
1491 /* Maybe add early return predict statement to PRE_P sequence. */
1492
1493 static void
1494 maybe_add_early_return_predict_stmt (gimple_seq *pre_p)
1495 {
1496 /* If we are not in a conditional context, add PREDICT statement. */
1497 if (gimple_conditional_context ())
1498 {
1499 gimple *predict = gimple_build_predict (PRED_TREE_EARLY_RETURN,
1500 NOT_TAKEN);
1501 gimplify_seq_add_stmt (pre_p, predict);
1502 }
1503 }
1504
1505 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1506 GIMPLE value, it is assigned to a new temporary and the statement is
1507 re-written to return the temporary.
1508
1509 PRE_P points to the sequence where side effects that must happen before
1510 STMT should be stored. */
1511
1512 static enum gimplify_status
1513 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1514 {
1515 greturn *ret;
1516 tree ret_expr = TREE_OPERAND (stmt, 0);
1517 tree result_decl, result;
1518
1519 if (ret_expr == error_mark_node)
1520 return GS_ERROR;
1521
1522 if (!ret_expr
1523 || TREE_CODE (ret_expr) == RESULT_DECL)
1524 {
1525 maybe_add_early_return_predict_stmt (pre_p);
1526 greturn *ret = gimple_build_return (ret_expr);
1527 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1528 gimplify_seq_add_stmt (pre_p, ret);
1529 return GS_ALL_DONE;
1530 }
1531
1532 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1533 result_decl = NULL_TREE;
1534 else
1535 {
1536 result_decl = TREE_OPERAND (ret_expr, 0);
1537
1538 /* See through a return by reference. */
1539 if (TREE_CODE (result_decl) == INDIRECT_REF)
1540 result_decl = TREE_OPERAND (result_decl, 0);
1541
1542 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1543 || TREE_CODE (ret_expr) == INIT_EXPR)
1544 && TREE_CODE (result_decl) == RESULT_DECL);
1545 }
1546
1547 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1548 Recall that aggregate_value_p is FALSE for any aggregate type that is
1549 returned in registers. If we're returning values in registers, then
1550 we don't want to extend the lifetime of the RESULT_DECL, particularly
1551 across another call. In addition, for those aggregates for which
1552 hard_function_value generates a PARALLEL, we'll die during normal
1553 expansion of structure assignments; there's special code in expand_return
1554 to handle this case that does not exist in expand_expr. */
1555 if (!result_decl)
1556 result = NULL_TREE;
1557 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1558 {
1559 if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST)
1560 {
1561 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1562 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1563 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1564 should be effectively allocated by the caller, i.e. all calls to
1565 this function must be subject to the Return Slot Optimization. */
1566 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1567 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1568 }
1569 result = result_decl;
1570 }
1571 else if (gimplify_ctxp->return_temp)
1572 result = gimplify_ctxp->return_temp;
1573 else
1574 {
1575 result = create_tmp_reg (TREE_TYPE (result_decl));
1576
1577 /* ??? With complex control flow (usually involving abnormal edges),
1578 we can wind up warning about an uninitialized value for this. Due
1579 to how this variable is constructed and initialized, this is never
1580 true. Give up and never warn. */
1581 TREE_NO_WARNING (result) = 1;
1582
1583 gimplify_ctxp->return_temp = result;
1584 }
1585
1586 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1587 Then gimplify the whole thing. */
1588 if (result != result_decl)
1589 TREE_OPERAND (ret_expr, 0) = result;
1590
1591 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1592
1593 maybe_add_early_return_predict_stmt (pre_p);
1594 ret = gimple_build_return (result);
1595 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1596 gimplify_seq_add_stmt (pre_p, ret);
1597
1598 return GS_ALL_DONE;
1599 }
1600
1601 /* Gimplify a variable-length array DECL. */
1602
1603 static void
1604 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1605 {
1606 /* This is a variable-sized decl. Simplify its size and mark it
1607 for deferred expansion. */
1608 tree t, addr, ptr_type;
1609
1610 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1611 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1612
1613 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1614 if (DECL_HAS_VALUE_EXPR_P (decl))
1615 return;
1616
1617 /* All occurrences of this decl in final gimplified code will be
1618 replaced by indirection. Setting DECL_VALUE_EXPR does two
1619 things: First, it lets the rest of the gimplifier know what
1620 replacement to use. Second, it lets the debug info know
1621 where to find the value. */
1622 ptr_type = build_pointer_type (TREE_TYPE (decl));
1623 addr = create_tmp_var (ptr_type, get_name (decl));
1624 DECL_IGNORED_P (addr) = 0;
1625 t = build_fold_indirect_ref (addr);
1626 TREE_THIS_NOTRAP (t) = 1;
1627 SET_DECL_VALUE_EXPR (decl, t);
1628 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1629
1630 t = build_alloca_call_expr (DECL_SIZE_UNIT (decl), DECL_ALIGN (decl),
1631 max_int_size_in_bytes (TREE_TYPE (decl)));
1632 /* The call has been built for a variable-sized object. */
1633 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1634 t = fold_convert (ptr_type, t);
1635 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1636
1637 gimplify_and_add (t, seq_p);
1638 }
1639
1640 /* A helper function to be called via walk_tree. Mark all labels under *TP
1641 as being forced. To be called for DECL_INITIAL of static variables. */
1642
1643 static tree
1644 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1645 {
1646 if (TYPE_P (*tp))
1647 *walk_subtrees = 0;
1648 if (TREE_CODE (*tp) == LABEL_DECL)
1649 {
1650 FORCED_LABEL (*tp) = 1;
1651 cfun->has_forced_label_in_static = 1;
1652 }
1653
1654 return NULL_TREE;
1655 }
1656
1657 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1658 and initialization explicit. */
1659
1660 static enum gimplify_status
1661 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1662 {
1663 tree stmt = *stmt_p;
1664 tree decl = DECL_EXPR_DECL (stmt);
1665
1666 *stmt_p = NULL_TREE;
1667
1668 if (TREE_TYPE (decl) == error_mark_node)
1669 return GS_ERROR;
1670
1671 if ((TREE_CODE (decl) == TYPE_DECL
1672 || VAR_P (decl))
1673 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1674 {
1675 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1676 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1677 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl)), seq_p);
1678 }
1679
1680 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1681 in case its size expressions contain problematic nodes like CALL_EXPR. */
1682 if (TREE_CODE (decl) == TYPE_DECL
1683 && DECL_ORIGINAL_TYPE (decl)
1684 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1685 {
1686 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1687 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl)) == REFERENCE_TYPE)
1688 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl)), seq_p);
1689 }
1690
1691 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
1692 {
1693 tree init = DECL_INITIAL (decl);
1694 bool is_vla = false;
1695
1696 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1697 || (!TREE_STATIC (decl)
1698 && flag_stack_check == GENERIC_STACK_CHECK
1699 && compare_tree_int (DECL_SIZE_UNIT (decl),
1700 STACK_CHECK_MAX_VAR_SIZE) > 0))
1701 {
1702 gimplify_vla_decl (decl, seq_p);
1703 is_vla = true;
1704 }
1705
1706 if (asan_poisoned_variables
1707 && !is_vla
1708 && TREE_ADDRESSABLE (decl)
1709 && !TREE_STATIC (decl)
1710 && !DECL_HAS_VALUE_EXPR_P (decl)
1711 && DECL_ALIGN (decl) <= MAX_SUPPORTED_STACK_ALIGNMENT
1712 && dbg_cnt (asan_use_after_scope)
1713 && !gimplify_omp_ctxp)
1714 {
1715 asan_poisoned_variables->add (decl);
1716 asan_poison_variable (decl, false, seq_p);
1717 if (!DECL_ARTIFICIAL (decl) && gimplify_ctxp->live_switch_vars)
1718 gimplify_ctxp->live_switch_vars->add (decl);
1719 }
1720
1721 /* Some front ends do not explicitly declare all anonymous
1722 artificial variables. We compensate here by declaring the
1723 variables, though it would be better if the front ends would
1724 explicitly declare them. */
1725 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1726 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1727 gimple_add_tmp_var (decl);
1728
1729 if (init && init != error_mark_node)
1730 {
1731 if (!TREE_STATIC (decl))
1732 {
1733 DECL_INITIAL (decl) = NULL_TREE;
1734 init = build2 (INIT_EXPR, void_type_node, decl, init);
1735 gimplify_and_add (init, seq_p);
1736 ggc_free (init);
1737 }
1738 else
1739 /* We must still examine initializers for static variables
1740 as they may contain a label address. */
1741 walk_tree (&init, force_labels_r, NULL, NULL);
1742 }
1743 }
1744
1745 return GS_ALL_DONE;
1746 }
1747
1748 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1749 and replacing the LOOP_EXPR with goto, but if the loop contains an
1750 EXIT_EXPR, we need to append a label for it to jump to. */
1751
1752 static enum gimplify_status
1753 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1754 {
1755 tree saved_label = gimplify_ctxp->exit_label;
1756 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1757
1758 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1759
1760 gimplify_ctxp->exit_label = NULL_TREE;
1761
1762 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1763
1764 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1765
1766 if (gimplify_ctxp->exit_label)
1767 gimplify_seq_add_stmt (pre_p,
1768 gimple_build_label (gimplify_ctxp->exit_label));
1769
1770 gimplify_ctxp->exit_label = saved_label;
1771
1772 *expr_p = NULL;
1773 return GS_ALL_DONE;
1774 }
1775
1776 /* Gimplify a statement list onto a sequence. These may be created either
1777 by an enlightened front-end, or by shortcut_cond_expr. */
1778
1779 static enum gimplify_status
1780 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1781 {
1782 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1783
1784 tree_stmt_iterator i = tsi_start (*expr_p);
1785
1786 while (!tsi_end_p (i))
1787 {
1788 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1789 tsi_delink (&i);
1790 }
1791
1792 if (temp)
1793 {
1794 *expr_p = temp;
1795 return GS_OK;
1796 }
1797
1798 return GS_ALL_DONE;
1799 }
1800
1801 /* Callback for walk_gimple_seq. */
1802
1803 static tree
1804 warn_switch_unreachable_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
1805 struct walk_stmt_info *wi)
1806 {
1807 gimple *stmt = gsi_stmt (*gsi_p);
1808
1809 *handled_ops_p = true;
1810 switch (gimple_code (stmt))
1811 {
1812 case GIMPLE_TRY:
1813 /* A compiler-generated cleanup or a user-written try block.
1814 If it's empty, don't dive into it--that would result in
1815 worse location info. */
1816 if (gimple_try_eval (stmt) == NULL)
1817 {
1818 wi->info = stmt;
1819 return integer_zero_node;
1820 }
1821 /* Fall through. */
1822 case GIMPLE_BIND:
1823 case GIMPLE_CATCH:
1824 case GIMPLE_EH_FILTER:
1825 case GIMPLE_TRANSACTION:
1826 /* Walk the sub-statements. */
1827 *handled_ops_p = false;
1828 break;
1829
1830 case GIMPLE_DEBUG:
1831 /* Ignore these. We may generate them before declarations that
1832 are never executed. If there's something to warn about,
1833 there will be non-debug stmts too, and we'll catch those. */
1834 break;
1835
1836 case GIMPLE_CALL:
1837 if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
1838 {
1839 *handled_ops_p = false;
1840 break;
1841 }
1842 /* Fall through. */
1843 default:
1844 /* Save the first "real" statement (not a decl/lexical scope/...). */
1845 wi->info = stmt;
1846 return integer_zero_node;
1847 }
1848 return NULL_TREE;
1849 }
1850
1851 /* Possibly warn about unreachable statements between switch's controlling
1852 expression and the first case. SEQ is the body of a switch expression. */
1853
1854 static void
1855 maybe_warn_switch_unreachable (gimple_seq seq)
1856 {
1857 if (!warn_switch_unreachable
1858 /* This warning doesn't play well with Fortran when optimizations
1859 are on. */
1860 || lang_GNU_Fortran ()
1861 || seq == NULL)
1862 return;
1863
1864 struct walk_stmt_info wi;
1865 memset (&wi, 0, sizeof (wi));
1866 walk_gimple_seq (seq, warn_switch_unreachable_r, NULL, &wi);
1867 gimple *stmt = (gimple *) wi.info;
1868
1869 if (stmt && gimple_code (stmt) != GIMPLE_LABEL)
1870 {
1871 if (gimple_code (stmt) == GIMPLE_GOTO
1872 && TREE_CODE (gimple_goto_dest (stmt)) == LABEL_DECL
1873 && DECL_ARTIFICIAL (gimple_goto_dest (stmt)))
1874 /* Don't warn for compiler-generated gotos. These occur
1875 in Duff's devices, for example. */;
1876 else
1877 warning_at (gimple_location (stmt), OPT_Wswitch_unreachable,
1878 "statement will never be executed");
1879 }
1880 }
1881
1882
1883 /* A label entry that pairs label and a location. */
1884 struct label_entry
1885 {
1886 tree label;
1887 location_t loc;
1888 };
1889
1890 /* Find LABEL in vector of label entries VEC. */
1891
1892 static struct label_entry *
1893 find_label_entry (const auto_vec<struct label_entry> *vec, tree label)
1894 {
1895 unsigned int i;
1896 struct label_entry *l;
1897
1898 FOR_EACH_VEC_ELT (*vec, i, l)
1899 if (l->label == label)
1900 return l;
1901 return NULL;
1902 }
1903
1904 /* Return true if LABEL, a LABEL_DECL, represents a case label
1905 in a vector of labels CASES. */
1906
1907 static bool
1908 case_label_p (const vec<tree> *cases, tree label)
1909 {
1910 unsigned int i;
1911 tree l;
1912
1913 FOR_EACH_VEC_ELT (*cases, i, l)
1914 if (CASE_LABEL (l) == label)
1915 return true;
1916 return false;
1917 }
1918
1919 /* Find the last nondebug statement in a scope STMT. */
1920
1921 static gimple *
1922 last_stmt_in_scope (gimple *stmt)
1923 {
1924 if (!stmt)
1925 return NULL;
1926
1927 switch (gimple_code (stmt))
1928 {
1929 case GIMPLE_BIND:
1930 {
1931 gbind *bind = as_a <gbind *> (stmt);
1932 stmt = gimple_seq_last_nondebug_stmt (gimple_bind_body (bind));
1933 return last_stmt_in_scope (stmt);
1934 }
1935
1936 case GIMPLE_TRY:
1937 {
1938 gtry *try_stmt = as_a <gtry *> (stmt);
1939 stmt = gimple_seq_last_nondebug_stmt (gimple_try_eval (try_stmt));
1940 gimple *last_eval = last_stmt_in_scope (stmt);
1941 if (gimple_stmt_may_fallthru (last_eval)
1942 && (last_eval == NULL
1943 || !gimple_call_internal_p (last_eval, IFN_FALLTHROUGH))
1944 && gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY)
1945 {
1946 stmt = gimple_seq_last_nondebug_stmt (gimple_try_cleanup (try_stmt));
1947 return last_stmt_in_scope (stmt);
1948 }
1949 else
1950 return last_eval;
1951 }
1952
1953 case GIMPLE_DEBUG:
1954 gcc_unreachable ();
1955
1956 default:
1957 return stmt;
1958 }
1959 }
1960
1961 /* Collect interesting labels in LABELS and return the statement preceding
1962 another case label, or a user-defined label. Store a location useful
1963 to give warnings at *PREVLOC (usually the location of the returned
1964 statement or of its surrounding scope). */
1965
1966 static gimple *
1967 collect_fallthrough_labels (gimple_stmt_iterator *gsi_p,
1968 auto_vec <struct label_entry> *labels,
1969 location_t *prevloc)
1970 {
1971 gimple *prev = NULL;
1972
1973 *prevloc = UNKNOWN_LOCATION;
1974 do
1975 {
1976 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND)
1977 {
1978 /* Recognize the special GIMPLE_BIND added by gimplify_switch_expr,
1979 which starts on a GIMPLE_SWITCH and ends with a break label.
1980 Handle that as a single statement that can fall through. */
1981 gbind *bind = as_a <gbind *> (gsi_stmt (*gsi_p));
1982 gimple *first = gimple_seq_first_stmt (gimple_bind_body (bind));
1983 gimple *last = gimple_seq_last_stmt (gimple_bind_body (bind));
1984 if (last
1985 && gimple_code (first) == GIMPLE_SWITCH
1986 && gimple_code (last) == GIMPLE_LABEL)
1987 {
1988 tree label = gimple_label_label (as_a <glabel *> (last));
1989 if (SWITCH_BREAK_LABEL_P (label))
1990 {
1991 prev = bind;
1992 gsi_next (gsi_p);
1993 continue;
1994 }
1995 }
1996 }
1997 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND
1998 || gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_TRY)
1999 {
2000 /* Nested scope. Only look at the last statement of
2001 the innermost scope. */
2002 location_t bind_loc = gimple_location (gsi_stmt (*gsi_p));
2003 gimple *last = last_stmt_in_scope (gsi_stmt (*gsi_p));
2004 if (last)
2005 {
2006 prev = last;
2007 /* It might be a label without a location. Use the
2008 location of the scope then. */
2009 if (!gimple_has_location (prev))
2010 *prevloc = bind_loc;
2011 }
2012 gsi_next (gsi_p);
2013 continue;
2014 }
2015
2016 /* Ifs are tricky. */
2017 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_COND)
2018 {
2019 gcond *cond_stmt = as_a <gcond *> (gsi_stmt (*gsi_p));
2020 tree false_lab = gimple_cond_false_label (cond_stmt);
2021 location_t if_loc = gimple_location (cond_stmt);
2022
2023 /* If we have e.g.
2024 if (i > 1) goto <D.2259>; else goto D;
2025 we can't do much with the else-branch. */
2026 if (!DECL_ARTIFICIAL (false_lab))
2027 break;
2028
2029 /* Go on until the false label, then one step back. */
2030 for (; !gsi_end_p (*gsi_p); gsi_next (gsi_p))
2031 {
2032 gimple *stmt = gsi_stmt (*gsi_p);
2033 if (gimple_code (stmt) == GIMPLE_LABEL
2034 && gimple_label_label (as_a <glabel *> (stmt)) == false_lab)
2035 break;
2036 }
2037
2038 /* Not found? Oops. */
2039 if (gsi_end_p (*gsi_p))
2040 break;
2041
2042 struct label_entry l = { false_lab, if_loc };
2043 labels->safe_push (l);
2044
2045 /* Go to the last statement of the then branch. */
2046 gsi_prev (gsi_p);
2047
2048 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
2049 <D.1759>:
2050 <stmt>;
2051 goto <D.1761>;
2052 <D.1760>:
2053 */
2054 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_GOTO
2055 && !gimple_has_location (gsi_stmt (*gsi_p)))
2056 {
2057 /* Look at the statement before, it might be
2058 attribute fallthrough, in which case don't warn. */
2059 gsi_prev (gsi_p);
2060 bool fallthru_before_dest
2061 = gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_FALLTHROUGH);
2062 gsi_next (gsi_p);
2063 tree goto_dest = gimple_goto_dest (gsi_stmt (*gsi_p));
2064 if (!fallthru_before_dest)
2065 {
2066 struct label_entry l = { goto_dest, if_loc };
2067 labels->safe_push (l);
2068 }
2069 }
2070 /* And move back. */
2071 gsi_next (gsi_p);
2072 }
2073
2074 /* Remember the last statement. Skip labels that are of no interest
2075 to us. */
2076 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2077 {
2078 tree label = gimple_label_label (as_a <glabel *> (gsi_stmt (*gsi_p)));
2079 if (find_label_entry (labels, label))
2080 prev = gsi_stmt (*gsi_p);
2081 }
2082 else if (gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_ASAN_MARK))
2083 ;
2084 else if (!is_gimple_debug (gsi_stmt (*gsi_p)))
2085 prev = gsi_stmt (*gsi_p);
2086 gsi_next (gsi_p);
2087 }
2088 while (!gsi_end_p (*gsi_p)
2089 /* Stop if we find a case or a user-defined label. */
2090 && (gimple_code (gsi_stmt (*gsi_p)) != GIMPLE_LABEL
2091 || !gimple_has_location (gsi_stmt (*gsi_p))));
2092
2093 if (prev && gimple_has_location (prev))
2094 *prevloc = gimple_location (prev);
2095 return prev;
2096 }
2097
2098 /* Return true if the switch fallthough warning should occur. LABEL is
2099 the label statement that we're falling through to. */
2100
2101 static bool
2102 should_warn_for_implicit_fallthrough (gimple_stmt_iterator *gsi_p, tree label)
2103 {
2104 gimple_stmt_iterator gsi = *gsi_p;
2105
2106 /* Don't warn if the label is marked with a "falls through" comment. */
2107 if (FALLTHROUGH_LABEL_P (label))
2108 return false;
2109
2110 /* Don't warn for non-case labels followed by a statement:
2111 case 0:
2112 foo ();
2113 label:
2114 bar ();
2115 as these are likely intentional. */
2116 if (!case_label_p (&gimplify_ctxp->case_labels, label))
2117 {
2118 tree l;
2119 while (!gsi_end_p (gsi)
2120 && gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2121 && (l = gimple_label_label (as_a <glabel *> (gsi_stmt (gsi))))
2122 && !case_label_p (&gimplify_ctxp->case_labels, l))
2123 gsi_next_nondebug (&gsi);
2124 if (gsi_end_p (gsi) || gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL)
2125 return false;
2126 }
2127
2128 /* Don't warn for terminated branches, i.e. when the subsequent case labels
2129 immediately breaks. */
2130 gsi = *gsi_p;
2131
2132 /* Skip all immediately following labels. */
2133 while (!gsi_end_p (gsi)
2134 && (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2135 || gimple_code (gsi_stmt (gsi)) == GIMPLE_PREDICT))
2136 gsi_next_nondebug (&gsi);
2137
2138 /* { ... something; default:; } */
2139 if (gsi_end_p (gsi)
2140 /* { ... something; default: break; } or
2141 { ... something; default: goto L; } */
2142 || gimple_code (gsi_stmt (gsi)) == GIMPLE_GOTO
2143 /* { ... something; default: return; } */
2144 || gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
2145 return false;
2146
2147 return true;
2148 }
2149
2150 /* Callback for walk_gimple_seq. */
2151
2152 static tree
2153 warn_implicit_fallthrough_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2154 struct walk_stmt_info *)
2155 {
2156 gimple *stmt = gsi_stmt (*gsi_p);
2157
2158 *handled_ops_p = true;
2159 switch (gimple_code (stmt))
2160 {
2161 case GIMPLE_TRY:
2162 case GIMPLE_BIND:
2163 case GIMPLE_CATCH:
2164 case GIMPLE_EH_FILTER:
2165 case GIMPLE_TRANSACTION:
2166 /* Walk the sub-statements. */
2167 *handled_ops_p = false;
2168 break;
2169
2170 /* Find a sequence of form:
2171
2172 GIMPLE_LABEL
2173 [...]
2174 <may fallthru stmt>
2175 GIMPLE_LABEL
2176
2177 and possibly warn. */
2178 case GIMPLE_LABEL:
2179 {
2180 /* Found a label. Skip all immediately following labels. */
2181 while (!gsi_end_p (*gsi_p)
2182 && gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2183 gsi_next_nondebug (gsi_p);
2184
2185 /* There might be no more statements. */
2186 if (gsi_end_p (*gsi_p))
2187 return integer_zero_node;
2188
2189 /* Vector of labels that fall through. */
2190 auto_vec <struct label_entry> labels;
2191 location_t prevloc;
2192 gimple *prev = collect_fallthrough_labels (gsi_p, &labels, &prevloc);
2193
2194 /* There might be no more statements. */
2195 if (gsi_end_p (*gsi_p))
2196 return integer_zero_node;
2197
2198 gimple *next = gsi_stmt (*gsi_p);
2199 tree label;
2200 /* If what follows is a label, then we may have a fallthrough. */
2201 if (gimple_code (next) == GIMPLE_LABEL
2202 && gimple_has_location (next)
2203 && (label = gimple_label_label (as_a <glabel *> (next)))
2204 && prev != NULL)
2205 {
2206 struct label_entry *l;
2207 bool warned_p = false;
2208 auto_diagnostic_group d;
2209 if (!should_warn_for_implicit_fallthrough (gsi_p, label))
2210 /* Quiet. */;
2211 else if (gimple_code (prev) == GIMPLE_LABEL
2212 && (label = gimple_label_label (as_a <glabel *> (prev)))
2213 && (l = find_label_entry (&labels, label)))
2214 warned_p = warning_at (l->loc, OPT_Wimplicit_fallthrough_,
2215 "this statement may fall through");
2216 else if (!gimple_call_internal_p (prev, IFN_FALLTHROUGH)
2217 /* Try to be clever and don't warn when the statement
2218 can't actually fall through. */
2219 && gimple_stmt_may_fallthru (prev)
2220 && prevloc != UNKNOWN_LOCATION)
2221 warned_p = warning_at (prevloc,
2222 OPT_Wimplicit_fallthrough_,
2223 "this statement may fall through");
2224 if (warned_p)
2225 inform (gimple_location (next), "here");
2226
2227 /* Mark this label as processed so as to prevent multiple
2228 warnings in nested switches. */
2229 FALLTHROUGH_LABEL_P (label) = true;
2230
2231 /* So that next warn_implicit_fallthrough_r will start looking for
2232 a new sequence starting with this label. */
2233 gsi_prev (gsi_p);
2234 }
2235 }
2236 break;
2237 default:
2238 break;
2239 }
2240 return NULL_TREE;
2241 }
2242
2243 /* Warn when a switch case falls through. */
2244
2245 static void
2246 maybe_warn_implicit_fallthrough (gimple_seq seq)
2247 {
2248 if (!warn_implicit_fallthrough)
2249 return;
2250
2251 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2252 if (!(lang_GNU_C ()
2253 || lang_GNU_CXX ()
2254 || lang_GNU_OBJC ()))
2255 return;
2256
2257 struct walk_stmt_info wi;
2258 memset (&wi, 0, sizeof (wi));
2259 walk_gimple_seq (seq, warn_implicit_fallthrough_r, NULL, &wi);
2260 }
2261
2262 /* Callback for walk_gimple_seq. */
2263
2264 static tree
2265 expand_FALLTHROUGH_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2266 struct walk_stmt_info *wi)
2267 {
2268 gimple *stmt = gsi_stmt (*gsi_p);
2269
2270 *handled_ops_p = true;
2271 switch (gimple_code (stmt))
2272 {
2273 case GIMPLE_TRY:
2274 case GIMPLE_BIND:
2275 case GIMPLE_CATCH:
2276 case GIMPLE_EH_FILTER:
2277 case GIMPLE_TRANSACTION:
2278 /* Walk the sub-statements. */
2279 *handled_ops_p = false;
2280 break;
2281 case GIMPLE_CALL:
2282 if (gimple_call_internal_p (stmt, IFN_FALLTHROUGH))
2283 {
2284 gsi_remove (gsi_p, true);
2285 if (gsi_end_p (*gsi_p))
2286 {
2287 *static_cast<location_t *>(wi->info) = gimple_location (stmt);
2288 return integer_zero_node;
2289 }
2290
2291 bool found = false;
2292 location_t loc = gimple_location (stmt);
2293
2294 gimple_stmt_iterator gsi2 = *gsi_p;
2295 stmt = gsi_stmt (gsi2);
2296 if (gimple_code (stmt) == GIMPLE_GOTO && !gimple_has_location (stmt))
2297 {
2298 /* Go on until the artificial label. */
2299 tree goto_dest = gimple_goto_dest (stmt);
2300 for (; !gsi_end_p (gsi2); gsi_next (&gsi2))
2301 {
2302 if (gimple_code (gsi_stmt (gsi2)) == GIMPLE_LABEL
2303 && gimple_label_label (as_a <glabel *> (gsi_stmt (gsi2)))
2304 == goto_dest)
2305 break;
2306 }
2307
2308 /* Not found? Stop. */
2309 if (gsi_end_p (gsi2))
2310 break;
2311
2312 /* Look one past it. */
2313 gsi_next (&gsi2);
2314 }
2315
2316 /* We're looking for a case label or default label here. */
2317 while (!gsi_end_p (gsi2))
2318 {
2319 stmt = gsi_stmt (gsi2);
2320 if (gimple_code (stmt) == GIMPLE_LABEL)
2321 {
2322 tree label = gimple_label_label (as_a <glabel *> (stmt));
2323 if (gimple_has_location (stmt) && DECL_ARTIFICIAL (label))
2324 {
2325 found = true;
2326 break;
2327 }
2328 }
2329 else if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
2330 ;
2331 else if (!is_gimple_debug (stmt))
2332 /* Anything else is not expected. */
2333 break;
2334 gsi_next (&gsi2);
2335 }
2336 if (!found)
2337 warning_at (loc, 0, "attribute %<fallthrough%> not preceding "
2338 "a case label or default label");
2339 }
2340 break;
2341 default:
2342 break;
2343 }
2344 return NULL_TREE;
2345 }
2346
2347 /* Expand all FALLTHROUGH () calls in SEQ. */
2348
2349 static void
2350 expand_FALLTHROUGH (gimple_seq *seq_p)
2351 {
2352 struct walk_stmt_info wi;
2353 location_t loc;
2354 memset (&wi, 0, sizeof (wi));
2355 wi.info = (void *) &loc;
2356 walk_gimple_seq_mod (seq_p, expand_FALLTHROUGH_r, NULL, &wi);
2357 if (wi.callback_result == integer_zero_node)
2358 /* We've found [[fallthrough]]; at the end of a switch, which the C++
2359 standard says is ill-formed; see [dcl.attr.fallthrough]. */
2360 warning_at (loc, 0, "attribute %<fallthrough%> not preceding "
2361 "a case label or default label");
2362 }
2363
2364 \f
2365 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
2366 branch to. */
2367
2368 static enum gimplify_status
2369 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
2370 {
2371 tree switch_expr = *expr_p;
2372 gimple_seq switch_body_seq = NULL;
2373 enum gimplify_status ret;
2374 tree index_type = TREE_TYPE (switch_expr);
2375 if (index_type == NULL_TREE)
2376 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
2377
2378 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
2379 fb_rvalue);
2380 if (ret == GS_ERROR || ret == GS_UNHANDLED)
2381 return ret;
2382
2383 if (SWITCH_BODY (switch_expr))
2384 {
2385 vec<tree> labels;
2386 vec<tree> saved_labels;
2387 hash_set<tree> *saved_live_switch_vars = NULL;
2388 tree default_case = NULL_TREE;
2389 gswitch *switch_stmt;
2390
2391 /* Save old labels, get new ones from body, then restore the old
2392 labels. Save all the things from the switch body to append after. */
2393 saved_labels = gimplify_ctxp->case_labels;
2394 gimplify_ctxp->case_labels.create (8);
2395
2396 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
2397 saved_live_switch_vars = gimplify_ctxp->live_switch_vars;
2398 tree_code body_type = TREE_CODE (SWITCH_BODY (switch_expr));
2399 if (body_type == BIND_EXPR || body_type == STATEMENT_LIST)
2400 gimplify_ctxp->live_switch_vars = new hash_set<tree> (4);
2401 else
2402 gimplify_ctxp->live_switch_vars = NULL;
2403
2404 bool old_in_switch_expr = gimplify_ctxp->in_switch_expr;
2405 gimplify_ctxp->in_switch_expr = true;
2406
2407 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
2408
2409 gimplify_ctxp->in_switch_expr = old_in_switch_expr;
2410 maybe_warn_switch_unreachable (switch_body_seq);
2411 maybe_warn_implicit_fallthrough (switch_body_seq);
2412 /* Only do this for the outermost GIMPLE_SWITCH. */
2413 if (!gimplify_ctxp->in_switch_expr)
2414 expand_FALLTHROUGH (&switch_body_seq);
2415
2416 labels = gimplify_ctxp->case_labels;
2417 gimplify_ctxp->case_labels = saved_labels;
2418
2419 if (gimplify_ctxp->live_switch_vars)
2420 {
2421 gcc_assert (gimplify_ctxp->live_switch_vars->elements () == 0);
2422 delete gimplify_ctxp->live_switch_vars;
2423 }
2424 gimplify_ctxp->live_switch_vars = saved_live_switch_vars;
2425
2426 preprocess_case_label_vec_for_gimple (labels, index_type,
2427 &default_case);
2428
2429 bool add_bind = false;
2430 if (!default_case)
2431 {
2432 glabel *new_default;
2433
2434 default_case
2435 = build_case_label (NULL_TREE, NULL_TREE,
2436 create_artificial_label (UNKNOWN_LOCATION));
2437 if (old_in_switch_expr)
2438 {
2439 SWITCH_BREAK_LABEL_P (CASE_LABEL (default_case)) = 1;
2440 add_bind = true;
2441 }
2442 new_default = gimple_build_label (CASE_LABEL (default_case));
2443 gimplify_seq_add_stmt (&switch_body_seq, new_default);
2444 }
2445 else if (old_in_switch_expr)
2446 {
2447 gimple *last = gimple_seq_last_stmt (switch_body_seq);
2448 if (last && gimple_code (last) == GIMPLE_LABEL)
2449 {
2450 tree label = gimple_label_label (as_a <glabel *> (last));
2451 if (SWITCH_BREAK_LABEL_P (label))
2452 add_bind = true;
2453 }
2454 }
2455
2456 switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
2457 default_case, labels);
2458 /* For the benefit of -Wimplicit-fallthrough, if switch_body_seq
2459 ends with a GIMPLE_LABEL holding SWITCH_BREAK_LABEL_P LABEL_DECL,
2460 wrap the GIMPLE_SWITCH up to that GIMPLE_LABEL into a GIMPLE_BIND,
2461 so that we can easily find the start and end of the switch
2462 statement. */
2463 if (add_bind)
2464 {
2465 gimple_seq bind_body = NULL;
2466 gimplify_seq_add_stmt (&bind_body, switch_stmt);
2467 gimple_seq_add_seq (&bind_body, switch_body_seq);
2468 gbind *bind = gimple_build_bind (NULL_TREE, bind_body, NULL_TREE);
2469 gimple_set_location (bind, EXPR_LOCATION (switch_expr));
2470 gimplify_seq_add_stmt (pre_p, bind);
2471 }
2472 else
2473 {
2474 gimplify_seq_add_stmt (pre_p, switch_stmt);
2475 gimplify_seq_add_seq (pre_p, switch_body_seq);
2476 }
2477 labels.release ();
2478 }
2479 else
2480 gcc_unreachable ();
2481
2482 return GS_ALL_DONE;
2483 }
2484
2485 /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
2486
2487 static enum gimplify_status
2488 gimplify_label_expr (tree *expr_p, gimple_seq *pre_p)
2489 {
2490 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
2491 == current_function_decl);
2492
2493 tree label = LABEL_EXPR_LABEL (*expr_p);
2494 glabel *label_stmt = gimple_build_label (label);
2495 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2496 gimplify_seq_add_stmt (pre_p, label_stmt);
2497
2498 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2499 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2500 NOT_TAKEN));
2501 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2502 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2503 TAKEN));
2504
2505 return GS_ALL_DONE;
2506 }
2507
2508 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
2509
2510 static enum gimplify_status
2511 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
2512 {
2513 struct gimplify_ctx *ctxp;
2514 glabel *label_stmt;
2515
2516 /* Invalid programs can play Duff's Device type games with, for example,
2517 #pragma omp parallel. At least in the C front end, we don't
2518 detect such invalid branches until after gimplification, in the
2519 diagnose_omp_blocks pass. */
2520 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
2521 if (ctxp->case_labels.exists ())
2522 break;
2523
2524 tree label = CASE_LABEL (*expr_p);
2525 label_stmt = gimple_build_label (label);
2526 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2527 ctxp->case_labels.safe_push (*expr_p);
2528 gimplify_seq_add_stmt (pre_p, label_stmt);
2529
2530 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2531 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2532 NOT_TAKEN));
2533 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2534 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2535 TAKEN));
2536
2537 return GS_ALL_DONE;
2538 }
2539
2540 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
2541 if necessary. */
2542
2543 tree
2544 build_and_jump (tree *label_p)
2545 {
2546 if (label_p == NULL)
2547 /* If there's nowhere to jump, just fall through. */
2548 return NULL_TREE;
2549
2550 if (*label_p == NULL_TREE)
2551 {
2552 tree label = create_artificial_label (UNKNOWN_LOCATION);
2553 *label_p = label;
2554 }
2555
2556 return build1 (GOTO_EXPR, void_type_node, *label_p);
2557 }
2558
2559 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
2560 This also involves building a label to jump to and communicating it to
2561 gimplify_loop_expr through gimplify_ctxp->exit_label. */
2562
2563 static enum gimplify_status
2564 gimplify_exit_expr (tree *expr_p)
2565 {
2566 tree cond = TREE_OPERAND (*expr_p, 0);
2567 tree expr;
2568
2569 expr = build_and_jump (&gimplify_ctxp->exit_label);
2570 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
2571 *expr_p = expr;
2572
2573 return GS_OK;
2574 }
2575
2576 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
2577 different from its canonical type, wrap the whole thing inside a
2578 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
2579 type.
2580
2581 The canonical type of a COMPONENT_REF is the type of the field being
2582 referenced--unless the field is a bit-field which can be read directly
2583 in a smaller mode, in which case the canonical type is the
2584 sign-appropriate type corresponding to that mode. */
2585
2586 static void
2587 canonicalize_component_ref (tree *expr_p)
2588 {
2589 tree expr = *expr_p;
2590 tree type;
2591
2592 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
2593
2594 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
2595 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
2596 else
2597 type = TREE_TYPE (TREE_OPERAND (expr, 1));
2598
2599 /* One could argue that all the stuff below is not necessary for
2600 the non-bitfield case and declare it a FE error if type
2601 adjustment would be needed. */
2602 if (TREE_TYPE (expr) != type)
2603 {
2604 #ifdef ENABLE_TYPES_CHECKING
2605 tree old_type = TREE_TYPE (expr);
2606 #endif
2607 int type_quals;
2608
2609 /* We need to preserve qualifiers and propagate them from
2610 operand 0. */
2611 type_quals = TYPE_QUALS (type)
2612 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
2613 if (TYPE_QUALS (type) != type_quals)
2614 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
2615
2616 /* Set the type of the COMPONENT_REF to the underlying type. */
2617 TREE_TYPE (expr) = type;
2618
2619 #ifdef ENABLE_TYPES_CHECKING
2620 /* It is now a FE error, if the conversion from the canonical
2621 type to the original expression type is not useless. */
2622 gcc_assert (useless_type_conversion_p (old_type, type));
2623 #endif
2624 }
2625 }
2626
2627 /* If a NOP conversion is changing a pointer to array of foo to a pointer
2628 to foo, embed that change in the ADDR_EXPR by converting
2629 T array[U];
2630 (T *)&array
2631 ==>
2632 &array[L]
2633 where L is the lower bound. For simplicity, only do this for constant
2634 lower bound.
2635 The constraint is that the type of &array[L] is trivially convertible
2636 to T *. */
2637
2638 static void
2639 canonicalize_addr_expr (tree *expr_p)
2640 {
2641 tree expr = *expr_p;
2642 tree addr_expr = TREE_OPERAND (expr, 0);
2643 tree datype, ddatype, pddatype;
2644
2645 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
2646 if (!POINTER_TYPE_P (TREE_TYPE (expr))
2647 || TREE_CODE (addr_expr) != ADDR_EXPR)
2648 return;
2649
2650 /* The addr_expr type should be a pointer to an array. */
2651 datype = TREE_TYPE (TREE_TYPE (addr_expr));
2652 if (TREE_CODE (datype) != ARRAY_TYPE)
2653 return;
2654
2655 /* The pointer to element type shall be trivially convertible to
2656 the expression pointer type. */
2657 ddatype = TREE_TYPE (datype);
2658 pddatype = build_pointer_type (ddatype);
2659 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
2660 pddatype))
2661 return;
2662
2663 /* The lower bound and element sizes must be constant. */
2664 if (!TYPE_SIZE_UNIT (ddatype)
2665 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
2666 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
2667 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
2668 return;
2669
2670 /* All checks succeeded. Build a new node to merge the cast. */
2671 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
2672 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
2673 NULL_TREE, NULL_TREE);
2674 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
2675
2676 /* We can have stripped a required restrict qualifier above. */
2677 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
2678 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
2679 }
2680
2681 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
2682 underneath as appropriate. */
2683
2684 static enum gimplify_status
2685 gimplify_conversion (tree *expr_p)
2686 {
2687 location_t loc = EXPR_LOCATION (*expr_p);
2688 gcc_assert (CONVERT_EXPR_P (*expr_p));
2689
2690 /* Then strip away all but the outermost conversion. */
2691 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
2692
2693 /* And remove the outermost conversion if it's useless. */
2694 if (tree_ssa_useless_type_conversion (*expr_p))
2695 *expr_p = TREE_OPERAND (*expr_p, 0);
2696
2697 /* If we still have a conversion at the toplevel,
2698 then canonicalize some constructs. */
2699 if (CONVERT_EXPR_P (*expr_p))
2700 {
2701 tree sub = TREE_OPERAND (*expr_p, 0);
2702
2703 /* If a NOP conversion is changing the type of a COMPONENT_REF
2704 expression, then canonicalize its type now in order to expose more
2705 redundant conversions. */
2706 if (TREE_CODE (sub) == COMPONENT_REF)
2707 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
2708
2709 /* If a NOP conversion is changing a pointer to array of foo
2710 to a pointer to foo, embed that change in the ADDR_EXPR. */
2711 else if (TREE_CODE (sub) == ADDR_EXPR)
2712 canonicalize_addr_expr (expr_p);
2713 }
2714
2715 /* If we have a conversion to a non-register type force the
2716 use of a VIEW_CONVERT_EXPR instead. */
2717 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
2718 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
2719 TREE_OPERAND (*expr_p, 0));
2720
2721 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
2722 if (TREE_CODE (*expr_p) == CONVERT_EXPR)
2723 TREE_SET_CODE (*expr_p, NOP_EXPR);
2724
2725 return GS_OK;
2726 }
2727
2728 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
2729 DECL_VALUE_EXPR, and it's worth re-examining things. */
2730
2731 static enum gimplify_status
2732 gimplify_var_or_parm_decl (tree *expr_p)
2733 {
2734 tree decl = *expr_p;
2735
2736 /* ??? If this is a local variable, and it has not been seen in any
2737 outer BIND_EXPR, then it's probably the result of a duplicate
2738 declaration, for which we've already issued an error. It would
2739 be really nice if the front end wouldn't leak these at all.
2740 Currently the only known culprit is C++ destructors, as seen
2741 in g++.old-deja/g++.jason/binding.C. */
2742 if (VAR_P (decl)
2743 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
2744 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
2745 && decl_function_context (decl) == current_function_decl)
2746 {
2747 gcc_assert (seen_error ());
2748 return GS_ERROR;
2749 }
2750
2751 /* When within an OMP context, notice uses of variables. */
2752 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
2753 return GS_ALL_DONE;
2754
2755 /* If the decl is an alias for another expression, substitute it now. */
2756 if (DECL_HAS_VALUE_EXPR_P (decl))
2757 {
2758 *expr_p = unshare_expr (DECL_VALUE_EXPR (decl));
2759 return GS_OK;
2760 }
2761
2762 return GS_ALL_DONE;
2763 }
2764
2765 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
2766
2767 static void
2768 recalculate_side_effects (tree t)
2769 {
2770 enum tree_code code = TREE_CODE (t);
2771 int len = TREE_OPERAND_LENGTH (t);
2772 int i;
2773
2774 switch (TREE_CODE_CLASS (code))
2775 {
2776 case tcc_expression:
2777 switch (code)
2778 {
2779 case INIT_EXPR:
2780 case MODIFY_EXPR:
2781 case VA_ARG_EXPR:
2782 case PREDECREMENT_EXPR:
2783 case PREINCREMENT_EXPR:
2784 case POSTDECREMENT_EXPR:
2785 case POSTINCREMENT_EXPR:
2786 /* All of these have side-effects, no matter what their
2787 operands are. */
2788 return;
2789
2790 default:
2791 break;
2792 }
2793 /* Fall through. */
2794
2795 case tcc_comparison: /* a comparison expression */
2796 case tcc_unary: /* a unary arithmetic expression */
2797 case tcc_binary: /* a binary arithmetic expression */
2798 case tcc_reference: /* a reference */
2799 case tcc_vl_exp: /* a function call */
2800 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
2801 for (i = 0; i < len; ++i)
2802 {
2803 tree op = TREE_OPERAND (t, i);
2804 if (op && TREE_SIDE_EFFECTS (op))
2805 TREE_SIDE_EFFECTS (t) = 1;
2806 }
2807 break;
2808
2809 case tcc_constant:
2810 /* No side-effects. */
2811 return;
2812
2813 default:
2814 gcc_unreachable ();
2815 }
2816 }
2817
2818 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
2819 node *EXPR_P.
2820
2821 compound_lval
2822 : min_lval '[' val ']'
2823 | min_lval '.' ID
2824 | compound_lval '[' val ']'
2825 | compound_lval '.' ID
2826
2827 This is not part of the original SIMPLE definition, which separates
2828 array and member references, but it seems reasonable to handle them
2829 together. Also, this way we don't run into problems with union
2830 aliasing; gcc requires that for accesses through a union to alias, the
2831 union reference must be explicit, which was not always the case when we
2832 were splitting up array and member refs.
2833
2834 PRE_P points to the sequence where side effects that must happen before
2835 *EXPR_P should be stored.
2836
2837 POST_P points to the sequence where side effects that must happen after
2838 *EXPR_P should be stored. */
2839
2840 static enum gimplify_status
2841 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2842 fallback_t fallback)
2843 {
2844 tree *p;
2845 enum gimplify_status ret = GS_ALL_DONE, tret;
2846 int i;
2847 location_t loc = EXPR_LOCATION (*expr_p);
2848 tree expr = *expr_p;
2849
2850 /* Create a stack of the subexpressions so later we can walk them in
2851 order from inner to outer. */
2852 auto_vec<tree, 10> expr_stack;
2853
2854 /* We can handle anything that get_inner_reference can deal with. */
2855 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
2856 {
2857 restart:
2858 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
2859 if (TREE_CODE (*p) == INDIRECT_REF)
2860 *p = fold_indirect_ref_loc (loc, *p);
2861
2862 if (handled_component_p (*p))
2863 ;
2864 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
2865 additional COMPONENT_REFs. */
2866 else if ((VAR_P (*p) || TREE_CODE (*p) == PARM_DECL)
2867 && gimplify_var_or_parm_decl (p) == GS_OK)
2868 goto restart;
2869 else
2870 break;
2871
2872 expr_stack.safe_push (*p);
2873 }
2874
2875 gcc_assert (expr_stack.length ());
2876
2877 /* Now EXPR_STACK is a stack of pointers to all the refs we've
2878 walked through and P points to the innermost expression.
2879
2880 Java requires that we elaborated nodes in source order. That
2881 means we must gimplify the inner expression followed by each of
2882 the indices, in order. But we can't gimplify the inner
2883 expression until we deal with any variable bounds, sizes, or
2884 positions in order to deal with PLACEHOLDER_EXPRs.
2885
2886 So we do this in three steps. First we deal with the annotations
2887 for any variables in the components, then we gimplify the base,
2888 then we gimplify any indices, from left to right. */
2889 for (i = expr_stack.length () - 1; i >= 0; i--)
2890 {
2891 tree t = expr_stack[i];
2892
2893 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2894 {
2895 /* Gimplify the low bound and element type size and put them into
2896 the ARRAY_REF. If these values are set, they have already been
2897 gimplified. */
2898 if (TREE_OPERAND (t, 2) == NULL_TREE)
2899 {
2900 tree low = unshare_expr (array_ref_low_bound (t));
2901 if (!is_gimple_min_invariant (low))
2902 {
2903 TREE_OPERAND (t, 2) = low;
2904 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2905 post_p, is_gimple_reg,
2906 fb_rvalue);
2907 ret = MIN (ret, tret);
2908 }
2909 }
2910 else
2911 {
2912 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2913 is_gimple_reg, fb_rvalue);
2914 ret = MIN (ret, tret);
2915 }
2916
2917 if (TREE_OPERAND (t, 3) == NULL_TREE)
2918 {
2919 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
2920 tree elmt_size = unshare_expr (array_ref_element_size (t));
2921 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
2922
2923 /* Divide the element size by the alignment of the element
2924 type (above). */
2925 elmt_size
2926 = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
2927
2928 if (!is_gimple_min_invariant (elmt_size))
2929 {
2930 TREE_OPERAND (t, 3) = elmt_size;
2931 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
2932 post_p, is_gimple_reg,
2933 fb_rvalue);
2934 ret = MIN (ret, tret);
2935 }
2936 }
2937 else
2938 {
2939 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
2940 is_gimple_reg, fb_rvalue);
2941 ret = MIN (ret, tret);
2942 }
2943 }
2944 else if (TREE_CODE (t) == COMPONENT_REF)
2945 {
2946 /* Set the field offset into T and gimplify it. */
2947 if (TREE_OPERAND (t, 2) == NULL_TREE)
2948 {
2949 tree offset = unshare_expr (component_ref_field_offset (t));
2950 tree field = TREE_OPERAND (t, 1);
2951 tree factor
2952 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
2953
2954 /* Divide the offset by its alignment. */
2955 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
2956
2957 if (!is_gimple_min_invariant (offset))
2958 {
2959 TREE_OPERAND (t, 2) = offset;
2960 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2961 post_p, is_gimple_reg,
2962 fb_rvalue);
2963 ret = MIN (ret, tret);
2964 }
2965 }
2966 else
2967 {
2968 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2969 is_gimple_reg, fb_rvalue);
2970 ret = MIN (ret, tret);
2971 }
2972 }
2973 }
2974
2975 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
2976 so as to match the min_lval predicate. Failure to do so may result
2977 in the creation of large aggregate temporaries. */
2978 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
2979 fallback | fb_lvalue);
2980 ret = MIN (ret, tret);
2981
2982 /* And finally, the indices and operands of ARRAY_REF. During this
2983 loop we also remove any useless conversions. */
2984 for (; expr_stack.length () > 0; )
2985 {
2986 tree t = expr_stack.pop ();
2987
2988 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2989 {
2990 /* Gimplify the dimension. */
2991 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2992 {
2993 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2994 is_gimple_val, fb_rvalue);
2995 ret = MIN (ret, tret);
2996 }
2997 }
2998
2999 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
3000
3001 /* The innermost expression P may have originally had
3002 TREE_SIDE_EFFECTS set which would have caused all the outer
3003 expressions in *EXPR_P leading to P to also have had
3004 TREE_SIDE_EFFECTS set. */
3005 recalculate_side_effects (t);
3006 }
3007
3008 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
3009 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
3010 {
3011 canonicalize_component_ref (expr_p);
3012 }
3013
3014 expr_stack.release ();
3015
3016 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
3017
3018 return ret;
3019 }
3020
3021 /* Gimplify the self modifying expression pointed to by EXPR_P
3022 (++, --, +=, -=).
3023
3024 PRE_P points to the list where side effects that must happen before
3025 *EXPR_P should be stored.
3026
3027 POST_P points to the list where side effects that must happen after
3028 *EXPR_P should be stored.
3029
3030 WANT_VALUE is nonzero iff we want to use the value of this expression
3031 in another expression.
3032
3033 ARITH_TYPE is the type the computation should be performed in. */
3034
3035 enum gimplify_status
3036 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3037 bool want_value, tree arith_type)
3038 {
3039 enum tree_code code;
3040 tree lhs, lvalue, rhs, t1;
3041 gimple_seq post = NULL, *orig_post_p = post_p;
3042 bool postfix;
3043 enum tree_code arith_code;
3044 enum gimplify_status ret;
3045 location_t loc = EXPR_LOCATION (*expr_p);
3046
3047 code = TREE_CODE (*expr_p);
3048
3049 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
3050 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
3051
3052 /* Prefix or postfix? */
3053 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
3054 /* Faster to treat as prefix if result is not used. */
3055 postfix = want_value;
3056 else
3057 postfix = false;
3058
3059 /* For postfix, make sure the inner expression's post side effects
3060 are executed after side effects from this expression. */
3061 if (postfix)
3062 post_p = &post;
3063
3064 /* Add or subtract? */
3065 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
3066 arith_code = PLUS_EXPR;
3067 else
3068 arith_code = MINUS_EXPR;
3069
3070 /* Gimplify the LHS into a GIMPLE lvalue. */
3071 lvalue = TREE_OPERAND (*expr_p, 0);
3072 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
3073 if (ret == GS_ERROR)
3074 return ret;
3075
3076 /* Extract the operands to the arithmetic operation. */
3077 lhs = lvalue;
3078 rhs = TREE_OPERAND (*expr_p, 1);
3079
3080 /* For postfix operator, we evaluate the LHS to an rvalue and then use
3081 that as the result value and in the postqueue operation. */
3082 if (postfix)
3083 {
3084 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
3085 if (ret == GS_ERROR)
3086 return ret;
3087
3088 lhs = get_initialized_tmp_var (lhs, pre_p, NULL);
3089 }
3090
3091 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
3092 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
3093 {
3094 rhs = convert_to_ptrofftype_loc (loc, rhs);
3095 if (arith_code == MINUS_EXPR)
3096 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
3097 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
3098 }
3099 else
3100 t1 = fold_convert (TREE_TYPE (*expr_p),
3101 fold_build2 (arith_code, arith_type,
3102 fold_convert (arith_type, lhs),
3103 fold_convert (arith_type, rhs)));
3104
3105 if (postfix)
3106 {
3107 gimplify_assign (lvalue, t1, pre_p);
3108 gimplify_seq_add_seq (orig_post_p, post);
3109 *expr_p = lhs;
3110 return GS_ALL_DONE;
3111 }
3112 else
3113 {
3114 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
3115 return GS_OK;
3116 }
3117 }
3118
3119 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
3120
3121 static void
3122 maybe_with_size_expr (tree *expr_p)
3123 {
3124 tree expr = *expr_p;
3125 tree type = TREE_TYPE (expr);
3126 tree size;
3127
3128 /* If we've already wrapped this or the type is error_mark_node, we can't do
3129 anything. */
3130 if (TREE_CODE (expr) == WITH_SIZE_EXPR
3131 || type == error_mark_node)
3132 return;
3133
3134 /* If the size isn't known or is a constant, we have nothing to do. */
3135 size = TYPE_SIZE_UNIT (type);
3136 if (!size || poly_int_tree_p (size))
3137 return;
3138
3139 /* Otherwise, make a WITH_SIZE_EXPR. */
3140 size = unshare_expr (size);
3141 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
3142 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
3143 }
3144
3145 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
3146 Store any side-effects in PRE_P. CALL_LOCATION is the location of
3147 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3148 gimplified to an SSA name. */
3149
3150 enum gimplify_status
3151 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
3152 bool allow_ssa)
3153 {
3154 bool (*test) (tree);
3155 fallback_t fb;
3156
3157 /* In general, we allow lvalues for function arguments to avoid
3158 extra overhead of copying large aggregates out of even larger
3159 aggregates into temporaries only to copy the temporaries to
3160 the argument list. Make optimizers happy by pulling out to
3161 temporaries those types that fit in registers. */
3162 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
3163 test = is_gimple_val, fb = fb_rvalue;
3164 else
3165 {
3166 test = is_gimple_lvalue, fb = fb_either;
3167 /* Also strip a TARGET_EXPR that would force an extra copy. */
3168 if (TREE_CODE (*arg_p) == TARGET_EXPR)
3169 {
3170 tree init = TARGET_EXPR_INITIAL (*arg_p);
3171 if (init
3172 && !VOID_TYPE_P (TREE_TYPE (init)))
3173 *arg_p = init;
3174 }
3175 }
3176
3177 /* If this is a variable sized type, we must remember the size. */
3178 maybe_with_size_expr (arg_p);
3179
3180 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
3181 /* Make sure arguments have the same location as the function call
3182 itself. */
3183 protected_set_expr_location (*arg_p, call_location);
3184
3185 /* There is a sequence point before a function call. Side effects in
3186 the argument list must occur before the actual call. So, when
3187 gimplifying arguments, force gimplify_expr to use an internal
3188 post queue which is then appended to the end of PRE_P. */
3189 return gimplify_expr (arg_p, pre_p, NULL, test, fb, allow_ssa);
3190 }
3191
3192 /* Don't fold inside offloading or taskreg regions: it can break code by
3193 adding decl references that weren't in the source. We'll do it during
3194 omplower pass instead. */
3195
3196 static bool
3197 maybe_fold_stmt (gimple_stmt_iterator *gsi)
3198 {
3199 struct gimplify_omp_ctx *ctx;
3200 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
3201 if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0)
3202 return false;
3203 else if ((ctx->region_type & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
3204 return false;
3205 /* Delay folding of builtins until the IL is in consistent state
3206 so the diagnostic machinery can do a better job. */
3207 if (gimple_call_builtin_p (gsi_stmt (*gsi)))
3208 return false;
3209 return fold_stmt (gsi);
3210 }
3211
3212 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
3213 WANT_VALUE is true if the result of the call is desired. */
3214
3215 static enum gimplify_status
3216 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
3217 {
3218 tree fndecl, parms, p, fnptrtype;
3219 enum gimplify_status ret;
3220 int i, nargs;
3221 gcall *call;
3222 bool builtin_va_start_p = false;
3223 location_t loc = EXPR_LOCATION (*expr_p);
3224
3225 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
3226
3227 /* For reliable diagnostics during inlining, it is necessary that
3228 every call_expr be annotated with file and line. */
3229 if (! EXPR_HAS_LOCATION (*expr_p))
3230 SET_EXPR_LOCATION (*expr_p, input_location);
3231
3232 /* Gimplify internal functions created in the FEs. */
3233 if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
3234 {
3235 if (want_value)
3236 return GS_ALL_DONE;
3237
3238 nargs = call_expr_nargs (*expr_p);
3239 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
3240 auto_vec<tree> vargs (nargs);
3241
3242 for (i = 0; i < nargs; i++)
3243 {
3244 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3245 EXPR_LOCATION (*expr_p));
3246 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
3247 }
3248
3249 gcall *call = gimple_build_call_internal_vec (ifn, vargs);
3250 gimple_call_set_nothrow (call, TREE_NOTHROW (*expr_p));
3251 gimplify_seq_add_stmt (pre_p, call);
3252 return GS_ALL_DONE;
3253 }
3254
3255 /* This may be a call to a builtin function.
3256
3257 Builtin function calls may be transformed into different
3258 (and more efficient) builtin function calls under certain
3259 circumstances. Unfortunately, gimplification can muck things
3260 up enough that the builtin expanders are not aware that certain
3261 transformations are still valid.
3262
3263 So we attempt transformation/gimplification of the call before
3264 we gimplify the CALL_EXPR. At this time we do not manage to
3265 transform all calls in the same manner as the expanders do, but
3266 we do transform most of them. */
3267 fndecl = get_callee_fndecl (*expr_p);
3268 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
3269 switch (DECL_FUNCTION_CODE (fndecl))
3270 {
3271 CASE_BUILT_IN_ALLOCA:
3272 /* If the call has been built for a variable-sized object, then we
3273 want to restore the stack level when the enclosing BIND_EXPR is
3274 exited to reclaim the allocated space; otherwise, we precisely
3275 need to do the opposite and preserve the latest stack level. */
3276 if (CALL_ALLOCA_FOR_VAR_P (*expr_p))
3277 gimplify_ctxp->save_stack = true;
3278 else
3279 gimplify_ctxp->keep_stack = true;
3280 break;
3281
3282 case BUILT_IN_VA_START:
3283 {
3284 builtin_va_start_p = TRUE;
3285 if (call_expr_nargs (*expr_p) < 2)
3286 {
3287 error ("too few arguments to function %<va_start%>");
3288 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3289 return GS_OK;
3290 }
3291
3292 if (fold_builtin_next_arg (*expr_p, true))
3293 {
3294 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3295 return GS_OK;
3296 }
3297 break;
3298 }
3299
3300 default:
3301 ;
3302 }
3303 if (fndecl && fndecl_built_in_p (fndecl))
3304 {
3305 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3306 if (new_tree && new_tree != *expr_p)
3307 {
3308 /* There was a transformation of this call which computes the
3309 same value, but in a more efficient way. Return and try
3310 again. */
3311 *expr_p = new_tree;
3312 return GS_OK;
3313 }
3314 }
3315
3316 /* Remember the original function pointer type. */
3317 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
3318
3319 /* There is a sequence point before the call, so any side effects in
3320 the calling expression must occur before the actual call. Force
3321 gimplify_expr to use an internal post queue. */
3322 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
3323 is_gimple_call_addr, fb_rvalue);
3324
3325 nargs = call_expr_nargs (*expr_p);
3326
3327 /* Get argument types for verification. */
3328 fndecl = get_callee_fndecl (*expr_p);
3329 parms = NULL_TREE;
3330 if (fndecl)
3331 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3332 else
3333 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
3334
3335 if (fndecl && DECL_ARGUMENTS (fndecl))
3336 p = DECL_ARGUMENTS (fndecl);
3337 else if (parms)
3338 p = parms;
3339 else
3340 p = NULL_TREE;
3341 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
3342 ;
3343
3344 /* If the last argument is __builtin_va_arg_pack () and it is not
3345 passed as a named argument, decrease the number of CALL_EXPR
3346 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
3347 if (!p
3348 && i < nargs
3349 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
3350 {
3351 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
3352 tree last_arg_fndecl = get_callee_fndecl (last_arg);
3353
3354 if (last_arg_fndecl
3355 && fndecl_built_in_p (last_arg_fndecl, BUILT_IN_VA_ARG_PACK))
3356 {
3357 tree call = *expr_p;
3358
3359 --nargs;
3360 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
3361 CALL_EXPR_FN (call),
3362 nargs, CALL_EXPR_ARGP (call));
3363
3364 /* Copy all CALL_EXPR flags, location and block, except
3365 CALL_EXPR_VA_ARG_PACK flag. */
3366 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
3367 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
3368 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
3369 = CALL_EXPR_RETURN_SLOT_OPT (call);
3370 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
3371 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
3372
3373 /* Set CALL_EXPR_VA_ARG_PACK. */
3374 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
3375 }
3376 }
3377
3378 /* If the call returns twice then after building the CFG the call
3379 argument computations will no longer dominate the call because
3380 we add an abnormal incoming edge to the call. So do not use SSA
3381 vars there. */
3382 bool returns_twice = call_expr_flags (*expr_p) & ECF_RETURNS_TWICE;
3383
3384 /* Gimplify the function arguments. */
3385 if (nargs > 0)
3386 {
3387 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
3388 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
3389 PUSH_ARGS_REVERSED ? i-- : i++)
3390 {
3391 enum gimplify_status t;
3392
3393 /* Avoid gimplifying the second argument to va_start, which needs to
3394 be the plain PARM_DECL. */
3395 if ((i != 1) || !builtin_va_start_p)
3396 {
3397 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3398 EXPR_LOCATION (*expr_p), ! returns_twice);
3399
3400 if (t == GS_ERROR)
3401 ret = GS_ERROR;
3402 }
3403 }
3404 }
3405
3406 /* Gimplify the static chain. */
3407 if (CALL_EXPR_STATIC_CHAIN (*expr_p))
3408 {
3409 if (fndecl && !DECL_STATIC_CHAIN (fndecl))
3410 CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
3411 else
3412 {
3413 enum gimplify_status t;
3414 t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
3415 EXPR_LOCATION (*expr_p), ! returns_twice);
3416 if (t == GS_ERROR)
3417 ret = GS_ERROR;
3418 }
3419 }
3420
3421 /* Verify the function result. */
3422 if (want_value && fndecl
3423 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
3424 {
3425 error_at (loc, "using result of function returning %<void%>");
3426 ret = GS_ERROR;
3427 }
3428
3429 /* Try this again in case gimplification exposed something. */
3430 if (ret != GS_ERROR)
3431 {
3432 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3433
3434 if (new_tree && new_tree != *expr_p)
3435 {
3436 /* There was a transformation of this call which computes the
3437 same value, but in a more efficient way. Return and try
3438 again. */
3439 *expr_p = new_tree;
3440 return GS_OK;
3441 }
3442 }
3443 else
3444 {
3445 *expr_p = error_mark_node;
3446 return GS_ERROR;
3447 }
3448
3449 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
3450 decl. This allows us to eliminate redundant or useless
3451 calls to "const" functions. */
3452 if (TREE_CODE (*expr_p) == CALL_EXPR)
3453 {
3454 int flags = call_expr_flags (*expr_p);
3455 if (flags & (ECF_CONST | ECF_PURE)
3456 /* An infinite loop is considered a side effect. */
3457 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
3458 TREE_SIDE_EFFECTS (*expr_p) = 0;
3459 }
3460
3461 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
3462 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
3463 form and delegate the creation of a GIMPLE_CALL to
3464 gimplify_modify_expr. This is always possible because when
3465 WANT_VALUE is true, the caller wants the result of this call into
3466 a temporary, which means that we will emit an INIT_EXPR in
3467 internal_get_tmp_var which will then be handled by
3468 gimplify_modify_expr. */
3469 if (!want_value)
3470 {
3471 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
3472 have to do is replicate it as a GIMPLE_CALL tuple. */
3473 gimple_stmt_iterator gsi;
3474 call = gimple_build_call_from_tree (*expr_p, fnptrtype);
3475 notice_special_calls (call);
3476 gimplify_seq_add_stmt (pre_p, call);
3477 gsi = gsi_last (*pre_p);
3478 maybe_fold_stmt (&gsi);
3479 *expr_p = NULL_TREE;
3480 }
3481 else
3482 /* Remember the original function type. */
3483 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
3484 CALL_EXPR_FN (*expr_p));
3485
3486 return ret;
3487 }
3488
3489 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
3490 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
3491
3492 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
3493 condition is true or false, respectively. If null, we should generate
3494 our own to skip over the evaluation of this specific expression.
3495
3496 LOCUS is the source location of the COND_EXPR.
3497
3498 This function is the tree equivalent of do_jump.
3499
3500 shortcut_cond_r should only be called by shortcut_cond_expr. */
3501
3502 static tree
3503 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
3504 location_t locus)
3505 {
3506 tree local_label = NULL_TREE;
3507 tree t, expr = NULL;
3508
3509 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
3510 retain the shortcut semantics. Just insert the gotos here;
3511 shortcut_cond_expr will append the real blocks later. */
3512 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3513 {
3514 location_t new_locus;
3515
3516 /* Turn if (a && b) into
3517
3518 if (a); else goto no;
3519 if (b) goto yes; else goto no;
3520 (no:) */
3521
3522 if (false_label_p == NULL)
3523 false_label_p = &local_label;
3524
3525 /* Keep the original source location on the first 'if'. */
3526 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
3527 append_to_statement_list (t, &expr);
3528
3529 /* Set the source location of the && on the second 'if'. */
3530 new_locus = rexpr_location (pred, locus);
3531 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3532 new_locus);
3533 append_to_statement_list (t, &expr);
3534 }
3535 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3536 {
3537 location_t new_locus;
3538
3539 /* Turn if (a || b) into
3540
3541 if (a) goto yes;
3542 if (b) goto yes; else goto no;
3543 (yes:) */
3544
3545 if (true_label_p == NULL)
3546 true_label_p = &local_label;
3547
3548 /* Keep the original source location on the first 'if'. */
3549 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
3550 append_to_statement_list (t, &expr);
3551
3552 /* Set the source location of the || on the second 'if'. */
3553 new_locus = rexpr_location (pred, locus);
3554 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3555 new_locus);
3556 append_to_statement_list (t, &expr);
3557 }
3558 else if (TREE_CODE (pred) == COND_EXPR
3559 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
3560 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
3561 {
3562 location_t new_locus;
3563
3564 /* As long as we're messing with gotos, turn if (a ? b : c) into
3565 if (a)
3566 if (b) goto yes; else goto no;
3567 else
3568 if (c) goto yes; else goto no;
3569
3570 Don't do this if one of the arms has void type, which can happen
3571 in C++ when the arm is throw. */
3572
3573 /* Keep the original source location on the first 'if'. Set the source
3574 location of the ? on the second 'if'. */
3575 new_locus = rexpr_location (pred, locus);
3576 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
3577 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
3578 false_label_p, locus),
3579 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
3580 false_label_p, new_locus));
3581 }
3582 else
3583 {
3584 expr = build3 (COND_EXPR, void_type_node, pred,
3585 build_and_jump (true_label_p),
3586 build_and_jump (false_label_p));
3587 SET_EXPR_LOCATION (expr, locus);
3588 }
3589
3590 if (local_label)
3591 {
3592 t = build1 (LABEL_EXPR, void_type_node, local_label);
3593 append_to_statement_list (t, &expr);
3594 }
3595
3596 return expr;
3597 }
3598
3599 /* If EXPR is a GOTO_EXPR, return it. If it is a STATEMENT_LIST, skip
3600 any of its leading DEBUG_BEGIN_STMTS and recurse on the subsequent
3601 statement, if it is the last one. Otherwise, return NULL. */
3602
3603 static tree
3604 find_goto (tree expr)
3605 {
3606 if (!expr)
3607 return NULL_TREE;
3608
3609 if (TREE_CODE (expr) == GOTO_EXPR)
3610 return expr;
3611
3612 if (TREE_CODE (expr) != STATEMENT_LIST)
3613 return NULL_TREE;
3614
3615 tree_stmt_iterator i = tsi_start (expr);
3616
3617 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
3618 tsi_next (&i);
3619
3620 if (!tsi_one_before_end_p (i))
3621 return NULL_TREE;
3622
3623 return find_goto (tsi_stmt (i));
3624 }
3625
3626 /* Same as find_goto, except that it returns NULL if the destination
3627 is not a LABEL_DECL. */
3628
3629 static inline tree
3630 find_goto_label (tree expr)
3631 {
3632 tree dest = find_goto (expr);
3633 if (dest && TREE_CODE (GOTO_DESTINATION (dest)) == LABEL_DECL)
3634 return dest;
3635 return NULL_TREE;
3636 }
3637
3638 /* Given a conditional expression EXPR with short-circuit boolean
3639 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
3640 predicate apart into the equivalent sequence of conditionals. */
3641
3642 static tree
3643 shortcut_cond_expr (tree expr)
3644 {
3645 tree pred = TREE_OPERAND (expr, 0);
3646 tree then_ = TREE_OPERAND (expr, 1);
3647 tree else_ = TREE_OPERAND (expr, 2);
3648 tree true_label, false_label, end_label, t;
3649 tree *true_label_p;
3650 tree *false_label_p;
3651 bool emit_end, emit_false, jump_over_else;
3652 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
3653 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
3654
3655 /* First do simple transformations. */
3656 if (!else_se)
3657 {
3658 /* If there is no 'else', turn
3659 if (a && b) then c
3660 into
3661 if (a) if (b) then c. */
3662 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3663 {
3664 /* Keep the original source location on the first 'if'. */
3665 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3666 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3667 /* Set the source location of the && on the second 'if'. */
3668 if (rexpr_has_location (pred))
3669 SET_EXPR_LOCATION (expr, rexpr_location (pred));
3670 then_ = shortcut_cond_expr (expr);
3671 then_se = then_ && TREE_SIDE_EFFECTS (then_);
3672 pred = TREE_OPERAND (pred, 0);
3673 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
3674 SET_EXPR_LOCATION (expr, locus);
3675 }
3676 }
3677
3678 if (!then_se)
3679 {
3680 /* If there is no 'then', turn
3681 if (a || b); else d
3682 into
3683 if (a); else if (b); else d. */
3684 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3685 {
3686 /* Keep the original source location on the first 'if'. */
3687 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3688 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3689 /* Set the source location of the || on the second 'if'. */
3690 if (rexpr_has_location (pred))
3691 SET_EXPR_LOCATION (expr, rexpr_location (pred));
3692 else_ = shortcut_cond_expr (expr);
3693 else_se = else_ && TREE_SIDE_EFFECTS (else_);
3694 pred = TREE_OPERAND (pred, 0);
3695 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
3696 SET_EXPR_LOCATION (expr, locus);
3697 }
3698 }
3699
3700 /* If we're done, great. */
3701 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
3702 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
3703 return expr;
3704
3705 /* Otherwise we need to mess with gotos. Change
3706 if (a) c; else d;
3707 to
3708 if (a); else goto no;
3709 c; goto end;
3710 no: d; end:
3711 and recursively gimplify the condition. */
3712
3713 true_label = false_label = end_label = NULL_TREE;
3714
3715 /* If our arms just jump somewhere, hijack those labels so we don't
3716 generate jumps to jumps. */
3717
3718 if (tree then_goto = find_goto_label (then_))
3719 {
3720 true_label = GOTO_DESTINATION (then_goto);
3721 then_ = NULL;
3722 then_se = false;
3723 }
3724
3725 if (tree else_goto = find_goto_label (else_))
3726 {
3727 false_label = GOTO_DESTINATION (else_goto);
3728 else_ = NULL;
3729 else_se = false;
3730 }
3731
3732 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
3733 if (true_label)
3734 true_label_p = &true_label;
3735 else
3736 true_label_p = NULL;
3737
3738 /* The 'else' branch also needs a label if it contains interesting code. */
3739 if (false_label || else_se)
3740 false_label_p = &false_label;
3741 else
3742 false_label_p = NULL;
3743
3744 /* If there was nothing else in our arms, just forward the label(s). */
3745 if (!then_se && !else_se)
3746 return shortcut_cond_r (pred, true_label_p, false_label_p,
3747 EXPR_LOC_OR_LOC (expr, input_location));
3748
3749 /* If our last subexpression already has a terminal label, reuse it. */
3750 if (else_se)
3751 t = expr_last (else_);
3752 else if (then_se)
3753 t = expr_last (then_);
3754 else
3755 t = NULL;
3756 if (t && TREE_CODE (t) == LABEL_EXPR)
3757 end_label = LABEL_EXPR_LABEL (t);
3758
3759 /* If we don't care about jumping to the 'else' branch, jump to the end
3760 if the condition is false. */
3761 if (!false_label_p)
3762 false_label_p = &end_label;
3763
3764 /* We only want to emit these labels if we aren't hijacking them. */
3765 emit_end = (end_label == NULL_TREE);
3766 emit_false = (false_label == NULL_TREE);
3767
3768 /* We only emit the jump over the else clause if we have to--if the
3769 then clause may fall through. Otherwise we can wind up with a
3770 useless jump and a useless label at the end of gimplified code,
3771 which will cause us to think that this conditional as a whole
3772 falls through even if it doesn't. If we then inline a function
3773 which ends with such a condition, that can cause us to issue an
3774 inappropriate warning about control reaching the end of a
3775 non-void function. */
3776 jump_over_else = block_may_fallthru (then_);
3777
3778 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
3779 EXPR_LOC_OR_LOC (expr, input_location));
3780
3781 expr = NULL;
3782 append_to_statement_list (pred, &expr);
3783
3784 append_to_statement_list (then_, &expr);
3785 if (else_se)
3786 {
3787 if (jump_over_else)
3788 {
3789 tree last = expr_last (expr);
3790 t = build_and_jump (&end_label);
3791 if (rexpr_has_location (last))
3792 SET_EXPR_LOCATION (t, rexpr_location (last));
3793 append_to_statement_list (t, &expr);
3794 }
3795 if (emit_false)
3796 {
3797 t = build1 (LABEL_EXPR, void_type_node, false_label);
3798 append_to_statement_list (t, &expr);
3799 }
3800 append_to_statement_list (else_, &expr);
3801 }
3802 if (emit_end && end_label)
3803 {
3804 t = build1 (LABEL_EXPR, void_type_node, end_label);
3805 append_to_statement_list (t, &expr);
3806 }
3807
3808 return expr;
3809 }
3810
3811 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
3812
3813 tree
3814 gimple_boolify (tree expr)
3815 {
3816 tree type = TREE_TYPE (expr);
3817 location_t loc = EXPR_LOCATION (expr);
3818
3819 if (TREE_CODE (expr) == NE_EXPR
3820 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
3821 && integer_zerop (TREE_OPERAND (expr, 1)))
3822 {
3823 tree call = TREE_OPERAND (expr, 0);
3824 tree fn = get_callee_fndecl (call);
3825
3826 /* For __builtin_expect ((long) (x), y) recurse into x as well
3827 if x is truth_value_p. */
3828 if (fn
3829 && fndecl_built_in_p (fn, BUILT_IN_EXPECT)
3830 && call_expr_nargs (call) == 2)
3831 {
3832 tree arg = CALL_EXPR_ARG (call, 0);
3833 if (arg)
3834 {
3835 if (TREE_CODE (arg) == NOP_EXPR
3836 && TREE_TYPE (arg) == TREE_TYPE (call))
3837 arg = TREE_OPERAND (arg, 0);
3838 if (truth_value_p (TREE_CODE (arg)))
3839 {
3840 arg = gimple_boolify (arg);
3841 CALL_EXPR_ARG (call, 0)
3842 = fold_convert_loc (loc, TREE_TYPE (call), arg);
3843 }
3844 }
3845 }
3846 }
3847
3848 switch (TREE_CODE (expr))
3849 {
3850 case TRUTH_AND_EXPR:
3851 case TRUTH_OR_EXPR:
3852 case TRUTH_XOR_EXPR:
3853 case TRUTH_ANDIF_EXPR:
3854 case TRUTH_ORIF_EXPR:
3855 /* Also boolify the arguments of truth exprs. */
3856 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
3857 /* FALLTHRU */
3858
3859 case TRUTH_NOT_EXPR:
3860 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3861
3862 /* These expressions always produce boolean results. */
3863 if (TREE_CODE (type) != BOOLEAN_TYPE)
3864 TREE_TYPE (expr) = boolean_type_node;
3865 return expr;
3866
3867 case ANNOTATE_EXPR:
3868 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
3869 {
3870 case annot_expr_ivdep_kind:
3871 case annot_expr_unroll_kind:
3872 case annot_expr_no_vector_kind:
3873 case annot_expr_vector_kind:
3874 case annot_expr_parallel_kind:
3875 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3876 if (TREE_CODE (type) != BOOLEAN_TYPE)
3877 TREE_TYPE (expr) = boolean_type_node;
3878 return expr;
3879 default:
3880 gcc_unreachable ();
3881 }
3882
3883 default:
3884 if (COMPARISON_CLASS_P (expr))
3885 {
3886 /* There expressions always prduce boolean results. */
3887 if (TREE_CODE (type) != BOOLEAN_TYPE)
3888 TREE_TYPE (expr) = boolean_type_node;
3889 return expr;
3890 }
3891 /* Other expressions that get here must have boolean values, but
3892 might need to be converted to the appropriate mode. */
3893 if (TREE_CODE (type) == BOOLEAN_TYPE)
3894 return expr;
3895 return fold_convert_loc (loc, boolean_type_node, expr);
3896 }
3897 }
3898
3899 /* Given a conditional expression *EXPR_P without side effects, gimplify
3900 its operands. New statements are inserted to PRE_P. */
3901
3902 static enum gimplify_status
3903 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
3904 {
3905 tree expr = *expr_p, cond;
3906 enum gimplify_status ret, tret;
3907 enum tree_code code;
3908
3909 cond = gimple_boolify (COND_EXPR_COND (expr));
3910
3911 /* We need to handle && and || specially, as their gimplification
3912 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
3913 code = TREE_CODE (cond);
3914 if (code == TRUTH_ANDIF_EXPR)
3915 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
3916 else if (code == TRUTH_ORIF_EXPR)
3917 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
3918 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
3919 COND_EXPR_COND (*expr_p) = cond;
3920
3921 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
3922 is_gimple_val, fb_rvalue);
3923 ret = MIN (ret, tret);
3924 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
3925 is_gimple_val, fb_rvalue);
3926
3927 return MIN (ret, tret);
3928 }
3929
3930 /* Return true if evaluating EXPR could trap.
3931 EXPR is GENERIC, while tree_could_trap_p can be called
3932 only on GIMPLE. */
3933
3934 bool
3935 generic_expr_could_trap_p (tree expr)
3936 {
3937 unsigned i, n;
3938
3939 if (!expr || is_gimple_val (expr))
3940 return false;
3941
3942 if (!EXPR_P (expr) || tree_could_trap_p (expr))
3943 return true;
3944
3945 n = TREE_OPERAND_LENGTH (expr);
3946 for (i = 0; i < n; i++)
3947 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
3948 return true;
3949
3950 return false;
3951 }
3952
3953 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
3954 into
3955
3956 if (p) if (p)
3957 t1 = a; a;
3958 else or else
3959 t1 = b; b;
3960 t1;
3961
3962 The second form is used when *EXPR_P is of type void.
3963
3964 PRE_P points to the list where side effects that must happen before
3965 *EXPR_P should be stored. */
3966
3967 static enum gimplify_status
3968 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
3969 {
3970 tree expr = *expr_p;
3971 tree type = TREE_TYPE (expr);
3972 location_t loc = EXPR_LOCATION (expr);
3973 tree tmp, arm1, arm2;
3974 enum gimplify_status ret;
3975 tree label_true, label_false, label_cont;
3976 bool have_then_clause_p, have_else_clause_p;
3977 gcond *cond_stmt;
3978 enum tree_code pred_code;
3979 gimple_seq seq = NULL;
3980
3981 /* If this COND_EXPR has a value, copy the values into a temporary within
3982 the arms. */
3983 if (!VOID_TYPE_P (type))
3984 {
3985 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
3986 tree result;
3987
3988 /* If either an rvalue is ok or we do not require an lvalue, create the
3989 temporary. But we cannot do that if the type is addressable. */
3990 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
3991 && !TREE_ADDRESSABLE (type))
3992 {
3993 if (gimplify_ctxp->allow_rhs_cond_expr
3994 /* If either branch has side effects or could trap, it can't be
3995 evaluated unconditionally. */
3996 && !TREE_SIDE_EFFECTS (then_)
3997 && !generic_expr_could_trap_p (then_)
3998 && !TREE_SIDE_EFFECTS (else_)
3999 && !generic_expr_could_trap_p (else_))
4000 return gimplify_pure_cond_expr (expr_p, pre_p);
4001
4002 tmp = create_tmp_var (type, "iftmp");
4003 result = tmp;
4004 }
4005
4006 /* Otherwise, only create and copy references to the values. */
4007 else
4008 {
4009 type = build_pointer_type (type);
4010
4011 if (!VOID_TYPE_P (TREE_TYPE (then_)))
4012 then_ = build_fold_addr_expr_loc (loc, then_);
4013
4014 if (!VOID_TYPE_P (TREE_TYPE (else_)))
4015 else_ = build_fold_addr_expr_loc (loc, else_);
4016
4017 expr
4018 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
4019
4020 tmp = create_tmp_var (type, "iftmp");
4021 result = build_simple_mem_ref_loc (loc, tmp);
4022 }
4023
4024 /* Build the new then clause, `tmp = then_;'. But don't build the
4025 assignment if the value is void; in C++ it can be if it's a throw. */
4026 if (!VOID_TYPE_P (TREE_TYPE (then_)))
4027 TREE_OPERAND (expr, 1) = build2 (INIT_EXPR, type, tmp, then_);
4028
4029 /* Similarly, build the new else clause, `tmp = else_;'. */
4030 if (!VOID_TYPE_P (TREE_TYPE (else_)))
4031 TREE_OPERAND (expr, 2) = build2 (INIT_EXPR, type, tmp, else_);
4032
4033 TREE_TYPE (expr) = void_type_node;
4034 recalculate_side_effects (expr);
4035
4036 /* Move the COND_EXPR to the prequeue. */
4037 gimplify_stmt (&expr, pre_p);
4038
4039 *expr_p = result;
4040 return GS_ALL_DONE;
4041 }
4042
4043 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
4044 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
4045 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
4046 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
4047
4048 /* Make sure the condition has BOOLEAN_TYPE. */
4049 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4050
4051 /* Break apart && and || conditions. */
4052 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
4053 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
4054 {
4055 expr = shortcut_cond_expr (expr);
4056
4057 if (expr != *expr_p)
4058 {
4059 *expr_p = expr;
4060
4061 /* We can't rely on gimplify_expr to re-gimplify the expanded
4062 form properly, as cleanups might cause the target labels to be
4063 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
4064 set up a conditional context. */
4065 gimple_push_condition ();
4066 gimplify_stmt (expr_p, &seq);
4067 gimple_pop_condition (pre_p);
4068 gimple_seq_add_seq (pre_p, seq);
4069
4070 return GS_ALL_DONE;
4071 }
4072 }
4073
4074 /* Now do the normal gimplification. */
4075
4076 /* Gimplify condition. */
4077 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
4078 fb_rvalue);
4079 if (ret == GS_ERROR)
4080 return GS_ERROR;
4081 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
4082
4083 gimple_push_condition ();
4084
4085 have_then_clause_p = have_else_clause_p = false;
4086 label_true = find_goto_label (TREE_OPERAND (expr, 1));
4087 if (label_true
4088 && DECL_CONTEXT (GOTO_DESTINATION (label_true)) == current_function_decl
4089 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4090 have different locations, otherwise we end up with incorrect
4091 location information on the branches. */
4092 && (optimize
4093 || !EXPR_HAS_LOCATION (expr)
4094 || !rexpr_has_location (label_true)
4095 || EXPR_LOCATION (expr) == rexpr_location (label_true)))
4096 {
4097 have_then_clause_p = true;
4098 label_true = GOTO_DESTINATION (label_true);
4099 }
4100 else
4101 label_true = create_artificial_label (UNKNOWN_LOCATION);
4102 label_false = find_goto_label (TREE_OPERAND (expr, 2));
4103 if (label_false
4104 && DECL_CONTEXT (GOTO_DESTINATION (label_false)) == current_function_decl
4105 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4106 have different locations, otherwise we end up with incorrect
4107 location information on the branches. */
4108 && (optimize
4109 || !EXPR_HAS_LOCATION (expr)
4110 || !rexpr_has_location (label_false)
4111 || EXPR_LOCATION (expr) == rexpr_location (label_false)))
4112 {
4113 have_else_clause_p = true;
4114 label_false = GOTO_DESTINATION (label_false);
4115 }
4116 else
4117 label_false = create_artificial_label (UNKNOWN_LOCATION);
4118
4119 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
4120 &arm2);
4121 cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
4122 label_false);
4123 gimple_set_no_warning (cond_stmt, TREE_NO_WARNING (COND_EXPR_COND (expr)));
4124 gimplify_seq_add_stmt (&seq, cond_stmt);
4125 gimple_stmt_iterator gsi = gsi_last (seq);
4126 maybe_fold_stmt (&gsi);
4127
4128 label_cont = NULL_TREE;
4129 if (!have_then_clause_p)
4130 {
4131 /* For if (...) {} else { code; } put label_true after
4132 the else block. */
4133 if (TREE_OPERAND (expr, 1) == NULL_TREE
4134 && !have_else_clause_p
4135 && TREE_OPERAND (expr, 2) != NULL_TREE)
4136 label_cont = label_true;
4137 else
4138 {
4139 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
4140 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
4141 /* For if (...) { code; } else {} or
4142 if (...) { code; } else goto label; or
4143 if (...) { code; return; } else { ... }
4144 label_cont isn't needed. */
4145 if (!have_else_clause_p
4146 && TREE_OPERAND (expr, 2) != NULL_TREE
4147 && gimple_seq_may_fallthru (seq))
4148 {
4149 gimple *g;
4150 label_cont = create_artificial_label (UNKNOWN_LOCATION);
4151
4152 g = gimple_build_goto (label_cont);
4153
4154 /* GIMPLE_COND's are very low level; they have embedded
4155 gotos. This particular embedded goto should not be marked
4156 with the location of the original COND_EXPR, as it would
4157 correspond to the COND_EXPR's condition, not the ELSE or the
4158 THEN arms. To avoid marking it with the wrong location, flag
4159 it as "no location". */
4160 gimple_set_do_not_emit_location (g);
4161
4162 gimplify_seq_add_stmt (&seq, g);
4163 }
4164 }
4165 }
4166 if (!have_else_clause_p)
4167 {
4168 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
4169 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
4170 }
4171 if (label_cont)
4172 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
4173
4174 gimple_pop_condition (pre_p);
4175 gimple_seq_add_seq (pre_p, seq);
4176
4177 if (ret == GS_ERROR)
4178 ; /* Do nothing. */
4179 else if (have_then_clause_p || have_else_clause_p)
4180 ret = GS_ALL_DONE;
4181 else
4182 {
4183 /* Both arms are empty; replace the COND_EXPR with its predicate. */
4184 expr = TREE_OPERAND (expr, 0);
4185 gimplify_stmt (&expr, pre_p);
4186 }
4187
4188 *expr_p = NULL;
4189 return ret;
4190 }
4191
4192 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4193 to be marked addressable.
4194
4195 We cannot rely on such an expression being directly markable if a temporary
4196 has been created by the gimplification. In this case, we create another
4197 temporary and initialize it with a copy, which will become a store after we
4198 mark it addressable. This can happen if the front-end passed us something
4199 that it could not mark addressable yet, like a Fortran pass-by-reference
4200 parameter (int) floatvar. */
4201
4202 static void
4203 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
4204 {
4205 while (handled_component_p (*expr_p))
4206 expr_p = &TREE_OPERAND (*expr_p, 0);
4207 if (is_gimple_reg (*expr_p))
4208 {
4209 /* Do not allow an SSA name as the temporary. */
4210 tree var = get_initialized_tmp_var (*expr_p, seq_p, NULL, false);
4211 DECL_GIMPLE_REG_P (var) = 0;
4212 *expr_p = var;
4213 }
4214 }
4215
4216 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4217 a call to __builtin_memcpy. */
4218
4219 static enum gimplify_status
4220 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
4221 gimple_seq *seq_p)
4222 {
4223 tree t, to, to_ptr, from, from_ptr;
4224 gcall *gs;
4225 location_t loc = EXPR_LOCATION (*expr_p);
4226
4227 to = TREE_OPERAND (*expr_p, 0);
4228 from = TREE_OPERAND (*expr_p, 1);
4229
4230 /* Mark the RHS addressable. Beware that it may not be possible to do so
4231 directly if a temporary has been created by the gimplification. */
4232 prepare_gimple_addressable (&from, seq_p);
4233
4234 mark_addressable (from);
4235 from_ptr = build_fold_addr_expr_loc (loc, from);
4236 gimplify_arg (&from_ptr, seq_p, loc);
4237
4238 mark_addressable (to);
4239 to_ptr = build_fold_addr_expr_loc (loc, to);
4240 gimplify_arg (&to_ptr, seq_p, loc);
4241
4242 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
4243
4244 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
4245
4246 if (want_value)
4247 {
4248 /* tmp = memcpy() */
4249 t = create_tmp_var (TREE_TYPE (to_ptr));
4250 gimple_call_set_lhs (gs, t);
4251 gimplify_seq_add_stmt (seq_p, gs);
4252
4253 *expr_p = build_simple_mem_ref (t);
4254 return GS_ALL_DONE;
4255 }
4256
4257 gimplify_seq_add_stmt (seq_p, gs);
4258 *expr_p = NULL;
4259 return GS_ALL_DONE;
4260 }
4261
4262 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4263 a call to __builtin_memset. In this case we know that the RHS is
4264 a CONSTRUCTOR with an empty element list. */
4265
4266 static enum gimplify_status
4267 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
4268 gimple_seq *seq_p)
4269 {
4270 tree t, from, to, to_ptr;
4271 gcall *gs;
4272 location_t loc = EXPR_LOCATION (*expr_p);
4273
4274 /* Assert our assumptions, to abort instead of producing wrong code
4275 silently if they are not met. Beware that the RHS CONSTRUCTOR might
4276 not be immediately exposed. */
4277 from = TREE_OPERAND (*expr_p, 1);
4278 if (TREE_CODE (from) == WITH_SIZE_EXPR)
4279 from = TREE_OPERAND (from, 0);
4280
4281 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
4282 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
4283
4284 /* Now proceed. */
4285 to = TREE_OPERAND (*expr_p, 0);
4286
4287 to_ptr = build_fold_addr_expr_loc (loc, to);
4288 gimplify_arg (&to_ptr, seq_p, loc);
4289 t = builtin_decl_implicit (BUILT_IN_MEMSET);
4290
4291 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
4292
4293 if (want_value)
4294 {
4295 /* tmp = memset() */
4296 t = create_tmp_var (TREE_TYPE (to_ptr));
4297 gimple_call_set_lhs (gs, t);
4298 gimplify_seq_add_stmt (seq_p, gs);
4299
4300 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
4301 return GS_ALL_DONE;
4302 }
4303
4304 gimplify_seq_add_stmt (seq_p, gs);
4305 *expr_p = NULL;
4306 return GS_ALL_DONE;
4307 }
4308
4309 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
4310 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
4311 assignment. Return non-null if we detect a potential overlap. */
4312
4313 struct gimplify_init_ctor_preeval_data
4314 {
4315 /* The base decl of the lhs object. May be NULL, in which case we
4316 have to assume the lhs is indirect. */
4317 tree lhs_base_decl;
4318
4319 /* The alias set of the lhs object. */
4320 alias_set_type lhs_alias_set;
4321 };
4322
4323 static tree
4324 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
4325 {
4326 struct gimplify_init_ctor_preeval_data *data
4327 = (struct gimplify_init_ctor_preeval_data *) xdata;
4328 tree t = *tp;
4329
4330 /* If we find the base object, obviously we have overlap. */
4331 if (data->lhs_base_decl == t)
4332 return t;
4333
4334 /* If the constructor component is indirect, determine if we have a
4335 potential overlap with the lhs. The only bits of information we
4336 have to go on at this point are addressability and alias sets. */
4337 if ((INDIRECT_REF_P (t)
4338 || TREE_CODE (t) == MEM_REF)
4339 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4340 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
4341 return t;
4342
4343 /* If the constructor component is a call, determine if it can hide a
4344 potential overlap with the lhs through an INDIRECT_REF like above.
4345 ??? Ugh - this is completely broken. In fact this whole analysis
4346 doesn't look conservative. */
4347 if (TREE_CODE (t) == CALL_EXPR)
4348 {
4349 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
4350
4351 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
4352 if (POINTER_TYPE_P (TREE_VALUE (type))
4353 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4354 && alias_sets_conflict_p (data->lhs_alias_set,
4355 get_alias_set
4356 (TREE_TYPE (TREE_VALUE (type)))))
4357 return t;
4358 }
4359
4360 if (IS_TYPE_OR_DECL_P (t))
4361 *walk_subtrees = 0;
4362 return NULL;
4363 }
4364
4365 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
4366 force values that overlap with the lhs (as described by *DATA)
4367 into temporaries. */
4368
4369 static void
4370 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4371 struct gimplify_init_ctor_preeval_data *data)
4372 {
4373 enum gimplify_status one;
4374
4375 /* If the value is constant, then there's nothing to pre-evaluate. */
4376 if (TREE_CONSTANT (*expr_p))
4377 {
4378 /* Ensure it does not have side effects, it might contain a reference to
4379 the object we're initializing. */
4380 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
4381 return;
4382 }
4383
4384 /* If the type has non-trivial constructors, we can't pre-evaluate. */
4385 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
4386 return;
4387
4388 /* Recurse for nested constructors. */
4389 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
4390 {
4391 unsigned HOST_WIDE_INT ix;
4392 constructor_elt *ce;
4393 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
4394
4395 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
4396 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
4397
4398 return;
4399 }
4400
4401 /* If this is a variable sized type, we must remember the size. */
4402 maybe_with_size_expr (expr_p);
4403
4404 /* Gimplify the constructor element to something appropriate for the rhs
4405 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
4406 the gimplifier will consider this a store to memory. Doing this
4407 gimplification now means that we won't have to deal with complicated
4408 language-specific trees, nor trees like SAVE_EXPR that can induce
4409 exponential search behavior. */
4410 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
4411 if (one == GS_ERROR)
4412 {
4413 *expr_p = NULL;
4414 return;
4415 }
4416
4417 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
4418 with the lhs, since "a = { .x=a }" doesn't make sense. This will
4419 always be true for all scalars, since is_gimple_mem_rhs insists on a
4420 temporary variable for them. */
4421 if (DECL_P (*expr_p))
4422 return;
4423
4424 /* If this is of variable size, we have no choice but to assume it doesn't
4425 overlap since we can't make a temporary for it. */
4426 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
4427 return;
4428
4429 /* Otherwise, we must search for overlap ... */
4430 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
4431 return;
4432
4433 /* ... and if found, force the value into a temporary. */
4434 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
4435 }
4436
4437 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
4438 a RANGE_EXPR in a CONSTRUCTOR for an array.
4439
4440 var = lower;
4441 loop_entry:
4442 object[var] = value;
4443 if (var == upper)
4444 goto loop_exit;
4445 var = var + 1;
4446 goto loop_entry;
4447 loop_exit:
4448
4449 We increment var _after_ the loop exit check because we might otherwise
4450 fail if upper == TYPE_MAX_VALUE (type for upper).
4451
4452 Note that we never have to deal with SAVE_EXPRs here, because this has
4453 already been taken care of for us, in gimplify_init_ctor_preeval(). */
4454
4455 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
4456 gimple_seq *, bool);
4457
4458 static void
4459 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
4460 tree value, tree array_elt_type,
4461 gimple_seq *pre_p, bool cleared)
4462 {
4463 tree loop_entry_label, loop_exit_label, fall_thru_label;
4464 tree var, var_type, cref, tmp;
4465
4466 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
4467 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
4468 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
4469
4470 /* Create and initialize the index variable. */
4471 var_type = TREE_TYPE (upper);
4472 var = create_tmp_var (var_type);
4473 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
4474
4475 /* Add the loop entry label. */
4476 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
4477
4478 /* Build the reference. */
4479 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4480 var, NULL_TREE, NULL_TREE);
4481
4482 /* If we are a constructor, just call gimplify_init_ctor_eval to do
4483 the store. Otherwise just assign value to the reference. */
4484
4485 if (TREE_CODE (value) == CONSTRUCTOR)
4486 /* NB we might have to call ourself recursively through
4487 gimplify_init_ctor_eval if the value is a constructor. */
4488 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4489 pre_p, cleared);
4490 else
4491 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
4492
4493 /* We exit the loop when the index var is equal to the upper bound. */
4494 gimplify_seq_add_stmt (pre_p,
4495 gimple_build_cond (EQ_EXPR, var, upper,
4496 loop_exit_label, fall_thru_label));
4497
4498 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
4499
4500 /* Otherwise, increment the index var... */
4501 tmp = build2 (PLUS_EXPR, var_type, var,
4502 fold_convert (var_type, integer_one_node));
4503 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
4504
4505 /* ...and jump back to the loop entry. */
4506 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
4507
4508 /* Add the loop exit label. */
4509 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
4510 }
4511
4512 /* Return true if FDECL is accessing a field that is zero sized. */
4513
4514 static bool
4515 zero_sized_field_decl (const_tree fdecl)
4516 {
4517 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
4518 && integer_zerop (DECL_SIZE (fdecl)))
4519 return true;
4520 return false;
4521 }
4522
4523 /* Return true if TYPE is zero sized. */
4524
4525 static bool
4526 zero_sized_type (const_tree type)
4527 {
4528 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
4529 && integer_zerop (TYPE_SIZE (type)))
4530 return true;
4531 return false;
4532 }
4533
4534 /* A subroutine of gimplify_init_constructor. Generate individual
4535 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
4536 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
4537 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
4538 zeroed first. */
4539
4540 static void
4541 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
4542 gimple_seq *pre_p, bool cleared)
4543 {
4544 tree array_elt_type = NULL;
4545 unsigned HOST_WIDE_INT ix;
4546 tree purpose, value;
4547
4548 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
4549 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
4550
4551 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
4552 {
4553 tree cref;
4554
4555 /* NULL values are created above for gimplification errors. */
4556 if (value == NULL)
4557 continue;
4558
4559 if (cleared && initializer_zerop (value))
4560 continue;
4561
4562 /* ??? Here's to hoping the front end fills in all of the indices,
4563 so we don't have to figure out what's missing ourselves. */
4564 gcc_assert (purpose);
4565
4566 /* Skip zero-sized fields, unless value has side-effects. This can
4567 happen with calls to functions returning a zero-sized type, which
4568 we shouldn't discard. As a number of downstream passes don't
4569 expect sets of zero-sized fields, we rely on the gimplification of
4570 the MODIFY_EXPR we make below to drop the assignment statement. */
4571 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
4572 continue;
4573
4574 /* If we have a RANGE_EXPR, we have to build a loop to assign the
4575 whole range. */
4576 if (TREE_CODE (purpose) == RANGE_EXPR)
4577 {
4578 tree lower = TREE_OPERAND (purpose, 0);
4579 tree upper = TREE_OPERAND (purpose, 1);
4580
4581 /* If the lower bound is equal to upper, just treat it as if
4582 upper was the index. */
4583 if (simple_cst_equal (lower, upper))
4584 purpose = upper;
4585 else
4586 {
4587 gimplify_init_ctor_eval_range (object, lower, upper, value,
4588 array_elt_type, pre_p, cleared);
4589 continue;
4590 }
4591 }
4592
4593 if (array_elt_type)
4594 {
4595 /* Do not use bitsizetype for ARRAY_REF indices. */
4596 if (TYPE_DOMAIN (TREE_TYPE (object)))
4597 purpose
4598 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
4599 purpose);
4600 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4601 purpose, NULL_TREE, NULL_TREE);
4602 }
4603 else
4604 {
4605 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
4606 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
4607 unshare_expr (object), purpose, NULL_TREE);
4608 }
4609
4610 if (TREE_CODE (value) == CONSTRUCTOR
4611 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
4612 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4613 pre_p, cleared);
4614 else
4615 {
4616 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
4617 gimplify_and_add (init, pre_p);
4618 ggc_free (init);
4619 }
4620 }
4621 }
4622
4623 /* Return the appropriate RHS predicate for this LHS. */
4624
4625 gimple_predicate
4626 rhs_predicate_for (tree lhs)
4627 {
4628 if (is_gimple_reg (lhs))
4629 return is_gimple_reg_rhs_or_call;
4630 else
4631 return is_gimple_mem_rhs_or_call;
4632 }
4633
4634 /* Return the initial guess for an appropriate RHS predicate for this LHS,
4635 before the LHS has been gimplified. */
4636
4637 static gimple_predicate
4638 initial_rhs_predicate_for (tree lhs)
4639 {
4640 if (is_gimple_reg_type (TREE_TYPE (lhs)))
4641 return is_gimple_reg_rhs_or_call;
4642 else
4643 return is_gimple_mem_rhs_or_call;
4644 }
4645
4646 /* Gimplify a C99 compound literal expression. This just means adding
4647 the DECL_EXPR before the current statement and using its anonymous
4648 decl instead. */
4649
4650 static enum gimplify_status
4651 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
4652 bool (*gimple_test_f) (tree),
4653 fallback_t fallback)
4654 {
4655 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
4656 tree decl = DECL_EXPR_DECL (decl_s);
4657 tree init = DECL_INITIAL (decl);
4658 /* Mark the decl as addressable if the compound literal
4659 expression is addressable now, otherwise it is marked too late
4660 after we gimplify the initialization expression. */
4661 if (TREE_ADDRESSABLE (*expr_p))
4662 TREE_ADDRESSABLE (decl) = 1;
4663 /* Otherwise, if we don't need an lvalue and have a literal directly
4664 substitute it. Check if it matches the gimple predicate, as
4665 otherwise we'd generate a new temporary, and we can as well just
4666 use the decl we already have. */
4667 else if (!TREE_ADDRESSABLE (decl)
4668 && init
4669 && (fallback & fb_lvalue) == 0
4670 && gimple_test_f (init))
4671 {
4672 *expr_p = init;
4673 return GS_OK;
4674 }
4675
4676 /* Preliminarily mark non-addressed complex variables as eligible
4677 for promotion to gimple registers. We'll transform their uses
4678 as we find them. */
4679 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
4680 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
4681 && !TREE_THIS_VOLATILE (decl)
4682 && !needs_to_live_in_memory (decl))
4683 DECL_GIMPLE_REG_P (decl) = 1;
4684
4685 /* If the decl is not addressable, then it is being used in some
4686 expression or on the right hand side of a statement, and it can
4687 be put into a readonly data section. */
4688 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
4689 TREE_READONLY (decl) = 1;
4690
4691 /* This decl isn't mentioned in the enclosing block, so add it to the
4692 list of temps. FIXME it seems a bit of a kludge to say that
4693 anonymous artificial vars aren't pushed, but everything else is. */
4694 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
4695 gimple_add_tmp_var (decl);
4696
4697 gimplify_and_add (decl_s, pre_p);
4698 *expr_p = decl;
4699 return GS_OK;
4700 }
4701
4702 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
4703 return a new CONSTRUCTOR if something changed. */
4704
4705 static tree
4706 optimize_compound_literals_in_ctor (tree orig_ctor)
4707 {
4708 tree ctor = orig_ctor;
4709 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
4710 unsigned int idx, num = vec_safe_length (elts);
4711
4712 for (idx = 0; idx < num; idx++)
4713 {
4714 tree value = (*elts)[idx].value;
4715 tree newval = value;
4716 if (TREE_CODE (value) == CONSTRUCTOR)
4717 newval = optimize_compound_literals_in_ctor (value);
4718 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
4719 {
4720 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
4721 tree decl = DECL_EXPR_DECL (decl_s);
4722 tree init = DECL_INITIAL (decl);
4723
4724 if (!TREE_ADDRESSABLE (value)
4725 && !TREE_ADDRESSABLE (decl)
4726 && init
4727 && TREE_CODE (init) == CONSTRUCTOR)
4728 newval = optimize_compound_literals_in_ctor (init);
4729 }
4730 if (newval == value)
4731 continue;
4732
4733 if (ctor == orig_ctor)
4734 {
4735 ctor = copy_node (orig_ctor);
4736 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
4737 elts = CONSTRUCTOR_ELTS (ctor);
4738 }
4739 (*elts)[idx].value = newval;
4740 }
4741 return ctor;
4742 }
4743
4744 /* A subroutine of gimplify_modify_expr. Break out elements of a
4745 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
4746
4747 Note that we still need to clear any elements that don't have explicit
4748 initializers, so if not all elements are initialized we keep the
4749 original MODIFY_EXPR, we just remove all of the constructor elements.
4750
4751 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
4752 GS_ERROR if we would have to create a temporary when gimplifying
4753 this constructor. Otherwise, return GS_OK.
4754
4755 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
4756
4757 static enum gimplify_status
4758 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4759 bool want_value, bool notify_temp_creation)
4760 {
4761 tree object, ctor, type;
4762 enum gimplify_status ret;
4763 vec<constructor_elt, va_gc> *elts;
4764
4765 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
4766
4767 if (!notify_temp_creation)
4768 {
4769 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4770 is_gimple_lvalue, fb_lvalue);
4771 if (ret == GS_ERROR)
4772 return ret;
4773 }
4774
4775 object = TREE_OPERAND (*expr_p, 0);
4776 ctor = TREE_OPERAND (*expr_p, 1)
4777 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
4778 type = TREE_TYPE (ctor);
4779 elts = CONSTRUCTOR_ELTS (ctor);
4780 ret = GS_ALL_DONE;
4781
4782 switch (TREE_CODE (type))
4783 {
4784 case RECORD_TYPE:
4785 case UNION_TYPE:
4786 case QUAL_UNION_TYPE:
4787 case ARRAY_TYPE:
4788 {
4789 struct gimplify_init_ctor_preeval_data preeval_data;
4790 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
4791 HOST_WIDE_INT num_unique_nonzero_elements;
4792 bool cleared, complete_p, valid_const_initializer;
4793 /* Use readonly data for initializers of this or smaller size
4794 regardless of the num_nonzero_elements / num_unique_nonzero_elements
4795 ratio. */
4796 const HOST_WIDE_INT min_unique_size = 64;
4797 /* If num_nonzero_elements / num_unique_nonzero_elements ratio
4798 is smaller than this, use readonly data. */
4799 const int unique_nonzero_ratio = 8;
4800
4801 /* Aggregate types must lower constructors to initialization of
4802 individual elements. The exception is that a CONSTRUCTOR node
4803 with no elements indicates zero-initialization of the whole. */
4804 if (vec_safe_is_empty (elts))
4805 {
4806 if (notify_temp_creation)
4807 return GS_OK;
4808 break;
4809 }
4810
4811 /* Fetch information about the constructor to direct later processing.
4812 We might want to make static versions of it in various cases, and
4813 can only do so if it known to be a valid constant initializer. */
4814 valid_const_initializer
4815 = categorize_ctor_elements (ctor, &num_nonzero_elements,
4816 &num_unique_nonzero_elements,
4817 &num_ctor_elements, &complete_p);
4818
4819 /* If a const aggregate variable is being initialized, then it
4820 should never be a lose to promote the variable to be static. */
4821 if (valid_const_initializer
4822 && num_nonzero_elements > 1
4823 && TREE_READONLY (object)
4824 && VAR_P (object)
4825 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object))
4826 /* For ctors that have many repeated nonzero elements
4827 represented through RANGE_EXPRs, prefer initializing
4828 those through runtime loops over copies of large amounts
4829 of data from readonly data section. */
4830 && (num_unique_nonzero_elements
4831 > num_nonzero_elements / unique_nonzero_ratio
4832 || ((unsigned HOST_WIDE_INT) int_size_in_bytes (type)
4833 <= (unsigned HOST_WIDE_INT) min_unique_size)))
4834 {
4835 if (notify_temp_creation)
4836 return GS_ERROR;
4837 DECL_INITIAL (object) = ctor;
4838 TREE_STATIC (object) = 1;
4839 if (!DECL_NAME (object))
4840 DECL_NAME (object) = create_tmp_var_name ("C");
4841 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
4842
4843 /* ??? C++ doesn't automatically append a .<number> to the
4844 assembler name, and even when it does, it looks at FE private
4845 data structures to figure out what that number should be,
4846 which are not set for this variable. I suppose this is
4847 important for local statics for inline functions, which aren't
4848 "local" in the object file sense. So in order to get a unique
4849 TU-local symbol, we must invoke the lhd version now. */
4850 lhd_set_decl_assembler_name (object);
4851
4852 *expr_p = NULL_TREE;
4853 break;
4854 }
4855
4856 /* If there are "lots" of initialized elements, even discounting
4857 those that are not address constants (and thus *must* be
4858 computed at runtime), then partition the constructor into
4859 constant and non-constant parts. Block copy the constant
4860 parts in, then generate code for the non-constant parts. */
4861 /* TODO. There's code in cp/typeck.c to do this. */
4862
4863 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
4864 /* store_constructor will ignore the clearing of variable-sized
4865 objects. Initializers for such objects must explicitly set
4866 every field that needs to be set. */
4867 cleared = false;
4868 else if (!complete_p)
4869 /* If the constructor isn't complete, clear the whole object
4870 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
4871
4872 ??? This ought not to be needed. For any element not present
4873 in the initializer, we should simply set them to zero. Except
4874 we'd need to *find* the elements that are not present, and that
4875 requires trickery to avoid quadratic compile-time behavior in
4876 large cases or excessive memory use in small cases. */
4877 cleared = !CONSTRUCTOR_NO_CLEARING (ctor);
4878 else if (num_ctor_elements - num_nonzero_elements
4879 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
4880 && num_nonzero_elements < num_ctor_elements / 4)
4881 /* If there are "lots" of zeros, it's more efficient to clear
4882 the memory and then set the nonzero elements. */
4883 cleared = true;
4884 else
4885 cleared = false;
4886
4887 /* If there are "lots" of initialized elements, and all of them
4888 are valid address constants, then the entire initializer can
4889 be dropped to memory, and then memcpy'd out. Don't do this
4890 for sparse arrays, though, as it's more efficient to follow
4891 the standard CONSTRUCTOR behavior of memset followed by
4892 individual element initialization. Also don't do this for small
4893 all-zero initializers (which aren't big enough to merit
4894 clearing), and don't try to make bitwise copies of
4895 TREE_ADDRESSABLE types. */
4896
4897 if (valid_const_initializer
4898 && !(cleared || num_nonzero_elements == 0)
4899 && !TREE_ADDRESSABLE (type))
4900 {
4901 HOST_WIDE_INT size = int_size_in_bytes (type);
4902 unsigned int align;
4903
4904 /* ??? We can still get unbounded array types, at least
4905 from the C++ front end. This seems wrong, but attempt
4906 to work around it for now. */
4907 if (size < 0)
4908 {
4909 size = int_size_in_bytes (TREE_TYPE (object));
4910 if (size >= 0)
4911 TREE_TYPE (ctor) = type = TREE_TYPE (object);
4912 }
4913
4914 /* Find the maximum alignment we can assume for the object. */
4915 /* ??? Make use of DECL_OFFSET_ALIGN. */
4916 if (DECL_P (object))
4917 align = DECL_ALIGN (object);
4918 else
4919 align = TYPE_ALIGN (type);
4920
4921 /* Do a block move either if the size is so small as to make
4922 each individual move a sub-unit move on average, or if it
4923 is so large as to make individual moves inefficient. */
4924 if (size > 0
4925 && num_nonzero_elements > 1
4926 /* For ctors that have many repeated nonzero elements
4927 represented through RANGE_EXPRs, prefer initializing
4928 those through runtime loops over copies of large amounts
4929 of data from readonly data section. */
4930 && (num_unique_nonzero_elements
4931 > num_nonzero_elements / unique_nonzero_ratio
4932 || size <= min_unique_size)
4933 && (size < num_nonzero_elements
4934 || !can_move_by_pieces (size, align)))
4935 {
4936 if (notify_temp_creation)
4937 return GS_ERROR;
4938
4939 walk_tree (&ctor, force_labels_r, NULL, NULL);
4940 ctor = tree_output_constant_def (ctor);
4941 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
4942 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
4943 TREE_OPERAND (*expr_p, 1) = ctor;
4944
4945 /* This is no longer an assignment of a CONSTRUCTOR, but
4946 we still may have processing to do on the LHS. So
4947 pretend we didn't do anything here to let that happen. */
4948 return GS_UNHANDLED;
4949 }
4950 }
4951
4952 /* If the target is volatile, we have non-zero elements and more than
4953 one field to assign, initialize the target from a temporary. */
4954 if (TREE_THIS_VOLATILE (object)
4955 && !TREE_ADDRESSABLE (type)
4956 && num_nonzero_elements > 0
4957 && vec_safe_length (elts) > 1)
4958 {
4959 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
4960 TREE_OPERAND (*expr_p, 0) = temp;
4961 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
4962 *expr_p,
4963 build2 (MODIFY_EXPR, void_type_node,
4964 object, temp));
4965 return GS_OK;
4966 }
4967
4968 if (notify_temp_creation)
4969 return GS_OK;
4970
4971 /* If there are nonzero elements and if needed, pre-evaluate to capture
4972 elements overlapping with the lhs into temporaries. We must do this
4973 before clearing to fetch the values before they are zeroed-out. */
4974 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
4975 {
4976 preeval_data.lhs_base_decl = get_base_address (object);
4977 if (!DECL_P (preeval_data.lhs_base_decl))
4978 preeval_data.lhs_base_decl = NULL;
4979 preeval_data.lhs_alias_set = get_alias_set (object);
4980
4981 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
4982 pre_p, post_p, &preeval_data);
4983 }
4984
4985 bool ctor_has_side_effects_p
4986 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1));
4987
4988 if (cleared)
4989 {
4990 /* Zap the CONSTRUCTOR element list, which simplifies this case.
4991 Note that we still have to gimplify, in order to handle the
4992 case of variable sized types. Avoid shared tree structures. */
4993 CONSTRUCTOR_ELTS (ctor) = NULL;
4994 TREE_SIDE_EFFECTS (ctor) = 0;
4995 object = unshare_expr (object);
4996 gimplify_stmt (expr_p, pre_p);
4997 }
4998
4999 /* If we have not block cleared the object, or if there are nonzero
5000 elements in the constructor, or if the constructor has side effects,
5001 add assignments to the individual scalar fields of the object. */
5002 if (!cleared
5003 || num_nonzero_elements > 0
5004 || ctor_has_side_effects_p)
5005 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
5006
5007 *expr_p = NULL_TREE;
5008 }
5009 break;
5010
5011 case COMPLEX_TYPE:
5012 {
5013 tree r, i;
5014
5015 if (notify_temp_creation)
5016 return GS_OK;
5017
5018 /* Extract the real and imaginary parts out of the ctor. */
5019 gcc_assert (elts->length () == 2);
5020 r = (*elts)[0].value;
5021 i = (*elts)[1].value;
5022 if (r == NULL || i == NULL)
5023 {
5024 tree zero = build_zero_cst (TREE_TYPE (type));
5025 if (r == NULL)
5026 r = zero;
5027 if (i == NULL)
5028 i = zero;
5029 }
5030
5031 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
5032 represent creation of a complex value. */
5033 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
5034 {
5035 ctor = build_complex (type, r, i);
5036 TREE_OPERAND (*expr_p, 1) = ctor;
5037 }
5038 else
5039 {
5040 ctor = build2 (COMPLEX_EXPR, type, r, i);
5041 TREE_OPERAND (*expr_p, 1) = ctor;
5042 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
5043 pre_p,
5044 post_p,
5045 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
5046 fb_rvalue);
5047 }
5048 }
5049 break;
5050
5051 case VECTOR_TYPE:
5052 {
5053 unsigned HOST_WIDE_INT ix;
5054 constructor_elt *ce;
5055
5056 if (notify_temp_creation)
5057 return GS_OK;
5058
5059 /* Go ahead and simplify constant constructors to VECTOR_CST. */
5060 if (TREE_CONSTANT (ctor))
5061 {
5062 bool constant_p = true;
5063 tree value;
5064
5065 /* Even when ctor is constant, it might contain non-*_CST
5066 elements, such as addresses or trapping values like
5067 1.0/0.0 - 1.0/0.0. Such expressions don't belong
5068 in VECTOR_CST nodes. */
5069 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
5070 if (!CONSTANT_CLASS_P (value))
5071 {
5072 constant_p = false;
5073 break;
5074 }
5075
5076 if (constant_p)
5077 {
5078 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
5079 break;
5080 }
5081
5082 TREE_CONSTANT (ctor) = 0;
5083 }
5084
5085 /* Vector types use CONSTRUCTOR all the way through gimple
5086 compilation as a general initializer. */
5087 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
5088 {
5089 enum gimplify_status tret;
5090 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
5091 fb_rvalue);
5092 if (tret == GS_ERROR)
5093 ret = GS_ERROR;
5094 else if (TREE_STATIC (ctor)
5095 && !initializer_constant_valid_p (ce->value,
5096 TREE_TYPE (ce->value)))
5097 TREE_STATIC (ctor) = 0;
5098 }
5099 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
5100 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
5101 }
5102 break;
5103
5104 default:
5105 /* So how did we get a CONSTRUCTOR for a scalar type? */
5106 gcc_unreachable ();
5107 }
5108
5109 if (ret == GS_ERROR)
5110 return GS_ERROR;
5111 /* If we have gimplified both sides of the initializer but have
5112 not emitted an assignment, do so now. */
5113 if (*expr_p)
5114 {
5115 tree lhs = TREE_OPERAND (*expr_p, 0);
5116 tree rhs = TREE_OPERAND (*expr_p, 1);
5117 if (want_value && object == lhs)
5118 lhs = unshare_expr (lhs);
5119 gassign *init = gimple_build_assign (lhs, rhs);
5120 gimplify_seq_add_stmt (pre_p, init);
5121 }
5122 if (want_value)
5123 {
5124 *expr_p = object;
5125 return GS_OK;
5126 }
5127 else
5128 {
5129 *expr_p = NULL;
5130 return GS_ALL_DONE;
5131 }
5132 }
5133
5134 /* Given a pointer value OP0, return a simplified version of an
5135 indirection through OP0, or NULL_TREE if no simplification is
5136 possible. This may only be applied to a rhs of an expression.
5137 Note that the resulting type may be different from the type pointed
5138 to in the sense that it is still compatible from the langhooks
5139 point of view. */
5140
5141 static tree
5142 gimple_fold_indirect_ref_rhs (tree t)
5143 {
5144 return gimple_fold_indirect_ref (t);
5145 }
5146
5147 /* Subroutine of gimplify_modify_expr to do simplifications of
5148 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
5149 something changes. */
5150
5151 static enum gimplify_status
5152 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
5153 gimple_seq *pre_p, gimple_seq *post_p,
5154 bool want_value)
5155 {
5156 enum gimplify_status ret = GS_UNHANDLED;
5157 bool changed;
5158
5159 do
5160 {
5161 changed = false;
5162 switch (TREE_CODE (*from_p))
5163 {
5164 case VAR_DECL:
5165 /* If we're assigning from a read-only variable initialized with
5166 a constructor, do the direct assignment from the constructor,
5167 but only if neither source nor target are volatile since this
5168 latter assignment might end up being done on a per-field basis. */
5169 if (DECL_INITIAL (*from_p)
5170 && TREE_READONLY (*from_p)
5171 && !TREE_THIS_VOLATILE (*from_p)
5172 && !TREE_THIS_VOLATILE (*to_p)
5173 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
5174 {
5175 tree old_from = *from_p;
5176 enum gimplify_status subret;
5177
5178 /* Move the constructor into the RHS. */
5179 *from_p = unshare_expr (DECL_INITIAL (*from_p));
5180
5181 /* Let's see if gimplify_init_constructor will need to put
5182 it in memory. */
5183 subret = gimplify_init_constructor (expr_p, NULL, NULL,
5184 false, true);
5185 if (subret == GS_ERROR)
5186 {
5187 /* If so, revert the change. */
5188 *from_p = old_from;
5189 }
5190 else
5191 {
5192 ret = GS_OK;
5193 changed = true;
5194 }
5195 }
5196 break;
5197 case INDIRECT_REF:
5198 {
5199 /* If we have code like
5200
5201 *(const A*)(A*)&x
5202
5203 where the type of "x" is a (possibly cv-qualified variant
5204 of "A"), treat the entire expression as identical to "x".
5205 This kind of code arises in C++ when an object is bound
5206 to a const reference, and if "x" is a TARGET_EXPR we want
5207 to take advantage of the optimization below. */
5208 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
5209 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
5210 if (t)
5211 {
5212 if (TREE_THIS_VOLATILE (t) != volatile_p)
5213 {
5214 if (DECL_P (t))
5215 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
5216 build_fold_addr_expr (t));
5217 if (REFERENCE_CLASS_P (t))
5218 TREE_THIS_VOLATILE (t) = volatile_p;
5219 }
5220 *from_p = t;
5221 ret = GS_OK;
5222 changed = true;
5223 }
5224 break;
5225 }
5226
5227 case TARGET_EXPR:
5228 {
5229 /* If we are initializing something from a TARGET_EXPR, strip the
5230 TARGET_EXPR and initialize it directly, if possible. This can't
5231 be done if the initializer is void, since that implies that the
5232 temporary is set in some non-trivial way.
5233
5234 ??? What about code that pulls out the temp and uses it
5235 elsewhere? I think that such code never uses the TARGET_EXPR as
5236 an initializer. If I'm wrong, we'll die because the temp won't
5237 have any RTL. In that case, I guess we'll need to replace
5238 references somehow. */
5239 tree init = TARGET_EXPR_INITIAL (*from_p);
5240
5241 if (init
5242 && (TREE_CODE (*expr_p) != MODIFY_EXPR
5243 || !TARGET_EXPR_NO_ELIDE (*from_p))
5244 && !VOID_TYPE_P (TREE_TYPE (init)))
5245 {
5246 *from_p = init;
5247 ret = GS_OK;
5248 changed = true;
5249 }
5250 }
5251 break;
5252
5253 case COMPOUND_EXPR:
5254 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
5255 caught. */
5256 gimplify_compound_expr (from_p, pre_p, true);
5257 ret = GS_OK;
5258 changed = true;
5259 break;
5260
5261 case CONSTRUCTOR:
5262 /* If we already made some changes, let the front end have a
5263 crack at this before we break it down. */
5264 if (ret != GS_UNHANDLED)
5265 break;
5266 /* If we're initializing from a CONSTRUCTOR, break this into
5267 individual MODIFY_EXPRs. */
5268 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
5269 false);
5270
5271 case COND_EXPR:
5272 /* If we're assigning to a non-register type, push the assignment
5273 down into the branches. This is mandatory for ADDRESSABLE types,
5274 since we cannot generate temporaries for such, but it saves a
5275 copy in other cases as well. */
5276 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
5277 {
5278 /* This code should mirror the code in gimplify_cond_expr. */
5279 enum tree_code code = TREE_CODE (*expr_p);
5280 tree cond = *from_p;
5281 tree result = *to_p;
5282
5283 ret = gimplify_expr (&result, pre_p, post_p,
5284 is_gimple_lvalue, fb_lvalue);
5285 if (ret != GS_ERROR)
5286 ret = GS_OK;
5287
5288 /* If we are going to write RESULT more than once, clear
5289 TREE_READONLY flag, otherwise we might incorrectly promote
5290 the variable to static const and initialize it at compile
5291 time in one of the branches. */
5292 if (VAR_P (result)
5293 && TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node
5294 && TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5295 TREE_READONLY (result) = 0;
5296 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
5297 TREE_OPERAND (cond, 1)
5298 = build2 (code, void_type_node, result,
5299 TREE_OPERAND (cond, 1));
5300 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5301 TREE_OPERAND (cond, 2)
5302 = build2 (code, void_type_node, unshare_expr (result),
5303 TREE_OPERAND (cond, 2));
5304
5305 TREE_TYPE (cond) = void_type_node;
5306 recalculate_side_effects (cond);
5307
5308 if (want_value)
5309 {
5310 gimplify_and_add (cond, pre_p);
5311 *expr_p = unshare_expr (result);
5312 }
5313 else
5314 *expr_p = cond;
5315 return ret;
5316 }
5317 break;
5318
5319 case CALL_EXPR:
5320 /* For calls that return in memory, give *to_p as the CALL_EXPR's
5321 return slot so that we don't generate a temporary. */
5322 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
5323 && aggregate_value_p (*from_p, *from_p))
5324 {
5325 bool use_target;
5326
5327 if (!(rhs_predicate_for (*to_p))(*from_p))
5328 /* If we need a temporary, *to_p isn't accurate. */
5329 use_target = false;
5330 /* It's OK to use the return slot directly unless it's an NRV. */
5331 else if (TREE_CODE (*to_p) == RESULT_DECL
5332 && DECL_NAME (*to_p) == NULL_TREE
5333 && needs_to_live_in_memory (*to_p))
5334 use_target = true;
5335 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
5336 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
5337 /* Don't force regs into memory. */
5338 use_target = false;
5339 else if (TREE_CODE (*expr_p) == INIT_EXPR)
5340 /* It's OK to use the target directly if it's being
5341 initialized. */
5342 use_target = true;
5343 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p)))
5344 != INTEGER_CST)
5345 /* Always use the target and thus RSO for variable-sized types.
5346 GIMPLE cannot deal with a variable-sized assignment
5347 embedded in a call statement. */
5348 use_target = true;
5349 else if (TREE_CODE (*to_p) != SSA_NAME
5350 && (!is_gimple_variable (*to_p)
5351 || needs_to_live_in_memory (*to_p)))
5352 /* Don't use the original target if it's already addressable;
5353 if its address escapes, and the called function uses the
5354 NRV optimization, a conforming program could see *to_p
5355 change before the called function returns; see c++/19317.
5356 When optimizing, the return_slot pass marks more functions
5357 as safe after we have escape info. */
5358 use_target = false;
5359 else
5360 use_target = true;
5361
5362 if (use_target)
5363 {
5364 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
5365 mark_addressable (*to_p);
5366 }
5367 }
5368 break;
5369
5370 case WITH_SIZE_EXPR:
5371 /* Likewise for calls that return an aggregate of non-constant size,
5372 since we would not be able to generate a temporary at all. */
5373 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
5374 {
5375 *from_p = TREE_OPERAND (*from_p, 0);
5376 /* We don't change ret in this case because the
5377 WITH_SIZE_EXPR might have been added in
5378 gimplify_modify_expr, so returning GS_OK would lead to an
5379 infinite loop. */
5380 changed = true;
5381 }
5382 break;
5383
5384 /* If we're initializing from a container, push the initialization
5385 inside it. */
5386 case CLEANUP_POINT_EXPR:
5387 case BIND_EXPR:
5388 case STATEMENT_LIST:
5389 {
5390 tree wrap = *from_p;
5391 tree t;
5392
5393 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
5394 fb_lvalue);
5395 if (ret != GS_ERROR)
5396 ret = GS_OK;
5397
5398 t = voidify_wrapper_expr (wrap, *expr_p);
5399 gcc_assert (t == *expr_p);
5400
5401 if (want_value)
5402 {
5403 gimplify_and_add (wrap, pre_p);
5404 *expr_p = unshare_expr (*to_p);
5405 }
5406 else
5407 *expr_p = wrap;
5408 return GS_OK;
5409 }
5410
5411 case COMPOUND_LITERAL_EXPR:
5412 {
5413 tree complit = TREE_OPERAND (*expr_p, 1);
5414 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
5415 tree decl = DECL_EXPR_DECL (decl_s);
5416 tree init = DECL_INITIAL (decl);
5417
5418 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
5419 into struct T x = { 0, 1, 2 } if the address of the
5420 compound literal has never been taken. */
5421 if (!TREE_ADDRESSABLE (complit)
5422 && !TREE_ADDRESSABLE (decl)
5423 && init)
5424 {
5425 *expr_p = copy_node (*expr_p);
5426 TREE_OPERAND (*expr_p, 1) = init;
5427 return GS_OK;
5428 }
5429 }
5430
5431 default:
5432 break;
5433 }
5434 }
5435 while (changed);
5436
5437 return ret;
5438 }
5439
5440
5441 /* Return true if T looks like a valid GIMPLE statement. */
5442
5443 static bool
5444 is_gimple_stmt (tree t)
5445 {
5446 const enum tree_code code = TREE_CODE (t);
5447
5448 switch (code)
5449 {
5450 case NOP_EXPR:
5451 /* The only valid NOP_EXPR is the empty statement. */
5452 return IS_EMPTY_STMT (t);
5453
5454 case BIND_EXPR:
5455 case COND_EXPR:
5456 /* These are only valid if they're void. */
5457 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
5458
5459 case SWITCH_EXPR:
5460 case GOTO_EXPR:
5461 case RETURN_EXPR:
5462 case LABEL_EXPR:
5463 case CASE_LABEL_EXPR:
5464 case TRY_CATCH_EXPR:
5465 case TRY_FINALLY_EXPR:
5466 case EH_FILTER_EXPR:
5467 case CATCH_EXPR:
5468 case ASM_EXPR:
5469 case STATEMENT_LIST:
5470 case OACC_PARALLEL:
5471 case OACC_KERNELS:
5472 case OACC_DATA:
5473 case OACC_HOST_DATA:
5474 case OACC_DECLARE:
5475 case OACC_UPDATE:
5476 case OACC_ENTER_DATA:
5477 case OACC_EXIT_DATA:
5478 case OACC_CACHE:
5479 case OMP_PARALLEL:
5480 case OMP_FOR:
5481 case OMP_SIMD:
5482 case OMP_DISTRIBUTE:
5483 case OACC_LOOP:
5484 case OMP_SECTIONS:
5485 case OMP_SECTION:
5486 case OMP_SINGLE:
5487 case OMP_MASTER:
5488 case OMP_TASKGROUP:
5489 case OMP_ORDERED:
5490 case OMP_CRITICAL:
5491 case OMP_TASK:
5492 case OMP_TARGET:
5493 case OMP_TARGET_DATA:
5494 case OMP_TARGET_UPDATE:
5495 case OMP_TARGET_ENTER_DATA:
5496 case OMP_TARGET_EXIT_DATA:
5497 case OMP_TASKLOOP:
5498 case OMP_TEAMS:
5499 /* These are always void. */
5500 return true;
5501
5502 case CALL_EXPR:
5503 case MODIFY_EXPR:
5504 case PREDICT_EXPR:
5505 /* These are valid regardless of their type. */
5506 return true;
5507
5508 default:
5509 return false;
5510 }
5511 }
5512
5513
5514 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
5515 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
5516 DECL_GIMPLE_REG_P set.
5517
5518 IMPORTANT NOTE: This promotion is performed by introducing a load of the
5519 other, unmodified part of the complex object just before the total store.
5520 As a consequence, if the object is still uninitialized, an undefined value
5521 will be loaded into a register, which may result in a spurious exception
5522 if the register is floating-point and the value happens to be a signaling
5523 NaN for example. Then the fully-fledged complex operations lowering pass
5524 followed by a DCE pass are necessary in order to fix things up. */
5525
5526 static enum gimplify_status
5527 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
5528 bool want_value)
5529 {
5530 enum tree_code code, ocode;
5531 tree lhs, rhs, new_rhs, other, realpart, imagpart;
5532
5533 lhs = TREE_OPERAND (*expr_p, 0);
5534 rhs = TREE_OPERAND (*expr_p, 1);
5535 code = TREE_CODE (lhs);
5536 lhs = TREE_OPERAND (lhs, 0);
5537
5538 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
5539 other = build1 (ocode, TREE_TYPE (rhs), lhs);
5540 TREE_NO_WARNING (other) = 1;
5541 other = get_formal_tmp_var (other, pre_p);
5542
5543 realpart = code == REALPART_EXPR ? rhs : other;
5544 imagpart = code == REALPART_EXPR ? other : rhs;
5545
5546 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
5547 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
5548 else
5549 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
5550
5551 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
5552 *expr_p = (want_value) ? rhs : NULL_TREE;
5553
5554 return GS_ALL_DONE;
5555 }
5556
5557 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
5558
5559 modify_expr
5560 : varname '=' rhs
5561 | '*' ID '=' rhs
5562
5563 PRE_P points to the list where side effects that must happen before
5564 *EXPR_P should be stored.
5565
5566 POST_P points to the list where side effects that must happen after
5567 *EXPR_P should be stored.
5568
5569 WANT_VALUE is nonzero iff we want to use the value of this expression
5570 in another expression. */
5571
5572 static enum gimplify_status
5573 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
5574 bool want_value)
5575 {
5576 tree *from_p = &TREE_OPERAND (*expr_p, 1);
5577 tree *to_p = &TREE_OPERAND (*expr_p, 0);
5578 enum gimplify_status ret = GS_UNHANDLED;
5579 gimple *assign;
5580 location_t loc = EXPR_LOCATION (*expr_p);
5581 gimple_stmt_iterator gsi;
5582
5583 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
5584 || TREE_CODE (*expr_p) == INIT_EXPR);
5585
5586 /* Trying to simplify a clobber using normal logic doesn't work,
5587 so handle it here. */
5588 if (TREE_CLOBBER_P (*from_p))
5589 {
5590 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5591 if (ret == GS_ERROR)
5592 return ret;
5593 gcc_assert (!want_value);
5594 if (!VAR_P (*to_p) && TREE_CODE (*to_p) != MEM_REF)
5595 {
5596 tree addr = get_initialized_tmp_var (build_fold_addr_expr (*to_p),
5597 pre_p, post_p);
5598 *to_p = build_simple_mem_ref_loc (EXPR_LOCATION (*to_p), addr);
5599 }
5600 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
5601 *expr_p = NULL;
5602 return GS_ALL_DONE;
5603 }
5604
5605 /* Insert pointer conversions required by the middle-end that are not
5606 required by the frontend. This fixes middle-end type checking for
5607 for example gcc.dg/redecl-6.c. */
5608 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
5609 {
5610 STRIP_USELESS_TYPE_CONVERSION (*from_p);
5611 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
5612 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
5613 }
5614
5615 /* See if any simplifications can be done based on what the RHS is. */
5616 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5617 want_value);
5618 if (ret != GS_UNHANDLED)
5619 return ret;
5620
5621 /* For zero sized types only gimplify the left hand side and right hand
5622 side as statements and throw away the assignment. Do this after
5623 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
5624 types properly. */
5625 if (zero_sized_type (TREE_TYPE (*from_p))
5626 && !want_value
5627 /* Don't do this for calls that return addressable types, expand_call
5628 relies on those having a lhs. */
5629 && !(TREE_ADDRESSABLE (TREE_TYPE (*from_p))
5630 && TREE_CODE (*from_p) == CALL_EXPR))
5631 {
5632 gimplify_stmt (from_p, pre_p);
5633 gimplify_stmt (to_p, pre_p);
5634 *expr_p = NULL_TREE;
5635 return GS_ALL_DONE;
5636 }
5637
5638 /* If the value being copied is of variable width, compute the length
5639 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
5640 before gimplifying any of the operands so that we can resolve any
5641 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
5642 the size of the expression to be copied, not of the destination, so
5643 that is what we must do here. */
5644 maybe_with_size_expr (from_p);
5645
5646 /* As a special case, we have to temporarily allow for assignments
5647 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
5648 a toplevel statement, when gimplifying the GENERIC expression
5649 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
5650 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
5651
5652 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
5653 prevent gimplify_expr from trying to create a new temporary for
5654 foo's LHS, we tell it that it should only gimplify until it
5655 reaches the CALL_EXPR. On return from gimplify_expr, the newly
5656 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
5657 and all we need to do here is set 'a' to be its LHS. */
5658
5659 /* Gimplify the RHS first for C++17 and bug 71104. */
5660 gimple_predicate initial_pred = initial_rhs_predicate_for (*to_p);
5661 ret = gimplify_expr (from_p, pre_p, post_p, initial_pred, fb_rvalue);
5662 if (ret == GS_ERROR)
5663 return ret;
5664
5665 /* Then gimplify the LHS. */
5666 /* If we gimplified the RHS to a CALL_EXPR and that call may return
5667 twice we have to make sure to gimplify into non-SSA as otherwise
5668 the abnormal edge added later will make those defs not dominate
5669 their uses.
5670 ??? Technically this applies only to the registers used in the
5671 resulting non-register *TO_P. */
5672 bool saved_into_ssa = gimplify_ctxp->into_ssa;
5673 if (saved_into_ssa
5674 && TREE_CODE (*from_p) == CALL_EXPR
5675 && call_expr_flags (*from_p) & ECF_RETURNS_TWICE)
5676 gimplify_ctxp->into_ssa = false;
5677 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5678 gimplify_ctxp->into_ssa = saved_into_ssa;
5679 if (ret == GS_ERROR)
5680 return ret;
5681
5682 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
5683 guess for the predicate was wrong. */
5684 gimple_predicate final_pred = rhs_predicate_for (*to_p);
5685 if (final_pred != initial_pred)
5686 {
5687 ret = gimplify_expr (from_p, pre_p, post_p, final_pred, fb_rvalue);
5688 if (ret == GS_ERROR)
5689 return ret;
5690 }
5691
5692 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
5693 size as argument to the call. */
5694 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5695 {
5696 tree call = TREE_OPERAND (*from_p, 0);
5697 tree vlasize = TREE_OPERAND (*from_p, 1);
5698
5699 if (TREE_CODE (call) == CALL_EXPR
5700 && CALL_EXPR_IFN (call) == IFN_VA_ARG)
5701 {
5702 int nargs = call_expr_nargs (call);
5703 tree type = TREE_TYPE (call);
5704 tree ap = CALL_EXPR_ARG (call, 0);
5705 tree tag = CALL_EXPR_ARG (call, 1);
5706 tree aptag = CALL_EXPR_ARG (call, 2);
5707 tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call),
5708 IFN_VA_ARG, type,
5709 nargs + 1, ap, tag,
5710 aptag, vlasize);
5711 TREE_OPERAND (*from_p, 0) = newcall;
5712 }
5713 }
5714
5715 /* Now see if the above changed *from_p to something we handle specially. */
5716 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5717 want_value);
5718 if (ret != GS_UNHANDLED)
5719 return ret;
5720
5721 /* If we've got a variable sized assignment between two lvalues (i.e. does
5722 not involve a call), then we can make things a bit more straightforward
5723 by converting the assignment to memcpy or memset. */
5724 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5725 {
5726 tree from = TREE_OPERAND (*from_p, 0);
5727 tree size = TREE_OPERAND (*from_p, 1);
5728
5729 if (TREE_CODE (from) == CONSTRUCTOR)
5730 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
5731
5732 if (is_gimple_addressable (from))
5733 {
5734 *from_p = from;
5735 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
5736 pre_p);
5737 }
5738 }
5739
5740 /* Transform partial stores to non-addressable complex variables into
5741 total stores. This allows us to use real instead of virtual operands
5742 for these variables, which improves optimization. */
5743 if ((TREE_CODE (*to_p) == REALPART_EXPR
5744 || TREE_CODE (*to_p) == IMAGPART_EXPR)
5745 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
5746 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
5747
5748 /* Try to alleviate the effects of the gimplification creating artificial
5749 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
5750 make sure not to create DECL_DEBUG_EXPR links across functions. */
5751 if (!gimplify_ctxp->into_ssa
5752 && VAR_P (*from_p)
5753 && DECL_IGNORED_P (*from_p)
5754 && DECL_P (*to_p)
5755 && !DECL_IGNORED_P (*to_p)
5756 && decl_function_context (*to_p) == current_function_decl
5757 && decl_function_context (*from_p) == current_function_decl)
5758 {
5759 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
5760 DECL_NAME (*from_p)
5761 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
5762 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
5763 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
5764 }
5765
5766 if (want_value && TREE_THIS_VOLATILE (*to_p))
5767 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
5768
5769 if (TREE_CODE (*from_p) == CALL_EXPR)
5770 {
5771 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
5772 instead of a GIMPLE_ASSIGN. */
5773 gcall *call_stmt;
5774 if (CALL_EXPR_FN (*from_p) == NULL_TREE)
5775 {
5776 /* Gimplify internal functions created in the FEs. */
5777 int nargs = call_expr_nargs (*from_p), i;
5778 enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
5779 auto_vec<tree> vargs (nargs);
5780
5781 for (i = 0; i < nargs; i++)
5782 {
5783 gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
5784 EXPR_LOCATION (*from_p));
5785 vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
5786 }
5787 call_stmt = gimple_build_call_internal_vec (ifn, vargs);
5788 gimple_call_set_nothrow (call_stmt, TREE_NOTHROW (*from_p));
5789 gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
5790 }
5791 else
5792 {
5793 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
5794 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
5795 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
5796 tree fndecl = get_callee_fndecl (*from_p);
5797 if (fndecl
5798 && fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
5799 && call_expr_nargs (*from_p) == 3)
5800 call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
5801 CALL_EXPR_ARG (*from_p, 0),
5802 CALL_EXPR_ARG (*from_p, 1),
5803 CALL_EXPR_ARG (*from_p, 2));
5804 else
5805 {
5806 call_stmt = gimple_build_call_from_tree (*from_p, fnptrtype);
5807 }
5808 }
5809 notice_special_calls (call_stmt);
5810 if (!gimple_call_noreturn_p (call_stmt) || !should_remove_lhs_p (*to_p))
5811 gimple_call_set_lhs (call_stmt, *to_p);
5812 else if (TREE_CODE (*to_p) == SSA_NAME)
5813 /* The above is somewhat premature, avoid ICEing later for a
5814 SSA name w/o a definition. We may have uses in the GIMPLE IL.
5815 ??? This doesn't make it a default-def. */
5816 SSA_NAME_DEF_STMT (*to_p) = gimple_build_nop ();
5817
5818 assign = call_stmt;
5819 }
5820 else
5821 {
5822 assign = gimple_build_assign (*to_p, *from_p);
5823 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
5824 if (COMPARISON_CLASS_P (*from_p))
5825 gimple_set_no_warning (assign, TREE_NO_WARNING (*from_p));
5826 }
5827
5828 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
5829 {
5830 /* We should have got an SSA name from the start. */
5831 gcc_assert (TREE_CODE (*to_p) == SSA_NAME
5832 || ! gimple_in_ssa_p (cfun));
5833 }
5834
5835 gimplify_seq_add_stmt (pre_p, assign);
5836 gsi = gsi_last (*pre_p);
5837 maybe_fold_stmt (&gsi);
5838
5839 if (want_value)
5840 {
5841 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
5842 return GS_OK;
5843 }
5844 else
5845 *expr_p = NULL;
5846
5847 return GS_ALL_DONE;
5848 }
5849
5850 /* Gimplify a comparison between two variable-sized objects. Do this
5851 with a call to BUILT_IN_MEMCMP. */
5852
5853 static enum gimplify_status
5854 gimplify_variable_sized_compare (tree *expr_p)
5855 {
5856 location_t loc = EXPR_LOCATION (*expr_p);
5857 tree op0 = TREE_OPERAND (*expr_p, 0);
5858 tree op1 = TREE_OPERAND (*expr_p, 1);
5859 tree t, arg, dest, src, expr;
5860
5861 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
5862 arg = unshare_expr (arg);
5863 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
5864 src = build_fold_addr_expr_loc (loc, op1);
5865 dest = build_fold_addr_expr_loc (loc, op0);
5866 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
5867 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
5868
5869 expr
5870 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
5871 SET_EXPR_LOCATION (expr, loc);
5872 *expr_p = expr;
5873
5874 return GS_OK;
5875 }
5876
5877 /* Gimplify a comparison between two aggregate objects of integral scalar
5878 mode as a comparison between the bitwise equivalent scalar values. */
5879
5880 static enum gimplify_status
5881 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
5882 {
5883 location_t loc = EXPR_LOCATION (*expr_p);
5884 tree op0 = TREE_OPERAND (*expr_p, 0);
5885 tree op1 = TREE_OPERAND (*expr_p, 1);
5886
5887 tree type = TREE_TYPE (op0);
5888 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
5889
5890 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
5891 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
5892
5893 *expr_p
5894 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
5895
5896 return GS_OK;
5897 }
5898
5899 /* Gimplify an expression sequence. This function gimplifies each
5900 expression and rewrites the original expression with the last
5901 expression of the sequence in GIMPLE form.
5902
5903 PRE_P points to the list where the side effects for all the
5904 expressions in the sequence will be emitted.
5905
5906 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
5907
5908 static enum gimplify_status
5909 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
5910 {
5911 tree t = *expr_p;
5912
5913 do
5914 {
5915 tree *sub_p = &TREE_OPERAND (t, 0);
5916
5917 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
5918 gimplify_compound_expr (sub_p, pre_p, false);
5919 else
5920 gimplify_stmt (sub_p, pre_p);
5921
5922 t = TREE_OPERAND (t, 1);
5923 }
5924 while (TREE_CODE (t) == COMPOUND_EXPR);
5925
5926 *expr_p = t;
5927 if (want_value)
5928 return GS_OK;
5929 else
5930 {
5931 gimplify_stmt (expr_p, pre_p);
5932 return GS_ALL_DONE;
5933 }
5934 }
5935
5936 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
5937 gimplify. After gimplification, EXPR_P will point to a new temporary
5938 that holds the original value of the SAVE_EXPR node.
5939
5940 PRE_P points to the list where side effects that must happen before
5941 *EXPR_P should be stored. */
5942
5943 static enum gimplify_status
5944 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5945 {
5946 enum gimplify_status ret = GS_ALL_DONE;
5947 tree val;
5948
5949 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
5950 val = TREE_OPERAND (*expr_p, 0);
5951
5952 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
5953 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
5954 {
5955 /* The operand may be a void-valued expression. It is
5956 being executed only for its side-effects. */
5957 if (TREE_TYPE (val) == void_type_node)
5958 {
5959 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5960 is_gimple_stmt, fb_none);
5961 val = NULL;
5962 }
5963 else
5964 /* The temporary may not be an SSA name as later abnormal and EH
5965 control flow may invalidate use/def domination. When in SSA
5966 form then assume there are no such issues and SAVE_EXPRs only
5967 appear via GENERIC foldings. */
5968 val = get_initialized_tmp_var (val, pre_p, post_p,
5969 gimple_in_ssa_p (cfun));
5970
5971 TREE_OPERAND (*expr_p, 0) = val;
5972 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
5973 }
5974
5975 *expr_p = val;
5976
5977 return ret;
5978 }
5979
5980 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
5981
5982 unary_expr
5983 : ...
5984 | '&' varname
5985 ...
5986
5987 PRE_P points to the list where side effects that must happen before
5988 *EXPR_P should be stored.
5989
5990 POST_P points to the list where side effects that must happen after
5991 *EXPR_P should be stored. */
5992
5993 static enum gimplify_status
5994 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5995 {
5996 tree expr = *expr_p;
5997 tree op0 = TREE_OPERAND (expr, 0);
5998 enum gimplify_status ret;
5999 location_t loc = EXPR_LOCATION (*expr_p);
6000
6001 switch (TREE_CODE (op0))
6002 {
6003 case INDIRECT_REF:
6004 do_indirect_ref:
6005 /* Check if we are dealing with an expression of the form '&*ptr'.
6006 While the front end folds away '&*ptr' into 'ptr', these
6007 expressions may be generated internally by the compiler (e.g.,
6008 builtins like __builtin_va_end). */
6009 /* Caution: the silent array decomposition semantics we allow for
6010 ADDR_EXPR means we can't always discard the pair. */
6011 /* Gimplification of the ADDR_EXPR operand may drop
6012 cv-qualification conversions, so make sure we add them if
6013 needed. */
6014 {
6015 tree op00 = TREE_OPERAND (op0, 0);
6016 tree t_expr = TREE_TYPE (expr);
6017 tree t_op00 = TREE_TYPE (op00);
6018
6019 if (!useless_type_conversion_p (t_expr, t_op00))
6020 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
6021 *expr_p = op00;
6022 ret = GS_OK;
6023 }
6024 break;
6025
6026 case VIEW_CONVERT_EXPR:
6027 /* Take the address of our operand and then convert it to the type of
6028 this ADDR_EXPR.
6029
6030 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
6031 all clear. The impact of this transformation is even less clear. */
6032
6033 /* If the operand is a useless conversion, look through it. Doing so
6034 guarantees that the ADDR_EXPR and its operand will remain of the
6035 same type. */
6036 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
6037 op0 = TREE_OPERAND (op0, 0);
6038
6039 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
6040 build_fold_addr_expr_loc (loc,
6041 TREE_OPERAND (op0, 0)));
6042 ret = GS_OK;
6043 break;
6044
6045 case MEM_REF:
6046 if (integer_zerop (TREE_OPERAND (op0, 1)))
6047 goto do_indirect_ref;
6048
6049 /* fall through */
6050
6051 default:
6052 /* If we see a call to a declared builtin or see its address
6053 being taken (we can unify those cases here) then we can mark
6054 the builtin for implicit generation by GCC. */
6055 if (TREE_CODE (op0) == FUNCTION_DECL
6056 && fndecl_built_in_p (op0, BUILT_IN_NORMAL)
6057 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
6058 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
6059
6060 /* We use fb_either here because the C frontend sometimes takes
6061 the address of a call that returns a struct; see
6062 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
6063 the implied temporary explicit. */
6064
6065 /* Make the operand addressable. */
6066 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
6067 is_gimple_addressable, fb_either);
6068 if (ret == GS_ERROR)
6069 break;
6070
6071 /* Then mark it. Beware that it may not be possible to do so directly
6072 if a temporary has been created by the gimplification. */
6073 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
6074
6075 op0 = TREE_OPERAND (expr, 0);
6076
6077 /* For various reasons, the gimplification of the expression
6078 may have made a new INDIRECT_REF. */
6079 if (TREE_CODE (op0) == INDIRECT_REF)
6080 goto do_indirect_ref;
6081
6082 mark_addressable (TREE_OPERAND (expr, 0));
6083
6084 /* The FEs may end up building ADDR_EXPRs early on a decl with
6085 an incomplete type. Re-build ADDR_EXPRs in canonical form
6086 here. */
6087 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
6088 *expr_p = build_fold_addr_expr (op0);
6089
6090 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
6091 recompute_tree_invariant_for_addr_expr (*expr_p);
6092
6093 /* If we re-built the ADDR_EXPR add a conversion to the original type
6094 if required. */
6095 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
6096 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
6097
6098 break;
6099 }
6100
6101 return ret;
6102 }
6103
6104 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
6105 value; output operands should be a gimple lvalue. */
6106
6107 static enum gimplify_status
6108 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6109 {
6110 tree expr;
6111 int noutputs;
6112 const char **oconstraints;
6113 int i;
6114 tree link;
6115 const char *constraint;
6116 bool allows_mem, allows_reg, is_inout;
6117 enum gimplify_status ret, tret;
6118 gasm *stmt;
6119 vec<tree, va_gc> *inputs;
6120 vec<tree, va_gc> *outputs;
6121 vec<tree, va_gc> *clobbers;
6122 vec<tree, va_gc> *labels;
6123 tree link_next;
6124
6125 expr = *expr_p;
6126 noutputs = list_length (ASM_OUTPUTS (expr));
6127 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
6128
6129 inputs = NULL;
6130 outputs = NULL;
6131 clobbers = NULL;
6132 labels = NULL;
6133
6134 ret = GS_ALL_DONE;
6135 link_next = NULL_TREE;
6136 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
6137 {
6138 bool ok;
6139 size_t constraint_len;
6140
6141 link_next = TREE_CHAIN (link);
6142
6143 oconstraints[i]
6144 = constraint
6145 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6146 constraint_len = strlen (constraint);
6147 if (constraint_len == 0)
6148 continue;
6149
6150 ok = parse_output_constraint (&constraint, i, 0, 0,
6151 &allows_mem, &allows_reg, &is_inout);
6152 if (!ok)
6153 {
6154 ret = GS_ERROR;
6155 is_inout = false;
6156 }
6157
6158 /* If we can't make copies, we can only accept memory. */
6159 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
6160 {
6161 if (allows_mem)
6162 allows_reg = 0;
6163 else
6164 {
6165 error ("impossible constraint in %<asm%>");
6166 error ("non-memory output %d must stay in memory", i);
6167 return GS_ERROR;
6168 }
6169 }
6170
6171 if (!allows_reg && allows_mem)
6172 mark_addressable (TREE_VALUE (link));
6173
6174 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6175 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
6176 fb_lvalue | fb_mayfail);
6177 if (tret == GS_ERROR)
6178 {
6179 error ("invalid lvalue in asm output %d", i);
6180 ret = tret;
6181 }
6182
6183 /* If the constraint does not allow memory make sure we gimplify
6184 it to a register if it is not already but its base is. This
6185 happens for complex and vector components. */
6186 if (!allows_mem)
6187 {
6188 tree op = TREE_VALUE (link);
6189 if (! is_gimple_val (op)
6190 && is_gimple_reg_type (TREE_TYPE (op))
6191 && is_gimple_reg (get_base_address (op)))
6192 {
6193 tree tem = create_tmp_reg (TREE_TYPE (op));
6194 tree ass;
6195 if (is_inout)
6196 {
6197 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem),
6198 tem, unshare_expr (op));
6199 gimplify_and_add (ass, pre_p);
6200 }
6201 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem), op, tem);
6202 gimplify_and_add (ass, post_p);
6203
6204 TREE_VALUE (link) = tem;
6205 tret = GS_OK;
6206 }
6207 }
6208
6209 vec_safe_push (outputs, link);
6210 TREE_CHAIN (link) = NULL_TREE;
6211
6212 if (is_inout)
6213 {
6214 /* An input/output operand. To give the optimizers more
6215 flexibility, split it into separate input and output
6216 operands. */
6217 tree input;
6218 /* Buffer big enough to format a 32-bit UINT_MAX into. */
6219 char buf[11];
6220
6221 /* Turn the in/out constraint into an output constraint. */
6222 char *p = xstrdup (constraint);
6223 p[0] = '=';
6224 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
6225
6226 /* And add a matching input constraint. */
6227 if (allows_reg)
6228 {
6229 sprintf (buf, "%u", i);
6230
6231 /* If there are multiple alternatives in the constraint,
6232 handle each of them individually. Those that allow register
6233 will be replaced with operand number, the others will stay
6234 unchanged. */
6235 if (strchr (p, ',') != NULL)
6236 {
6237 size_t len = 0, buflen = strlen (buf);
6238 char *beg, *end, *str, *dst;
6239
6240 for (beg = p + 1;;)
6241 {
6242 end = strchr (beg, ',');
6243 if (end == NULL)
6244 end = strchr (beg, '\0');
6245 if ((size_t) (end - beg) < buflen)
6246 len += buflen + 1;
6247 else
6248 len += end - beg + 1;
6249 if (*end)
6250 beg = end + 1;
6251 else
6252 break;
6253 }
6254
6255 str = (char *) alloca (len);
6256 for (beg = p + 1, dst = str;;)
6257 {
6258 const char *tem;
6259 bool mem_p, reg_p, inout_p;
6260
6261 end = strchr (beg, ',');
6262 if (end)
6263 *end = '\0';
6264 beg[-1] = '=';
6265 tem = beg - 1;
6266 parse_output_constraint (&tem, i, 0, 0,
6267 &mem_p, &reg_p, &inout_p);
6268 if (dst != str)
6269 *dst++ = ',';
6270 if (reg_p)
6271 {
6272 memcpy (dst, buf, buflen);
6273 dst += buflen;
6274 }
6275 else
6276 {
6277 if (end)
6278 len = end - beg;
6279 else
6280 len = strlen (beg);
6281 memcpy (dst, beg, len);
6282 dst += len;
6283 }
6284 if (end)
6285 beg = end + 1;
6286 else
6287 break;
6288 }
6289 *dst = '\0';
6290 input = build_string (dst - str, str);
6291 }
6292 else
6293 input = build_string (strlen (buf), buf);
6294 }
6295 else
6296 input = build_string (constraint_len - 1, constraint + 1);
6297
6298 free (p);
6299
6300 input = build_tree_list (build_tree_list (NULL_TREE, input),
6301 unshare_expr (TREE_VALUE (link)));
6302 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
6303 }
6304 }
6305
6306 link_next = NULL_TREE;
6307 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
6308 {
6309 link_next = TREE_CHAIN (link);
6310 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6311 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
6312 oconstraints, &allows_mem, &allows_reg);
6313
6314 /* If we can't make copies, we can only accept memory. */
6315 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
6316 {
6317 if (allows_mem)
6318 allows_reg = 0;
6319 else
6320 {
6321 error ("impossible constraint in %<asm%>");
6322 error ("non-memory input %d must stay in memory", i);
6323 return GS_ERROR;
6324 }
6325 }
6326
6327 /* If the operand is a memory input, it should be an lvalue. */
6328 if (!allows_reg && allows_mem)
6329 {
6330 tree inputv = TREE_VALUE (link);
6331 STRIP_NOPS (inputv);
6332 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
6333 || TREE_CODE (inputv) == PREINCREMENT_EXPR
6334 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
6335 || TREE_CODE (inputv) == POSTINCREMENT_EXPR
6336 || TREE_CODE (inputv) == MODIFY_EXPR)
6337 TREE_VALUE (link) = error_mark_node;
6338 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6339 is_gimple_lvalue, fb_lvalue | fb_mayfail);
6340 if (tret != GS_ERROR)
6341 {
6342 /* Unlike output operands, memory inputs are not guaranteed
6343 to be lvalues by the FE, and while the expressions are
6344 marked addressable there, if it is e.g. a statement
6345 expression, temporaries in it might not end up being
6346 addressable. They might be already used in the IL and thus
6347 it is too late to make them addressable now though. */
6348 tree x = TREE_VALUE (link);
6349 while (handled_component_p (x))
6350 x = TREE_OPERAND (x, 0);
6351 if (TREE_CODE (x) == MEM_REF
6352 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
6353 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
6354 if ((VAR_P (x)
6355 || TREE_CODE (x) == PARM_DECL
6356 || TREE_CODE (x) == RESULT_DECL)
6357 && !TREE_ADDRESSABLE (x)
6358 && is_gimple_reg (x))
6359 {
6360 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link),
6361 input_location), 0,
6362 "memory input %d is not directly addressable",
6363 i);
6364 prepare_gimple_addressable (&TREE_VALUE (link), pre_p);
6365 }
6366 }
6367 mark_addressable (TREE_VALUE (link));
6368 if (tret == GS_ERROR)
6369 {
6370 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), input_location),
6371 "memory input %d is not directly addressable", i);
6372 ret = tret;
6373 }
6374 }
6375 else
6376 {
6377 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6378 is_gimple_asm_val, fb_rvalue);
6379 if (tret == GS_ERROR)
6380 ret = tret;
6381 }
6382
6383 TREE_CHAIN (link) = NULL_TREE;
6384 vec_safe_push (inputs, link);
6385 }
6386
6387 link_next = NULL_TREE;
6388 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
6389 {
6390 link_next = TREE_CHAIN (link);
6391 TREE_CHAIN (link) = NULL_TREE;
6392 vec_safe_push (clobbers, link);
6393 }
6394
6395 link_next = NULL_TREE;
6396 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
6397 {
6398 link_next = TREE_CHAIN (link);
6399 TREE_CHAIN (link) = NULL_TREE;
6400 vec_safe_push (labels, link);
6401 }
6402
6403 /* Do not add ASMs with errors to the gimple IL stream. */
6404 if (ret != GS_ERROR)
6405 {
6406 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
6407 inputs, outputs, clobbers, labels);
6408
6409 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr) || noutputs == 0);
6410 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
6411 gimple_asm_set_inline (stmt, ASM_INLINE_P (expr));
6412
6413 gimplify_seq_add_stmt (pre_p, stmt);
6414 }
6415
6416 return ret;
6417 }
6418
6419 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
6420 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6421 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
6422 return to this function.
6423
6424 FIXME should we complexify the prequeue handling instead? Or use flags
6425 for all the cleanups and let the optimizer tighten them up? The current
6426 code seems pretty fragile; it will break on a cleanup within any
6427 non-conditional nesting. But any such nesting would be broken, anyway;
6428 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
6429 and continues out of it. We can do that at the RTL level, though, so
6430 having an optimizer to tighten up try/finally regions would be a Good
6431 Thing. */
6432
6433 static enum gimplify_status
6434 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
6435 {
6436 gimple_stmt_iterator iter;
6437 gimple_seq body_sequence = NULL;
6438
6439 tree temp = voidify_wrapper_expr (*expr_p, NULL);
6440
6441 /* We only care about the number of conditions between the innermost
6442 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
6443 any cleanups collected outside the CLEANUP_POINT_EXPR. */
6444 int old_conds = gimplify_ctxp->conditions;
6445 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
6446 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
6447 gimplify_ctxp->conditions = 0;
6448 gimplify_ctxp->conditional_cleanups = NULL;
6449 gimplify_ctxp->in_cleanup_point_expr = true;
6450
6451 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
6452
6453 gimplify_ctxp->conditions = old_conds;
6454 gimplify_ctxp->conditional_cleanups = old_cleanups;
6455 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
6456
6457 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
6458 {
6459 gimple *wce = gsi_stmt (iter);
6460
6461 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
6462 {
6463 if (gsi_one_before_end_p (iter))
6464 {
6465 /* Note that gsi_insert_seq_before and gsi_remove do not
6466 scan operands, unlike some other sequence mutators. */
6467 if (!gimple_wce_cleanup_eh_only (wce))
6468 gsi_insert_seq_before_without_update (&iter,
6469 gimple_wce_cleanup (wce),
6470 GSI_SAME_STMT);
6471 gsi_remove (&iter, true);
6472 break;
6473 }
6474 else
6475 {
6476 gtry *gtry;
6477 gimple_seq seq;
6478 enum gimple_try_flags kind;
6479
6480 if (gimple_wce_cleanup_eh_only (wce))
6481 kind = GIMPLE_TRY_CATCH;
6482 else
6483 kind = GIMPLE_TRY_FINALLY;
6484 seq = gsi_split_seq_after (iter);
6485
6486 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
6487 /* Do not use gsi_replace here, as it may scan operands.
6488 We want to do a simple structural modification only. */
6489 gsi_set_stmt (&iter, gtry);
6490 iter = gsi_start (gtry->eval);
6491 }
6492 }
6493 else
6494 gsi_next (&iter);
6495 }
6496
6497 gimplify_seq_add_seq (pre_p, body_sequence);
6498 if (temp)
6499 {
6500 *expr_p = temp;
6501 return GS_OK;
6502 }
6503 else
6504 {
6505 *expr_p = NULL;
6506 return GS_ALL_DONE;
6507 }
6508 }
6509
6510 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
6511 is the cleanup action required. EH_ONLY is true if the cleanup should
6512 only be executed if an exception is thrown, not on normal exit.
6513 If FORCE_UNCOND is true perform the cleanup unconditionally; this is
6514 only valid for clobbers. */
6515
6516 static void
6517 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p,
6518 bool force_uncond = false)
6519 {
6520 gimple *wce;
6521 gimple_seq cleanup_stmts = NULL;
6522
6523 /* Errors can result in improperly nested cleanups. Which results in
6524 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
6525 if (seen_error ())
6526 return;
6527
6528 if (gimple_conditional_context ())
6529 {
6530 /* If we're in a conditional context, this is more complex. We only
6531 want to run the cleanup if we actually ran the initialization that
6532 necessitates it, but we want to run it after the end of the
6533 conditional context. So we wrap the try/finally around the
6534 condition and use a flag to determine whether or not to actually
6535 run the destructor. Thus
6536
6537 test ? f(A()) : 0
6538
6539 becomes (approximately)
6540
6541 flag = 0;
6542 try {
6543 if (test) { A::A(temp); flag = 1; val = f(temp); }
6544 else { val = 0; }
6545 } finally {
6546 if (flag) A::~A(temp);
6547 }
6548 val
6549 */
6550 if (force_uncond)
6551 {
6552 gimplify_stmt (&cleanup, &cleanup_stmts);
6553 wce = gimple_build_wce (cleanup_stmts);
6554 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6555 }
6556 else
6557 {
6558 tree flag = create_tmp_var (boolean_type_node, "cleanup");
6559 gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
6560 gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
6561
6562 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
6563 gimplify_stmt (&cleanup, &cleanup_stmts);
6564 wce = gimple_build_wce (cleanup_stmts);
6565
6566 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
6567 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6568 gimplify_seq_add_stmt (pre_p, ftrue);
6569
6570 /* Because of this manipulation, and the EH edges that jump
6571 threading cannot redirect, the temporary (VAR) will appear
6572 to be used uninitialized. Don't warn. */
6573 TREE_NO_WARNING (var) = 1;
6574 }
6575 }
6576 else
6577 {
6578 gimplify_stmt (&cleanup, &cleanup_stmts);
6579 wce = gimple_build_wce (cleanup_stmts);
6580 gimple_wce_set_cleanup_eh_only (wce, eh_only);
6581 gimplify_seq_add_stmt (pre_p, wce);
6582 }
6583 }
6584
6585 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
6586
6587 static enum gimplify_status
6588 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6589 {
6590 tree targ = *expr_p;
6591 tree temp = TARGET_EXPR_SLOT (targ);
6592 tree init = TARGET_EXPR_INITIAL (targ);
6593 enum gimplify_status ret;
6594
6595 bool unpoison_empty_seq = false;
6596 gimple_stmt_iterator unpoison_it;
6597
6598 if (init)
6599 {
6600 tree cleanup = NULL_TREE;
6601
6602 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
6603 to the temps list. Handle also variable length TARGET_EXPRs. */
6604 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
6605 {
6606 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
6607 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
6608 gimplify_vla_decl (temp, pre_p);
6609 }
6610 else
6611 {
6612 /* Save location where we need to place unpoisoning. It's possible
6613 that a variable will be converted to needs_to_live_in_memory. */
6614 unpoison_it = gsi_last (*pre_p);
6615 unpoison_empty_seq = gsi_end_p (unpoison_it);
6616
6617 gimple_add_tmp_var (temp);
6618 }
6619
6620 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
6621 expression is supposed to initialize the slot. */
6622 if (VOID_TYPE_P (TREE_TYPE (init)))
6623 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6624 else
6625 {
6626 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
6627 init = init_expr;
6628 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6629 init = NULL;
6630 ggc_free (init_expr);
6631 }
6632 if (ret == GS_ERROR)
6633 {
6634 /* PR c++/28266 Make sure this is expanded only once. */
6635 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6636 return GS_ERROR;
6637 }
6638 if (init)
6639 gimplify_and_add (init, pre_p);
6640
6641 /* If needed, push the cleanup for the temp. */
6642 if (TARGET_EXPR_CLEANUP (targ))
6643 {
6644 if (CLEANUP_EH_ONLY (targ))
6645 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
6646 CLEANUP_EH_ONLY (targ), pre_p);
6647 else
6648 cleanup = TARGET_EXPR_CLEANUP (targ);
6649 }
6650
6651 /* Add a clobber for the temporary going out of scope, like
6652 gimplify_bind_expr. */
6653 if (gimplify_ctxp->in_cleanup_point_expr
6654 && needs_to_live_in_memory (temp))
6655 {
6656 if (flag_stack_reuse == SR_ALL)
6657 {
6658 tree clobber = build_clobber (TREE_TYPE (temp));
6659 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
6660 gimple_push_cleanup (temp, clobber, false, pre_p, true);
6661 }
6662 if (asan_poisoned_variables
6663 && DECL_ALIGN (temp) <= MAX_SUPPORTED_STACK_ALIGNMENT
6664 && dbg_cnt (asan_use_after_scope)
6665 && !gimplify_omp_ctxp)
6666 {
6667 tree asan_cleanup = build_asan_poison_call_expr (temp);
6668 if (asan_cleanup)
6669 {
6670 if (unpoison_empty_seq)
6671 unpoison_it = gsi_start (*pre_p);
6672
6673 asan_poison_variable (temp, false, &unpoison_it,
6674 unpoison_empty_seq);
6675 gimple_push_cleanup (temp, asan_cleanup, false, pre_p);
6676 }
6677 }
6678 }
6679 if (cleanup)
6680 gimple_push_cleanup (temp, cleanup, false, pre_p);
6681
6682 /* Only expand this once. */
6683 TREE_OPERAND (targ, 3) = init;
6684 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6685 }
6686 else
6687 /* We should have expanded this before. */
6688 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
6689
6690 *expr_p = temp;
6691 return GS_OK;
6692 }
6693
6694 /* Gimplification of expression trees. */
6695
6696 /* Gimplify an expression which appears at statement context. The
6697 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
6698 NULL, a new sequence is allocated.
6699
6700 Return true if we actually added a statement to the queue. */
6701
6702 bool
6703 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
6704 {
6705 gimple_seq_node last;
6706
6707 last = gimple_seq_last (*seq_p);
6708 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
6709 return last != gimple_seq_last (*seq_p);
6710 }
6711
6712 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
6713 to CTX. If entries already exist, force them to be some flavor of private.
6714 If there is no enclosing parallel, do nothing. */
6715
6716 void
6717 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
6718 {
6719 splay_tree_node n;
6720
6721 if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE)
6722 return;
6723
6724 do
6725 {
6726 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6727 if (n != NULL)
6728 {
6729 if (n->value & GOVD_SHARED)
6730 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
6731 else if (n->value & GOVD_MAP)
6732 n->value |= GOVD_MAP_TO_ONLY;
6733 else
6734 return;
6735 }
6736 else if ((ctx->region_type & ORT_TARGET) != 0)
6737 {
6738 if (ctx->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
6739 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6740 else
6741 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
6742 }
6743 else if (ctx->region_type != ORT_WORKSHARE
6744 && ctx->region_type != ORT_TASKGROUP
6745 && ctx->region_type != ORT_SIMD
6746 && ctx->region_type != ORT_ACC
6747 && !(ctx->region_type & ORT_TARGET_DATA))
6748 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6749
6750 ctx = ctx->outer_context;
6751 }
6752 while (ctx);
6753 }
6754
6755 /* Similarly for each of the type sizes of TYPE. */
6756
6757 static void
6758 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
6759 {
6760 if (type == NULL || type == error_mark_node)
6761 return;
6762 type = TYPE_MAIN_VARIANT (type);
6763
6764 if (ctx->privatized_types->add (type))
6765 return;
6766
6767 switch (TREE_CODE (type))
6768 {
6769 case INTEGER_TYPE:
6770 case ENUMERAL_TYPE:
6771 case BOOLEAN_TYPE:
6772 case REAL_TYPE:
6773 case FIXED_POINT_TYPE:
6774 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
6775 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
6776 break;
6777
6778 case ARRAY_TYPE:
6779 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6780 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
6781 break;
6782
6783 case RECORD_TYPE:
6784 case UNION_TYPE:
6785 case QUAL_UNION_TYPE:
6786 {
6787 tree field;
6788 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
6789 if (TREE_CODE (field) == FIELD_DECL)
6790 {
6791 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
6792 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
6793 }
6794 }
6795 break;
6796
6797 case POINTER_TYPE:
6798 case REFERENCE_TYPE:
6799 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6800 break;
6801
6802 default:
6803 break;
6804 }
6805
6806 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
6807 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
6808 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
6809 }
6810
6811 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
6812
6813 static void
6814 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
6815 {
6816 splay_tree_node n;
6817 unsigned int nflags;
6818 tree t;
6819
6820 if (error_operand_p (decl) || ctx->region_type == ORT_NONE)
6821 return;
6822
6823 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
6824 there are constructors involved somewhere. Exception is a shared clause,
6825 there is nothing privatized in that case. */
6826 if ((flags & GOVD_SHARED) == 0
6827 && (TREE_ADDRESSABLE (TREE_TYPE (decl))
6828 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl))))
6829 flags |= GOVD_SEEN;
6830
6831 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6832 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6833 {
6834 /* We shouldn't be re-adding the decl with the same data
6835 sharing class. */
6836 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
6837 nflags = n->value | flags;
6838 /* The only combination of data sharing classes we should see is
6839 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
6840 reduction variables to be used in data sharing clauses. */
6841 gcc_assert ((ctx->region_type & ORT_ACC) != 0
6842 || ((nflags & GOVD_DATA_SHARE_CLASS)
6843 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE))
6844 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
6845 n->value = nflags;
6846 return;
6847 }
6848
6849 /* When adding a variable-sized variable, we have to handle all sorts
6850 of additional bits of data: the pointer replacement variable, and
6851 the parameters of the type. */
6852 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6853 {
6854 /* Add the pointer replacement variable as PRIVATE if the variable
6855 replacement is private, else FIRSTPRIVATE since we'll need the
6856 address of the original variable either for SHARED, or for the
6857 copy into or out of the context. */
6858 if (!(flags & GOVD_LOCAL) && ctx->region_type != ORT_TASKGROUP)
6859 {
6860 if (flags & GOVD_MAP)
6861 nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
6862 else if (flags & GOVD_PRIVATE)
6863 nflags = GOVD_PRIVATE;
6864 else if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
6865 && (flags & GOVD_FIRSTPRIVATE))
6866 nflags = GOVD_PRIVATE | GOVD_EXPLICIT;
6867 else
6868 nflags = GOVD_FIRSTPRIVATE;
6869 nflags |= flags & GOVD_SEEN;
6870 t = DECL_VALUE_EXPR (decl);
6871 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
6872 t = TREE_OPERAND (t, 0);
6873 gcc_assert (DECL_P (t));
6874 omp_add_variable (ctx, t, nflags);
6875 }
6876
6877 /* Add all of the variable and type parameters (which should have
6878 been gimplified to a formal temporary) as FIRSTPRIVATE. */
6879 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
6880 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
6881 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
6882
6883 /* The variable-sized variable itself is never SHARED, only some form
6884 of PRIVATE. The sharing would take place via the pointer variable
6885 which we remapped above. */
6886 if (flags & GOVD_SHARED)
6887 flags = GOVD_SHARED | GOVD_DEBUG_PRIVATE
6888 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
6889
6890 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
6891 alloca statement we generate for the variable, so make sure it
6892 is available. This isn't automatically needed for the SHARED
6893 case, since we won't be allocating local storage then.
6894 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
6895 in this case omp_notice_variable will be called later
6896 on when it is gimplified. */
6897 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
6898 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
6899 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
6900 }
6901 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
6902 && lang_hooks.decls.omp_privatize_by_reference (decl))
6903 {
6904 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
6905
6906 /* Similar to the direct variable sized case above, we'll need the
6907 size of references being privatized. */
6908 if ((flags & GOVD_SHARED) == 0)
6909 {
6910 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
6911 if (DECL_P (t))
6912 omp_notice_variable (ctx, t, true);
6913 }
6914 }
6915
6916 if (n != NULL)
6917 n->value |= flags;
6918 else
6919 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
6920
6921 /* For reductions clauses in OpenACC loop directives, by default create a
6922 copy clause on the enclosing parallel construct for carrying back the
6923 results. */
6924 if (ctx->region_type == ORT_ACC && (flags & GOVD_REDUCTION))
6925 {
6926 struct gimplify_omp_ctx *outer_ctx = ctx->outer_context;
6927 while (outer_ctx)
6928 {
6929 n = splay_tree_lookup (outer_ctx->variables, (splay_tree_key)decl);
6930 if (n != NULL)
6931 {
6932 /* Ignore local variables and explicitly declared clauses. */
6933 if (n->value & (GOVD_LOCAL | GOVD_EXPLICIT))
6934 break;
6935 else if (outer_ctx->region_type == ORT_ACC_KERNELS)
6936 {
6937 /* According to the OpenACC spec, such a reduction variable
6938 should already have a copy map on a kernels construct,
6939 verify that here. */
6940 gcc_assert (!(n->value & GOVD_FIRSTPRIVATE)
6941 && (n->value & GOVD_MAP));
6942 }
6943 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
6944 {
6945 /* Remove firstprivate and make it a copy map. */
6946 n->value &= ~GOVD_FIRSTPRIVATE;
6947 n->value |= GOVD_MAP;
6948 }
6949 }
6950 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
6951 {
6952 splay_tree_insert (outer_ctx->variables, (splay_tree_key)decl,
6953 GOVD_MAP | GOVD_SEEN);
6954 break;
6955 }
6956 outer_ctx = outer_ctx->outer_context;
6957 }
6958 }
6959 }
6960
6961 /* Notice a threadprivate variable DECL used in OMP context CTX.
6962 This just prints out diagnostics about threadprivate variable uses
6963 in untied tasks. If DECL2 is non-NULL, prevent this warning
6964 on that variable. */
6965
6966 static bool
6967 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
6968 tree decl2)
6969 {
6970 splay_tree_node n;
6971 struct gimplify_omp_ctx *octx;
6972
6973 for (octx = ctx; octx; octx = octx->outer_context)
6974 if ((octx->region_type & ORT_TARGET) != 0)
6975 {
6976 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
6977 if (n == NULL)
6978 {
6979 error ("threadprivate variable %qE used in target region",
6980 DECL_NAME (decl));
6981 error_at (octx->location, "enclosing target region");
6982 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
6983 }
6984 if (decl2)
6985 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
6986 }
6987
6988 if (ctx->region_type != ORT_UNTIED_TASK)
6989 return false;
6990 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6991 if (n == NULL)
6992 {
6993 error ("threadprivate variable %qE used in untied task",
6994 DECL_NAME (decl));
6995 error_at (ctx->location, "enclosing task");
6996 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
6997 }
6998 if (decl2)
6999 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
7000 return false;
7001 }
7002
7003 /* Return true if global var DECL is device resident. */
7004
7005 static bool
7006 device_resident_p (tree decl)
7007 {
7008 tree attr = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl));
7009
7010 if (!attr)
7011 return false;
7012
7013 for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t))
7014 {
7015 tree c = TREE_VALUE (t);
7016 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT)
7017 return true;
7018 }
7019
7020 return false;
7021 }
7022
7023 /* Return true if DECL has an ACC DECLARE attribute. */
7024
7025 static bool
7026 is_oacc_declared (tree decl)
7027 {
7028 tree t = TREE_CODE (decl) == MEM_REF ? TREE_OPERAND (decl, 0) : decl;
7029 tree declared = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t));
7030 return declared != NULL_TREE;
7031 }
7032
7033 /* Determine outer default flags for DECL mentioned in an OMP region
7034 but not declared in an enclosing clause.
7035
7036 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
7037 remapped firstprivate instead of shared. To some extent this is
7038 addressed in omp_firstprivatize_type_sizes, but not
7039 effectively. */
7040
7041 static unsigned
7042 omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl,
7043 bool in_code, unsigned flags)
7044 {
7045 enum omp_clause_default_kind default_kind = ctx->default_kind;
7046 enum omp_clause_default_kind kind;
7047
7048 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
7049 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
7050 default_kind = kind;
7051
7052 switch (default_kind)
7053 {
7054 case OMP_CLAUSE_DEFAULT_NONE:
7055 {
7056 const char *rtype;
7057
7058 if (ctx->region_type & ORT_PARALLEL)
7059 rtype = "parallel";
7060 else if ((ctx->region_type & ORT_TASKLOOP) == ORT_TASKLOOP)
7061 rtype = "taskloop";
7062 else if (ctx->region_type & ORT_TASK)
7063 rtype = "task";
7064 else if (ctx->region_type & ORT_TEAMS)
7065 rtype = "teams";
7066 else
7067 gcc_unreachable ();
7068
7069 error ("%qE not specified in enclosing %qs",
7070 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype);
7071 error_at (ctx->location, "enclosing %qs", rtype);
7072 }
7073 /* FALLTHRU */
7074 case OMP_CLAUSE_DEFAULT_SHARED:
7075 flags |= GOVD_SHARED;
7076 break;
7077 case OMP_CLAUSE_DEFAULT_PRIVATE:
7078 flags |= GOVD_PRIVATE;
7079 break;
7080 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
7081 flags |= GOVD_FIRSTPRIVATE;
7082 break;
7083 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
7084 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
7085 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
7086 if (struct gimplify_omp_ctx *octx = ctx->outer_context)
7087 {
7088 omp_notice_variable (octx, decl, in_code);
7089 for (; octx; octx = octx->outer_context)
7090 {
7091 splay_tree_node n2;
7092
7093 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
7094 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0
7095 && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0))
7096 continue;
7097 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
7098 {
7099 flags |= GOVD_FIRSTPRIVATE;
7100 goto found_outer;
7101 }
7102 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
7103 {
7104 flags |= GOVD_SHARED;
7105 goto found_outer;
7106 }
7107 }
7108 }
7109
7110 if (TREE_CODE (decl) == PARM_DECL
7111 || (!is_global_var (decl)
7112 && DECL_CONTEXT (decl) == current_function_decl))
7113 flags |= GOVD_FIRSTPRIVATE;
7114 else
7115 flags |= GOVD_SHARED;
7116 found_outer:
7117 break;
7118
7119 default:
7120 gcc_unreachable ();
7121 }
7122
7123 return flags;
7124 }
7125
7126
7127 /* Determine outer default flags for DECL mentioned in an OACC region
7128 but not declared in an enclosing clause. */
7129
7130 static unsigned
7131 oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags)
7132 {
7133 const char *rkind;
7134 bool on_device = false;
7135 bool declared = is_oacc_declared (decl);
7136 tree type = TREE_TYPE (decl);
7137
7138 if (lang_hooks.decls.omp_privatize_by_reference (decl))
7139 type = TREE_TYPE (type);
7140
7141 if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0
7142 && is_global_var (decl)
7143 && device_resident_p (decl))
7144 {
7145 on_device = true;
7146 flags |= GOVD_MAP_TO_ONLY;
7147 }
7148
7149 switch (ctx->region_type)
7150 {
7151 case ORT_ACC_KERNELS:
7152 rkind = "kernels";
7153
7154 if (AGGREGATE_TYPE_P (type))
7155 {
7156 /* Aggregates default to 'present_or_copy', or 'present'. */
7157 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7158 flags |= GOVD_MAP;
7159 else
7160 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7161 }
7162 else
7163 /* Scalars default to 'copy'. */
7164 flags |= GOVD_MAP | GOVD_MAP_FORCE;
7165
7166 break;
7167
7168 case ORT_ACC_PARALLEL:
7169 rkind = "parallel";
7170
7171 if (on_device || declared)
7172 flags |= GOVD_MAP;
7173 else if (AGGREGATE_TYPE_P (type))
7174 {
7175 /* Aggregates default to 'present_or_copy', or 'present'. */
7176 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7177 flags |= GOVD_MAP;
7178 else
7179 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7180 }
7181 else
7182 /* Scalars default to 'firstprivate'. */
7183 flags |= GOVD_FIRSTPRIVATE;
7184
7185 break;
7186
7187 default:
7188 gcc_unreachable ();
7189 }
7190
7191 if (DECL_ARTIFICIAL (decl))
7192 ; /* We can get compiler-generated decls, and should not complain
7193 about them. */
7194 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_NONE)
7195 {
7196 error ("%qE not specified in enclosing OpenACC %qs construct",
7197 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rkind);
7198 inform (ctx->location, "enclosing OpenACC %qs construct", rkind);
7199 }
7200 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_PRESENT)
7201 ; /* Handled above. */
7202 else
7203 gcc_checking_assert (ctx->default_kind == OMP_CLAUSE_DEFAULT_SHARED);
7204
7205 return flags;
7206 }
7207
7208 /* Record the fact that DECL was used within the OMP context CTX.
7209 IN_CODE is true when real code uses DECL, and false when we should
7210 merely emit default(none) errors. Return true if DECL is going to
7211 be remapped and thus DECL shouldn't be gimplified into its
7212 DECL_VALUE_EXPR (if any). */
7213
7214 static bool
7215 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
7216 {
7217 splay_tree_node n;
7218 unsigned flags = in_code ? GOVD_SEEN : 0;
7219 bool ret = false, shared;
7220
7221 if (error_operand_p (decl))
7222 return false;
7223
7224 if (ctx->region_type == ORT_NONE)
7225 return lang_hooks.decls.omp_disregard_value_expr (decl, false);
7226
7227 if (is_global_var (decl))
7228 {
7229 /* Threadprivate variables are predetermined. */
7230 if (DECL_THREAD_LOCAL_P (decl))
7231 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
7232
7233 if (DECL_HAS_VALUE_EXPR_P (decl))
7234 {
7235 tree value = get_base_address (DECL_VALUE_EXPR (decl));
7236
7237 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
7238 return omp_notice_threadprivate_variable (ctx, decl, value);
7239 }
7240
7241 if (gimplify_omp_ctxp->outer_context == NULL
7242 && VAR_P (decl)
7243 && oacc_get_fn_attrib (current_function_decl))
7244 {
7245 location_t loc = DECL_SOURCE_LOCATION (decl);
7246
7247 if (lookup_attribute ("omp declare target link",
7248 DECL_ATTRIBUTES (decl)))
7249 {
7250 error_at (loc,
7251 "%qE with %<link%> clause used in %<routine%> function",
7252 DECL_NAME (decl));
7253 return false;
7254 }
7255 else if (!lookup_attribute ("omp declare target",
7256 DECL_ATTRIBUTES (decl)))
7257 {
7258 error_at (loc,
7259 "%qE requires a %<declare%> directive for use "
7260 "in a %<routine%> function", DECL_NAME (decl));
7261 return false;
7262 }
7263 }
7264 }
7265
7266 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7267 if ((ctx->region_type & ORT_TARGET) != 0)
7268 {
7269 ret = lang_hooks.decls.omp_disregard_value_expr (decl, true);
7270 if (n == NULL)
7271 {
7272 unsigned nflags = flags;
7273 if ((ctx->region_type & ORT_ACC) == 0)
7274 {
7275 bool is_declare_target = false;
7276 if (is_global_var (decl)
7277 && varpool_node::get_create (decl)->offloadable)
7278 {
7279 struct gimplify_omp_ctx *octx;
7280 for (octx = ctx->outer_context;
7281 octx; octx = octx->outer_context)
7282 {
7283 n = splay_tree_lookup (octx->variables,
7284 (splay_tree_key)decl);
7285 if (n
7286 && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED
7287 && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7288 break;
7289 }
7290 is_declare_target = octx == NULL;
7291 }
7292 if (!is_declare_target)
7293 {
7294 int gdmk;
7295 if (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
7296 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
7297 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
7298 == POINTER_TYPE)))
7299 gdmk = GDMK_POINTER;
7300 else if (lang_hooks.decls.omp_scalar_p (decl))
7301 gdmk = GDMK_SCALAR;
7302 else
7303 gdmk = GDMK_AGGREGATE;
7304 if (ctx->defaultmap[gdmk] == 0)
7305 {
7306 tree d = lang_hooks.decls.omp_report_decl (decl);
7307 error ("%qE not specified in enclosing %<target%>",
7308 DECL_NAME (d));
7309 error_at (ctx->location, "enclosing %<target%>");
7310 }
7311 else if (ctx->defaultmap[gdmk]
7312 & (GOVD_MAP_0LEN_ARRAY | GOVD_FIRSTPRIVATE))
7313 nflags |= ctx->defaultmap[gdmk];
7314 else
7315 {
7316 gcc_assert (ctx->defaultmap[gdmk] & GOVD_MAP);
7317 nflags |= ctx->defaultmap[gdmk] & ~GOVD_MAP;
7318 }
7319 }
7320 }
7321
7322 struct gimplify_omp_ctx *octx = ctx->outer_context;
7323 if ((ctx->region_type & ORT_ACC) && octx)
7324 {
7325 /* Look in outer OpenACC contexts, to see if there's a
7326 data attribute for this variable. */
7327 omp_notice_variable (octx, decl, in_code);
7328
7329 for (; octx; octx = octx->outer_context)
7330 {
7331 if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)))
7332 break;
7333 splay_tree_node n2
7334 = splay_tree_lookup (octx->variables,
7335 (splay_tree_key) decl);
7336 if (n2)
7337 {
7338 if (octx->region_type == ORT_ACC_HOST_DATA)
7339 error ("variable %qE declared in enclosing "
7340 "%<host_data%> region", DECL_NAME (decl));
7341 nflags |= GOVD_MAP;
7342 if (octx->region_type == ORT_ACC_DATA
7343 && (n2->value & GOVD_MAP_0LEN_ARRAY))
7344 nflags |= GOVD_MAP_0LEN_ARRAY;
7345 goto found_outer;
7346 }
7347 }
7348 }
7349
7350 if ((nflags & ~(GOVD_MAP_TO_ONLY | GOVD_MAP_FROM_ONLY
7351 | GOVD_MAP_ALLOC_ONLY)) == flags)
7352 {
7353 tree type = TREE_TYPE (decl);
7354
7355 if (gimplify_omp_ctxp->target_firstprivatize_array_bases
7356 && lang_hooks.decls.omp_privatize_by_reference (decl))
7357 type = TREE_TYPE (type);
7358 if (!lang_hooks.types.omp_mappable_type (type))
7359 {
7360 error ("%qD referenced in target region does not have "
7361 "a mappable type", decl);
7362 nflags |= GOVD_MAP | GOVD_EXPLICIT;
7363 }
7364 else
7365 {
7366 if ((ctx->region_type & ORT_ACC) != 0)
7367 nflags = oacc_default_clause (ctx, decl, flags);
7368 else
7369 nflags |= GOVD_MAP;
7370 }
7371 }
7372 found_outer:
7373 omp_add_variable (ctx, decl, nflags);
7374 }
7375 else
7376 {
7377 /* If nothing changed, there's nothing left to do. */
7378 if ((n->value & flags) == flags)
7379 return ret;
7380 flags |= n->value;
7381 n->value = flags;
7382 }
7383 goto do_outer;
7384 }
7385
7386 if (n == NULL)
7387 {
7388 if (ctx->region_type == ORT_WORKSHARE
7389 || ctx->region_type == ORT_TASKGROUP
7390 || ctx->region_type == ORT_SIMD
7391 || ctx->region_type == ORT_ACC
7392 || (ctx->region_type & ORT_TARGET_DATA) != 0)
7393 goto do_outer;
7394
7395 flags = omp_default_clause (ctx, decl, in_code, flags);
7396
7397 if ((flags & GOVD_PRIVATE)
7398 && lang_hooks.decls.omp_private_outer_ref (decl))
7399 flags |= GOVD_PRIVATE_OUTER_REF;
7400
7401 omp_add_variable (ctx, decl, flags);
7402
7403 shared = (flags & GOVD_SHARED) != 0;
7404 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7405 goto do_outer;
7406 }
7407
7408 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
7409 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
7410 && DECL_SIZE (decl))
7411 {
7412 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7413 {
7414 splay_tree_node n2;
7415 tree t = DECL_VALUE_EXPR (decl);
7416 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
7417 t = TREE_OPERAND (t, 0);
7418 gcc_assert (DECL_P (t));
7419 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7420 n2->value |= GOVD_SEEN;
7421 }
7422 else if (lang_hooks.decls.omp_privatize_by_reference (decl)
7423 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))
7424 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))))
7425 != INTEGER_CST))
7426 {
7427 splay_tree_node n2;
7428 tree t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
7429 gcc_assert (DECL_P (t));
7430 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7431 if (n2)
7432 omp_notice_variable (ctx, t, true);
7433 }
7434 }
7435
7436 shared = ((flags | n->value) & GOVD_SHARED) != 0;
7437 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7438
7439 /* If nothing changed, there's nothing left to do. */
7440 if ((n->value & flags) == flags)
7441 return ret;
7442 flags |= n->value;
7443 n->value = flags;
7444
7445 do_outer:
7446 /* If the variable is private in the current context, then we don't
7447 need to propagate anything to an outer context. */
7448 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
7449 return ret;
7450 if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7451 == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7452 return ret;
7453 if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
7454 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7455 == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7456 return ret;
7457 if (ctx->outer_context
7458 && omp_notice_variable (ctx->outer_context, decl, in_code))
7459 return true;
7460 return ret;
7461 }
7462
7463 /* Verify that DECL is private within CTX. If there's specific information
7464 to the contrary in the innermost scope, generate an error. */
7465
7466 static bool
7467 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
7468 {
7469 splay_tree_node n;
7470
7471 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7472 if (n != NULL)
7473 {
7474 if (n->value & GOVD_SHARED)
7475 {
7476 if (ctx == gimplify_omp_ctxp)
7477 {
7478 if (simd)
7479 error ("iteration variable %qE is predetermined linear",
7480 DECL_NAME (decl));
7481 else
7482 error ("iteration variable %qE should be private",
7483 DECL_NAME (decl));
7484 n->value = GOVD_PRIVATE;
7485 return true;
7486 }
7487 else
7488 return false;
7489 }
7490 else if ((n->value & GOVD_EXPLICIT) != 0
7491 && (ctx == gimplify_omp_ctxp
7492 || (ctx->region_type == ORT_COMBINED_PARALLEL
7493 && gimplify_omp_ctxp->outer_context == ctx)))
7494 {
7495 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
7496 error ("iteration variable %qE should not be firstprivate",
7497 DECL_NAME (decl));
7498 else if ((n->value & GOVD_REDUCTION) != 0)
7499 error ("iteration variable %qE should not be reduction",
7500 DECL_NAME (decl));
7501 else if (simd != 1 && (n->value & GOVD_LINEAR) != 0)
7502 error ("iteration variable %qE should not be linear",
7503 DECL_NAME (decl));
7504 }
7505 return (ctx == gimplify_omp_ctxp
7506 || (ctx->region_type == ORT_COMBINED_PARALLEL
7507 && gimplify_omp_ctxp->outer_context == ctx));
7508 }
7509
7510 if (ctx->region_type != ORT_WORKSHARE
7511 && ctx->region_type != ORT_TASKGROUP
7512 && ctx->region_type != ORT_SIMD
7513 && ctx->region_type != ORT_ACC)
7514 return false;
7515 else if (ctx->outer_context)
7516 return omp_is_private (ctx->outer_context, decl, simd);
7517 return false;
7518 }
7519
7520 /* Return true if DECL is private within a parallel region
7521 that binds to the current construct's context or in parallel
7522 region's REDUCTION clause. */
7523
7524 static bool
7525 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
7526 {
7527 splay_tree_node n;
7528
7529 do
7530 {
7531 ctx = ctx->outer_context;
7532 if (ctx == NULL)
7533 {
7534 if (is_global_var (decl))
7535 return false;
7536
7537 /* References might be private, but might be shared too,
7538 when checking for copyprivate, assume they might be
7539 private, otherwise assume they might be shared. */
7540 if (copyprivate)
7541 return true;
7542
7543 if (lang_hooks.decls.omp_privatize_by_reference (decl))
7544 return false;
7545
7546 /* Treat C++ privatized non-static data members outside
7547 of the privatization the same. */
7548 if (omp_member_access_dummy_var (decl))
7549 return false;
7550
7551 return true;
7552 }
7553
7554 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7555
7556 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
7557 && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0))
7558 continue;
7559
7560 if (n != NULL)
7561 {
7562 if ((n->value & GOVD_LOCAL) != 0
7563 && omp_member_access_dummy_var (decl))
7564 return false;
7565 return (n->value & GOVD_SHARED) == 0;
7566 }
7567 }
7568 while (ctx->region_type == ORT_WORKSHARE
7569 || ctx->region_type == ORT_TASKGROUP
7570 || ctx->region_type == ORT_SIMD
7571 || ctx->region_type == ORT_ACC);
7572 return false;
7573 }
7574
7575 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
7576
7577 static tree
7578 find_decl_expr (tree *tp, int *walk_subtrees, void *data)
7579 {
7580 tree t = *tp;
7581
7582 /* If this node has been visited, unmark it and keep looking. */
7583 if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data)
7584 return t;
7585
7586 if (IS_TYPE_OR_DECL_P (t))
7587 *walk_subtrees = 0;
7588 return NULL_TREE;
7589 }
7590
7591 /* If *LIST_P contains any OpenMP depend clauses with iterators,
7592 lower all the depend clauses by populating corresponding depend
7593 array. Returns 0 if there are no such depend clauses, or
7594 2 if all depend clauses should be removed, 1 otherwise. */
7595
7596 static int
7597 gimplify_omp_depend (tree *list_p, gimple_seq *pre_p)
7598 {
7599 tree c;
7600 gimple *g;
7601 size_t n[4] = { 0, 0, 0, 0 };
7602 bool unused[4];
7603 tree counts[4] = { NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE };
7604 tree last_iter = NULL_TREE, last_count = NULL_TREE;
7605 size_t i, j;
7606 location_t first_loc = UNKNOWN_LOCATION;
7607
7608 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
7609 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
7610 {
7611 switch (OMP_CLAUSE_DEPEND_KIND (c))
7612 {
7613 case OMP_CLAUSE_DEPEND_IN:
7614 i = 2;
7615 break;
7616 case OMP_CLAUSE_DEPEND_OUT:
7617 case OMP_CLAUSE_DEPEND_INOUT:
7618 i = 0;
7619 break;
7620 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
7621 i = 1;
7622 break;
7623 case OMP_CLAUSE_DEPEND_DEPOBJ:
7624 i = 3;
7625 break;
7626 case OMP_CLAUSE_DEPEND_SOURCE:
7627 case OMP_CLAUSE_DEPEND_SINK:
7628 continue;
7629 default:
7630 gcc_unreachable ();
7631 }
7632 tree t = OMP_CLAUSE_DECL (c);
7633 if (first_loc == UNKNOWN_LOCATION)
7634 first_loc = OMP_CLAUSE_LOCATION (c);
7635 if (TREE_CODE (t) == TREE_LIST
7636 && TREE_PURPOSE (t)
7637 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
7638 {
7639 if (TREE_PURPOSE (t) != last_iter)
7640 {
7641 tree tcnt = size_one_node;
7642 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
7643 {
7644 if (gimplify_expr (&TREE_VEC_ELT (it, 1), pre_p, NULL,
7645 is_gimple_val, fb_rvalue) == GS_ERROR
7646 || gimplify_expr (&TREE_VEC_ELT (it, 2), pre_p, NULL,
7647 is_gimple_val, fb_rvalue) == GS_ERROR
7648 || gimplify_expr (&TREE_VEC_ELT (it, 3), pre_p, NULL,
7649 is_gimple_val, fb_rvalue) == GS_ERROR
7650 || (gimplify_expr (&TREE_VEC_ELT (it, 4), pre_p, NULL,
7651 is_gimple_val, fb_rvalue)
7652 == GS_ERROR))
7653 return 2;
7654 tree var = TREE_VEC_ELT (it, 0);
7655 tree begin = TREE_VEC_ELT (it, 1);
7656 tree end = TREE_VEC_ELT (it, 2);
7657 tree step = TREE_VEC_ELT (it, 3);
7658 tree orig_step = TREE_VEC_ELT (it, 4);
7659 tree type = TREE_TYPE (var);
7660 tree stype = TREE_TYPE (step);
7661 location_t loc = DECL_SOURCE_LOCATION (var);
7662 tree endmbegin;
7663 /* Compute count for this iterator as
7664 orig_step > 0
7665 ? (begin < end ? (end - begin + (step - 1)) / step : 0)
7666 : (begin > end ? (end - begin + (step + 1)) / step : 0)
7667 and compute product of those for the entire depend
7668 clause. */
7669 if (POINTER_TYPE_P (type))
7670 endmbegin = fold_build2_loc (loc, POINTER_DIFF_EXPR,
7671 stype, end, begin);
7672 else
7673 endmbegin = fold_build2_loc (loc, MINUS_EXPR, type,
7674 end, begin);
7675 tree stepm1 = fold_build2_loc (loc, MINUS_EXPR, stype,
7676 step,
7677 build_int_cst (stype, 1));
7678 tree stepp1 = fold_build2_loc (loc, PLUS_EXPR, stype, step,
7679 build_int_cst (stype, 1));
7680 tree pos = fold_build2_loc (loc, PLUS_EXPR, stype,
7681 unshare_expr (endmbegin),
7682 stepm1);
7683 pos = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
7684 pos, step);
7685 tree neg = fold_build2_loc (loc, PLUS_EXPR, stype,
7686 endmbegin, stepp1);
7687 if (TYPE_UNSIGNED (stype))
7688 {
7689 neg = fold_build1_loc (loc, NEGATE_EXPR, stype, neg);
7690 step = fold_build1_loc (loc, NEGATE_EXPR, stype, step);
7691 }
7692 neg = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
7693 neg, step);
7694 step = NULL_TREE;
7695 tree cond = fold_build2_loc (loc, LT_EXPR,
7696 boolean_type_node,
7697 begin, end);
7698 pos = fold_build3_loc (loc, COND_EXPR, stype, cond, pos,
7699 build_int_cst (stype, 0));
7700 cond = fold_build2_loc (loc, LT_EXPR, boolean_type_node,
7701 end, begin);
7702 neg = fold_build3_loc (loc, COND_EXPR, stype, cond, neg,
7703 build_int_cst (stype, 0));
7704 tree osteptype = TREE_TYPE (orig_step);
7705 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
7706 orig_step,
7707 build_int_cst (osteptype, 0));
7708 tree cnt = fold_build3_loc (loc, COND_EXPR, stype,
7709 cond, pos, neg);
7710 cnt = fold_convert_loc (loc, sizetype, cnt);
7711 if (gimplify_expr (&cnt, pre_p, NULL, is_gimple_val,
7712 fb_rvalue) == GS_ERROR)
7713 return 2;
7714 tcnt = size_binop_loc (loc, MULT_EXPR, tcnt, cnt);
7715 }
7716 if (gimplify_expr (&tcnt, pre_p, NULL, is_gimple_val,
7717 fb_rvalue) == GS_ERROR)
7718 return 2;
7719 last_iter = TREE_PURPOSE (t);
7720 last_count = tcnt;
7721 }
7722 if (counts[i] == NULL_TREE)
7723 counts[i] = last_count;
7724 else
7725 counts[i] = size_binop_loc (OMP_CLAUSE_LOCATION (c),
7726 PLUS_EXPR, counts[i], last_count);
7727 }
7728 else
7729 n[i]++;
7730 }
7731 for (i = 0; i < 4; i++)
7732 if (counts[i])
7733 break;
7734 if (i == 4)
7735 return 0;
7736
7737 tree total = size_zero_node;
7738 for (i = 0; i < 4; i++)
7739 {
7740 unused[i] = counts[i] == NULL_TREE && n[i] == 0;
7741 if (counts[i] == NULL_TREE)
7742 counts[i] = size_zero_node;
7743 if (n[i])
7744 counts[i] = size_binop (PLUS_EXPR, counts[i], size_int (n[i]));
7745 if (gimplify_expr (&counts[i], pre_p, NULL, is_gimple_val,
7746 fb_rvalue) == GS_ERROR)
7747 return 2;
7748 total = size_binop (PLUS_EXPR, total, counts[i]);
7749 }
7750
7751 if (gimplify_expr (&total, pre_p, NULL, is_gimple_val, fb_rvalue)
7752 == GS_ERROR)
7753 return 2;
7754 bool is_old = unused[1] && unused[3];
7755 tree totalpx = size_binop (PLUS_EXPR, unshare_expr (total),
7756 size_int (is_old ? 1 : 4));
7757 tree type = build_array_type (ptr_type_node, build_index_type (totalpx));
7758 tree array = create_tmp_var_raw (type);
7759 TREE_ADDRESSABLE (array) = 1;
7760 if (TREE_CODE (totalpx) != INTEGER_CST)
7761 {
7762 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (array)))
7763 gimplify_type_sizes (TREE_TYPE (array), pre_p);
7764 if (gimplify_omp_ctxp)
7765 {
7766 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7767 while (ctx
7768 && (ctx->region_type == ORT_WORKSHARE
7769 || ctx->region_type == ORT_TASKGROUP
7770 || ctx->region_type == ORT_SIMD
7771 || ctx->region_type == ORT_ACC))
7772 ctx = ctx->outer_context;
7773 if (ctx)
7774 omp_add_variable (ctx, array, GOVD_LOCAL | GOVD_SEEN);
7775 }
7776 gimplify_vla_decl (array, pre_p);
7777 }
7778 else
7779 gimple_add_tmp_var (array);
7780 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
7781 NULL_TREE);
7782 tree tem;
7783 if (!is_old)
7784 {
7785 tem = build2 (MODIFY_EXPR, void_type_node, r,
7786 build_int_cst (ptr_type_node, 0));
7787 gimplify_and_add (tem, pre_p);
7788 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
7789 NULL_TREE);
7790 }
7791 tem = build2 (MODIFY_EXPR, void_type_node, r,
7792 fold_convert (ptr_type_node, total));
7793 gimplify_and_add (tem, pre_p);
7794 for (i = 1; i < (is_old ? 2 : 4); i++)
7795 {
7796 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (i + !is_old),
7797 NULL_TREE, NULL_TREE);
7798 tem = build2 (MODIFY_EXPR, void_type_node, r, counts[i - 1]);
7799 gimplify_and_add (tem, pre_p);
7800 }
7801
7802 tree cnts[4];
7803 for (j = 4; j; j--)
7804 if (!unused[j - 1])
7805 break;
7806 for (i = 0; i < 4; i++)
7807 {
7808 if (i && (i >= j || unused[i - 1]))
7809 {
7810 cnts[i] = cnts[i - 1];
7811 continue;
7812 }
7813 cnts[i] = create_tmp_var (sizetype);
7814 if (i == 0)
7815 g = gimple_build_assign (cnts[i], size_int (is_old ? 2 : 5));
7816 else
7817 {
7818 tree t;
7819 if (is_old)
7820 t = size_binop (PLUS_EXPR, counts[0], size_int (2));
7821 else
7822 t = size_binop (PLUS_EXPR, cnts[i - 1], counts[i - 1]);
7823 if (gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue)
7824 == GS_ERROR)
7825 return 2;
7826 g = gimple_build_assign (cnts[i], t);
7827 }
7828 gimple_seq_add_stmt (pre_p, g);
7829 }
7830
7831 last_iter = NULL_TREE;
7832 tree last_bind = NULL_TREE;
7833 tree *last_body = NULL;
7834 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
7835 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
7836 {
7837 switch (OMP_CLAUSE_DEPEND_KIND (c))
7838 {
7839 case OMP_CLAUSE_DEPEND_IN:
7840 i = 2;
7841 break;
7842 case OMP_CLAUSE_DEPEND_OUT:
7843 case OMP_CLAUSE_DEPEND_INOUT:
7844 i = 0;
7845 break;
7846 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
7847 i = 1;
7848 break;
7849 case OMP_CLAUSE_DEPEND_DEPOBJ:
7850 i = 3;
7851 break;
7852 case OMP_CLAUSE_DEPEND_SOURCE:
7853 case OMP_CLAUSE_DEPEND_SINK:
7854 continue;
7855 default:
7856 gcc_unreachable ();
7857 }
7858 tree t = OMP_CLAUSE_DECL (c);
7859 if (TREE_CODE (t) == TREE_LIST
7860 && TREE_PURPOSE (t)
7861 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
7862 {
7863 if (TREE_PURPOSE (t) != last_iter)
7864 {
7865 if (last_bind)
7866 gimplify_and_add (last_bind, pre_p);
7867 tree block = TREE_VEC_ELT (TREE_PURPOSE (t), 5);
7868 last_bind = build3 (BIND_EXPR, void_type_node,
7869 BLOCK_VARS (block), NULL, block);
7870 TREE_SIDE_EFFECTS (last_bind) = 1;
7871 SET_EXPR_LOCATION (last_bind, OMP_CLAUSE_LOCATION (c));
7872 tree *p = &BIND_EXPR_BODY (last_bind);
7873 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
7874 {
7875 tree var = TREE_VEC_ELT (it, 0);
7876 tree begin = TREE_VEC_ELT (it, 1);
7877 tree end = TREE_VEC_ELT (it, 2);
7878 tree step = TREE_VEC_ELT (it, 3);
7879 tree orig_step = TREE_VEC_ELT (it, 4);
7880 tree type = TREE_TYPE (var);
7881 location_t loc = DECL_SOURCE_LOCATION (var);
7882 /* Emit:
7883 var = begin;
7884 goto cond_label;
7885 beg_label:
7886 ...
7887 var = var + step;
7888 cond_label:
7889 if (orig_step > 0) {
7890 if (var < end) goto beg_label;
7891 } else {
7892 if (var > end) goto beg_label;
7893 }
7894 for each iterator, with inner iterators added to
7895 the ... above. */
7896 tree beg_label = create_artificial_label (loc);
7897 tree cond_label = NULL_TREE;
7898 tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
7899 var, begin);
7900 append_to_statement_list_force (tem, p);
7901 tem = build_and_jump (&cond_label);
7902 append_to_statement_list_force (tem, p);
7903 tem = build1 (LABEL_EXPR, void_type_node, beg_label);
7904 append_to_statement_list (tem, p);
7905 tree bind = build3 (BIND_EXPR, void_type_node, NULL_TREE,
7906 NULL_TREE, NULL_TREE);
7907 TREE_SIDE_EFFECTS (bind) = 1;
7908 SET_EXPR_LOCATION (bind, loc);
7909 append_to_statement_list_force (bind, p);
7910 if (POINTER_TYPE_P (type))
7911 tem = build2_loc (loc, POINTER_PLUS_EXPR, type,
7912 var, fold_convert_loc (loc, sizetype,
7913 step));
7914 else
7915 tem = build2_loc (loc, PLUS_EXPR, type, var, step);
7916 tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
7917 var, tem);
7918 append_to_statement_list_force (tem, p);
7919 tem = build1 (LABEL_EXPR, void_type_node, cond_label);
7920 append_to_statement_list (tem, p);
7921 tree cond = fold_build2_loc (loc, LT_EXPR,
7922 boolean_type_node,
7923 var, end);
7924 tree pos
7925 = fold_build3_loc (loc, COND_EXPR, void_type_node,
7926 cond, build_and_jump (&beg_label),
7927 void_node);
7928 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
7929 var, end);
7930 tree neg
7931 = fold_build3_loc (loc, COND_EXPR, void_type_node,
7932 cond, build_and_jump (&beg_label),
7933 void_node);
7934 tree osteptype = TREE_TYPE (orig_step);
7935 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
7936 orig_step,
7937 build_int_cst (osteptype, 0));
7938 tem = fold_build3_loc (loc, COND_EXPR, void_type_node,
7939 cond, pos, neg);
7940 append_to_statement_list_force (tem, p);
7941 p = &BIND_EXPR_BODY (bind);
7942 }
7943 last_body = p;
7944 }
7945 last_iter = TREE_PURPOSE (t);
7946 if (TREE_CODE (TREE_VALUE (t)) == COMPOUND_EXPR)
7947 {
7948 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t),
7949 0), last_body);
7950 TREE_VALUE (t) = TREE_OPERAND (TREE_VALUE (t), 1);
7951 }
7952 if (error_operand_p (TREE_VALUE (t)))
7953 return 2;
7954 TREE_VALUE (t) = build_fold_addr_expr (TREE_VALUE (t));
7955 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
7956 NULL_TREE, NULL_TREE);
7957 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
7958 void_type_node, r, TREE_VALUE (t));
7959 append_to_statement_list_force (tem, last_body);
7960 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
7961 void_type_node, cnts[i],
7962 size_binop (PLUS_EXPR, cnts[i], size_int (1)));
7963 append_to_statement_list_force (tem, last_body);
7964 TREE_VALUE (t) = null_pointer_node;
7965 }
7966 else
7967 {
7968 if (last_bind)
7969 {
7970 gimplify_and_add (last_bind, pre_p);
7971 last_bind = NULL_TREE;
7972 }
7973 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
7974 {
7975 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
7976 NULL, is_gimple_val, fb_rvalue);
7977 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
7978 }
7979 if (error_operand_p (OMP_CLAUSE_DECL (c)))
7980 return 2;
7981 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
7982 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
7983 is_gimple_val, fb_rvalue) == GS_ERROR)
7984 return 2;
7985 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
7986 NULL_TREE, NULL_TREE);
7987 tem = build2 (MODIFY_EXPR, void_type_node, r, OMP_CLAUSE_DECL (c));
7988 gimplify_and_add (tem, pre_p);
7989 g = gimple_build_assign (cnts[i], size_binop (PLUS_EXPR, cnts[i],
7990 size_int (1)));
7991 gimple_seq_add_stmt (pre_p, g);
7992 }
7993 }
7994 if (last_bind)
7995 gimplify_and_add (last_bind, pre_p);
7996 tree cond = boolean_false_node;
7997 if (is_old)
7998 {
7999 if (!unused[0])
8000 cond = build2_loc (first_loc, NE_EXPR, boolean_type_node, cnts[0],
8001 size_binop_loc (first_loc, PLUS_EXPR, counts[0],
8002 size_int (2)));
8003 if (!unused[2])
8004 cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
8005 build2_loc (first_loc, NE_EXPR, boolean_type_node,
8006 cnts[2],
8007 size_binop_loc (first_loc, PLUS_EXPR,
8008 totalpx,
8009 size_int (1))));
8010 }
8011 else
8012 {
8013 tree prev = size_int (5);
8014 for (i = 0; i < 4; i++)
8015 {
8016 if (unused[i])
8017 continue;
8018 prev = size_binop_loc (first_loc, PLUS_EXPR, counts[i], prev);
8019 cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
8020 build2_loc (first_loc, NE_EXPR, boolean_type_node,
8021 cnts[i], unshare_expr (prev)));
8022 }
8023 }
8024 tem = build3_loc (first_loc, COND_EXPR, void_type_node, cond,
8025 build_call_expr_loc (first_loc,
8026 builtin_decl_explicit (BUILT_IN_TRAP),
8027 0), void_node);
8028 gimplify_and_add (tem, pre_p);
8029 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
8030 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
8031 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
8032 OMP_CLAUSE_CHAIN (c) = *list_p;
8033 *list_p = c;
8034 return 1;
8035 }
8036
8037 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
8038 and previous omp contexts. */
8039
8040 static void
8041 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
8042 enum omp_region_type region_type,
8043 enum tree_code code)
8044 {
8045 struct gimplify_omp_ctx *ctx, *outer_ctx;
8046 tree c;
8047 hash_map<tree, tree> *struct_map_to_clause = NULL;
8048 tree *prev_list_p = NULL;
8049 int handled_depend_iterators = -1;
8050 int nowait = -1;
8051
8052 ctx = new_omp_context (region_type);
8053 outer_ctx = ctx->outer_context;
8054 if (code == OMP_TARGET)
8055 {
8056 if (!lang_GNU_Fortran ())
8057 ctx->defaultmap[GDMK_POINTER] = GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
8058 ctx->defaultmap[GDMK_SCALAR] = GOVD_FIRSTPRIVATE;
8059 }
8060 if (!lang_GNU_Fortran ())
8061 switch (code)
8062 {
8063 case OMP_TARGET:
8064 case OMP_TARGET_DATA:
8065 case OMP_TARGET_ENTER_DATA:
8066 case OMP_TARGET_EXIT_DATA:
8067 case OACC_DECLARE:
8068 case OACC_HOST_DATA:
8069 case OACC_PARALLEL:
8070 case OACC_KERNELS:
8071 ctx->target_firstprivatize_array_bases = true;
8072 default:
8073 break;
8074 }
8075
8076 while ((c = *list_p) != NULL)
8077 {
8078 bool remove = false;
8079 bool notice_outer = true;
8080 const char *check_non_private = NULL;
8081 unsigned int flags;
8082 tree decl;
8083
8084 switch (OMP_CLAUSE_CODE (c))
8085 {
8086 case OMP_CLAUSE_PRIVATE:
8087 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
8088 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
8089 {
8090 flags |= GOVD_PRIVATE_OUTER_REF;
8091 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
8092 }
8093 else
8094 notice_outer = false;
8095 goto do_add;
8096 case OMP_CLAUSE_SHARED:
8097 flags = GOVD_SHARED | GOVD_EXPLICIT;
8098 goto do_add;
8099 case OMP_CLAUSE_FIRSTPRIVATE:
8100 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
8101 check_non_private = "firstprivate";
8102 goto do_add;
8103 case OMP_CLAUSE_LASTPRIVATE:
8104 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
8105 switch (code)
8106 {
8107 case OMP_DISTRIBUTE:
8108 error_at (OMP_CLAUSE_LOCATION (c),
8109 "conditional %<lastprivate%> clause on "
8110 "%<distribute%> construct");
8111 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
8112 break;
8113 case OMP_TASKLOOP:
8114 error_at (OMP_CLAUSE_LOCATION (c),
8115 "conditional %<lastprivate%> clause on "
8116 "%<taskloop%> construct");
8117 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
8118 break;
8119 default:
8120 break;
8121 }
8122 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
8123 check_non_private = "lastprivate";
8124 decl = OMP_CLAUSE_DECL (c);
8125 if (error_operand_p (decl))
8126 goto do_add;
8127 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
8128 && !lang_hooks.decls.omp_scalar_p (decl))
8129 {
8130 error_at (OMP_CLAUSE_LOCATION (c),
8131 "non-scalar variable %qD in conditional "
8132 "%<lastprivate%> clause", decl);
8133 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
8134 }
8135 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
8136 sorry_at (OMP_CLAUSE_LOCATION (c),
8137 "%<conditional%> modifier on %<lastprivate%> clause "
8138 "not supported yet");
8139 if (outer_ctx
8140 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
8141 || ((outer_ctx->region_type & ORT_COMBINED_TEAMS)
8142 == ORT_COMBINED_TEAMS))
8143 && splay_tree_lookup (outer_ctx->variables,
8144 (splay_tree_key) decl) == NULL)
8145 {
8146 omp_add_variable (outer_ctx, decl, GOVD_SHARED | GOVD_SEEN);
8147 if (outer_ctx->outer_context)
8148 omp_notice_variable (outer_ctx->outer_context, decl, true);
8149 }
8150 else if (outer_ctx
8151 && (outer_ctx->region_type & ORT_TASK) != 0
8152 && outer_ctx->combined_loop
8153 && splay_tree_lookup (outer_ctx->variables,
8154 (splay_tree_key) decl) == NULL)
8155 {
8156 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
8157 if (outer_ctx->outer_context)
8158 omp_notice_variable (outer_ctx->outer_context, decl, true);
8159 }
8160 else if (outer_ctx
8161 && (outer_ctx->region_type == ORT_WORKSHARE
8162 || outer_ctx->region_type == ORT_ACC)
8163 && outer_ctx->combined_loop
8164 && splay_tree_lookup (outer_ctx->variables,
8165 (splay_tree_key) decl) == NULL
8166 && !omp_check_private (outer_ctx, decl, false))
8167 {
8168 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
8169 if (outer_ctx->outer_context
8170 && (outer_ctx->outer_context->region_type
8171 == ORT_COMBINED_PARALLEL)
8172 && splay_tree_lookup (outer_ctx->outer_context->variables,
8173 (splay_tree_key) decl) == NULL)
8174 {
8175 struct gimplify_omp_ctx *octx = outer_ctx->outer_context;
8176 omp_add_variable (octx, decl, GOVD_SHARED | GOVD_SEEN);
8177 if (octx->outer_context)
8178 {
8179 octx = octx->outer_context;
8180 if (octx->region_type == ORT_WORKSHARE
8181 && octx->combined_loop
8182 && splay_tree_lookup (octx->variables,
8183 (splay_tree_key) decl) == NULL
8184 && !omp_check_private (octx, decl, false))
8185 {
8186 omp_add_variable (octx, decl,
8187 GOVD_LASTPRIVATE | GOVD_SEEN);
8188 octx = octx->outer_context;
8189 if (octx
8190 && ((octx->region_type & ORT_COMBINED_TEAMS)
8191 == ORT_COMBINED_TEAMS)
8192 && (splay_tree_lookup (octx->variables,
8193 (splay_tree_key) decl)
8194 == NULL))
8195 {
8196 omp_add_variable (octx, decl,
8197 GOVD_SHARED | GOVD_SEEN);
8198 octx = octx->outer_context;
8199 }
8200 }
8201 if (octx)
8202 omp_notice_variable (octx, decl, true);
8203 }
8204 }
8205 else if (outer_ctx->outer_context)
8206 omp_notice_variable (outer_ctx->outer_context, decl, true);
8207 }
8208 goto do_add;
8209 case OMP_CLAUSE_REDUCTION:
8210 if (OMP_CLAUSE_REDUCTION_TASK (c))
8211 {
8212 if (region_type == ORT_WORKSHARE)
8213 {
8214 if (nowait == -1)
8215 nowait = omp_find_clause (*list_p,
8216 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8217 if (nowait
8218 && (outer_ctx == NULL
8219 || outer_ctx->region_type != ORT_COMBINED_PARALLEL))
8220 {
8221 error_at (OMP_CLAUSE_LOCATION (c),
8222 "%<task%> reduction modifier on a construct "
8223 "with a %<nowait%> clause");
8224 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
8225 }
8226 }
8227 else if ((region_type & ORT_PARALLEL) != ORT_PARALLEL)
8228 {
8229 error_at (OMP_CLAUSE_LOCATION (c),
8230 "invalid %<task%> reduction modifier on construct "
8231 "other than %<parallel%>, %<for%> or %<sections%>");
8232 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
8233 }
8234 }
8235 /* FALLTHRU */
8236 case OMP_CLAUSE_IN_REDUCTION:
8237 case OMP_CLAUSE_TASK_REDUCTION:
8238 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
8239 /* OpenACC permits reductions on private variables. */
8240 if (!(region_type & ORT_ACC)
8241 /* taskgroup is actually not a worksharing region. */
8242 && code != OMP_TASKGROUP)
8243 check_non_private = omp_clause_code_name[OMP_CLAUSE_CODE (c)];
8244 decl = OMP_CLAUSE_DECL (c);
8245 if (TREE_CODE (decl) == MEM_REF)
8246 {
8247 tree type = TREE_TYPE (decl);
8248 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p,
8249 NULL, is_gimple_val, fb_rvalue, false)
8250 == GS_ERROR)
8251 {
8252 remove = true;
8253 break;
8254 }
8255 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
8256 if (DECL_P (v))
8257 {
8258 omp_firstprivatize_variable (ctx, v);
8259 omp_notice_variable (ctx, v, true);
8260 }
8261 decl = TREE_OPERAND (decl, 0);
8262 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
8263 {
8264 if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p,
8265 NULL, is_gimple_val, fb_rvalue, false)
8266 == GS_ERROR)
8267 {
8268 remove = true;
8269 break;
8270 }
8271 v = TREE_OPERAND (decl, 1);
8272 if (DECL_P (v))
8273 {
8274 omp_firstprivatize_variable (ctx, v);
8275 omp_notice_variable (ctx, v, true);
8276 }
8277 decl = TREE_OPERAND (decl, 0);
8278 }
8279 if (TREE_CODE (decl) == ADDR_EXPR
8280 || TREE_CODE (decl) == INDIRECT_REF)
8281 decl = TREE_OPERAND (decl, 0);
8282 }
8283 goto do_add_decl;
8284 case OMP_CLAUSE_LINEAR:
8285 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
8286 is_gimple_val, fb_rvalue) == GS_ERROR)
8287 {
8288 remove = true;
8289 break;
8290 }
8291 else
8292 {
8293 if (code == OMP_SIMD
8294 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
8295 {
8296 struct gimplify_omp_ctx *octx = outer_ctx;
8297 if (octx
8298 && octx->region_type == ORT_WORKSHARE
8299 && octx->combined_loop
8300 && !octx->distribute)
8301 {
8302 if (octx->outer_context
8303 && (octx->outer_context->region_type
8304 == ORT_COMBINED_PARALLEL))
8305 octx = octx->outer_context->outer_context;
8306 else
8307 octx = octx->outer_context;
8308 }
8309 if (octx
8310 && octx->region_type == ORT_WORKSHARE
8311 && octx->combined_loop
8312 && octx->distribute)
8313 {
8314 error_at (OMP_CLAUSE_LOCATION (c),
8315 "%<linear%> clause for variable other than "
8316 "loop iterator specified on construct "
8317 "combined with %<distribute%>");
8318 remove = true;
8319 break;
8320 }
8321 }
8322 /* For combined #pragma omp parallel for simd, need to put
8323 lastprivate and perhaps firstprivate too on the
8324 parallel. Similarly for #pragma omp for simd. */
8325 struct gimplify_omp_ctx *octx = outer_ctx;
8326 decl = NULL_TREE;
8327 do
8328 {
8329 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
8330 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8331 break;
8332 decl = OMP_CLAUSE_DECL (c);
8333 if (error_operand_p (decl))
8334 {
8335 decl = NULL_TREE;
8336 break;
8337 }
8338 flags = GOVD_SEEN;
8339 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
8340 flags |= GOVD_FIRSTPRIVATE;
8341 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8342 flags |= GOVD_LASTPRIVATE;
8343 if (octx
8344 && octx->region_type == ORT_WORKSHARE
8345 && octx->combined_loop)
8346 {
8347 if (octx->outer_context
8348 && (octx->outer_context->region_type
8349 == ORT_COMBINED_PARALLEL))
8350 octx = octx->outer_context;
8351 else if (omp_check_private (octx, decl, false))
8352 break;
8353 }
8354 else if (octx
8355 && (octx->region_type & ORT_TASK) != 0
8356 && octx->combined_loop)
8357 ;
8358 else if (octx
8359 && octx->region_type == ORT_COMBINED_PARALLEL
8360 && ctx->region_type == ORT_WORKSHARE
8361 && octx == outer_ctx)
8362 flags = GOVD_SEEN | GOVD_SHARED;
8363 else if (octx
8364 && ((octx->region_type & ORT_COMBINED_TEAMS)
8365 == ORT_COMBINED_TEAMS))
8366 flags = GOVD_SEEN | GOVD_SHARED;
8367 else if (octx
8368 && octx->region_type == ORT_COMBINED_TARGET)
8369 {
8370 flags &= ~GOVD_LASTPRIVATE;
8371 if (flags == GOVD_SEEN)
8372 break;
8373 }
8374 else
8375 break;
8376 splay_tree_node on
8377 = splay_tree_lookup (octx->variables,
8378 (splay_tree_key) decl);
8379 if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0)
8380 {
8381 octx = NULL;
8382 break;
8383 }
8384 omp_add_variable (octx, decl, flags);
8385 if (octx->outer_context == NULL)
8386 break;
8387 octx = octx->outer_context;
8388 }
8389 while (1);
8390 if (octx
8391 && decl
8392 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
8393 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
8394 omp_notice_variable (octx, decl, true);
8395 }
8396 flags = GOVD_LINEAR | GOVD_EXPLICIT;
8397 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
8398 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8399 {
8400 notice_outer = false;
8401 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
8402 }
8403 goto do_add;
8404
8405 case OMP_CLAUSE_MAP:
8406 decl = OMP_CLAUSE_DECL (c);
8407 if (error_operand_p (decl))
8408 remove = true;
8409 switch (code)
8410 {
8411 case OMP_TARGET:
8412 break;
8413 case OACC_DATA:
8414 if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE)
8415 break;
8416 /* FALLTHRU */
8417 case OMP_TARGET_DATA:
8418 case OMP_TARGET_ENTER_DATA:
8419 case OMP_TARGET_EXIT_DATA:
8420 case OACC_ENTER_DATA:
8421 case OACC_EXIT_DATA:
8422 case OACC_HOST_DATA:
8423 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8424 || (OMP_CLAUSE_MAP_KIND (c)
8425 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
8426 /* For target {,enter ,exit }data only the array slice is
8427 mapped, but not the pointer to it. */
8428 remove = true;
8429 break;
8430 default:
8431 break;
8432 }
8433 if (remove)
8434 break;
8435 if (DECL_P (decl) && outer_ctx && (region_type & ORT_ACC))
8436 {
8437 struct gimplify_omp_ctx *octx;
8438 for (octx = outer_ctx; octx; octx = octx->outer_context)
8439 {
8440 if (octx->region_type != ORT_ACC_HOST_DATA)
8441 break;
8442 splay_tree_node n2
8443 = splay_tree_lookup (octx->variables,
8444 (splay_tree_key) decl);
8445 if (n2)
8446 error_at (OMP_CLAUSE_LOCATION (c), "variable %qE "
8447 "declared in enclosing %<host_data%> region",
8448 DECL_NAME (decl));
8449 }
8450 }
8451 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
8452 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
8453 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
8454 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
8455 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
8456 {
8457 remove = true;
8458 break;
8459 }
8460 else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8461 || (OMP_CLAUSE_MAP_KIND (c)
8462 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
8463 && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST)
8464 {
8465 OMP_CLAUSE_SIZE (c)
8466 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL,
8467 false);
8468 omp_add_variable (ctx, OMP_CLAUSE_SIZE (c),
8469 GOVD_FIRSTPRIVATE | GOVD_SEEN);
8470 }
8471 if (!DECL_P (decl))
8472 {
8473 tree d = decl, *pd;
8474 if (TREE_CODE (d) == ARRAY_REF)
8475 {
8476 while (TREE_CODE (d) == ARRAY_REF)
8477 d = TREE_OPERAND (d, 0);
8478 if (TREE_CODE (d) == COMPONENT_REF
8479 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
8480 decl = d;
8481 }
8482 pd = &OMP_CLAUSE_DECL (c);
8483 if (d == decl
8484 && TREE_CODE (decl) == INDIRECT_REF
8485 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
8486 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
8487 == REFERENCE_TYPE))
8488 {
8489 pd = &TREE_OPERAND (decl, 0);
8490 decl = TREE_OPERAND (decl, 0);
8491 }
8492 if (TREE_CODE (decl) == COMPONENT_REF)
8493 {
8494 while (TREE_CODE (decl) == COMPONENT_REF)
8495 decl = TREE_OPERAND (decl, 0);
8496 if (TREE_CODE (decl) == INDIRECT_REF
8497 && DECL_P (TREE_OPERAND (decl, 0))
8498 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
8499 == REFERENCE_TYPE))
8500 decl = TREE_OPERAND (decl, 0);
8501 }
8502 if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue, fb_lvalue)
8503 == GS_ERROR)
8504 {
8505 remove = true;
8506 break;
8507 }
8508 if (DECL_P (decl))
8509 {
8510 if (error_operand_p (decl))
8511 {
8512 remove = true;
8513 break;
8514 }
8515
8516 tree stype = TREE_TYPE (decl);
8517 if (TREE_CODE (stype) == REFERENCE_TYPE)
8518 stype = TREE_TYPE (stype);
8519 if (TYPE_SIZE_UNIT (stype) == NULL
8520 || TREE_CODE (TYPE_SIZE_UNIT (stype)) != INTEGER_CST)
8521 {
8522 error_at (OMP_CLAUSE_LOCATION (c),
8523 "mapping field %qE of variable length "
8524 "structure", OMP_CLAUSE_DECL (c));
8525 remove = true;
8526 break;
8527 }
8528
8529 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
8530 {
8531 /* Error recovery. */
8532 if (prev_list_p == NULL)
8533 {
8534 remove = true;
8535 break;
8536 }
8537 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
8538 {
8539 tree ch = OMP_CLAUSE_CHAIN (*prev_list_p);
8540 if (ch == NULL_TREE || OMP_CLAUSE_CHAIN (ch) != c)
8541 {
8542 remove = true;
8543 break;
8544 }
8545 }
8546 }
8547
8548 tree offset;
8549 poly_int64 bitsize, bitpos;
8550 machine_mode mode;
8551 int unsignedp, reversep, volatilep = 0;
8552 tree base = OMP_CLAUSE_DECL (c);
8553 while (TREE_CODE (base) == ARRAY_REF)
8554 base = TREE_OPERAND (base, 0);
8555 if (TREE_CODE (base) == INDIRECT_REF)
8556 base = TREE_OPERAND (base, 0);
8557 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
8558 &mode, &unsignedp, &reversep,
8559 &volatilep);
8560 tree orig_base = base;
8561 if ((TREE_CODE (base) == INDIRECT_REF
8562 || (TREE_CODE (base) == MEM_REF
8563 && integer_zerop (TREE_OPERAND (base, 1))))
8564 && DECL_P (TREE_OPERAND (base, 0))
8565 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0)))
8566 == REFERENCE_TYPE))
8567 base = TREE_OPERAND (base, 0);
8568 gcc_assert (base == decl
8569 && (offset == NULL_TREE
8570 || poly_int_tree_p (offset)));
8571
8572 splay_tree_node n
8573 = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
8574 bool ptr = (OMP_CLAUSE_MAP_KIND (c)
8575 == GOMP_MAP_ALWAYS_POINTER);
8576 if (n == NULL || (n->value & GOVD_MAP) == 0)
8577 {
8578 tree l = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8579 OMP_CLAUSE_MAP);
8580 OMP_CLAUSE_SET_MAP_KIND (l, GOMP_MAP_STRUCT);
8581 if (orig_base != base)
8582 OMP_CLAUSE_DECL (l) = unshare_expr (orig_base);
8583 else
8584 OMP_CLAUSE_DECL (l) = decl;
8585 OMP_CLAUSE_SIZE (l) = size_int (1);
8586 if (struct_map_to_clause == NULL)
8587 struct_map_to_clause = new hash_map<tree, tree>;
8588 struct_map_to_clause->put (decl, l);
8589 if (ptr)
8590 {
8591 enum gomp_map_kind mkind
8592 = code == OMP_TARGET_EXIT_DATA
8593 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
8594 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8595 OMP_CLAUSE_MAP);
8596 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
8597 OMP_CLAUSE_DECL (c2)
8598 = unshare_expr (OMP_CLAUSE_DECL (c));
8599 OMP_CLAUSE_CHAIN (c2) = *prev_list_p;
8600 OMP_CLAUSE_SIZE (c2)
8601 = TYPE_SIZE_UNIT (ptr_type_node);
8602 OMP_CLAUSE_CHAIN (l) = c2;
8603 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
8604 {
8605 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
8606 tree c3
8607 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8608 OMP_CLAUSE_MAP);
8609 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
8610 OMP_CLAUSE_DECL (c3)
8611 = unshare_expr (OMP_CLAUSE_DECL (c4));
8612 OMP_CLAUSE_SIZE (c3)
8613 = TYPE_SIZE_UNIT (ptr_type_node);
8614 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
8615 OMP_CLAUSE_CHAIN (c2) = c3;
8616 }
8617 *prev_list_p = l;
8618 prev_list_p = NULL;
8619 }
8620 else
8621 {
8622 OMP_CLAUSE_CHAIN (l) = c;
8623 *list_p = l;
8624 list_p = &OMP_CLAUSE_CHAIN (l);
8625 }
8626 if (orig_base != base && code == OMP_TARGET)
8627 {
8628 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8629 OMP_CLAUSE_MAP);
8630 enum gomp_map_kind mkind
8631 = GOMP_MAP_FIRSTPRIVATE_REFERENCE;
8632 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
8633 OMP_CLAUSE_DECL (c2) = decl;
8634 OMP_CLAUSE_SIZE (c2) = size_zero_node;
8635 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (l);
8636 OMP_CLAUSE_CHAIN (l) = c2;
8637 }
8638 flags = GOVD_MAP | GOVD_EXPLICIT;
8639 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
8640 flags |= GOVD_SEEN;
8641 goto do_add_decl;
8642 }
8643 else
8644 {
8645 tree *osc = struct_map_to_clause->get (decl);
8646 tree *sc = NULL, *scp = NULL;
8647 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
8648 n->value |= GOVD_SEEN;
8649 poly_offset_int o1, o2;
8650 if (offset)
8651 o1 = wi::to_poly_offset (offset);
8652 else
8653 o1 = 0;
8654 if (maybe_ne (bitpos, 0))
8655 o1 += bits_to_bytes_round_down (bitpos);
8656 sc = &OMP_CLAUSE_CHAIN (*osc);
8657 if (*sc != c
8658 && (OMP_CLAUSE_MAP_KIND (*sc)
8659 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
8660 sc = &OMP_CLAUSE_CHAIN (*sc);
8661 for (; *sc != c; sc = &OMP_CLAUSE_CHAIN (*sc))
8662 if (ptr && sc == prev_list_p)
8663 break;
8664 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc))
8665 != COMPONENT_REF
8666 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
8667 != INDIRECT_REF)
8668 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
8669 != ARRAY_REF))
8670 break;
8671 else
8672 {
8673 tree offset2;
8674 poly_int64 bitsize2, bitpos2;
8675 base = OMP_CLAUSE_DECL (*sc);
8676 if (TREE_CODE (base) == ARRAY_REF)
8677 {
8678 while (TREE_CODE (base) == ARRAY_REF)
8679 base = TREE_OPERAND (base, 0);
8680 if (TREE_CODE (base) != COMPONENT_REF
8681 || (TREE_CODE (TREE_TYPE (base))
8682 != ARRAY_TYPE))
8683 break;
8684 }
8685 else if (TREE_CODE (base) == INDIRECT_REF
8686 && (TREE_CODE (TREE_OPERAND (base, 0))
8687 == COMPONENT_REF)
8688 && (TREE_CODE (TREE_TYPE
8689 (TREE_OPERAND (base, 0)))
8690 == REFERENCE_TYPE))
8691 base = TREE_OPERAND (base, 0);
8692 base = get_inner_reference (base, &bitsize2,
8693 &bitpos2, &offset2,
8694 &mode, &unsignedp,
8695 &reversep, &volatilep);
8696 if ((TREE_CODE (base) == INDIRECT_REF
8697 || (TREE_CODE (base) == MEM_REF
8698 && integer_zerop (TREE_OPERAND (base,
8699 1))))
8700 && DECL_P (TREE_OPERAND (base, 0))
8701 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base,
8702 0)))
8703 == REFERENCE_TYPE))
8704 base = TREE_OPERAND (base, 0);
8705 if (base != decl)
8706 break;
8707 if (scp)
8708 continue;
8709 gcc_assert (offset == NULL_TREE
8710 || poly_int_tree_p (offset));
8711 tree d1 = OMP_CLAUSE_DECL (*sc);
8712 tree d2 = OMP_CLAUSE_DECL (c);
8713 while (TREE_CODE (d1) == ARRAY_REF)
8714 d1 = TREE_OPERAND (d1, 0);
8715 while (TREE_CODE (d2) == ARRAY_REF)
8716 d2 = TREE_OPERAND (d2, 0);
8717 if (TREE_CODE (d1) == INDIRECT_REF)
8718 d1 = TREE_OPERAND (d1, 0);
8719 if (TREE_CODE (d2) == INDIRECT_REF)
8720 d2 = TREE_OPERAND (d2, 0);
8721 while (TREE_CODE (d1) == COMPONENT_REF)
8722 if (TREE_CODE (d2) == COMPONENT_REF
8723 && TREE_OPERAND (d1, 1)
8724 == TREE_OPERAND (d2, 1))
8725 {
8726 d1 = TREE_OPERAND (d1, 0);
8727 d2 = TREE_OPERAND (d2, 0);
8728 }
8729 else
8730 break;
8731 if (d1 == d2)
8732 {
8733 error_at (OMP_CLAUSE_LOCATION (c),
8734 "%qE appears more than once in map "
8735 "clauses", OMP_CLAUSE_DECL (c));
8736 remove = true;
8737 break;
8738 }
8739 if (offset2)
8740 o2 = wi::to_poly_offset (offset2);
8741 else
8742 o2 = 0;
8743 o2 += bits_to_bytes_round_down (bitpos2);
8744 if (maybe_lt (o1, o2)
8745 || (known_eq (o1, o2)
8746 && maybe_lt (bitpos, bitpos2)))
8747 {
8748 if (ptr)
8749 scp = sc;
8750 else
8751 break;
8752 }
8753 }
8754 if (remove)
8755 break;
8756 OMP_CLAUSE_SIZE (*osc)
8757 = size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc),
8758 size_one_node);
8759 if (ptr)
8760 {
8761 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8762 OMP_CLAUSE_MAP);
8763 tree cl = NULL_TREE;
8764 enum gomp_map_kind mkind
8765 = code == OMP_TARGET_EXIT_DATA
8766 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
8767 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
8768 OMP_CLAUSE_DECL (c2)
8769 = unshare_expr (OMP_CLAUSE_DECL (c));
8770 OMP_CLAUSE_CHAIN (c2) = scp ? *scp : *prev_list_p;
8771 OMP_CLAUSE_SIZE (c2)
8772 = TYPE_SIZE_UNIT (ptr_type_node);
8773 cl = scp ? *prev_list_p : c2;
8774 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
8775 {
8776 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
8777 tree c3
8778 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8779 OMP_CLAUSE_MAP);
8780 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
8781 OMP_CLAUSE_DECL (c3)
8782 = unshare_expr (OMP_CLAUSE_DECL (c4));
8783 OMP_CLAUSE_SIZE (c3)
8784 = TYPE_SIZE_UNIT (ptr_type_node);
8785 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
8786 if (!scp)
8787 OMP_CLAUSE_CHAIN (c2) = c3;
8788 else
8789 cl = c3;
8790 }
8791 if (scp)
8792 *scp = c2;
8793 if (sc == prev_list_p)
8794 {
8795 *sc = cl;
8796 prev_list_p = NULL;
8797 }
8798 else
8799 {
8800 *prev_list_p = OMP_CLAUSE_CHAIN (c);
8801 list_p = prev_list_p;
8802 prev_list_p = NULL;
8803 OMP_CLAUSE_CHAIN (c) = *sc;
8804 *sc = cl;
8805 continue;
8806 }
8807 }
8808 else if (*sc != c)
8809 {
8810 *list_p = OMP_CLAUSE_CHAIN (c);
8811 OMP_CLAUSE_CHAIN (c) = *sc;
8812 *sc = c;
8813 continue;
8814 }
8815 }
8816 }
8817 if (!remove
8818 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_POINTER
8819 && OMP_CLAUSE_CHAIN (c)
8820 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c)) == OMP_CLAUSE_MAP
8821 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8822 == GOMP_MAP_ALWAYS_POINTER))
8823 prev_list_p = list_p;
8824 break;
8825 }
8826 flags = GOVD_MAP | GOVD_EXPLICIT;
8827 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
8828 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM)
8829 flags |= GOVD_MAP_ALWAYS_TO;
8830 goto do_add;
8831
8832 case OMP_CLAUSE_DEPEND:
8833 if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
8834 {
8835 tree deps = OMP_CLAUSE_DECL (c);
8836 while (deps && TREE_CODE (deps) == TREE_LIST)
8837 {
8838 if (TREE_CODE (TREE_PURPOSE (deps)) == TRUNC_DIV_EXPR
8839 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps), 1)))
8840 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps), 1),
8841 pre_p, NULL, is_gimple_val, fb_rvalue);
8842 deps = TREE_CHAIN (deps);
8843 }
8844 break;
8845 }
8846 else if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
8847 break;
8848 if (handled_depend_iterators == -1)
8849 handled_depend_iterators = gimplify_omp_depend (list_p, pre_p);
8850 if (handled_depend_iterators)
8851 {
8852 if (handled_depend_iterators == 2)
8853 remove = true;
8854 break;
8855 }
8856 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
8857 {
8858 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
8859 NULL, is_gimple_val, fb_rvalue);
8860 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
8861 }
8862 if (error_operand_p (OMP_CLAUSE_DECL (c)))
8863 {
8864 remove = true;
8865 break;
8866 }
8867 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
8868 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
8869 is_gimple_val, fb_rvalue) == GS_ERROR)
8870 {
8871 remove = true;
8872 break;
8873 }
8874 break;
8875
8876 case OMP_CLAUSE_TO:
8877 case OMP_CLAUSE_FROM:
8878 case OMP_CLAUSE__CACHE_:
8879 decl = OMP_CLAUSE_DECL (c);
8880 if (error_operand_p (decl))
8881 {
8882 remove = true;
8883 break;
8884 }
8885 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
8886 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
8887 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
8888 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
8889 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
8890 {
8891 remove = true;
8892 break;
8893 }
8894 if (!DECL_P (decl))
8895 {
8896 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
8897 NULL, is_gimple_lvalue, fb_lvalue)
8898 == GS_ERROR)
8899 {
8900 remove = true;
8901 break;
8902 }
8903 break;
8904 }
8905 goto do_notice;
8906
8907 case OMP_CLAUSE_USE_DEVICE_PTR:
8908 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
8909 goto do_add;
8910 case OMP_CLAUSE_IS_DEVICE_PTR:
8911 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
8912 goto do_add;
8913
8914 do_add:
8915 decl = OMP_CLAUSE_DECL (c);
8916 do_add_decl:
8917 if (error_operand_p (decl))
8918 {
8919 remove = true;
8920 break;
8921 }
8922 if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0)
8923 {
8924 tree t = omp_member_access_dummy_var (decl);
8925 if (t)
8926 {
8927 tree v = DECL_VALUE_EXPR (decl);
8928 DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1));
8929 if (outer_ctx)
8930 omp_notice_variable (outer_ctx, t, true);
8931 }
8932 }
8933 if (code == OACC_DATA
8934 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
8935 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
8936 flags |= GOVD_MAP_0LEN_ARRAY;
8937 omp_add_variable (ctx, decl, flags);
8938 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
8939 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
8940 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
8941 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8942 {
8943 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
8944 GOVD_LOCAL | GOVD_SEEN);
8945 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
8946 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
8947 find_decl_expr,
8948 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
8949 NULL) == NULL_TREE)
8950 omp_add_variable (ctx,
8951 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
8952 GOVD_LOCAL | GOVD_SEEN);
8953 gimplify_omp_ctxp = ctx;
8954 push_gimplify_context ();
8955
8956 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
8957 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8958
8959 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
8960 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
8961 pop_gimplify_context
8962 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
8963 push_gimplify_context ();
8964 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
8965 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
8966 pop_gimplify_context
8967 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
8968 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
8969 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
8970
8971 gimplify_omp_ctxp = outer_ctx;
8972 }
8973 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
8974 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
8975 {
8976 gimplify_omp_ctxp = ctx;
8977 push_gimplify_context ();
8978 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
8979 {
8980 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
8981 NULL, NULL);
8982 TREE_SIDE_EFFECTS (bind) = 1;
8983 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
8984 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
8985 }
8986 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
8987 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
8988 pop_gimplify_context
8989 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
8990 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
8991
8992 gimplify_omp_ctxp = outer_ctx;
8993 }
8994 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
8995 && OMP_CLAUSE_LINEAR_STMT (c))
8996 {
8997 gimplify_omp_ctxp = ctx;
8998 push_gimplify_context ();
8999 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
9000 {
9001 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
9002 NULL, NULL);
9003 TREE_SIDE_EFFECTS (bind) = 1;
9004 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
9005 OMP_CLAUSE_LINEAR_STMT (c) = bind;
9006 }
9007 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
9008 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
9009 pop_gimplify_context
9010 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
9011 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
9012
9013 gimplify_omp_ctxp = outer_ctx;
9014 }
9015 if (notice_outer)
9016 goto do_notice;
9017 break;
9018
9019 case OMP_CLAUSE_COPYIN:
9020 case OMP_CLAUSE_COPYPRIVATE:
9021 decl = OMP_CLAUSE_DECL (c);
9022 if (error_operand_p (decl))
9023 {
9024 remove = true;
9025 break;
9026 }
9027 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
9028 && !remove
9029 && !omp_check_private (ctx, decl, true))
9030 {
9031 remove = true;
9032 if (is_global_var (decl))
9033 {
9034 if (DECL_THREAD_LOCAL_P (decl))
9035 remove = false;
9036 else if (DECL_HAS_VALUE_EXPR_P (decl))
9037 {
9038 tree value = get_base_address (DECL_VALUE_EXPR (decl));
9039
9040 if (value
9041 && DECL_P (value)
9042 && DECL_THREAD_LOCAL_P (value))
9043 remove = false;
9044 }
9045 }
9046 if (remove)
9047 error_at (OMP_CLAUSE_LOCATION (c),
9048 "copyprivate variable %qE is not threadprivate"
9049 " or private in outer context", DECL_NAME (decl));
9050 }
9051 do_notice:
9052 if ((region_type & ORT_TASKLOOP) == ORT_TASKLOOP
9053 && outer_ctx
9054 && outer_ctx->region_type == ORT_COMBINED_PARALLEL
9055 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9056 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
9057 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE))
9058 {
9059 splay_tree_node on
9060 = splay_tree_lookup (outer_ctx->variables,
9061 (splay_tree_key)decl);
9062 if (on == NULL || (on->value & GOVD_DATA_SHARE_CLASS) == 0)
9063 {
9064 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9065 && TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
9066 && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
9067 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
9068 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
9069 == POINTER_TYPE))))
9070 omp_firstprivatize_variable (outer_ctx, decl);
9071 else
9072 omp_add_variable (outer_ctx, decl,
9073 GOVD_SEEN | GOVD_SHARED);
9074 omp_notice_variable (outer_ctx, decl, true);
9075 }
9076 }
9077 if (outer_ctx)
9078 omp_notice_variable (outer_ctx, decl, true);
9079 if (check_non_private
9080 && region_type == ORT_WORKSHARE
9081 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
9082 || decl == OMP_CLAUSE_DECL (c)
9083 || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
9084 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
9085 == ADDR_EXPR
9086 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
9087 == POINTER_PLUS_EXPR
9088 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
9089 (OMP_CLAUSE_DECL (c), 0), 0))
9090 == ADDR_EXPR)))))
9091 && omp_check_private (ctx, decl, false))
9092 {
9093 error ("%s variable %qE is private in outer context",
9094 check_non_private, DECL_NAME (decl));
9095 remove = true;
9096 }
9097 break;
9098
9099 case OMP_CLAUSE_IF:
9100 if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK
9101 && OMP_CLAUSE_IF_MODIFIER (c) != code)
9102 {
9103 const char *p[2];
9104 for (int i = 0; i < 2; i++)
9105 switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code)
9106 {
9107 case VOID_CST: p[i] = "cancel"; break;
9108 case OMP_PARALLEL: p[i] = "parallel"; break;
9109 case OMP_SIMD: p[i] = "simd"; break;
9110 case OMP_TASK: p[i] = "task"; break;
9111 case OMP_TASKLOOP: p[i] = "taskloop"; break;
9112 case OMP_TARGET_DATA: p[i] = "target data"; break;
9113 case OMP_TARGET: p[i] = "target"; break;
9114 case OMP_TARGET_UPDATE: p[i] = "target update"; break;
9115 case OMP_TARGET_ENTER_DATA:
9116 p[i] = "target enter data"; break;
9117 case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break;
9118 default: gcc_unreachable ();
9119 }
9120 error_at (OMP_CLAUSE_LOCATION (c),
9121 "expected %qs %<if%> clause modifier rather than %qs",
9122 p[0], p[1]);
9123 remove = true;
9124 }
9125 /* Fall through. */
9126
9127 case OMP_CLAUSE_FINAL:
9128 OMP_CLAUSE_OPERAND (c, 0)
9129 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
9130 /* Fall through. */
9131
9132 case OMP_CLAUSE_SCHEDULE:
9133 case OMP_CLAUSE_NUM_THREADS:
9134 case OMP_CLAUSE_NUM_TEAMS:
9135 case OMP_CLAUSE_THREAD_LIMIT:
9136 case OMP_CLAUSE_DIST_SCHEDULE:
9137 case OMP_CLAUSE_DEVICE:
9138 case OMP_CLAUSE_PRIORITY:
9139 case OMP_CLAUSE_GRAINSIZE:
9140 case OMP_CLAUSE_NUM_TASKS:
9141 case OMP_CLAUSE_HINT:
9142 case OMP_CLAUSE_ASYNC:
9143 case OMP_CLAUSE_WAIT:
9144 case OMP_CLAUSE_NUM_GANGS:
9145 case OMP_CLAUSE_NUM_WORKERS:
9146 case OMP_CLAUSE_VECTOR_LENGTH:
9147 case OMP_CLAUSE_WORKER:
9148 case OMP_CLAUSE_VECTOR:
9149 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
9150 is_gimple_val, fb_rvalue) == GS_ERROR)
9151 remove = true;
9152 break;
9153
9154 case OMP_CLAUSE_GANG:
9155 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
9156 is_gimple_val, fb_rvalue) == GS_ERROR)
9157 remove = true;
9158 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
9159 is_gimple_val, fb_rvalue) == GS_ERROR)
9160 remove = true;
9161 break;
9162
9163 case OMP_CLAUSE_NOWAIT:
9164 nowait = 1;
9165 break;
9166
9167 case OMP_CLAUSE_ORDERED:
9168 case OMP_CLAUSE_UNTIED:
9169 case OMP_CLAUSE_COLLAPSE:
9170 case OMP_CLAUSE_TILE:
9171 case OMP_CLAUSE_AUTO:
9172 case OMP_CLAUSE_SEQ:
9173 case OMP_CLAUSE_INDEPENDENT:
9174 case OMP_CLAUSE_MERGEABLE:
9175 case OMP_CLAUSE_PROC_BIND:
9176 case OMP_CLAUSE_SAFELEN:
9177 case OMP_CLAUSE_SIMDLEN:
9178 case OMP_CLAUSE_NOGROUP:
9179 case OMP_CLAUSE_THREADS:
9180 case OMP_CLAUSE_SIMD:
9181 case OMP_CLAUSE_IF_PRESENT:
9182 case OMP_CLAUSE_FINALIZE:
9183 break;
9184
9185 case OMP_CLAUSE_DEFAULTMAP:
9186 enum gimplify_defaultmap_kind gdmkmin, gdmkmax;
9187 switch (OMP_CLAUSE_DEFAULTMAP_CATEGORY (c))
9188 {
9189 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED:
9190 gdmkmin = GDMK_SCALAR;
9191 gdmkmax = GDMK_POINTER;
9192 break;
9193 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_SCALAR:
9194 gdmkmin = gdmkmax = GDMK_SCALAR;
9195 break;
9196 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_AGGREGATE:
9197 gdmkmin = gdmkmax = GDMK_AGGREGATE;
9198 break;
9199 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALLOCATABLE:
9200 gdmkmin = gdmkmax = GDMK_ALLOCATABLE;
9201 break;
9202 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_POINTER:
9203 gdmkmin = gdmkmax = GDMK_POINTER;
9204 break;
9205 default:
9206 gcc_unreachable ();
9207 }
9208 for (int gdmk = gdmkmin; gdmk <= gdmkmax; gdmk++)
9209 switch (OMP_CLAUSE_DEFAULTMAP_BEHAVIOR (c))
9210 {
9211 case OMP_CLAUSE_DEFAULTMAP_ALLOC:
9212 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_ALLOC_ONLY;
9213 break;
9214 case OMP_CLAUSE_DEFAULTMAP_TO:
9215 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_TO_ONLY;
9216 break;
9217 case OMP_CLAUSE_DEFAULTMAP_FROM:
9218 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_FROM_ONLY;
9219 break;
9220 case OMP_CLAUSE_DEFAULTMAP_TOFROM:
9221 ctx->defaultmap[gdmk] = GOVD_MAP;
9222 break;
9223 case OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE:
9224 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
9225 break;
9226 case OMP_CLAUSE_DEFAULTMAP_NONE:
9227 ctx->defaultmap[gdmk] = 0;
9228 break;
9229 case OMP_CLAUSE_DEFAULTMAP_DEFAULT:
9230 switch (gdmk)
9231 {
9232 case GDMK_SCALAR:
9233 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
9234 break;
9235 case GDMK_AGGREGATE:
9236 case GDMK_ALLOCATABLE:
9237 ctx->defaultmap[gdmk] = GOVD_MAP;
9238 break;
9239 case GDMK_POINTER:
9240 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
9241 break;
9242 default:
9243 gcc_unreachable ();
9244 }
9245 break;
9246 default:
9247 gcc_unreachable ();
9248 }
9249 break;
9250
9251 case OMP_CLAUSE_ALIGNED:
9252 decl = OMP_CLAUSE_DECL (c);
9253 if (error_operand_p (decl))
9254 {
9255 remove = true;
9256 break;
9257 }
9258 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
9259 is_gimple_val, fb_rvalue) == GS_ERROR)
9260 {
9261 remove = true;
9262 break;
9263 }
9264 if (!is_global_var (decl)
9265 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
9266 omp_add_variable (ctx, decl, GOVD_ALIGNED);
9267 break;
9268
9269 case OMP_CLAUSE_NONTEMPORAL:
9270 decl = OMP_CLAUSE_DECL (c);
9271 if (error_operand_p (decl))
9272 {
9273 remove = true;
9274 break;
9275 }
9276 omp_add_variable (ctx, decl, GOVD_NONTEMPORAL);
9277 break;
9278
9279 case OMP_CLAUSE_DEFAULT:
9280 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
9281 break;
9282
9283 default:
9284 gcc_unreachable ();
9285 }
9286
9287 if (code == OACC_DATA
9288 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9289 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
9290 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
9291 remove = true;
9292 if (remove)
9293 *list_p = OMP_CLAUSE_CHAIN (c);
9294 else
9295 list_p = &OMP_CLAUSE_CHAIN (c);
9296 }
9297
9298 gimplify_omp_ctxp = ctx;
9299 if (struct_map_to_clause)
9300 delete struct_map_to_clause;
9301 }
9302
9303 /* Return true if DECL is a candidate for shared to firstprivate
9304 optimization. We only consider non-addressable scalars, not
9305 too big, and not references. */
9306
9307 static bool
9308 omp_shared_to_firstprivate_optimizable_decl_p (tree decl)
9309 {
9310 if (TREE_ADDRESSABLE (decl))
9311 return false;
9312 tree type = TREE_TYPE (decl);
9313 if (!is_gimple_reg_type (type)
9314 || TREE_CODE (type) == REFERENCE_TYPE
9315 || TREE_ADDRESSABLE (type))
9316 return false;
9317 /* Don't optimize too large decls, as each thread/task will have
9318 its own. */
9319 HOST_WIDE_INT len = int_size_in_bytes (type);
9320 if (len == -1 || len > 4 * POINTER_SIZE / BITS_PER_UNIT)
9321 return false;
9322 if (lang_hooks.decls.omp_privatize_by_reference (decl))
9323 return false;
9324 return true;
9325 }
9326
9327 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
9328 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
9329 GOVD_WRITTEN in outer contexts. */
9330
9331 static void
9332 omp_mark_stores (struct gimplify_omp_ctx *ctx, tree decl)
9333 {
9334 for (; ctx; ctx = ctx->outer_context)
9335 {
9336 splay_tree_node n = splay_tree_lookup (ctx->variables,
9337 (splay_tree_key) decl);
9338 if (n == NULL)
9339 continue;
9340 else if (n->value & GOVD_SHARED)
9341 {
9342 n->value |= GOVD_WRITTEN;
9343 return;
9344 }
9345 else if (n->value & GOVD_DATA_SHARE_CLASS)
9346 return;
9347 }
9348 }
9349
9350 /* Helper callback for walk_gimple_seq to discover possible stores
9351 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
9352 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
9353 for those. */
9354
9355 static tree
9356 omp_find_stores_op (tree *tp, int *walk_subtrees, void *data)
9357 {
9358 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
9359
9360 *walk_subtrees = 0;
9361 if (!wi->is_lhs)
9362 return NULL_TREE;
9363
9364 tree op = *tp;
9365 do
9366 {
9367 if (handled_component_p (op))
9368 op = TREE_OPERAND (op, 0);
9369 else if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
9370 && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
9371 op = TREE_OPERAND (TREE_OPERAND (op, 0), 0);
9372 else
9373 break;
9374 }
9375 while (1);
9376 if (!DECL_P (op) || !omp_shared_to_firstprivate_optimizable_decl_p (op))
9377 return NULL_TREE;
9378
9379 omp_mark_stores (gimplify_omp_ctxp, op);
9380 return NULL_TREE;
9381 }
9382
9383 /* Helper callback for walk_gimple_seq to discover possible stores
9384 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
9385 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
9386 for those. */
9387
9388 static tree
9389 omp_find_stores_stmt (gimple_stmt_iterator *gsi_p,
9390 bool *handled_ops_p,
9391 struct walk_stmt_info *wi)
9392 {
9393 gimple *stmt = gsi_stmt (*gsi_p);
9394 switch (gimple_code (stmt))
9395 {
9396 /* Don't recurse on OpenMP constructs for which
9397 gimplify_adjust_omp_clauses already handled the bodies,
9398 except handle gimple_omp_for_pre_body. */
9399 case GIMPLE_OMP_FOR:
9400 *handled_ops_p = true;
9401 if (gimple_omp_for_pre_body (stmt))
9402 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
9403 omp_find_stores_stmt, omp_find_stores_op, wi);
9404 break;
9405 case GIMPLE_OMP_PARALLEL:
9406 case GIMPLE_OMP_TASK:
9407 case GIMPLE_OMP_SECTIONS:
9408 case GIMPLE_OMP_SINGLE:
9409 case GIMPLE_OMP_TARGET:
9410 case GIMPLE_OMP_TEAMS:
9411 case GIMPLE_OMP_CRITICAL:
9412 *handled_ops_p = true;
9413 break;
9414 default:
9415 break;
9416 }
9417 return NULL_TREE;
9418 }
9419
9420 struct gimplify_adjust_omp_clauses_data
9421 {
9422 tree *list_p;
9423 gimple_seq *pre_p;
9424 };
9425
9426 /* For all variables that were not actually used within the context,
9427 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
9428
9429 static int
9430 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
9431 {
9432 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
9433 gimple_seq *pre_p
9434 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
9435 tree decl = (tree) n->key;
9436 unsigned flags = n->value;
9437 enum omp_clause_code code;
9438 tree clause;
9439 bool private_debug;
9440
9441 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
9442 return 0;
9443 if ((flags & GOVD_SEEN) == 0)
9444 return 0;
9445 if (flags & GOVD_DEBUG_PRIVATE)
9446 {
9447 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_SHARED);
9448 private_debug = true;
9449 }
9450 else if (flags & GOVD_MAP)
9451 private_debug = false;
9452 else
9453 private_debug
9454 = lang_hooks.decls.omp_private_debug_clause (decl,
9455 !!(flags & GOVD_SHARED));
9456 if (private_debug)
9457 code = OMP_CLAUSE_PRIVATE;
9458 else if (flags & GOVD_MAP)
9459 {
9460 code = OMP_CLAUSE_MAP;
9461 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0
9462 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
9463 {
9464 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl);
9465 return 0;
9466 }
9467 }
9468 else if (flags & GOVD_SHARED)
9469 {
9470 if (is_global_var (decl))
9471 {
9472 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
9473 while (ctx != NULL)
9474 {
9475 splay_tree_node on
9476 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9477 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
9478 | GOVD_PRIVATE | GOVD_REDUCTION
9479 | GOVD_LINEAR | GOVD_MAP)) != 0)
9480 break;
9481 ctx = ctx->outer_context;
9482 }
9483 if (ctx == NULL)
9484 return 0;
9485 }
9486 code = OMP_CLAUSE_SHARED;
9487 }
9488 else if (flags & GOVD_PRIVATE)
9489 code = OMP_CLAUSE_PRIVATE;
9490 else if (flags & GOVD_FIRSTPRIVATE)
9491 {
9492 code = OMP_CLAUSE_FIRSTPRIVATE;
9493 if ((gimplify_omp_ctxp->region_type & ORT_TARGET)
9494 && (gimplify_omp_ctxp->region_type & ORT_ACC) == 0
9495 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
9496 {
9497 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
9498 "%<target%> construct", decl);
9499 return 0;
9500 }
9501 }
9502 else if (flags & GOVD_LASTPRIVATE)
9503 code = OMP_CLAUSE_LASTPRIVATE;
9504 else if (flags & (GOVD_ALIGNED | GOVD_NONTEMPORAL))
9505 return 0;
9506 else
9507 gcc_unreachable ();
9508
9509 if (((flags & GOVD_LASTPRIVATE)
9510 || (code == OMP_CLAUSE_SHARED && (flags & GOVD_WRITTEN)))
9511 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9512 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
9513
9514 tree chain = *list_p;
9515 clause = build_omp_clause (input_location, code);
9516 OMP_CLAUSE_DECL (clause) = decl;
9517 OMP_CLAUSE_CHAIN (clause) = chain;
9518 if (private_debug)
9519 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
9520 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
9521 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
9522 else if (code == OMP_CLAUSE_SHARED
9523 && (flags & GOVD_WRITTEN) == 0
9524 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9525 OMP_CLAUSE_SHARED_READONLY (clause) = 1;
9526 else if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_EXPLICIT) == 0)
9527 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause) = 1;
9528 else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0)
9529 {
9530 tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP);
9531 OMP_CLAUSE_DECL (nc) = decl;
9532 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
9533 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
9534 OMP_CLAUSE_DECL (clause)
9535 = build_simple_mem_ref_loc (input_location, decl);
9536 OMP_CLAUSE_DECL (clause)
9537 = build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause),
9538 build_int_cst (build_pointer_type (char_type_node), 0));
9539 OMP_CLAUSE_SIZE (clause) = size_zero_node;
9540 OMP_CLAUSE_SIZE (nc) = size_zero_node;
9541 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC);
9542 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1;
9543 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
9544 OMP_CLAUSE_CHAIN (nc) = chain;
9545 OMP_CLAUSE_CHAIN (clause) = nc;
9546 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9547 gimplify_omp_ctxp = ctx->outer_context;
9548 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0),
9549 pre_p, NULL, is_gimple_val, fb_rvalue);
9550 gimplify_omp_ctxp = ctx;
9551 }
9552 else if (code == OMP_CLAUSE_MAP)
9553 {
9554 int kind;
9555 /* Not all combinations of these GOVD_MAP flags are actually valid. */
9556 switch (flags & (GOVD_MAP_TO_ONLY
9557 | GOVD_MAP_FORCE
9558 | GOVD_MAP_FORCE_PRESENT
9559 | GOVD_MAP_ALLOC_ONLY
9560 | GOVD_MAP_FROM_ONLY))
9561 {
9562 case 0:
9563 kind = GOMP_MAP_TOFROM;
9564 break;
9565 case GOVD_MAP_FORCE:
9566 kind = GOMP_MAP_TOFROM | GOMP_MAP_FLAG_FORCE;
9567 break;
9568 case GOVD_MAP_TO_ONLY:
9569 kind = GOMP_MAP_TO;
9570 break;
9571 case GOVD_MAP_FROM_ONLY:
9572 kind = GOMP_MAP_FROM;
9573 break;
9574 case GOVD_MAP_ALLOC_ONLY:
9575 kind = GOMP_MAP_ALLOC;
9576 break;
9577 case GOVD_MAP_TO_ONLY | GOVD_MAP_FORCE:
9578 kind = GOMP_MAP_TO | GOMP_MAP_FLAG_FORCE;
9579 break;
9580 case GOVD_MAP_FORCE_PRESENT:
9581 kind = GOMP_MAP_FORCE_PRESENT;
9582 break;
9583 default:
9584 gcc_unreachable ();
9585 }
9586 OMP_CLAUSE_SET_MAP_KIND (clause, kind);
9587 if (DECL_SIZE (decl)
9588 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
9589 {
9590 tree decl2 = DECL_VALUE_EXPR (decl);
9591 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
9592 decl2 = TREE_OPERAND (decl2, 0);
9593 gcc_assert (DECL_P (decl2));
9594 tree mem = build_simple_mem_ref (decl2);
9595 OMP_CLAUSE_DECL (clause) = mem;
9596 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
9597 if (gimplify_omp_ctxp->outer_context)
9598 {
9599 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
9600 omp_notice_variable (ctx, decl2, true);
9601 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
9602 }
9603 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
9604 OMP_CLAUSE_MAP);
9605 OMP_CLAUSE_DECL (nc) = decl;
9606 OMP_CLAUSE_SIZE (nc) = size_zero_node;
9607 if (gimplify_omp_ctxp->target_firstprivatize_array_bases)
9608 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
9609 else
9610 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
9611 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
9612 OMP_CLAUSE_CHAIN (clause) = nc;
9613 }
9614 else if (gimplify_omp_ctxp->target_firstprivatize_array_bases
9615 && lang_hooks.decls.omp_privatize_by_reference (decl))
9616 {
9617 OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl);
9618 OMP_CLAUSE_SIZE (clause)
9619 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))));
9620 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9621 gimplify_omp_ctxp = ctx->outer_context;
9622 gimplify_expr (&OMP_CLAUSE_SIZE (clause),
9623 pre_p, NULL, is_gimple_val, fb_rvalue);
9624 gimplify_omp_ctxp = ctx;
9625 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
9626 OMP_CLAUSE_MAP);
9627 OMP_CLAUSE_DECL (nc) = decl;
9628 OMP_CLAUSE_SIZE (nc) = size_zero_node;
9629 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
9630 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
9631 OMP_CLAUSE_CHAIN (clause) = nc;
9632 }
9633 else
9634 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
9635 }
9636 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
9637 {
9638 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
9639 OMP_CLAUSE_DECL (nc) = decl;
9640 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
9641 OMP_CLAUSE_CHAIN (nc) = chain;
9642 OMP_CLAUSE_CHAIN (clause) = nc;
9643 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9644 gimplify_omp_ctxp = ctx->outer_context;
9645 lang_hooks.decls.omp_finish_clause (nc, pre_p);
9646 gimplify_omp_ctxp = ctx;
9647 }
9648 *list_p = clause;
9649 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9650 gimplify_omp_ctxp = ctx->outer_context;
9651 lang_hooks.decls.omp_finish_clause (clause, pre_p);
9652 if (gimplify_omp_ctxp)
9653 for (; clause != chain; clause = OMP_CLAUSE_CHAIN (clause))
9654 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP
9655 && DECL_P (OMP_CLAUSE_SIZE (clause)))
9656 omp_notice_variable (gimplify_omp_ctxp, OMP_CLAUSE_SIZE (clause),
9657 true);
9658 gimplify_omp_ctxp = ctx;
9659 return 0;
9660 }
9661
9662 static void
9663 gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
9664 enum tree_code code)
9665 {
9666 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9667 tree c, decl;
9668
9669 if (body)
9670 {
9671 struct gimplify_omp_ctx *octx;
9672 for (octx = ctx; octx; octx = octx->outer_context)
9673 if ((octx->region_type & (ORT_PARALLEL | ORT_TASK | ORT_TEAMS)) != 0)
9674 break;
9675 if (octx)
9676 {
9677 struct walk_stmt_info wi;
9678 memset (&wi, 0, sizeof (wi));
9679 walk_gimple_seq (body, omp_find_stores_stmt,
9680 omp_find_stores_op, &wi);
9681 }
9682 }
9683 while ((c = *list_p) != NULL)
9684 {
9685 splay_tree_node n;
9686 bool remove = false;
9687
9688 switch (OMP_CLAUSE_CODE (c))
9689 {
9690 case OMP_CLAUSE_FIRSTPRIVATE:
9691 if ((ctx->region_type & ORT_TARGET)
9692 && (ctx->region_type & ORT_ACC) == 0
9693 && TYPE_ATOMIC (strip_array_types
9694 (TREE_TYPE (OMP_CLAUSE_DECL (c)))))
9695 {
9696 error_at (OMP_CLAUSE_LOCATION (c),
9697 "%<_Atomic%> %qD in %<firstprivate%> clause on "
9698 "%<target%> construct", OMP_CLAUSE_DECL (c));
9699 remove = true;
9700 break;
9701 }
9702 /* FALLTHRU */
9703 case OMP_CLAUSE_PRIVATE:
9704 case OMP_CLAUSE_SHARED:
9705 case OMP_CLAUSE_LINEAR:
9706 decl = OMP_CLAUSE_DECL (c);
9707 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9708 remove = !(n->value & GOVD_SEEN);
9709 if (! remove)
9710 {
9711 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
9712 if ((n->value & GOVD_DEBUG_PRIVATE)
9713 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
9714 {
9715 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
9716 || ((n->value & GOVD_DATA_SHARE_CLASS)
9717 == GOVD_SHARED));
9718 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
9719 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
9720 }
9721 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
9722 && (n->value & GOVD_WRITTEN) == 0
9723 && DECL_P (decl)
9724 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9725 OMP_CLAUSE_SHARED_READONLY (c) = 1;
9726 else if (DECL_P (decl)
9727 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
9728 && (n->value & GOVD_WRITTEN) != 0)
9729 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
9730 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
9731 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9732 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
9733 }
9734 break;
9735
9736 case OMP_CLAUSE_LASTPRIVATE:
9737 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
9738 accurately reflect the presence of a FIRSTPRIVATE clause. */
9739 decl = OMP_CLAUSE_DECL (c);
9740 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9741 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
9742 = (n->value & GOVD_FIRSTPRIVATE) != 0;
9743 if (code == OMP_DISTRIBUTE
9744 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
9745 {
9746 remove = true;
9747 error_at (OMP_CLAUSE_LOCATION (c),
9748 "same variable used in %<firstprivate%> and "
9749 "%<lastprivate%> clauses on %<distribute%> "
9750 "construct");
9751 }
9752 if (!remove
9753 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
9754 && DECL_P (decl)
9755 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9756 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
9757 break;
9758
9759 case OMP_CLAUSE_ALIGNED:
9760 decl = OMP_CLAUSE_DECL (c);
9761 if (!is_global_var (decl))
9762 {
9763 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9764 remove = n == NULL || !(n->value & GOVD_SEEN);
9765 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
9766 {
9767 struct gimplify_omp_ctx *octx;
9768 if (n != NULL
9769 && (n->value & (GOVD_DATA_SHARE_CLASS
9770 & ~GOVD_FIRSTPRIVATE)))
9771 remove = true;
9772 else
9773 for (octx = ctx->outer_context; octx;
9774 octx = octx->outer_context)
9775 {
9776 n = splay_tree_lookup (octx->variables,
9777 (splay_tree_key) decl);
9778 if (n == NULL)
9779 continue;
9780 if (n->value & GOVD_LOCAL)
9781 break;
9782 /* We have to avoid assigning a shared variable
9783 to itself when trying to add
9784 __builtin_assume_aligned. */
9785 if (n->value & GOVD_SHARED)
9786 {
9787 remove = true;
9788 break;
9789 }
9790 }
9791 }
9792 }
9793 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
9794 {
9795 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9796 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
9797 remove = true;
9798 }
9799 break;
9800
9801 case OMP_CLAUSE_NONTEMPORAL:
9802 decl = OMP_CLAUSE_DECL (c);
9803 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9804 remove = n == NULL || !(n->value & GOVD_SEEN);
9805 break;
9806
9807 case OMP_CLAUSE_MAP:
9808 if (code == OMP_TARGET_EXIT_DATA
9809 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
9810 {
9811 remove = true;
9812 break;
9813 }
9814 decl = OMP_CLAUSE_DECL (c);
9815 /* Data clauses associated with acc parallel reductions must be
9816 compatible with present_or_copy. Warn and adjust the clause
9817 if that is not the case. */
9818 if (ctx->region_type == ORT_ACC_PARALLEL)
9819 {
9820 tree t = DECL_P (decl) ? decl : TREE_OPERAND (decl, 0);
9821 n = NULL;
9822
9823 if (DECL_P (t))
9824 n = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
9825
9826 if (n && (n->value & GOVD_REDUCTION))
9827 {
9828 enum gomp_map_kind kind = OMP_CLAUSE_MAP_KIND (c);
9829
9830 OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1;
9831 if ((kind & GOMP_MAP_TOFROM) != GOMP_MAP_TOFROM
9832 && kind != GOMP_MAP_FORCE_PRESENT
9833 && kind != GOMP_MAP_POINTER)
9834 {
9835 warning_at (OMP_CLAUSE_LOCATION (c), 0,
9836 "incompatible data clause with reduction "
9837 "on %qE; promoting to present_or_copy",
9838 DECL_NAME (t));
9839 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
9840 }
9841 }
9842 }
9843 if (!DECL_P (decl))
9844 {
9845 if ((ctx->region_type & ORT_TARGET) != 0
9846 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
9847 {
9848 if (TREE_CODE (decl) == INDIRECT_REF
9849 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
9850 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
9851 == REFERENCE_TYPE))
9852 decl = TREE_OPERAND (decl, 0);
9853 if (TREE_CODE (decl) == COMPONENT_REF)
9854 {
9855 while (TREE_CODE (decl) == COMPONENT_REF)
9856 decl = TREE_OPERAND (decl, 0);
9857 if (DECL_P (decl))
9858 {
9859 n = splay_tree_lookup (ctx->variables,
9860 (splay_tree_key) decl);
9861 if (!(n->value & GOVD_SEEN))
9862 remove = true;
9863 }
9864 }
9865 }
9866 break;
9867 }
9868 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9869 if ((ctx->region_type & ORT_TARGET) != 0
9870 && !(n->value & GOVD_SEEN)
9871 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0
9872 && (!is_global_var (decl)
9873 || !lookup_attribute ("omp declare target link",
9874 DECL_ATTRIBUTES (decl))))
9875 {
9876 remove = true;
9877 /* For struct element mapping, if struct is never referenced
9878 in target block and none of the mapping has always modifier,
9879 remove all the struct element mappings, which immediately
9880 follow the GOMP_MAP_STRUCT map clause. */
9881 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
9882 {
9883 HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
9884 while (cnt--)
9885 OMP_CLAUSE_CHAIN (c)
9886 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c));
9887 }
9888 }
9889 else if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
9890 && code == OMP_TARGET_EXIT_DATA)
9891 remove = true;
9892 else if (DECL_SIZE (decl)
9893 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
9894 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER
9895 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
9896 && (OMP_CLAUSE_MAP_KIND (c)
9897 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
9898 {
9899 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
9900 for these, TREE_CODE (DECL_SIZE (decl)) will always be
9901 INTEGER_CST. */
9902 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
9903
9904 tree decl2 = DECL_VALUE_EXPR (decl);
9905 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
9906 decl2 = TREE_OPERAND (decl2, 0);
9907 gcc_assert (DECL_P (decl2));
9908 tree mem = build_simple_mem_ref (decl2);
9909 OMP_CLAUSE_DECL (c) = mem;
9910 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
9911 if (ctx->outer_context)
9912 {
9913 omp_notice_variable (ctx->outer_context, decl2, true);
9914 omp_notice_variable (ctx->outer_context,
9915 OMP_CLAUSE_SIZE (c), true);
9916 }
9917 if (((ctx->region_type & ORT_TARGET) != 0
9918 || !ctx->target_firstprivatize_array_bases)
9919 && ((n->value & GOVD_SEEN) == 0
9920 || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0))
9921 {
9922 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9923 OMP_CLAUSE_MAP);
9924 OMP_CLAUSE_DECL (nc) = decl;
9925 OMP_CLAUSE_SIZE (nc) = size_zero_node;
9926 if (ctx->target_firstprivatize_array_bases)
9927 OMP_CLAUSE_SET_MAP_KIND (nc,
9928 GOMP_MAP_FIRSTPRIVATE_POINTER);
9929 else
9930 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
9931 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
9932 OMP_CLAUSE_CHAIN (c) = nc;
9933 c = nc;
9934 }
9935 }
9936 else
9937 {
9938 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
9939 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
9940 gcc_assert ((n->value & GOVD_SEEN) == 0
9941 || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
9942 == 0));
9943 }
9944 break;
9945
9946 case OMP_CLAUSE_TO:
9947 case OMP_CLAUSE_FROM:
9948 case OMP_CLAUSE__CACHE_:
9949 decl = OMP_CLAUSE_DECL (c);
9950 if (!DECL_P (decl))
9951 break;
9952 if (DECL_SIZE (decl)
9953 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
9954 {
9955 tree decl2 = DECL_VALUE_EXPR (decl);
9956 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
9957 decl2 = TREE_OPERAND (decl2, 0);
9958 gcc_assert (DECL_P (decl2));
9959 tree mem = build_simple_mem_ref (decl2);
9960 OMP_CLAUSE_DECL (c) = mem;
9961 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
9962 if (ctx->outer_context)
9963 {
9964 omp_notice_variable (ctx->outer_context, decl2, true);
9965 omp_notice_variable (ctx->outer_context,
9966 OMP_CLAUSE_SIZE (c), true);
9967 }
9968 }
9969 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
9970 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
9971 break;
9972
9973 case OMP_CLAUSE_REDUCTION:
9974 case OMP_CLAUSE_IN_REDUCTION:
9975 case OMP_CLAUSE_TASK_REDUCTION:
9976 decl = OMP_CLAUSE_DECL (c);
9977 /* OpenACC reductions need a present_or_copy data clause.
9978 Add one if necessary. Emit error when the reduction is private. */
9979 if (ctx->region_type == ORT_ACC_PARALLEL)
9980 {
9981 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9982 if (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
9983 {
9984 remove = true;
9985 error_at (OMP_CLAUSE_LOCATION (c), "invalid private "
9986 "reduction on %qE", DECL_NAME (decl));
9987 }
9988 else if ((n->value & GOVD_MAP) == 0)
9989 {
9990 tree next = OMP_CLAUSE_CHAIN (c);
9991 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_MAP);
9992 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_TOFROM);
9993 OMP_CLAUSE_DECL (nc) = decl;
9994 OMP_CLAUSE_CHAIN (c) = nc;
9995 lang_hooks.decls.omp_finish_clause (nc, pre_p);
9996 while (1)
9997 {
9998 OMP_CLAUSE_MAP_IN_REDUCTION (nc) = 1;
9999 if (OMP_CLAUSE_CHAIN (nc) == NULL)
10000 break;
10001 nc = OMP_CLAUSE_CHAIN (nc);
10002 }
10003 OMP_CLAUSE_CHAIN (nc) = next;
10004 n->value |= GOVD_MAP;
10005 }
10006 }
10007 if (DECL_P (decl)
10008 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
10009 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
10010 break;
10011 case OMP_CLAUSE_COPYIN:
10012 case OMP_CLAUSE_COPYPRIVATE:
10013 case OMP_CLAUSE_IF:
10014 case OMP_CLAUSE_NUM_THREADS:
10015 case OMP_CLAUSE_NUM_TEAMS:
10016 case OMP_CLAUSE_THREAD_LIMIT:
10017 case OMP_CLAUSE_DIST_SCHEDULE:
10018 case OMP_CLAUSE_DEVICE:
10019 case OMP_CLAUSE_SCHEDULE:
10020 case OMP_CLAUSE_NOWAIT:
10021 case OMP_CLAUSE_ORDERED:
10022 case OMP_CLAUSE_DEFAULT:
10023 case OMP_CLAUSE_UNTIED:
10024 case OMP_CLAUSE_COLLAPSE:
10025 case OMP_CLAUSE_FINAL:
10026 case OMP_CLAUSE_MERGEABLE:
10027 case OMP_CLAUSE_PROC_BIND:
10028 case OMP_CLAUSE_SAFELEN:
10029 case OMP_CLAUSE_SIMDLEN:
10030 case OMP_CLAUSE_DEPEND:
10031 case OMP_CLAUSE_PRIORITY:
10032 case OMP_CLAUSE_GRAINSIZE:
10033 case OMP_CLAUSE_NUM_TASKS:
10034 case OMP_CLAUSE_NOGROUP:
10035 case OMP_CLAUSE_THREADS:
10036 case OMP_CLAUSE_SIMD:
10037 case OMP_CLAUSE_HINT:
10038 case OMP_CLAUSE_DEFAULTMAP:
10039 case OMP_CLAUSE_USE_DEVICE_PTR:
10040 case OMP_CLAUSE_IS_DEVICE_PTR:
10041 case OMP_CLAUSE_ASYNC:
10042 case OMP_CLAUSE_WAIT:
10043 case OMP_CLAUSE_INDEPENDENT:
10044 case OMP_CLAUSE_NUM_GANGS:
10045 case OMP_CLAUSE_NUM_WORKERS:
10046 case OMP_CLAUSE_VECTOR_LENGTH:
10047 case OMP_CLAUSE_GANG:
10048 case OMP_CLAUSE_WORKER:
10049 case OMP_CLAUSE_VECTOR:
10050 case OMP_CLAUSE_AUTO:
10051 case OMP_CLAUSE_SEQ:
10052 case OMP_CLAUSE_TILE:
10053 case OMP_CLAUSE_IF_PRESENT:
10054 case OMP_CLAUSE_FINALIZE:
10055 break;
10056
10057 default:
10058 gcc_unreachable ();
10059 }
10060
10061 if (remove)
10062 *list_p = OMP_CLAUSE_CHAIN (c);
10063 else
10064 list_p = &OMP_CLAUSE_CHAIN (c);
10065 }
10066
10067 /* Add in any implicit data sharing. */
10068 struct gimplify_adjust_omp_clauses_data data;
10069 data.list_p = list_p;
10070 data.pre_p = pre_p;
10071 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
10072
10073 gimplify_omp_ctxp = ctx->outer_context;
10074 delete_omp_context (ctx);
10075 }
10076
10077 /* Gimplify OACC_CACHE. */
10078
10079 static void
10080 gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
10081 {
10082 tree expr = *expr_p;
10083
10084 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_ACC,
10085 OACC_CACHE);
10086 gimplify_adjust_omp_clauses (pre_p, NULL, &OACC_CACHE_CLAUSES (expr),
10087 OACC_CACHE);
10088
10089 /* TODO: Do something sensible with this information. */
10090
10091 *expr_p = NULL_TREE;
10092 }
10093
10094 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
10095 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
10096 kind. The entry kind will replace the one in CLAUSE, while the exit
10097 kind will be used in a new omp_clause and returned to the caller. */
10098
10099 static tree
10100 gimplify_oacc_declare_1 (tree clause)
10101 {
10102 HOST_WIDE_INT kind, new_op;
10103 bool ret = false;
10104 tree c = NULL;
10105
10106 kind = OMP_CLAUSE_MAP_KIND (clause);
10107
10108 switch (kind)
10109 {
10110 case GOMP_MAP_ALLOC:
10111 new_op = GOMP_MAP_RELEASE;
10112 ret = true;
10113 break;
10114
10115 case GOMP_MAP_FROM:
10116 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
10117 new_op = GOMP_MAP_FROM;
10118 ret = true;
10119 break;
10120
10121 case GOMP_MAP_TOFROM:
10122 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO);
10123 new_op = GOMP_MAP_FROM;
10124 ret = true;
10125 break;
10126
10127 case GOMP_MAP_DEVICE_RESIDENT:
10128 case GOMP_MAP_FORCE_DEVICEPTR:
10129 case GOMP_MAP_FORCE_PRESENT:
10130 case GOMP_MAP_LINK:
10131 case GOMP_MAP_POINTER:
10132 case GOMP_MAP_TO:
10133 break;
10134
10135 default:
10136 gcc_unreachable ();
10137 break;
10138 }
10139
10140 if (ret)
10141 {
10142 c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP);
10143 OMP_CLAUSE_SET_MAP_KIND (c, new_op);
10144 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause);
10145 }
10146
10147 return c;
10148 }
10149
10150 /* Gimplify OACC_DECLARE. */
10151
10152 static void
10153 gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p)
10154 {
10155 tree expr = *expr_p;
10156 gomp_target *stmt;
10157 tree clauses, t, decl;
10158
10159 clauses = OACC_DECLARE_CLAUSES (expr);
10160
10161 gimplify_scan_omp_clauses (&clauses, pre_p, ORT_TARGET_DATA, OACC_DECLARE);
10162 gimplify_adjust_omp_clauses (pre_p, NULL, &clauses, OACC_DECLARE);
10163
10164 for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t))
10165 {
10166 decl = OMP_CLAUSE_DECL (t);
10167
10168 if (TREE_CODE (decl) == MEM_REF)
10169 decl = TREE_OPERAND (decl, 0);
10170
10171 if (VAR_P (decl) && !is_oacc_declared (decl))
10172 {
10173 tree attr = get_identifier ("oacc declare target");
10174 DECL_ATTRIBUTES (decl) = tree_cons (attr, NULL_TREE,
10175 DECL_ATTRIBUTES (decl));
10176 }
10177
10178 if (VAR_P (decl)
10179 && !is_global_var (decl)
10180 && DECL_CONTEXT (decl) == current_function_decl)
10181 {
10182 tree c = gimplify_oacc_declare_1 (t);
10183 if (c)
10184 {
10185 if (oacc_declare_returns == NULL)
10186 oacc_declare_returns = new hash_map<tree, tree>;
10187
10188 oacc_declare_returns->put (decl, c);
10189 }
10190 }
10191
10192 if (gimplify_omp_ctxp)
10193 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_SEEN);
10194 }
10195
10196 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
10197 clauses);
10198
10199 gimplify_seq_add_stmt (pre_p, stmt);
10200
10201 *expr_p = NULL_TREE;
10202 }
10203
10204 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
10205 gimplification of the body, as well as scanning the body for used
10206 variables. We need to do this scan now, because variable-sized
10207 decls will be decomposed during gimplification. */
10208
10209 static void
10210 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
10211 {
10212 tree expr = *expr_p;
10213 gimple *g;
10214 gimple_seq body = NULL;
10215
10216 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
10217 OMP_PARALLEL_COMBINED (expr)
10218 ? ORT_COMBINED_PARALLEL
10219 : ORT_PARALLEL, OMP_PARALLEL);
10220
10221 push_gimplify_context ();
10222
10223 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
10224 if (gimple_code (g) == GIMPLE_BIND)
10225 pop_gimplify_context (g);
10226 else
10227 pop_gimplify_context (NULL);
10228
10229 gimplify_adjust_omp_clauses (pre_p, body, &OMP_PARALLEL_CLAUSES (expr),
10230 OMP_PARALLEL);
10231
10232 g = gimple_build_omp_parallel (body,
10233 OMP_PARALLEL_CLAUSES (expr),
10234 NULL_TREE, NULL_TREE);
10235 if (OMP_PARALLEL_COMBINED (expr))
10236 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
10237 gimplify_seq_add_stmt (pre_p, g);
10238 *expr_p = NULL_TREE;
10239 }
10240
10241 /* Gimplify the contents of an OMP_TASK statement. This involves
10242 gimplification of the body, as well as scanning the body for used
10243 variables. We need to do this scan now, because variable-sized
10244 decls will be decomposed during gimplification. */
10245
10246 static void
10247 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
10248 {
10249 tree expr = *expr_p;
10250 gimple *g;
10251 gimple_seq body = NULL;
10252
10253 if (OMP_TASK_BODY (expr) == NULL_TREE)
10254 for (tree c = OMP_TASK_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
10255 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
10256 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_MUTEXINOUTSET)
10257 {
10258 error_at (OMP_CLAUSE_LOCATION (c),
10259 "%<mutexinoutset%> kind in %<depend%> clause on a "
10260 "%<taskwait%> construct");
10261 break;
10262 }
10263
10264 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
10265 omp_find_clause (OMP_TASK_CLAUSES (expr),
10266 OMP_CLAUSE_UNTIED)
10267 ? ORT_UNTIED_TASK : ORT_TASK, OMP_TASK);
10268
10269 if (OMP_TASK_BODY (expr))
10270 {
10271 push_gimplify_context ();
10272
10273 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
10274 if (gimple_code (g) == GIMPLE_BIND)
10275 pop_gimplify_context (g);
10276 else
10277 pop_gimplify_context (NULL);
10278 }
10279
10280 gimplify_adjust_omp_clauses (pre_p, body, &OMP_TASK_CLAUSES (expr),
10281 OMP_TASK);
10282
10283 g = gimple_build_omp_task (body,
10284 OMP_TASK_CLAUSES (expr),
10285 NULL_TREE, NULL_TREE,
10286 NULL_TREE, NULL_TREE, NULL_TREE);
10287 if (OMP_TASK_BODY (expr) == NULL_TREE)
10288 gimple_omp_task_set_taskwait_p (g, true);
10289 gimplify_seq_add_stmt (pre_p, g);
10290 *expr_p = NULL_TREE;
10291 }
10292
10293 /* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD
10294 with non-NULL OMP_FOR_INIT. Also, fill in pdata array,
10295 pdata[0] non-NULL if there is anything non-trivial in between, pdata[1]
10296 is address of OMP_PARALLEL in between if any, pdata[2] is address of
10297 OMP_FOR in between if any and pdata[3] is address of the inner
10298 OMP_FOR/OMP_SIMD. */
10299
10300 static tree
10301 find_combined_omp_for (tree *tp, int *walk_subtrees, void *data)
10302 {
10303 tree **pdata = (tree **) data;
10304 *walk_subtrees = 0;
10305 switch (TREE_CODE (*tp))
10306 {
10307 case OMP_FOR:
10308 if (OMP_FOR_INIT (*tp) != NULL_TREE)
10309 {
10310 pdata[3] = tp;
10311 return *tp;
10312 }
10313 pdata[2] = tp;
10314 *walk_subtrees = 1;
10315 break;
10316 case OMP_SIMD:
10317 if (OMP_FOR_INIT (*tp) != NULL_TREE)
10318 {
10319 pdata[3] = tp;
10320 return *tp;
10321 }
10322 break;
10323 case BIND_EXPR:
10324 if (BIND_EXPR_VARS (*tp)
10325 || (BIND_EXPR_BLOCK (*tp)
10326 && BLOCK_VARS (BIND_EXPR_BLOCK (*tp))))
10327 pdata[0] = tp;
10328 *walk_subtrees = 1;
10329 break;
10330 case STATEMENT_LIST:
10331 if (!tsi_one_before_end_p (tsi_start (*tp)))
10332 pdata[0] = tp;
10333 *walk_subtrees = 1;
10334 break;
10335 case TRY_FINALLY_EXPR:
10336 pdata[0] = tp;
10337 *walk_subtrees = 1;
10338 break;
10339 case OMP_PARALLEL:
10340 pdata[1] = tp;
10341 *walk_subtrees = 1;
10342 break;
10343 default:
10344 break;
10345 }
10346 return NULL_TREE;
10347 }
10348
10349 /* Gimplify the gross structure of an OMP_FOR statement. */
10350
10351 static enum gimplify_status
10352 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
10353 {
10354 tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
10355 enum gimplify_status ret = GS_ALL_DONE;
10356 enum gimplify_status tret;
10357 gomp_for *gfor;
10358 gimple_seq for_body, for_pre_body;
10359 int i;
10360 bitmap has_decl_expr = NULL;
10361 enum omp_region_type ort = ORT_WORKSHARE;
10362
10363 orig_for_stmt = for_stmt = *expr_p;
10364
10365 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
10366 {
10367 tree *data[4] = { NULL, NULL, NULL, NULL };
10368 gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
10369 inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
10370 find_combined_omp_for, data, NULL);
10371 if (inner_for_stmt == NULL_TREE)
10372 {
10373 gcc_assert (seen_error ());
10374 *expr_p = NULL_TREE;
10375 return GS_ERROR;
10376 }
10377 if (data[2] && OMP_FOR_PRE_BODY (*data[2]))
10378 {
10379 append_to_statement_list_force (OMP_FOR_PRE_BODY (*data[2]),
10380 &OMP_FOR_PRE_BODY (for_stmt));
10381 OMP_FOR_PRE_BODY (*data[2]) = NULL_TREE;
10382 }
10383 if (OMP_FOR_PRE_BODY (inner_for_stmt))
10384 {
10385 append_to_statement_list_force (OMP_FOR_PRE_BODY (inner_for_stmt),
10386 &OMP_FOR_PRE_BODY (for_stmt));
10387 OMP_FOR_PRE_BODY (inner_for_stmt) = NULL_TREE;
10388 }
10389
10390 if (data[0])
10391 {
10392 /* We have some statements or variable declarations in between
10393 the composite construct directives. Move them around the
10394 inner_for_stmt. */
10395 data[0] = expr_p;
10396 for (i = 0; i < 3; i++)
10397 if (data[i])
10398 {
10399 tree t = *data[i];
10400 if (i < 2 && data[i + 1] == &OMP_BODY (t))
10401 data[i + 1] = data[i];
10402 *data[i] = OMP_BODY (t);
10403 tree body = build3 (BIND_EXPR, void_type_node, NULL_TREE,
10404 NULL_TREE, make_node (BLOCK));
10405 OMP_BODY (t) = body;
10406 append_to_statement_list_force (inner_for_stmt,
10407 &BIND_EXPR_BODY (body));
10408 *data[3] = t;
10409 data[3] = tsi_stmt_ptr (tsi_start (BIND_EXPR_BODY (body)));
10410 gcc_assert (*data[3] == inner_for_stmt);
10411 }
10412 return GS_OK;
10413 }
10414
10415 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
10416 if (OMP_FOR_ORIG_DECLS (inner_for_stmt)
10417 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
10418 i)) == TREE_LIST
10419 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
10420 i)))
10421 {
10422 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
10423 /* Class iterators aren't allowed on OMP_SIMD, so the only
10424 case we need to solve is distribute parallel for. */
10425 gcc_assert (TREE_CODE (inner_for_stmt) == OMP_FOR
10426 && TREE_CODE (for_stmt) == OMP_DISTRIBUTE
10427 && data[1]);
10428 tree orig_decl = TREE_PURPOSE (orig);
10429 tree last = TREE_VALUE (orig);
10430 tree *pc;
10431 for (pc = &OMP_FOR_CLAUSES (inner_for_stmt);
10432 *pc; pc = &OMP_CLAUSE_CHAIN (*pc))
10433 if ((OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE
10434 || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_LASTPRIVATE)
10435 && OMP_CLAUSE_DECL (*pc) == orig_decl)
10436 break;
10437 if (*pc == NULL_TREE)
10438 ;
10439 else if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE)
10440 {
10441 /* private clause will appear only on inner_for_stmt.
10442 Change it into firstprivate, and add private clause
10443 on for_stmt. */
10444 tree c = copy_node (*pc);
10445 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
10446 OMP_FOR_CLAUSES (for_stmt) = c;
10447 OMP_CLAUSE_CODE (*pc) = OMP_CLAUSE_FIRSTPRIVATE;
10448 lang_hooks.decls.omp_finish_clause (*pc, pre_p);
10449 }
10450 else
10451 {
10452 /* lastprivate clause will appear on both inner_for_stmt
10453 and for_stmt. Add firstprivate clause to
10454 inner_for_stmt. */
10455 tree c = build_omp_clause (OMP_CLAUSE_LOCATION (*pc),
10456 OMP_CLAUSE_FIRSTPRIVATE);
10457 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (*pc);
10458 OMP_CLAUSE_CHAIN (c) = *pc;
10459 *pc = c;
10460 lang_hooks.decls.omp_finish_clause (*pc, pre_p);
10461 }
10462 tree c = build_omp_clause (UNKNOWN_LOCATION,
10463 OMP_CLAUSE_FIRSTPRIVATE);
10464 OMP_CLAUSE_DECL (c) = last;
10465 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
10466 OMP_PARALLEL_CLAUSES (*data[1]) = c;
10467 c = build_omp_clause (UNKNOWN_LOCATION,
10468 *pc ? OMP_CLAUSE_SHARED
10469 : OMP_CLAUSE_FIRSTPRIVATE);
10470 OMP_CLAUSE_DECL (c) = orig_decl;
10471 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
10472 OMP_PARALLEL_CLAUSES (*data[1]) = c;
10473 }
10474 /* Similarly, take care of C++ range for temporaries, those should
10475 be firstprivate on OMP_PARALLEL if any. */
10476 if (data[1])
10477 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
10478 if (OMP_FOR_ORIG_DECLS (inner_for_stmt)
10479 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
10480 i)) == TREE_LIST
10481 && TREE_CHAIN (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
10482 i)))
10483 {
10484 tree orig
10485 = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
10486 tree v = TREE_CHAIN (orig);
10487 tree c = build_omp_clause (UNKNOWN_LOCATION,
10488 OMP_CLAUSE_FIRSTPRIVATE);
10489 /* First add firstprivate clause for the __for_end artificial
10490 decl. */
10491 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 1);
10492 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
10493 == REFERENCE_TYPE)
10494 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
10495 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
10496 OMP_PARALLEL_CLAUSES (*data[1]) = c;
10497 if (TREE_VEC_ELT (v, 0))
10498 {
10499 /* And now the same for __for_range artificial decl if it
10500 exists. */
10501 c = build_omp_clause (UNKNOWN_LOCATION,
10502 OMP_CLAUSE_FIRSTPRIVATE);
10503 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 0);
10504 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
10505 == REFERENCE_TYPE)
10506 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
10507 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
10508 OMP_PARALLEL_CLAUSES (*data[1]) = c;
10509 }
10510 }
10511 }
10512
10513 switch (TREE_CODE (for_stmt))
10514 {
10515 case OMP_FOR:
10516 case OMP_DISTRIBUTE:
10517 break;
10518 case OACC_LOOP:
10519 ort = ORT_ACC;
10520 break;
10521 case OMP_TASKLOOP:
10522 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_UNTIED))
10523 ort = ORT_UNTIED_TASKLOOP;
10524 else
10525 ort = ORT_TASKLOOP;
10526 break;
10527 case OMP_SIMD:
10528 ort = ORT_SIMD;
10529 break;
10530 default:
10531 gcc_unreachable ();
10532 }
10533
10534 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
10535 clause for the IV. */
10536 if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
10537 {
10538 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
10539 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
10540 decl = TREE_OPERAND (t, 0);
10541 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
10542 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10543 && OMP_CLAUSE_DECL (c) == decl)
10544 {
10545 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
10546 break;
10547 }
10548 }
10549
10550 if (TREE_CODE (for_stmt) != OMP_TASKLOOP)
10551 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort,
10552 TREE_CODE (for_stmt));
10553
10554 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
10555 gimplify_omp_ctxp->distribute = true;
10556
10557 /* Handle OMP_FOR_INIT. */
10558 for_pre_body = NULL;
10559 if ((ort == ORT_SIMD
10560 || (inner_for_stmt && TREE_CODE (inner_for_stmt) == OMP_SIMD))
10561 && OMP_FOR_PRE_BODY (for_stmt))
10562 {
10563 has_decl_expr = BITMAP_ALLOC (NULL);
10564 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
10565 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
10566 == VAR_DECL)
10567 {
10568 t = OMP_FOR_PRE_BODY (for_stmt);
10569 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
10570 }
10571 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
10572 {
10573 tree_stmt_iterator si;
10574 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
10575 tsi_next (&si))
10576 {
10577 t = tsi_stmt (si);
10578 if (TREE_CODE (t) == DECL_EXPR
10579 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
10580 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
10581 }
10582 }
10583 }
10584 if (OMP_FOR_PRE_BODY (for_stmt))
10585 {
10586 if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp)
10587 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
10588 else
10589 {
10590 struct gimplify_omp_ctx ctx;
10591 memset (&ctx, 0, sizeof (ctx));
10592 ctx.region_type = ORT_NONE;
10593 gimplify_omp_ctxp = &ctx;
10594 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
10595 gimplify_omp_ctxp = NULL;
10596 }
10597 }
10598 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
10599
10600 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
10601 for_stmt = inner_for_stmt;
10602
10603 /* For taskloop, need to gimplify the start, end and step before the
10604 taskloop, outside of the taskloop omp context. */
10605 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10606 {
10607 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
10608 {
10609 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
10610 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
10611 {
10612 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
10613 TREE_OPERAND (t, 1)
10614 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
10615 gimple_seq_empty_p (for_pre_body)
10616 ? pre_p : &for_pre_body, NULL,
10617 false);
10618 /* Reference to pointer conversion is considered useless,
10619 but is significant for firstprivate clause. Force it
10620 here. */
10621 if (TREE_CODE (type) == POINTER_TYPE
10622 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 1)))
10623 == REFERENCE_TYPE))
10624 {
10625 tree v = create_tmp_var (TYPE_MAIN_VARIANT (type));
10626 tree m = build2 (INIT_EXPR, TREE_TYPE (v), v,
10627 TREE_OPERAND (t, 1));
10628 gimplify_and_add (m, gimple_seq_empty_p (for_pre_body)
10629 ? pre_p : &for_pre_body);
10630 TREE_OPERAND (t, 1) = v;
10631 }
10632 tree c = build_omp_clause (input_location,
10633 OMP_CLAUSE_FIRSTPRIVATE);
10634 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
10635 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
10636 OMP_FOR_CLAUSES (orig_for_stmt) = c;
10637 }
10638
10639 /* Handle OMP_FOR_COND. */
10640 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
10641 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
10642 {
10643 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
10644 TREE_OPERAND (t, 1)
10645 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
10646 gimple_seq_empty_p (for_pre_body)
10647 ? pre_p : &for_pre_body, NULL,
10648 false);
10649 /* Reference to pointer conversion is considered useless,
10650 but is significant for firstprivate clause. Force it
10651 here. */
10652 if (TREE_CODE (type) == POINTER_TYPE
10653 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 1)))
10654 == REFERENCE_TYPE))
10655 {
10656 tree v = create_tmp_var (TYPE_MAIN_VARIANT (type));
10657 tree m = build2 (INIT_EXPR, TREE_TYPE (v), v,
10658 TREE_OPERAND (t, 1));
10659 gimplify_and_add (m, gimple_seq_empty_p (for_pre_body)
10660 ? pre_p : &for_pre_body);
10661 TREE_OPERAND (t, 1) = v;
10662 }
10663 tree c = build_omp_clause (input_location,
10664 OMP_CLAUSE_FIRSTPRIVATE);
10665 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
10666 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
10667 OMP_FOR_CLAUSES (orig_for_stmt) = c;
10668 }
10669
10670 /* Handle OMP_FOR_INCR. */
10671 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10672 if (TREE_CODE (t) == MODIFY_EXPR)
10673 {
10674 decl = TREE_OPERAND (t, 0);
10675 t = TREE_OPERAND (t, 1);
10676 tree *tp = &TREE_OPERAND (t, 1);
10677 if (TREE_CODE (t) == PLUS_EXPR && *tp == decl)
10678 tp = &TREE_OPERAND (t, 0);
10679
10680 if (!is_gimple_constant (*tp))
10681 {
10682 gimple_seq *seq = gimple_seq_empty_p (for_pre_body)
10683 ? pre_p : &for_pre_body;
10684 *tp = get_initialized_tmp_var (*tp, seq, NULL, false);
10685 tree c = build_omp_clause (input_location,
10686 OMP_CLAUSE_FIRSTPRIVATE);
10687 OMP_CLAUSE_DECL (c) = *tp;
10688 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
10689 OMP_FOR_CLAUSES (orig_for_stmt) = c;
10690 }
10691 }
10692 }
10693
10694 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt), pre_p, ort,
10695 OMP_TASKLOOP);
10696 }
10697
10698 if (orig_for_stmt != for_stmt)
10699 gimplify_omp_ctxp->combined_loop = true;
10700
10701 for_body = NULL;
10702 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
10703 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
10704 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
10705 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
10706
10707 tree c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED);
10708 bool is_doacross = false;
10709 if (c && OMP_CLAUSE_ORDERED_EXPR (c))
10710 {
10711 is_doacross = true;
10712 gimplify_omp_ctxp->loop_iter_var.create (TREE_VEC_LENGTH
10713 (OMP_FOR_INIT (for_stmt))
10714 * 2);
10715 }
10716 int collapse = 1, tile = 0;
10717 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_COLLAPSE);
10718 if (c)
10719 collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
10720 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_TILE);
10721 if (c)
10722 tile = list_length (OMP_CLAUSE_TILE_LIST (c));
10723 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
10724 {
10725 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
10726 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
10727 decl = TREE_OPERAND (t, 0);
10728 gcc_assert (DECL_P (decl));
10729 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
10730 || POINTER_TYPE_P (TREE_TYPE (decl)));
10731 if (is_doacross)
10732 {
10733 if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt))
10734 {
10735 tree orig_decl = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
10736 if (TREE_CODE (orig_decl) == TREE_LIST)
10737 {
10738 orig_decl = TREE_PURPOSE (orig_decl);
10739 if (!orig_decl)
10740 orig_decl = decl;
10741 }
10742 gimplify_omp_ctxp->loop_iter_var.quick_push (orig_decl);
10743 }
10744 else
10745 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
10746 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
10747 }
10748
10749 /* Make sure the iteration variable is private. */
10750 tree c = NULL_TREE;
10751 tree c2 = NULL_TREE;
10752 if (orig_for_stmt != for_stmt)
10753 {
10754 /* Preserve this information until we gimplify the inner simd. */
10755 if (has_decl_expr
10756 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
10757 TREE_PRIVATE (t) = 1;
10758 }
10759 else if (ort == ORT_SIMD)
10760 {
10761 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
10762 (splay_tree_key) decl);
10763 omp_is_private (gimplify_omp_ctxp, decl,
10764 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
10765 != 1));
10766 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
10767 omp_notice_variable (gimplify_omp_ctxp, decl, true);
10768 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
10769 {
10770 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
10771 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
10772 unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
10773 if ((has_decl_expr
10774 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
10775 || TREE_PRIVATE (t))
10776 {
10777 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
10778 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
10779 }
10780 struct gimplify_omp_ctx *outer
10781 = gimplify_omp_ctxp->outer_context;
10782 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
10783 {
10784 if (outer->region_type == ORT_WORKSHARE
10785 && outer->combined_loop)
10786 {
10787 n = splay_tree_lookup (outer->variables,
10788 (splay_tree_key)decl);
10789 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
10790 {
10791 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
10792 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
10793 }
10794 else
10795 {
10796 struct gimplify_omp_ctx *octx = outer->outer_context;
10797 if (octx
10798 && octx->region_type == ORT_COMBINED_PARALLEL
10799 && octx->outer_context
10800 && (octx->outer_context->region_type
10801 == ORT_WORKSHARE)
10802 && octx->outer_context->combined_loop)
10803 {
10804 octx = octx->outer_context;
10805 n = splay_tree_lookup (octx->variables,
10806 (splay_tree_key)decl);
10807 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
10808 {
10809 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
10810 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
10811 }
10812 }
10813 }
10814 }
10815 }
10816
10817 OMP_CLAUSE_DECL (c) = decl;
10818 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
10819 OMP_FOR_CLAUSES (for_stmt) = c;
10820 omp_add_variable (gimplify_omp_ctxp, decl, flags);
10821 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
10822 {
10823 if (outer->region_type == ORT_WORKSHARE
10824 && outer->combined_loop)
10825 {
10826 if (outer->outer_context
10827 && (outer->outer_context->region_type
10828 == ORT_COMBINED_PARALLEL))
10829 outer = outer->outer_context;
10830 else if (omp_check_private (outer, decl, false))
10831 outer = NULL;
10832 }
10833 else if (((outer->region_type & ORT_TASKLOOP)
10834 == ORT_TASKLOOP)
10835 && outer->combined_loop
10836 && !omp_check_private (gimplify_omp_ctxp,
10837 decl, false))
10838 ;
10839 else if (outer->region_type != ORT_COMBINED_PARALLEL)
10840 {
10841 omp_notice_variable (outer, decl, true);
10842 outer = NULL;
10843 }
10844 if (outer)
10845 {
10846 n = splay_tree_lookup (outer->variables,
10847 (splay_tree_key)decl);
10848 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
10849 {
10850 omp_add_variable (outer, decl,
10851 GOVD_LASTPRIVATE | GOVD_SEEN);
10852 if (outer->region_type == ORT_COMBINED_PARALLEL
10853 && outer->outer_context
10854 && (outer->outer_context->region_type
10855 == ORT_WORKSHARE)
10856 && outer->outer_context->combined_loop)
10857 {
10858 outer = outer->outer_context;
10859 n = splay_tree_lookup (outer->variables,
10860 (splay_tree_key)decl);
10861 if (omp_check_private (outer, decl, false))
10862 outer = NULL;
10863 else if (n == NULL
10864 || ((n->value & GOVD_DATA_SHARE_CLASS)
10865 == 0))
10866 omp_add_variable (outer, decl,
10867 GOVD_LASTPRIVATE
10868 | GOVD_SEEN);
10869 else
10870 outer = NULL;
10871 }
10872 if (outer && outer->outer_context
10873 && ((outer->outer_context->region_type
10874 & ORT_COMBINED_TEAMS) == ORT_COMBINED_TEAMS
10875 || (((outer->region_type & ORT_TASKLOOP)
10876 == ORT_TASKLOOP)
10877 && (outer->outer_context->region_type
10878 == ORT_COMBINED_PARALLEL))))
10879 {
10880 outer = outer->outer_context;
10881 n = splay_tree_lookup (outer->variables,
10882 (splay_tree_key)decl);
10883 if (n == NULL
10884 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
10885 omp_add_variable (outer, decl,
10886 GOVD_SHARED | GOVD_SEEN);
10887 else
10888 outer = NULL;
10889 }
10890 if (outer && outer->outer_context)
10891 omp_notice_variable (outer->outer_context, decl,
10892 true);
10893 }
10894 }
10895 }
10896 }
10897 else
10898 {
10899 bool lastprivate
10900 = (!has_decl_expr
10901 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
10902 if (TREE_PRIVATE (t))
10903 lastprivate = false;
10904 struct gimplify_omp_ctx *outer
10905 = gimplify_omp_ctxp->outer_context;
10906 if (outer && lastprivate)
10907 {
10908 if (outer->region_type == ORT_WORKSHARE
10909 && outer->combined_loop)
10910 {
10911 n = splay_tree_lookup (outer->variables,
10912 (splay_tree_key)decl);
10913 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
10914 {
10915 lastprivate = false;
10916 outer = NULL;
10917 }
10918 else if (outer->outer_context
10919 && (outer->outer_context->region_type
10920 == ORT_COMBINED_PARALLEL))
10921 outer = outer->outer_context;
10922 else if (omp_check_private (outer, decl, false))
10923 outer = NULL;
10924 }
10925 else if (((outer->region_type & ORT_TASKLOOP)
10926 == ORT_TASKLOOP)
10927 && outer->combined_loop
10928 && !omp_check_private (gimplify_omp_ctxp,
10929 decl, false))
10930 ;
10931 else if (outer->region_type != ORT_COMBINED_PARALLEL)
10932 {
10933 omp_notice_variable (outer, decl, true);
10934 outer = NULL;
10935 }
10936 if (outer)
10937 {
10938 n = splay_tree_lookup (outer->variables,
10939 (splay_tree_key)decl);
10940 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
10941 {
10942 omp_add_variable (outer, decl,
10943 GOVD_LASTPRIVATE | GOVD_SEEN);
10944 if (outer->region_type == ORT_COMBINED_PARALLEL
10945 && outer->outer_context
10946 && (outer->outer_context->region_type
10947 == ORT_WORKSHARE)
10948 && outer->outer_context->combined_loop)
10949 {
10950 outer = outer->outer_context;
10951 n = splay_tree_lookup (outer->variables,
10952 (splay_tree_key)decl);
10953 if (omp_check_private (outer, decl, false))
10954 outer = NULL;
10955 else if (n == NULL
10956 || ((n->value & GOVD_DATA_SHARE_CLASS)
10957 == 0))
10958 omp_add_variable (outer, decl,
10959 GOVD_LASTPRIVATE
10960 | GOVD_SEEN);
10961 else
10962 outer = NULL;
10963 }
10964 if (outer && outer->outer_context
10965 && ((outer->outer_context->region_type
10966 & ORT_COMBINED_TEAMS) == ORT_COMBINED_TEAMS
10967 || (((outer->region_type & ORT_TASKLOOP)
10968 == ORT_TASKLOOP)
10969 && (outer->outer_context->region_type
10970 == ORT_COMBINED_PARALLEL))))
10971 {
10972 outer = outer->outer_context;
10973 n = splay_tree_lookup (outer->variables,
10974 (splay_tree_key)decl);
10975 if (n == NULL
10976 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
10977 omp_add_variable (outer, decl,
10978 GOVD_SHARED | GOVD_SEEN);
10979 else
10980 outer = NULL;
10981 }
10982 if (outer && outer->outer_context)
10983 omp_notice_variable (outer->outer_context, decl,
10984 true);
10985 }
10986 }
10987 }
10988
10989 c = build_omp_clause (input_location,
10990 lastprivate ? OMP_CLAUSE_LASTPRIVATE
10991 : OMP_CLAUSE_PRIVATE);
10992 OMP_CLAUSE_DECL (c) = decl;
10993 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
10994 OMP_FOR_CLAUSES (for_stmt) = c;
10995 omp_add_variable (gimplify_omp_ctxp, decl,
10996 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
10997 | GOVD_EXPLICIT | GOVD_SEEN);
10998 c = NULL_TREE;
10999 }
11000 }
11001 else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
11002 omp_notice_variable (gimplify_omp_ctxp, decl, true);
11003 else
11004 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
11005
11006 /* If DECL is not a gimple register, create a temporary variable to act
11007 as an iteration counter. This is valid, since DECL cannot be
11008 modified in the body of the loop. Similarly for any iteration vars
11009 in simd with collapse > 1 where the iterator vars must be
11010 lastprivate. */
11011 if (orig_for_stmt != for_stmt)
11012 var = decl;
11013 else if (!is_gimple_reg (decl)
11014 || (ort == ORT_SIMD
11015 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1))
11016 {
11017 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
11018 /* Make sure omp_add_variable is not called on it prematurely.
11019 We call it ourselves a few lines later. */
11020 gimplify_omp_ctxp = NULL;
11021 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
11022 gimplify_omp_ctxp = ctx;
11023 TREE_OPERAND (t, 0) = var;
11024
11025 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
11026
11027 if (ort == ORT_SIMD
11028 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
11029 {
11030 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
11031 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
11032 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
11033 OMP_CLAUSE_DECL (c2) = var;
11034 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
11035 OMP_FOR_CLAUSES (for_stmt) = c2;
11036 omp_add_variable (gimplify_omp_ctxp, var,
11037 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
11038 if (c == NULL_TREE)
11039 {
11040 c = c2;
11041 c2 = NULL_TREE;
11042 }
11043 }
11044 else
11045 omp_add_variable (gimplify_omp_ctxp, var,
11046 GOVD_PRIVATE | GOVD_SEEN);
11047 }
11048 else
11049 var = decl;
11050
11051 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
11052 is_gimple_val, fb_rvalue, false);
11053 ret = MIN (ret, tret);
11054 if (ret == GS_ERROR)
11055 return ret;
11056
11057 /* Handle OMP_FOR_COND. */
11058 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
11059 gcc_assert (COMPARISON_CLASS_P (t));
11060 gcc_assert (TREE_OPERAND (t, 0) == decl);
11061
11062 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
11063 is_gimple_val, fb_rvalue, false);
11064 ret = MIN (ret, tret);
11065
11066 /* Handle OMP_FOR_INCR. */
11067 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
11068 switch (TREE_CODE (t))
11069 {
11070 case PREINCREMENT_EXPR:
11071 case POSTINCREMENT_EXPR:
11072 {
11073 tree decl = TREE_OPERAND (t, 0);
11074 /* c_omp_for_incr_canonicalize_ptr() should have been
11075 called to massage things appropriately. */
11076 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
11077
11078 if (orig_for_stmt != for_stmt)
11079 break;
11080 t = build_int_cst (TREE_TYPE (decl), 1);
11081 if (c)
11082 OMP_CLAUSE_LINEAR_STEP (c) = t;
11083 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
11084 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
11085 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
11086 break;
11087 }
11088
11089 case PREDECREMENT_EXPR:
11090 case POSTDECREMENT_EXPR:
11091 /* c_omp_for_incr_canonicalize_ptr() should have been
11092 called to massage things appropriately. */
11093 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
11094 if (orig_for_stmt != for_stmt)
11095 break;
11096 t = build_int_cst (TREE_TYPE (decl), -1);
11097 if (c)
11098 OMP_CLAUSE_LINEAR_STEP (c) = t;
11099 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
11100 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
11101 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
11102 break;
11103
11104 case MODIFY_EXPR:
11105 gcc_assert (TREE_OPERAND (t, 0) == decl);
11106 TREE_OPERAND (t, 0) = var;
11107
11108 t = TREE_OPERAND (t, 1);
11109 switch (TREE_CODE (t))
11110 {
11111 case PLUS_EXPR:
11112 if (TREE_OPERAND (t, 1) == decl)
11113 {
11114 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
11115 TREE_OPERAND (t, 0) = var;
11116 break;
11117 }
11118
11119 /* Fallthru. */
11120 case MINUS_EXPR:
11121 case POINTER_PLUS_EXPR:
11122 gcc_assert (TREE_OPERAND (t, 0) == decl);
11123 TREE_OPERAND (t, 0) = var;
11124 break;
11125 default:
11126 gcc_unreachable ();
11127 }
11128
11129 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
11130 is_gimple_val, fb_rvalue, false);
11131 ret = MIN (ret, tret);
11132 if (c)
11133 {
11134 tree step = TREE_OPERAND (t, 1);
11135 tree stept = TREE_TYPE (decl);
11136 if (POINTER_TYPE_P (stept))
11137 stept = sizetype;
11138 step = fold_convert (stept, step);
11139 if (TREE_CODE (t) == MINUS_EXPR)
11140 step = fold_build1 (NEGATE_EXPR, stept, step);
11141 OMP_CLAUSE_LINEAR_STEP (c) = step;
11142 if (step != TREE_OPERAND (t, 1))
11143 {
11144 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
11145 &for_pre_body, NULL,
11146 is_gimple_val, fb_rvalue, false);
11147 ret = MIN (ret, tret);
11148 }
11149 }
11150 break;
11151
11152 default:
11153 gcc_unreachable ();
11154 }
11155
11156 if (c2)
11157 {
11158 gcc_assert (c);
11159 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
11160 }
11161
11162 if ((var != decl || collapse > 1 || tile) && orig_for_stmt == for_stmt)
11163 {
11164 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
11165 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
11166 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
11167 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
11168 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
11169 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
11170 && OMP_CLAUSE_DECL (c) == decl)
11171 {
11172 if (is_doacross && (collapse == 1 || i >= collapse))
11173 t = var;
11174 else
11175 {
11176 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
11177 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
11178 gcc_assert (TREE_OPERAND (t, 0) == var);
11179 t = TREE_OPERAND (t, 1);
11180 gcc_assert (TREE_CODE (t) == PLUS_EXPR
11181 || TREE_CODE (t) == MINUS_EXPR
11182 || TREE_CODE (t) == POINTER_PLUS_EXPR);
11183 gcc_assert (TREE_OPERAND (t, 0) == var);
11184 t = build2 (TREE_CODE (t), TREE_TYPE (decl),
11185 is_doacross ? var : decl,
11186 TREE_OPERAND (t, 1));
11187 }
11188 gimple_seq *seq;
11189 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
11190 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
11191 else
11192 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
11193 push_gimplify_context ();
11194 gimplify_assign (decl, t, seq);
11195 gimple *bind = NULL;
11196 if (gimplify_ctxp->temps)
11197 {
11198 bind = gimple_build_bind (NULL_TREE, *seq, NULL_TREE);
11199 *seq = NULL;
11200 gimplify_seq_add_stmt (seq, bind);
11201 }
11202 pop_gimplify_context (bind);
11203 }
11204 }
11205 }
11206
11207 BITMAP_FREE (has_decl_expr);
11208
11209 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
11210 {
11211 push_gimplify_context ();
11212 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR)
11213 {
11214 OMP_FOR_BODY (orig_for_stmt)
11215 = build3 (BIND_EXPR, void_type_node, NULL,
11216 OMP_FOR_BODY (orig_for_stmt), NULL);
11217 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1;
11218 }
11219 }
11220
11221 gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt),
11222 &for_body);
11223
11224 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
11225 {
11226 if (gimple_code (g) == GIMPLE_BIND)
11227 pop_gimplify_context (g);
11228 else
11229 pop_gimplify_context (NULL);
11230 }
11231
11232 if (orig_for_stmt != for_stmt)
11233 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
11234 {
11235 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
11236 decl = TREE_OPERAND (t, 0);
11237 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
11238 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
11239 gimplify_omp_ctxp = ctx->outer_context;
11240 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
11241 gimplify_omp_ctxp = ctx;
11242 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
11243 TREE_OPERAND (t, 0) = var;
11244 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
11245 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
11246 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
11247 }
11248
11249 gimplify_adjust_omp_clauses (pre_p, for_body,
11250 &OMP_FOR_CLAUSES (orig_for_stmt),
11251 TREE_CODE (orig_for_stmt));
11252
11253 int kind;
11254 switch (TREE_CODE (orig_for_stmt))
11255 {
11256 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
11257 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
11258 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
11259 case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break;
11260 case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
11261 default:
11262 gcc_unreachable ();
11263 }
11264 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
11265 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
11266 for_pre_body);
11267 if (orig_for_stmt != for_stmt)
11268 gimple_omp_for_set_combined_p (gfor, true);
11269 if (gimplify_omp_ctxp
11270 && (gimplify_omp_ctxp->combined_loop
11271 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
11272 && gimplify_omp_ctxp->outer_context
11273 && gimplify_omp_ctxp->outer_context->combined_loop)))
11274 {
11275 gimple_omp_for_set_combined_into_p (gfor, true);
11276 if (gimplify_omp_ctxp->combined_loop)
11277 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
11278 else
11279 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
11280 }
11281
11282 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
11283 {
11284 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
11285 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
11286 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
11287 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
11288 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
11289 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
11290 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
11291 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
11292 }
11293
11294 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
11295 constructs with GIMPLE_OMP_TASK sandwiched in between them.
11296 The outer taskloop stands for computing the number of iterations,
11297 counts for collapsed loops and holding taskloop specific clauses.
11298 The task construct stands for the effect of data sharing on the
11299 explicit task it creates and the inner taskloop stands for expansion
11300 of the static loop inside of the explicit task construct. */
11301 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
11302 {
11303 tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gfor);
11304 tree task_clauses = NULL_TREE;
11305 tree c = *gfor_clauses_ptr;
11306 tree *gtask_clauses_ptr = &task_clauses;
11307 tree outer_for_clauses = NULL_TREE;
11308 tree *gforo_clauses_ptr = &outer_for_clauses;
11309 for (; c; c = OMP_CLAUSE_CHAIN (c))
11310 switch (OMP_CLAUSE_CODE (c))
11311 {
11312 /* These clauses are allowed on task, move them there. */
11313 case OMP_CLAUSE_SHARED:
11314 case OMP_CLAUSE_FIRSTPRIVATE:
11315 case OMP_CLAUSE_DEFAULT:
11316 case OMP_CLAUSE_IF:
11317 case OMP_CLAUSE_UNTIED:
11318 case OMP_CLAUSE_FINAL:
11319 case OMP_CLAUSE_MERGEABLE:
11320 case OMP_CLAUSE_PRIORITY:
11321 case OMP_CLAUSE_REDUCTION:
11322 case OMP_CLAUSE_IN_REDUCTION:
11323 *gtask_clauses_ptr = c;
11324 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
11325 break;
11326 case OMP_CLAUSE_PRIVATE:
11327 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c))
11328 {
11329 /* We want private on outer for and firstprivate
11330 on task. */
11331 *gtask_clauses_ptr
11332 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
11333 OMP_CLAUSE_FIRSTPRIVATE);
11334 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
11335 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
11336 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
11337 *gforo_clauses_ptr = c;
11338 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
11339 }
11340 else
11341 {
11342 *gtask_clauses_ptr = c;
11343 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
11344 }
11345 break;
11346 /* These clauses go into outer taskloop clauses. */
11347 case OMP_CLAUSE_GRAINSIZE:
11348 case OMP_CLAUSE_NUM_TASKS:
11349 case OMP_CLAUSE_NOGROUP:
11350 *gforo_clauses_ptr = c;
11351 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
11352 break;
11353 /* Taskloop clause we duplicate on both taskloops. */
11354 case OMP_CLAUSE_COLLAPSE:
11355 *gfor_clauses_ptr = c;
11356 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
11357 *gforo_clauses_ptr = copy_node (c);
11358 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
11359 break;
11360 /* For lastprivate, keep the clause on inner taskloop, and add
11361 a shared clause on task. If the same decl is also firstprivate,
11362 add also firstprivate clause on the inner taskloop. */
11363 case OMP_CLAUSE_LASTPRIVATE:
11364 if (OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
11365 {
11366 /* For taskloop C++ lastprivate IVs, we want:
11367 1) private on outer taskloop
11368 2) firstprivate and shared on task
11369 3) lastprivate on inner taskloop */
11370 *gtask_clauses_ptr
11371 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
11372 OMP_CLAUSE_FIRSTPRIVATE);
11373 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
11374 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
11375 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
11376 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1;
11377 *gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c),
11378 OMP_CLAUSE_PRIVATE);
11379 OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c);
11380 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1;
11381 TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c);
11382 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
11383 }
11384 *gfor_clauses_ptr = c;
11385 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
11386 *gtask_clauses_ptr
11387 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED);
11388 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
11389 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
11390 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1;
11391 gtask_clauses_ptr
11392 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
11393 break;
11394 default:
11395 gcc_unreachable ();
11396 }
11397 *gfor_clauses_ptr = NULL_TREE;
11398 *gtask_clauses_ptr = NULL_TREE;
11399 *gforo_clauses_ptr = NULL_TREE;
11400 g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE);
11401 g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE,
11402 NULL_TREE, NULL_TREE, NULL_TREE);
11403 gimple_omp_task_set_taskloop_p (g, true);
11404 g = gimple_build_bind (NULL_TREE, g, NULL_TREE);
11405 gomp_for *gforo
11406 = gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses,
11407 gimple_omp_for_collapse (gfor),
11408 gimple_omp_for_pre_body (gfor));
11409 gimple_omp_for_set_pre_body (gfor, NULL);
11410 gimple_omp_for_set_combined_p (gforo, true);
11411 gimple_omp_for_set_combined_into_p (gfor, true);
11412 for (i = 0; i < (int) gimple_omp_for_collapse (gfor); i++)
11413 {
11414 tree type = TREE_TYPE (gimple_omp_for_index (gfor, i));
11415 tree v = create_tmp_var (type);
11416 gimple_omp_for_set_index (gforo, i, v);
11417 t = unshare_expr (gimple_omp_for_initial (gfor, i));
11418 gimple_omp_for_set_initial (gforo, i, t);
11419 gimple_omp_for_set_cond (gforo, i,
11420 gimple_omp_for_cond (gfor, i));
11421 t = unshare_expr (gimple_omp_for_final (gfor, i));
11422 gimple_omp_for_set_final (gforo, i, t);
11423 t = unshare_expr (gimple_omp_for_incr (gfor, i));
11424 gcc_assert (TREE_OPERAND (t, 0) == gimple_omp_for_index (gfor, i));
11425 TREE_OPERAND (t, 0) = v;
11426 gimple_omp_for_set_incr (gforo, i, t);
11427 t = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
11428 OMP_CLAUSE_DECL (t) = v;
11429 OMP_CLAUSE_CHAIN (t) = gimple_omp_for_clauses (gforo);
11430 gimple_omp_for_set_clauses (gforo, t);
11431 }
11432 gimplify_seq_add_stmt (pre_p, gforo);
11433 }
11434 else
11435 gimplify_seq_add_stmt (pre_p, gfor);
11436 if (ret != GS_ALL_DONE)
11437 return GS_ERROR;
11438 *expr_p = NULL_TREE;
11439 return GS_ALL_DONE;
11440 }
11441
11442 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
11443 of OMP_TARGET's body. */
11444
11445 static tree
11446 find_omp_teams (tree *tp, int *walk_subtrees, void *)
11447 {
11448 *walk_subtrees = 0;
11449 switch (TREE_CODE (*tp))
11450 {
11451 case OMP_TEAMS:
11452 return *tp;
11453 case BIND_EXPR:
11454 case STATEMENT_LIST:
11455 *walk_subtrees = 1;
11456 break;
11457 default:
11458 break;
11459 }
11460 return NULL_TREE;
11461 }
11462
11463 /* Helper function of optimize_target_teams, determine if the expression
11464 can be computed safely before the target construct on the host. */
11465
11466 static tree
11467 computable_teams_clause (tree *tp, int *walk_subtrees, void *)
11468 {
11469 splay_tree_node n;
11470
11471 if (TYPE_P (*tp))
11472 {
11473 *walk_subtrees = 0;
11474 return NULL_TREE;
11475 }
11476 switch (TREE_CODE (*tp))
11477 {
11478 case VAR_DECL:
11479 case PARM_DECL:
11480 case RESULT_DECL:
11481 *walk_subtrees = 0;
11482 if (error_operand_p (*tp)
11483 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp))
11484 || DECL_HAS_VALUE_EXPR_P (*tp)
11485 || DECL_THREAD_LOCAL_P (*tp)
11486 || TREE_SIDE_EFFECTS (*tp)
11487 || TREE_THIS_VOLATILE (*tp))
11488 return *tp;
11489 if (is_global_var (*tp)
11490 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp))
11491 || lookup_attribute ("omp declare target link",
11492 DECL_ATTRIBUTES (*tp))))
11493 return *tp;
11494 if (VAR_P (*tp)
11495 && !DECL_SEEN_IN_BIND_EXPR_P (*tp)
11496 && !is_global_var (*tp)
11497 && decl_function_context (*tp) == current_function_decl)
11498 return *tp;
11499 n = splay_tree_lookup (gimplify_omp_ctxp->variables,
11500 (splay_tree_key) *tp);
11501 if (n == NULL)
11502 {
11503 if (gimplify_omp_ctxp->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
11504 return NULL_TREE;
11505 return *tp;
11506 }
11507 else if (n->value & GOVD_LOCAL)
11508 return *tp;
11509 else if (n->value & GOVD_FIRSTPRIVATE)
11510 return NULL_TREE;
11511 else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
11512 == (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
11513 return NULL_TREE;
11514 return *tp;
11515 case INTEGER_CST:
11516 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
11517 return *tp;
11518 return NULL_TREE;
11519 case TARGET_EXPR:
11520 if (TARGET_EXPR_INITIAL (*tp)
11521 || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL)
11522 return *tp;
11523 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp),
11524 walk_subtrees, NULL);
11525 /* Allow some reasonable subset of integral arithmetics. */
11526 case PLUS_EXPR:
11527 case MINUS_EXPR:
11528 case MULT_EXPR:
11529 case TRUNC_DIV_EXPR:
11530 case CEIL_DIV_EXPR:
11531 case FLOOR_DIV_EXPR:
11532 case ROUND_DIV_EXPR:
11533 case TRUNC_MOD_EXPR:
11534 case CEIL_MOD_EXPR:
11535 case FLOOR_MOD_EXPR:
11536 case ROUND_MOD_EXPR:
11537 case RDIV_EXPR:
11538 case EXACT_DIV_EXPR:
11539 case MIN_EXPR:
11540 case MAX_EXPR:
11541 case LSHIFT_EXPR:
11542 case RSHIFT_EXPR:
11543 case BIT_IOR_EXPR:
11544 case BIT_XOR_EXPR:
11545 case BIT_AND_EXPR:
11546 case NEGATE_EXPR:
11547 case ABS_EXPR:
11548 case BIT_NOT_EXPR:
11549 case NON_LVALUE_EXPR:
11550 CASE_CONVERT:
11551 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
11552 return *tp;
11553 return NULL_TREE;
11554 /* And disallow anything else, except for comparisons. */
11555 default:
11556 if (COMPARISON_CLASS_P (*tp))
11557 return NULL_TREE;
11558 return *tp;
11559 }
11560 }
11561
11562 /* Try to determine if the num_teams and/or thread_limit expressions
11563 can have their values determined already before entering the
11564 target construct.
11565 INTEGER_CSTs trivially are,
11566 integral decls that are firstprivate (explicitly or implicitly)
11567 or explicitly map(always, to:) or map(always, tofrom:) on the target
11568 region too, and expressions involving simple arithmetics on those
11569 too, function calls are not ok, dereferencing something neither etc.
11570 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
11571 EXPR based on what we find:
11572 0 stands for clause not specified at all, use implementation default
11573 -1 stands for value that can't be determined easily before entering
11574 the target construct.
11575 If teams construct is not present at all, use 1 for num_teams
11576 and 0 for thread_limit (only one team is involved, and the thread
11577 limit is implementation defined. */
11578
11579 static void
11580 optimize_target_teams (tree target, gimple_seq *pre_p)
11581 {
11582 tree body = OMP_BODY (target);
11583 tree teams = walk_tree (&body, find_omp_teams, NULL, NULL);
11584 tree num_teams = integer_zero_node;
11585 tree thread_limit = integer_zero_node;
11586 location_t num_teams_loc = EXPR_LOCATION (target);
11587 location_t thread_limit_loc = EXPR_LOCATION (target);
11588 tree c, *p, expr;
11589 struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp;
11590
11591 if (teams == NULL_TREE)
11592 num_teams = integer_one_node;
11593 else
11594 for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c))
11595 {
11596 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS)
11597 {
11598 p = &num_teams;
11599 num_teams_loc = OMP_CLAUSE_LOCATION (c);
11600 }
11601 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT)
11602 {
11603 p = &thread_limit;
11604 thread_limit_loc = OMP_CLAUSE_LOCATION (c);
11605 }
11606 else
11607 continue;
11608 expr = OMP_CLAUSE_OPERAND (c, 0);
11609 if (TREE_CODE (expr) == INTEGER_CST)
11610 {
11611 *p = expr;
11612 continue;
11613 }
11614 if (walk_tree (&expr, computable_teams_clause, NULL, NULL))
11615 {
11616 *p = integer_minus_one_node;
11617 continue;
11618 }
11619 *p = expr;
11620 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
11621 if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue, false)
11622 == GS_ERROR)
11623 {
11624 gimplify_omp_ctxp = target_ctx;
11625 *p = integer_minus_one_node;
11626 continue;
11627 }
11628 gimplify_omp_ctxp = target_ctx;
11629 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
11630 OMP_CLAUSE_OPERAND (c, 0) = *p;
11631 }
11632 c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT);
11633 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
11634 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
11635 OMP_TARGET_CLAUSES (target) = c;
11636 c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS);
11637 OMP_CLAUSE_NUM_TEAMS_EXPR (c) = num_teams;
11638 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
11639 OMP_TARGET_CLAUSES (target) = c;
11640 }
11641
11642 /* Gimplify the gross structure of several OMP constructs. */
11643
11644 static void
11645 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
11646 {
11647 tree expr = *expr_p;
11648 gimple *stmt;
11649 gimple_seq body = NULL;
11650 enum omp_region_type ort;
11651
11652 switch (TREE_CODE (expr))
11653 {
11654 case OMP_SECTIONS:
11655 case OMP_SINGLE:
11656 ort = ORT_WORKSHARE;
11657 break;
11658 case OMP_TARGET:
11659 ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET;
11660 break;
11661 case OACC_KERNELS:
11662 ort = ORT_ACC_KERNELS;
11663 break;
11664 case OACC_PARALLEL:
11665 ort = ORT_ACC_PARALLEL;
11666 break;
11667 case OACC_DATA:
11668 ort = ORT_ACC_DATA;
11669 break;
11670 case OMP_TARGET_DATA:
11671 ort = ORT_TARGET_DATA;
11672 break;
11673 case OMP_TEAMS:
11674 ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
11675 if (gimplify_omp_ctxp == NULL
11676 || (gimplify_omp_ctxp->region_type == ORT_TARGET
11677 && gimplify_omp_ctxp->outer_context == NULL
11678 && lookup_attribute ("omp declare target",
11679 DECL_ATTRIBUTES (current_function_decl))))
11680 ort = (enum omp_region_type) (ort | ORT_HOST_TEAMS);
11681 break;
11682 case OACC_HOST_DATA:
11683 ort = ORT_ACC_HOST_DATA;
11684 break;
11685 default:
11686 gcc_unreachable ();
11687 }
11688 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort,
11689 TREE_CODE (expr));
11690 if (TREE_CODE (expr) == OMP_TARGET)
11691 optimize_target_teams (expr, pre_p);
11692 if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0
11693 || (ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
11694 {
11695 push_gimplify_context ();
11696 gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body);
11697 if (gimple_code (g) == GIMPLE_BIND)
11698 pop_gimplify_context (g);
11699 else
11700 pop_gimplify_context (NULL);
11701 if ((ort & ORT_TARGET_DATA) != 0)
11702 {
11703 enum built_in_function end_ix;
11704 switch (TREE_CODE (expr))
11705 {
11706 case OACC_DATA:
11707 case OACC_HOST_DATA:
11708 end_ix = BUILT_IN_GOACC_DATA_END;
11709 break;
11710 case OMP_TARGET_DATA:
11711 end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
11712 break;
11713 default:
11714 gcc_unreachable ();
11715 }
11716 tree fn = builtin_decl_explicit (end_ix);
11717 g = gimple_build_call (fn, 0);
11718 gimple_seq cleanup = NULL;
11719 gimple_seq_add_stmt (&cleanup, g);
11720 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
11721 body = NULL;
11722 gimple_seq_add_stmt (&body, g);
11723 }
11724 }
11725 else
11726 gimplify_and_add (OMP_BODY (expr), &body);
11727 gimplify_adjust_omp_clauses (pre_p, body, &OMP_CLAUSES (expr),
11728 TREE_CODE (expr));
11729
11730 switch (TREE_CODE (expr))
11731 {
11732 case OACC_DATA:
11733 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
11734 OMP_CLAUSES (expr));
11735 break;
11736 case OACC_KERNELS:
11737 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
11738 OMP_CLAUSES (expr));
11739 break;
11740 case OACC_HOST_DATA:
11741 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_HOST_DATA,
11742 OMP_CLAUSES (expr));
11743 break;
11744 case OACC_PARALLEL:
11745 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
11746 OMP_CLAUSES (expr));
11747 break;
11748 case OMP_SECTIONS:
11749 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
11750 break;
11751 case OMP_SINGLE:
11752 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
11753 break;
11754 case OMP_TARGET:
11755 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
11756 OMP_CLAUSES (expr));
11757 break;
11758 case OMP_TARGET_DATA:
11759 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
11760 OMP_CLAUSES (expr));
11761 break;
11762 case OMP_TEAMS:
11763 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
11764 if ((ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
11765 gimple_omp_teams_set_host (as_a <gomp_teams *> (stmt), true);
11766 break;
11767 default:
11768 gcc_unreachable ();
11769 }
11770
11771 gimplify_seq_add_stmt (pre_p, stmt);
11772 *expr_p = NULL_TREE;
11773 }
11774
11775 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
11776 target update constructs. */
11777
11778 static void
11779 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
11780 {
11781 tree expr = *expr_p;
11782 int kind;
11783 gomp_target *stmt;
11784 enum omp_region_type ort = ORT_WORKSHARE;
11785
11786 switch (TREE_CODE (expr))
11787 {
11788 case OACC_ENTER_DATA:
11789 case OACC_EXIT_DATA:
11790 kind = GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA;
11791 ort = ORT_ACC;
11792 break;
11793 case OACC_UPDATE:
11794 kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
11795 ort = ORT_ACC;
11796 break;
11797 case OMP_TARGET_UPDATE:
11798 kind = GF_OMP_TARGET_KIND_UPDATE;
11799 break;
11800 case OMP_TARGET_ENTER_DATA:
11801 kind = GF_OMP_TARGET_KIND_ENTER_DATA;
11802 break;
11803 case OMP_TARGET_EXIT_DATA:
11804 kind = GF_OMP_TARGET_KIND_EXIT_DATA;
11805 break;
11806 default:
11807 gcc_unreachable ();
11808 }
11809 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p,
11810 ort, TREE_CODE (expr));
11811 gimplify_adjust_omp_clauses (pre_p, NULL, &OMP_STANDALONE_CLAUSES (expr),
11812 TREE_CODE (expr));
11813 if (TREE_CODE (expr) == OACC_UPDATE
11814 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
11815 OMP_CLAUSE_IF_PRESENT))
11816 {
11817 /* The runtime uses GOMP_MAP_{TO,FROM} to denote the if_present
11818 clause. */
11819 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
11820 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
11821 switch (OMP_CLAUSE_MAP_KIND (c))
11822 {
11823 case GOMP_MAP_FORCE_TO:
11824 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
11825 break;
11826 case GOMP_MAP_FORCE_FROM:
11827 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FROM);
11828 break;
11829 default:
11830 break;
11831 }
11832 }
11833 else if (TREE_CODE (expr) == OACC_EXIT_DATA
11834 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
11835 OMP_CLAUSE_FINALIZE))
11836 {
11837 /* Use GOMP_MAP_DELETE/GOMP_MAP_FORCE_FROM to denote that "finalize"
11838 semantics apply to all mappings of this OpenACC directive. */
11839 bool finalize_marked = false;
11840 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
11841 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
11842 switch (OMP_CLAUSE_MAP_KIND (c))
11843 {
11844 case GOMP_MAP_FROM:
11845 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_FROM);
11846 finalize_marked = true;
11847 break;
11848 case GOMP_MAP_RELEASE:
11849 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_DELETE);
11850 finalize_marked = true;
11851 break;
11852 default:
11853 /* Check consistency: libgomp relies on the very first data
11854 mapping clause being marked, so make sure we did that before
11855 any other mapping clauses. */
11856 gcc_assert (finalize_marked);
11857 break;
11858 }
11859 }
11860 stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr));
11861
11862 gimplify_seq_add_stmt (pre_p, stmt);
11863 *expr_p = NULL_TREE;
11864 }
11865
11866 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
11867 stabilized the lhs of the atomic operation as *ADDR. Return true if
11868 EXPR is this stabilized form. */
11869
11870 static bool
11871 goa_lhs_expr_p (tree expr, tree addr)
11872 {
11873 /* Also include casts to other type variants. The C front end is fond
11874 of adding these for e.g. volatile variables. This is like
11875 STRIP_TYPE_NOPS but includes the main variant lookup. */
11876 STRIP_USELESS_TYPE_CONVERSION (expr);
11877
11878 if (TREE_CODE (expr) == INDIRECT_REF)
11879 {
11880 expr = TREE_OPERAND (expr, 0);
11881 while (expr != addr
11882 && (CONVERT_EXPR_P (expr)
11883 || TREE_CODE (expr) == NON_LVALUE_EXPR)
11884 && TREE_CODE (expr) == TREE_CODE (addr)
11885 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
11886 {
11887 expr = TREE_OPERAND (expr, 0);
11888 addr = TREE_OPERAND (addr, 0);
11889 }
11890 if (expr == addr)
11891 return true;
11892 return (TREE_CODE (addr) == ADDR_EXPR
11893 && TREE_CODE (expr) == ADDR_EXPR
11894 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
11895 }
11896 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
11897 return true;
11898 return false;
11899 }
11900
11901 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
11902 expression does not involve the lhs, evaluate it into a temporary.
11903 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
11904 or -1 if an error was encountered. */
11905
11906 static int
11907 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
11908 tree lhs_var)
11909 {
11910 tree expr = *expr_p;
11911 int saw_lhs;
11912
11913 if (goa_lhs_expr_p (expr, lhs_addr))
11914 {
11915 *expr_p = lhs_var;
11916 return 1;
11917 }
11918 if (is_gimple_val (expr))
11919 return 0;
11920
11921 saw_lhs = 0;
11922 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
11923 {
11924 case tcc_binary:
11925 case tcc_comparison:
11926 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
11927 lhs_var);
11928 /* FALLTHRU */
11929 case tcc_unary:
11930 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
11931 lhs_var);
11932 break;
11933 case tcc_expression:
11934 switch (TREE_CODE (expr))
11935 {
11936 case TRUTH_ANDIF_EXPR:
11937 case TRUTH_ORIF_EXPR:
11938 case TRUTH_AND_EXPR:
11939 case TRUTH_OR_EXPR:
11940 case TRUTH_XOR_EXPR:
11941 case BIT_INSERT_EXPR:
11942 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
11943 lhs_addr, lhs_var);
11944 /* FALLTHRU */
11945 case TRUTH_NOT_EXPR:
11946 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
11947 lhs_addr, lhs_var);
11948 break;
11949 case COMPOUND_EXPR:
11950 /* Break out any preevaluations from cp_build_modify_expr. */
11951 for (; TREE_CODE (expr) == COMPOUND_EXPR;
11952 expr = TREE_OPERAND (expr, 1))
11953 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
11954 *expr_p = expr;
11955 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
11956 default:
11957 break;
11958 }
11959 break;
11960 case tcc_reference:
11961 if (TREE_CODE (expr) == BIT_FIELD_REF)
11962 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
11963 lhs_addr, lhs_var);
11964 break;
11965 default:
11966 break;
11967 }
11968
11969 if (saw_lhs == 0)
11970 {
11971 enum gimplify_status gs;
11972 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
11973 if (gs != GS_ALL_DONE)
11974 saw_lhs = -1;
11975 }
11976
11977 return saw_lhs;
11978 }
11979
11980 /* Gimplify an OMP_ATOMIC statement. */
11981
11982 static enum gimplify_status
11983 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
11984 {
11985 tree addr = TREE_OPERAND (*expr_p, 0);
11986 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
11987 ? NULL : TREE_OPERAND (*expr_p, 1);
11988 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
11989 tree tmp_load;
11990 gomp_atomic_load *loadstmt;
11991 gomp_atomic_store *storestmt;
11992
11993 tmp_load = create_tmp_reg (type);
11994 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
11995 return GS_ERROR;
11996
11997 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
11998 != GS_ALL_DONE)
11999 return GS_ERROR;
12000
12001 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr,
12002 OMP_ATOMIC_MEMORY_ORDER (*expr_p));
12003 gimplify_seq_add_stmt (pre_p, loadstmt);
12004 if (rhs)
12005 {
12006 /* BIT_INSERT_EXPR is not valid for non-integral bitfield
12007 representatives. Use BIT_FIELD_REF on the lhs instead. */
12008 if (TREE_CODE (rhs) == BIT_INSERT_EXPR
12009 && !INTEGRAL_TYPE_P (TREE_TYPE (tmp_load)))
12010 {
12011 tree bitpos = TREE_OPERAND (rhs, 2);
12012 tree op1 = TREE_OPERAND (rhs, 1);
12013 tree bitsize;
12014 tree tmp_store = tmp_load;
12015 if (TREE_CODE (*expr_p) == OMP_ATOMIC_CAPTURE_OLD)
12016 tmp_store = get_initialized_tmp_var (tmp_load, pre_p, NULL);
12017 if (INTEGRAL_TYPE_P (TREE_TYPE (op1)))
12018 bitsize = bitsize_int (TYPE_PRECISION (TREE_TYPE (op1)));
12019 else
12020 bitsize = TYPE_SIZE (TREE_TYPE (op1));
12021 gcc_assert (TREE_OPERAND (rhs, 0) == tmp_load);
12022 tree t = build2_loc (EXPR_LOCATION (rhs),
12023 MODIFY_EXPR, void_type_node,
12024 build3_loc (EXPR_LOCATION (rhs), BIT_FIELD_REF,
12025 TREE_TYPE (op1), tmp_store, bitsize,
12026 bitpos), op1);
12027 gimplify_and_add (t, pre_p);
12028 rhs = tmp_store;
12029 }
12030 if (gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
12031 != GS_ALL_DONE)
12032 return GS_ERROR;
12033 }
12034
12035 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
12036 rhs = tmp_load;
12037 storestmt
12038 = gimple_build_omp_atomic_store (rhs, OMP_ATOMIC_MEMORY_ORDER (*expr_p));
12039 gimplify_seq_add_stmt (pre_p, storestmt);
12040 switch (TREE_CODE (*expr_p))
12041 {
12042 case OMP_ATOMIC_READ:
12043 case OMP_ATOMIC_CAPTURE_OLD:
12044 *expr_p = tmp_load;
12045 gimple_omp_atomic_set_need_value (loadstmt);
12046 break;
12047 case OMP_ATOMIC_CAPTURE_NEW:
12048 *expr_p = rhs;
12049 gimple_omp_atomic_set_need_value (storestmt);
12050 break;
12051 default:
12052 *expr_p = NULL;
12053 break;
12054 }
12055
12056 return GS_ALL_DONE;
12057 }
12058
12059 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
12060 body, and adding some EH bits. */
12061
12062 static enum gimplify_status
12063 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
12064 {
12065 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
12066 gimple *body_stmt;
12067 gtransaction *trans_stmt;
12068 gimple_seq body = NULL;
12069 int subcode = 0;
12070
12071 /* Wrap the transaction body in a BIND_EXPR so we have a context
12072 where to put decls for OMP. */
12073 if (TREE_CODE (tbody) != BIND_EXPR)
12074 {
12075 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
12076 TREE_SIDE_EFFECTS (bind) = 1;
12077 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
12078 TRANSACTION_EXPR_BODY (expr) = bind;
12079 }
12080
12081 push_gimplify_context ();
12082 temp = voidify_wrapper_expr (*expr_p, NULL);
12083
12084 body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
12085 pop_gimplify_context (body_stmt);
12086
12087 trans_stmt = gimple_build_transaction (body);
12088 if (TRANSACTION_EXPR_OUTER (expr))
12089 subcode = GTMA_IS_OUTER;
12090 else if (TRANSACTION_EXPR_RELAXED (expr))
12091 subcode = GTMA_IS_RELAXED;
12092 gimple_transaction_set_subcode (trans_stmt, subcode);
12093
12094 gimplify_seq_add_stmt (pre_p, trans_stmt);
12095
12096 if (temp)
12097 {
12098 *expr_p = temp;
12099 return GS_OK;
12100 }
12101
12102 *expr_p = NULL_TREE;
12103 return GS_ALL_DONE;
12104 }
12105
12106 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
12107 is the OMP_BODY of the original EXPR (which has already been
12108 gimplified so it's not present in the EXPR).
12109
12110 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
12111
12112 static gimple *
12113 gimplify_omp_ordered (tree expr, gimple_seq body)
12114 {
12115 tree c, decls;
12116 int failures = 0;
12117 unsigned int i;
12118 tree source_c = NULL_TREE;
12119 tree sink_c = NULL_TREE;
12120
12121 if (gimplify_omp_ctxp)
12122 {
12123 for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
12124 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
12125 && gimplify_omp_ctxp->loop_iter_var.is_empty ()
12126 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK
12127 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE))
12128 {
12129 error_at (OMP_CLAUSE_LOCATION (c),
12130 "%<ordered%> construct with %<depend%> clause must be "
12131 "closely nested inside a loop with %<ordered%> clause "
12132 "with a parameter");
12133 failures++;
12134 }
12135 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
12136 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
12137 {
12138 bool fail = false;
12139 for (decls = OMP_CLAUSE_DECL (c), i = 0;
12140 decls && TREE_CODE (decls) == TREE_LIST;
12141 decls = TREE_CHAIN (decls), ++i)
12142 if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2)
12143 continue;
12144 else if (TREE_VALUE (decls)
12145 != gimplify_omp_ctxp->loop_iter_var[2 * i])
12146 {
12147 error_at (OMP_CLAUSE_LOCATION (c),
12148 "variable %qE is not an iteration "
12149 "of outermost loop %d, expected %qE",
12150 TREE_VALUE (decls), i + 1,
12151 gimplify_omp_ctxp->loop_iter_var[2 * i]);
12152 fail = true;
12153 failures++;
12154 }
12155 else
12156 TREE_VALUE (decls)
12157 = gimplify_omp_ctxp->loop_iter_var[2 * i + 1];
12158 if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2)
12159 {
12160 error_at (OMP_CLAUSE_LOCATION (c),
12161 "number of variables in %<depend%> clause with "
12162 "%<sink%> modifier does not match number of "
12163 "iteration variables");
12164 failures++;
12165 }
12166 sink_c = c;
12167 }
12168 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
12169 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
12170 {
12171 if (source_c)
12172 {
12173 error_at (OMP_CLAUSE_LOCATION (c),
12174 "more than one %<depend%> clause with %<source%> "
12175 "modifier on an %<ordered%> construct");
12176 failures++;
12177 }
12178 else
12179 source_c = c;
12180 }
12181 }
12182 if (source_c && sink_c)
12183 {
12184 error_at (OMP_CLAUSE_LOCATION (source_c),
12185 "%<depend%> clause with %<source%> modifier specified "
12186 "together with %<depend%> clauses with %<sink%> modifier "
12187 "on the same construct");
12188 failures++;
12189 }
12190
12191 if (failures)
12192 return gimple_build_nop ();
12193 return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr));
12194 }
12195
12196 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
12197 expression produces a value to be used as an operand inside a GIMPLE
12198 statement, the value will be stored back in *EXPR_P. This value will
12199 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
12200 an SSA_NAME. The corresponding sequence of GIMPLE statements is
12201 emitted in PRE_P and POST_P.
12202
12203 Additionally, this process may overwrite parts of the input
12204 expression during gimplification. Ideally, it should be
12205 possible to do non-destructive gimplification.
12206
12207 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
12208 the expression needs to evaluate to a value to be used as
12209 an operand in a GIMPLE statement, this value will be stored in
12210 *EXPR_P on exit. This happens when the caller specifies one
12211 of fb_lvalue or fb_rvalue fallback flags.
12212
12213 PRE_P will contain the sequence of GIMPLE statements corresponding
12214 to the evaluation of EXPR and all the side-effects that must
12215 be executed before the main expression. On exit, the last
12216 statement of PRE_P is the core statement being gimplified. For
12217 instance, when gimplifying 'if (++a)' the last statement in
12218 PRE_P will be 'if (t.1)' where t.1 is the result of
12219 pre-incrementing 'a'.
12220
12221 POST_P will contain the sequence of GIMPLE statements corresponding
12222 to the evaluation of all the side-effects that must be executed
12223 after the main expression. If this is NULL, the post
12224 side-effects are stored at the end of PRE_P.
12225
12226 The reason why the output is split in two is to handle post
12227 side-effects explicitly. In some cases, an expression may have
12228 inner and outer post side-effects which need to be emitted in
12229 an order different from the one given by the recursive
12230 traversal. For instance, for the expression (*p--)++ the post
12231 side-effects of '--' must actually occur *after* the post
12232 side-effects of '++'. However, gimplification will first visit
12233 the inner expression, so if a separate POST sequence was not
12234 used, the resulting sequence would be:
12235
12236 1 t.1 = *p
12237 2 p = p - 1
12238 3 t.2 = t.1 + 1
12239 4 *p = t.2
12240
12241 However, the post-decrement operation in line #2 must not be
12242 evaluated until after the store to *p at line #4, so the
12243 correct sequence should be:
12244
12245 1 t.1 = *p
12246 2 t.2 = t.1 + 1
12247 3 *p = t.2
12248 4 p = p - 1
12249
12250 So, by specifying a separate post queue, it is possible
12251 to emit the post side-effects in the correct order.
12252 If POST_P is NULL, an internal queue will be used. Before
12253 returning to the caller, the sequence POST_P is appended to
12254 the main output sequence PRE_P.
12255
12256 GIMPLE_TEST_F points to a function that takes a tree T and
12257 returns nonzero if T is in the GIMPLE form requested by the
12258 caller. The GIMPLE predicates are in gimple.c.
12259
12260 FALLBACK tells the function what sort of a temporary we want if
12261 gimplification cannot produce an expression that complies with
12262 GIMPLE_TEST_F.
12263
12264 fb_none means that no temporary should be generated
12265 fb_rvalue means that an rvalue is OK to generate
12266 fb_lvalue means that an lvalue is OK to generate
12267 fb_either means that either is OK, but an lvalue is preferable.
12268 fb_mayfail means that gimplification may fail (in which case
12269 GS_ERROR will be returned)
12270
12271 The return value is either GS_ERROR or GS_ALL_DONE, since this
12272 function iterates until EXPR is completely gimplified or an error
12273 occurs. */
12274
12275 enum gimplify_status
12276 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
12277 bool (*gimple_test_f) (tree), fallback_t fallback)
12278 {
12279 tree tmp;
12280 gimple_seq internal_pre = NULL;
12281 gimple_seq internal_post = NULL;
12282 tree save_expr;
12283 bool is_statement;
12284 location_t saved_location;
12285 enum gimplify_status ret;
12286 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
12287 tree label;
12288
12289 save_expr = *expr_p;
12290 if (save_expr == NULL_TREE)
12291 return GS_ALL_DONE;
12292
12293 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
12294 is_statement = gimple_test_f == is_gimple_stmt;
12295 if (is_statement)
12296 gcc_assert (pre_p);
12297
12298 /* Consistency checks. */
12299 if (gimple_test_f == is_gimple_reg)
12300 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
12301 else if (gimple_test_f == is_gimple_val
12302 || gimple_test_f == is_gimple_call_addr
12303 || gimple_test_f == is_gimple_condexpr
12304 || gimple_test_f == is_gimple_mem_rhs
12305 || gimple_test_f == is_gimple_mem_rhs_or_call
12306 || gimple_test_f == is_gimple_reg_rhs
12307 || gimple_test_f == is_gimple_reg_rhs_or_call
12308 || gimple_test_f == is_gimple_asm_val
12309 || gimple_test_f == is_gimple_mem_ref_addr)
12310 gcc_assert (fallback & fb_rvalue);
12311 else if (gimple_test_f == is_gimple_min_lval
12312 || gimple_test_f == is_gimple_lvalue)
12313 gcc_assert (fallback & fb_lvalue);
12314 else if (gimple_test_f == is_gimple_addressable)
12315 gcc_assert (fallback & fb_either);
12316 else if (gimple_test_f == is_gimple_stmt)
12317 gcc_assert (fallback == fb_none);
12318 else
12319 {
12320 /* We should have recognized the GIMPLE_TEST_F predicate to
12321 know what kind of fallback to use in case a temporary is
12322 needed to hold the value or address of *EXPR_P. */
12323 gcc_unreachable ();
12324 }
12325
12326 /* We used to check the predicate here and return immediately if it
12327 succeeds. This is wrong; the design is for gimplification to be
12328 idempotent, and for the predicates to only test for valid forms, not
12329 whether they are fully simplified. */
12330 if (pre_p == NULL)
12331 pre_p = &internal_pre;
12332
12333 if (post_p == NULL)
12334 post_p = &internal_post;
12335
12336 /* Remember the last statements added to PRE_P and POST_P. Every
12337 new statement added by the gimplification helpers needs to be
12338 annotated with location information. To centralize the
12339 responsibility, we remember the last statement that had been
12340 added to both queues before gimplifying *EXPR_P. If
12341 gimplification produces new statements in PRE_P and POST_P, those
12342 statements will be annotated with the same location information
12343 as *EXPR_P. */
12344 pre_last_gsi = gsi_last (*pre_p);
12345 post_last_gsi = gsi_last (*post_p);
12346
12347 saved_location = input_location;
12348 if (save_expr != error_mark_node
12349 && EXPR_HAS_LOCATION (*expr_p))
12350 input_location = EXPR_LOCATION (*expr_p);
12351
12352 /* Loop over the specific gimplifiers until the toplevel node
12353 remains the same. */
12354 do
12355 {
12356 /* Strip away as many useless type conversions as possible
12357 at the toplevel. */
12358 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
12359
12360 /* Remember the expr. */
12361 save_expr = *expr_p;
12362
12363 /* Die, die, die, my darling. */
12364 if (error_operand_p (save_expr))
12365 {
12366 ret = GS_ERROR;
12367 break;
12368 }
12369
12370 /* Do any language-specific gimplification. */
12371 ret = ((enum gimplify_status)
12372 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
12373 if (ret == GS_OK)
12374 {
12375 if (*expr_p == NULL_TREE)
12376 break;
12377 if (*expr_p != save_expr)
12378 continue;
12379 }
12380 else if (ret != GS_UNHANDLED)
12381 break;
12382
12383 /* Make sure that all the cases set 'ret' appropriately. */
12384 ret = GS_UNHANDLED;
12385 switch (TREE_CODE (*expr_p))
12386 {
12387 /* First deal with the special cases. */
12388
12389 case POSTINCREMENT_EXPR:
12390 case POSTDECREMENT_EXPR:
12391 case PREINCREMENT_EXPR:
12392 case PREDECREMENT_EXPR:
12393 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
12394 fallback != fb_none,
12395 TREE_TYPE (*expr_p));
12396 break;
12397
12398 case VIEW_CONVERT_EXPR:
12399 if (is_gimple_reg_type (TREE_TYPE (*expr_p))
12400 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
12401 {
12402 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12403 post_p, is_gimple_val, fb_rvalue);
12404 recalculate_side_effects (*expr_p);
12405 break;
12406 }
12407 /* Fallthru. */
12408
12409 case ARRAY_REF:
12410 case ARRAY_RANGE_REF:
12411 case REALPART_EXPR:
12412 case IMAGPART_EXPR:
12413 case COMPONENT_REF:
12414 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
12415 fallback ? fallback : fb_rvalue);
12416 break;
12417
12418 case COND_EXPR:
12419 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
12420
12421 /* C99 code may assign to an array in a structure value of a
12422 conditional expression, and this has undefined behavior
12423 only on execution, so create a temporary if an lvalue is
12424 required. */
12425 if (fallback == fb_lvalue)
12426 {
12427 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
12428 mark_addressable (*expr_p);
12429 ret = GS_OK;
12430 }
12431 break;
12432
12433 case CALL_EXPR:
12434 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
12435
12436 /* C99 code may assign to an array in a structure returned
12437 from a function, and this has undefined behavior only on
12438 execution, so create a temporary if an lvalue is
12439 required. */
12440 if (fallback == fb_lvalue)
12441 {
12442 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
12443 mark_addressable (*expr_p);
12444 ret = GS_OK;
12445 }
12446 break;
12447
12448 case TREE_LIST:
12449 gcc_unreachable ();
12450
12451 case COMPOUND_EXPR:
12452 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
12453 break;
12454
12455 case COMPOUND_LITERAL_EXPR:
12456 ret = gimplify_compound_literal_expr (expr_p, pre_p,
12457 gimple_test_f, fallback);
12458 break;
12459
12460 case MODIFY_EXPR:
12461 case INIT_EXPR:
12462 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
12463 fallback != fb_none);
12464 break;
12465
12466 case TRUTH_ANDIF_EXPR:
12467 case TRUTH_ORIF_EXPR:
12468 {
12469 /* Preserve the original type of the expression and the
12470 source location of the outer expression. */
12471 tree org_type = TREE_TYPE (*expr_p);
12472 *expr_p = gimple_boolify (*expr_p);
12473 *expr_p = build3_loc (input_location, COND_EXPR,
12474 org_type, *expr_p,
12475 fold_convert_loc
12476 (input_location,
12477 org_type, boolean_true_node),
12478 fold_convert_loc
12479 (input_location,
12480 org_type, boolean_false_node));
12481 ret = GS_OK;
12482 break;
12483 }
12484
12485 case TRUTH_NOT_EXPR:
12486 {
12487 tree type = TREE_TYPE (*expr_p);
12488 /* The parsers are careful to generate TRUTH_NOT_EXPR
12489 only with operands that are always zero or one.
12490 We do not fold here but handle the only interesting case
12491 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
12492 *expr_p = gimple_boolify (*expr_p);
12493 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
12494 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
12495 TREE_TYPE (*expr_p),
12496 TREE_OPERAND (*expr_p, 0));
12497 else
12498 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
12499 TREE_TYPE (*expr_p),
12500 TREE_OPERAND (*expr_p, 0),
12501 build_int_cst (TREE_TYPE (*expr_p), 1));
12502 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
12503 *expr_p = fold_convert_loc (input_location, type, *expr_p);
12504 ret = GS_OK;
12505 break;
12506 }
12507
12508 case ADDR_EXPR:
12509 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
12510 break;
12511
12512 case ANNOTATE_EXPR:
12513 {
12514 tree cond = TREE_OPERAND (*expr_p, 0);
12515 tree kind = TREE_OPERAND (*expr_p, 1);
12516 tree data = TREE_OPERAND (*expr_p, 2);
12517 tree type = TREE_TYPE (cond);
12518 if (!INTEGRAL_TYPE_P (type))
12519 {
12520 *expr_p = cond;
12521 ret = GS_OK;
12522 break;
12523 }
12524 tree tmp = create_tmp_var (type);
12525 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
12526 gcall *call
12527 = gimple_build_call_internal (IFN_ANNOTATE, 3, cond, kind, data);
12528 gimple_call_set_lhs (call, tmp);
12529 gimplify_seq_add_stmt (pre_p, call);
12530 *expr_p = tmp;
12531 ret = GS_ALL_DONE;
12532 break;
12533 }
12534
12535 case VA_ARG_EXPR:
12536 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
12537 break;
12538
12539 CASE_CONVERT:
12540 if (IS_EMPTY_STMT (*expr_p))
12541 {
12542 ret = GS_ALL_DONE;
12543 break;
12544 }
12545
12546 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
12547 || fallback == fb_none)
12548 {
12549 /* Just strip a conversion to void (or in void context) and
12550 try again. */
12551 *expr_p = TREE_OPERAND (*expr_p, 0);
12552 ret = GS_OK;
12553 break;
12554 }
12555
12556 ret = gimplify_conversion (expr_p);
12557 if (ret == GS_ERROR)
12558 break;
12559 if (*expr_p != save_expr)
12560 break;
12561 /* FALLTHRU */
12562
12563 case FIX_TRUNC_EXPR:
12564 /* unary_expr: ... | '(' cast ')' val | ... */
12565 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
12566 is_gimple_val, fb_rvalue);
12567 recalculate_side_effects (*expr_p);
12568 break;
12569
12570 case INDIRECT_REF:
12571 {
12572 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
12573 bool notrap = TREE_THIS_NOTRAP (*expr_p);
12574 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
12575
12576 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
12577 if (*expr_p != save_expr)
12578 {
12579 ret = GS_OK;
12580 break;
12581 }
12582
12583 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
12584 is_gimple_reg, fb_rvalue);
12585 if (ret == GS_ERROR)
12586 break;
12587
12588 recalculate_side_effects (*expr_p);
12589 *expr_p = fold_build2_loc (input_location, MEM_REF,
12590 TREE_TYPE (*expr_p),
12591 TREE_OPERAND (*expr_p, 0),
12592 build_int_cst (saved_ptr_type, 0));
12593 TREE_THIS_VOLATILE (*expr_p) = volatilep;
12594 TREE_THIS_NOTRAP (*expr_p) = notrap;
12595 ret = GS_OK;
12596 break;
12597 }
12598
12599 /* We arrive here through the various re-gimplifcation paths. */
12600 case MEM_REF:
12601 /* First try re-folding the whole thing. */
12602 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
12603 TREE_OPERAND (*expr_p, 0),
12604 TREE_OPERAND (*expr_p, 1));
12605 if (tmp)
12606 {
12607 REF_REVERSE_STORAGE_ORDER (tmp)
12608 = REF_REVERSE_STORAGE_ORDER (*expr_p);
12609 *expr_p = tmp;
12610 recalculate_side_effects (*expr_p);
12611 ret = GS_OK;
12612 break;
12613 }
12614 /* Avoid re-gimplifying the address operand if it is already
12615 in suitable form. Re-gimplifying would mark the address
12616 operand addressable. Always gimplify when not in SSA form
12617 as we still may have to gimplify decls with value-exprs. */
12618 if (!gimplify_ctxp || !gimple_in_ssa_p (cfun)
12619 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
12620 {
12621 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
12622 is_gimple_mem_ref_addr, fb_rvalue);
12623 if (ret == GS_ERROR)
12624 break;
12625 }
12626 recalculate_side_effects (*expr_p);
12627 ret = GS_ALL_DONE;
12628 break;
12629
12630 /* Constants need not be gimplified. */
12631 case INTEGER_CST:
12632 case REAL_CST:
12633 case FIXED_CST:
12634 case STRING_CST:
12635 case COMPLEX_CST:
12636 case VECTOR_CST:
12637 /* Drop the overflow flag on constants, we do not want
12638 that in the GIMPLE IL. */
12639 if (TREE_OVERFLOW_P (*expr_p))
12640 *expr_p = drop_tree_overflow (*expr_p);
12641 ret = GS_ALL_DONE;
12642 break;
12643
12644 case CONST_DECL:
12645 /* If we require an lvalue, such as for ADDR_EXPR, retain the
12646 CONST_DECL node. Otherwise the decl is replaceable by its
12647 value. */
12648 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
12649 if (fallback & fb_lvalue)
12650 ret = GS_ALL_DONE;
12651 else
12652 {
12653 *expr_p = DECL_INITIAL (*expr_p);
12654 ret = GS_OK;
12655 }
12656 break;
12657
12658 case DECL_EXPR:
12659 ret = gimplify_decl_expr (expr_p, pre_p);
12660 break;
12661
12662 case BIND_EXPR:
12663 ret = gimplify_bind_expr (expr_p, pre_p);
12664 break;
12665
12666 case LOOP_EXPR:
12667 ret = gimplify_loop_expr (expr_p, pre_p);
12668 break;
12669
12670 case SWITCH_EXPR:
12671 ret = gimplify_switch_expr (expr_p, pre_p);
12672 break;
12673
12674 case EXIT_EXPR:
12675 ret = gimplify_exit_expr (expr_p);
12676 break;
12677
12678 case GOTO_EXPR:
12679 /* If the target is not LABEL, then it is a computed jump
12680 and the target needs to be gimplified. */
12681 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
12682 {
12683 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
12684 NULL, is_gimple_val, fb_rvalue);
12685 if (ret == GS_ERROR)
12686 break;
12687 }
12688 gimplify_seq_add_stmt (pre_p,
12689 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
12690 ret = GS_ALL_DONE;
12691 break;
12692
12693 case PREDICT_EXPR:
12694 gimplify_seq_add_stmt (pre_p,
12695 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
12696 PREDICT_EXPR_OUTCOME (*expr_p)));
12697 ret = GS_ALL_DONE;
12698 break;
12699
12700 case LABEL_EXPR:
12701 ret = gimplify_label_expr (expr_p, pre_p);
12702 label = LABEL_EXPR_LABEL (*expr_p);
12703 gcc_assert (decl_function_context (label) == current_function_decl);
12704
12705 /* If the label is used in a goto statement, or address of the label
12706 is taken, we need to unpoison all variables that were seen so far.
12707 Doing so would prevent us from reporting a false positives. */
12708 if (asan_poisoned_variables
12709 && asan_used_labels != NULL
12710 && asan_used_labels->contains (label))
12711 asan_poison_variables (asan_poisoned_variables, false, pre_p);
12712 break;
12713
12714 case CASE_LABEL_EXPR:
12715 ret = gimplify_case_label_expr (expr_p, pre_p);
12716
12717 if (gimplify_ctxp->live_switch_vars)
12718 asan_poison_variables (gimplify_ctxp->live_switch_vars, false,
12719 pre_p);
12720 break;
12721
12722 case RETURN_EXPR:
12723 ret = gimplify_return_expr (*expr_p, pre_p);
12724 break;
12725
12726 case CONSTRUCTOR:
12727 /* Don't reduce this in place; let gimplify_init_constructor work its
12728 magic. Buf if we're just elaborating this for side effects, just
12729 gimplify any element that has side-effects. */
12730 if (fallback == fb_none)
12731 {
12732 unsigned HOST_WIDE_INT ix;
12733 tree val;
12734 tree temp = NULL_TREE;
12735 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
12736 if (TREE_SIDE_EFFECTS (val))
12737 append_to_statement_list (val, &temp);
12738
12739 *expr_p = temp;
12740 ret = temp ? GS_OK : GS_ALL_DONE;
12741 }
12742 /* C99 code may assign to an array in a constructed
12743 structure or union, and this has undefined behavior only
12744 on execution, so create a temporary if an lvalue is
12745 required. */
12746 else if (fallback == fb_lvalue)
12747 {
12748 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
12749 mark_addressable (*expr_p);
12750 ret = GS_OK;
12751 }
12752 else
12753 ret = GS_ALL_DONE;
12754 break;
12755
12756 /* The following are special cases that are not handled by the
12757 original GIMPLE grammar. */
12758
12759 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
12760 eliminated. */
12761 case SAVE_EXPR:
12762 ret = gimplify_save_expr (expr_p, pre_p, post_p);
12763 break;
12764
12765 case BIT_FIELD_REF:
12766 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12767 post_p, is_gimple_lvalue, fb_either);
12768 recalculate_side_effects (*expr_p);
12769 break;
12770
12771 case TARGET_MEM_REF:
12772 {
12773 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
12774
12775 if (TMR_BASE (*expr_p))
12776 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
12777 post_p, is_gimple_mem_ref_addr, fb_either);
12778 if (TMR_INDEX (*expr_p))
12779 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
12780 post_p, is_gimple_val, fb_rvalue);
12781 if (TMR_INDEX2 (*expr_p))
12782 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
12783 post_p, is_gimple_val, fb_rvalue);
12784 /* TMR_STEP and TMR_OFFSET are always integer constants. */
12785 ret = MIN (r0, r1);
12786 }
12787 break;
12788
12789 case NON_LVALUE_EXPR:
12790 /* This should have been stripped above. */
12791 gcc_unreachable ();
12792
12793 case ASM_EXPR:
12794 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
12795 break;
12796
12797 case TRY_FINALLY_EXPR:
12798 case TRY_CATCH_EXPR:
12799 {
12800 gimple_seq eval, cleanup;
12801 gtry *try_;
12802
12803 /* Calls to destructors are generated automatically in FINALLY/CATCH
12804 block. They should have location as UNKNOWN_LOCATION. However,
12805 gimplify_call_expr will reset these call stmts to input_location
12806 if it finds stmt's location is unknown. To prevent resetting for
12807 destructors, we set the input_location to unknown.
12808 Note that this only affects the destructor calls in FINALLY/CATCH
12809 block, and will automatically reset to its original value by the
12810 end of gimplify_expr. */
12811 input_location = UNKNOWN_LOCATION;
12812 eval = cleanup = NULL;
12813 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
12814 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
12815 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
12816 if (gimple_seq_empty_p (cleanup))
12817 {
12818 gimple_seq_add_seq (pre_p, eval);
12819 ret = GS_ALL_DONE;
12820 break;
12821 }
12822 try_ = gimple_build_try (eval, cleanup,
12823 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
12824 ? GIMPLE_TRY_FINALLY
12825 : GIMPLE_TRY_CATCH);
12826 if (EXPR_HAS_LOCATION (save_expr))
12827 gimple_set_location (try_, EXPR_LOCATION (save_expr));
12828 else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
12829 gimple_set_location (try_, saved_location);
12830 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
12831 gimple_try_set_catch_is_cleanup (try_,
12832 TRY_CATCH_IS_CLEANUP (*expr_p));
12833 gimplify_seq_add_stmt (pre_p, try_);
12834 ret = GS_ALL_DONE;
12835 break;
12836 }
12837
12838 case CLEANUP_POINT_EXPR:
12839 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
12840 break;
12841
12842 case TARGET_EXPR:
12843 ret = gimplify_target_expr (expr_p, pre_p, post_p);
12844 break;
12845
12846 case CATCH_EXPR:
12847 {
12848 gimple *c;
12849 gimple_seq handler = NULL;
12850 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
12851 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
12852 gimplify_seq_add_stmt (pre_p, c);
12853 ret = GS_ALL_DONE;
12854 break;
12855 }
12856
12857 case EH_FILTER_EXPR:
12858 {
12859 gimple *ehf;
12860 gimple_seq failure = NULL;
12861
12862 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
12863 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
12864 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
12865 gimplify_seq_add_stmt (pre_p, ehf);
12866 ret = GS_ALL_DONE;
12867 break;
12868 }
12869
12870 case OBJ_TYPE_REF:
12871 {
12872 enum gimplify_status r0, r1;
12873 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
12874 post_p, is_gimple_val, fb_rvalue);
12875 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
12876 post_p, is_gimple_val, fb_rvalue);
12877 TREE_SIDE_EFFECTS (*expr_p) = 0;
12878 ret = MIN (r0, r1);
12879 }
12880 break;
12881
12882 case LABEL_DECL:
12883 /* We get here when taking the address of a label. We mark
12884 the label as "forced"; meaning it can never be removed and
12885 it is a potential target for any computed goto. */
12886 FORCED_LABEL (*expr_p) = 1;
12887 ret = GS_ALL_DONE;
12888 break;
12889
12890 case STATEMENT_LIST:
12891 ret = gimplify_statement_list (expr_p, pre_p);
12892 break;
12893
12894 case WITH_SIZE_EXPR:
12895 {
12896 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12897 post_p == &internal_post ? NULL : post_p,
12898 gimple_test_f, fallback);
12899 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
12900 is_gimple_val, fb_rvalue);
12901 ret = GS_ALL_DONE;
12902 }
12903 break;
12904
12905 case VAR_DECL:
12906 case PARM_DECL:
12907 ret = gimplify_var_or_parm_decl (expr_p);
12908 break;
12909
12910 case RESULT_DECL:
12911 /* When within an OMP context, notice uses of variables. */
12912 if (gimplify_omp_ctxp)
12913 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
12914 ret = GS_ALL_DONE;
12915 break;
12916
12917 case DEBUG_EXPR_DECL:
12918 gcc_unreachable ();
12919
12920 case DEBUG_BEGIN_STMT:
12921 gimplify_seq_add_stmt (pre_p,
12922 gimple_build_debug_begin_stmt
12923 (TREE_BLOCK (*expr_p),
12924 EXPR_LOCATION (*expr_p)));
12925 ret = GS_ALL_DONE;
12926 *expr_p = NULL;
12927 break;
12928
12929 case SSA_NAME:
12930 /* Allow callbacks into the gimplifier during optimization. */
12931 ret = GS_ALL_DONE;
12932 break;
12933
12934 case OMP_PARALLEL:
12935 gimplify_omp_parallel (expr_p, pre_p);
12936 ret = GS_ALL_DONE;
12937 break;
12938
12939 case OMP_TASK:
12940 gimplify_omp_task (expr_p, pre_p);
12941 ret = GS_ALL_DONE;
12942 break;
12943
12944 case OMP_FOR:
12945 case OMP_SIMD:
12946 case OMP_DISTRIBUTE:
12947 case OMP_TASKLOOP:
12948 case OACC_LOOP:
12949 ret = gimplify_omp_for (expr_p, pre_p);
12950 break;
12951
12952 case OACC_CACHE:
12953 gimplify_oacc_cache (expr_p, pre_p);
12954 ret = GS_ALL_DONE;
12955 break;
12956
12957 case OACC_DECLARE:
12958 gimplify_oacc_declare (expr_p, pre_p);
12959 ret = GS_ALL_DONE;
12960 break;
12961
12962 case OACC_HOST_DATA:
12963 case OACC_DATA:
12964 case OACC_KERNELS:
12965 case OACC_PARALLEL:
12966 case OMP_SECTIONS:
12967 case OMP_SINGLE:
12968 case OMP_TARGET:
12969 case OMP_TARGET_DATA:
12970 case OMP_TEAMS:
12971 gimplify_omp_workshare (expr_p, pre_p);
12972 ret = GS_ALL_DONE;
12973 break;
12974
12975 case OACC_ENTER_DATA:
12976 case OACC_EXIT_DATA:
12977 case OACC_UPDATE:
12978 case OMP_TARGET_UPDATE:
12979 case OMP_TARGET_ENTER_DATA:
12980 case OMP_TARGET_EXIT_DATA:
12981 gimplify_omp_target_update (expr_p, pre_p);
12982 ret = GS_ALL_DONE;
12983 break;
12984
12985 case OMP_SECTION:
12986 case OMP_MASTER:
12987 case OMP_ORDERED:
12988 case OMP_CRITICAL:
12989 {
12990 gimple_seq body = NULL;
12991 gimple *g;
12992
12993 gimplify_and_add (OMP_BODY (*expr_p), &body);
12994 switch (TREE_CODE (*expr_p))
12995 {
12996 case OMP_SECTION:
12997 g = gimple_build_omp_section (body);
12998 break;
12999 case OMP_MASTER:
13000 g = gimple_build_omp_master (body);
13001 break;
13002 case OMP_ORDERED:
13003 g = gimplify_omp_ordered (*expr_p, body);
13004 break;
13005 case OMP_CRITICAL:
13006 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p),
13007 pre_p, ORT_WORKSHARE, OMP_CRITICAL);
13008 gimplify_adjust_omp_clauses (pre_p, body,
13009 &OMP_CRITICAL_CLAUSES (*expr_p),
13010 OMP_CRITICAL);
13011 g = gimple_build_omp_critical (body,
13012 OMP_CRITICAL_NAME (*expr_p),
13013 OMP_CRITICAL_CLAUSES (*expr_p));
13014 break;
13015 default:
13016 gcc_unreachable ();
13017 }
13018 gimplify_seq_add_stmt (pre_p, g);
13019 ret = GS_ALL_DONE;
13020 break;
13021 }
13022
13023 case OMP_TASKGROUP:
13024 {
13025 gimple_seq body = NULL;
13026
13027 tree *pclauses = &OMP_TASKGROUP_CLAUSES (*expr_p);
13028 gimplify_scan_omp_clauses (pclauses, pre_p, ORT_TASKGROUP,
13029 OMP_TASKGROUP);
13030 gimplify_adjust_omp_clauses (pre_p, NULL, pclauses, OMP_TASKGROUP);
13031 gimplify_and_add (OMP_BODY (*expr_p), &body);
13032 gimple_seq cleanup = NULL;
13033 tree fn = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
13034 gimple *g = gimple_build_call (fn, 0);
13035 gimple_seq_add_stmt (&cleanup, g);
13036 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
13037 body = NULL;
13038 gimple_seq_add_stmt (&body, g);
13039 g = gimple_build_omp_taskgroup (body, *pclauses);
13040 gimplify_seq_add_stmt (pre_p, g);
13041 ret = GS_ALL_DONE;
13042 break;
13043 }
13044
13045 case OMP_ATOMIC:
13046 case OMP_ATOMIC_READ:
13047 case OMP_ATOMIC_CAPTURE_OLD:
13048 case OMP_ATOMIC_CAPTURE_NEW:
13049 ret = gimplify_omp_atomic (expr_p, pre_p);
13050 break;
13051
13052 case TRANSACTION_EXPR:
13053 ret = gimplify_transaction (expr_p, pre_p);
13054 break;
13055
13056 case TRUTH_AND_EXPR:
13057 case TRUTH_OR_EXPR:
13058 case TRUTH_XOR_EXPR:
13059 {
13060 tree orig_type = TREE_TYPE (*expr_p);
13061 tree new_type, xop0, xop1;
13062 *expr_p = gimple_boolify (*expr_p);
13063 new_type = TREE_TYPE (*expr_p);
13064 if (!useless_type_conversion_p (orig_type, new_type))
13065 {
13066 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
13067 ret = GS_OK;
13068 break;
13069 }
13070
13071 /* Boolified binary truth expressions are semantically equivalent
13072 to bitwise binary expressions. Canonicalize them to the
13073 bitwise variant. */
13074 switch (TREE_CODE (*expr_p))
13075 {
13076 case TRUTH_AND_EXPR:
13077 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
13078 break;
13079 case TRUTH_OR_EXPR:
13080 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
13081 break;
13082 case TRUTH_XOR_EXPR:
13083 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
13084 break;
13085 default:
13086 break;
13087 }
13088 /* Now make sure that operands have compatible type to
13089 expression's new_type. */
13090 xop0 = TREE_OPERAND (*expr_p, 0);
13091 xop1 = TREE_OPERAND (*expr_p, 1);
13092 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
13093 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
13094 new_type,
13095 xop0);
13096 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
13097 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
13098 new_type,
13099 xop1);
13100 /* Continue classified as tcc_binary. */
13101 goto expr_2;
13102 }
13103
13104 case VEC_COND_EXPR:
13105 {
13106 enum gimplify_status r0, r1, r2;
13107
13108 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
13109 post_p, is_gimple_condexpr, fb_rvalue);
13110 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
13111 post_p, is_gimple_val, fb_rvalue);
13112 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
13113 post_p, is_gimple_val, fb_rvalue);
13114
13115 ret = MIN (MIN (r0, r1), r2);
13116 recalculate_side_effects (*expr_p);
13117 }
13118 break;
13119
13120 case VEC_PERM_EXPR:
13121 /* Classified as tcc_expression. */
13122 goto expr_3;
13123
13124 case BIT_INSERT_EXPR:
13125 /* Argument 3 is a constant. */
13126 goto expr_2;
13127
13128 case POINTER_PLUS_EXPR:
13129 {
13130 enum gimplify_status r0, r1;
13131 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
13132 post_p, is_gimple_val, fb_rvalue);
13133 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
13134 post_p, is_gimple_val, fb_rvalue);
13135 recalculate_side_effects (*expr_p);
13136 ret = MIN (r0, r1);
13137 break;
13138 }
13139
13140 default:
13141 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
13142 {
13143 case tcc_comparison:
13144 /* Handle comparison of objects of non scalar mode aggregates
13145 with a call to memcmp. It would be nice to only have to do
13146 this for variable-sized objects, but then we'd have to allow
13147 the same nest of reference nodes we allow for MODIFY_EXPR and
13148 that's too complex.
13149
13150 Compare scalar mode aggregates as scalar mode values. Using
13151 memcmp for them would be very inefficient at best, and is
13152 plain wrong if bitfields are involved. */
13153 {
13154 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
13155
13156 /* Vector comparisons need no boolification. */
13157 if (TREE_CODE (type) == VECTOR_TYPE)
13158 goto expr_2;
13159 else if (!AGGREGATE_TYPE_P (type))
13160 {
13161 tree org_type = TREE_TYPE (*expr_p);
13162 *expr_p = gimple_boolify (*expr_p);
13163 if (!useless_type_conversion_p (org_type,
13164 TREE_TYPE (*expr_p)))
13165 {
13166 *expr_p = fold_convert_loc (input_location,
13167 org_type, *expr_p);
13168 ret = GS_OK;
13169 }
13170 else
13171 goto expr_2;
13172 }
13173 else if (TYPE_MODE (type) != BLKmode)
13174 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
13175 else
13176 ret = gimplify_variable_sized_compare (expr_p);
13177
13178 break;
13179 }
13180
13181 /* If *EXPR_P does not need to be special-cased, handle it
13182 according to its class. */
13183 case tcc_unary:
13184 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
13185 post_p, is_gimple_val, fb_rvalue);
13186 break;
13187
13188 case tcc_binary:
13189 expr_2:
13190 {
13191 enum gimplify_status r0, r1;
13192
13193 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
13194 post_p, is_gimple_val, fb_rvalue);
13195 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
13196 post_p, is_gimple_val, fb_rvalue);
13197
13198 ret = MIN (r0, r1);
13199 break;
13200 }
13201
13202 expr_3:
13203 {
13204 enum gimplify_status r0, r1, r2;
13205
13206 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
13207 post_p, is_gimple_val, fb_rvalue);
13208 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
13209 post_p, is_gimple_val, fb_rvalue);
13210 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
13211 post_p, is_gimple_val, fb_rvalue);
13212
13213 ret = MIN (MIN (r0, r1), r2);
13214 break;
13215 }
13216
13217 case tcc_declaration:
13218 case tcc_constant:
13219 ret = GS_ALL_DONE;
13220 goto dont_recalculate;
13221
13222 default:
13223 gcc_unreachable ();
13224 }
13225
13226 recalculate_side_effects (*expr_p);
13227
13228 dont_recalculate:
13229 break;
13230 }
13231
13232 gcc_assert (*expr_p || ret != GS_OK);
13233 }
13234 while (ret == GS_OK);
13235
13236 /* If we encountered an error_mark somewhere nested inside, either
13237 stub out the statement or propagate the error back out. */
13238 if (ret == GS_ERROR)
13239 {
13240 if (is_statement)
13241 *expr_p = NULL;
13242 goto out;
13243 }
13244
13245 /* This was only valid as a return value from the langhook, which
13246 we handled. Make sure it doesn't escape from any other context. */
13247 gcc_assert (ret != GS_UNHANDLED);
13248
13249 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
13250 {
13251 /* We aren't looking for a value, and we don't have a valid
13252 statement. If it doesn't have side-effects, throw it away.
13253 We can also get here with code such as "*&&L;", where L is
13254 a LABEL_DECL that is marked as FORCED_LABEL. */
13255 if (TREE_CODE (*expr_p) == LABEL_DECL
13256 || !TREE_SIDE_EFFECTS (*expr_p))
13257 *expr_p = NULL;
13258 else if (!TREE_THIS_VOLATILE (*expr_p))
13259 {
13260 /* This is probably a _REF that contains something nested that
13261 has side effects. Recurse through the operands to find it. */
13262 enum tree_code code = TREE_CODE (*expr_p);
13263
13264 switch (code)
13265 {
13266 case COMPONENT_REF:
13267 case REALPART_EXPR:
13268 case IMAGPART_EXPR:
13269 case VIEW_CONVERT_EXPR:
13270 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
13271 gimple_test_f, fallback);
13272 break;
13273
13274 case ARRAY_REF:
13275 case ARRAY_RANGE_REF:
13276 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
13277 gimple_test_f, fallback);
13278 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
13279 gimple_test_f, fallback);
13280 break;
13281
13282 default:
13283 /* Anything else with side-effects must be converted to
13284 a valid statement before we get here. */
13285 gcc_unreachable ();
13286 }
13287
13288 *expr_p = NULL;
13289 }
13290 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
13291 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
13292 {
13293 /* Historically, the compiler has treated a bare reference
13294 to a non-BLKmode volatile lvalue as forcing a load. */
13295 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
13296
13297 /* Normally, we do not want to create a temporary for a
13298 TREE_ADDRESSABLE type because such a type should not be
13299 copied by bitwise-assignment. However, we make an
13300 exception here, as all we are doing here is ensuring that
13301 we read the bytes that make up the type. We use
13302 create_tmp_var_raw because create_tmp_var will abort when
13303 given a TREE_ADDRESSABLE type. */
13304 tree tmp = create_tmp_var_raw (type, "vol");
13305 gimple_add_tmp_var (tmp);
13306 gimplify_assign (tmp, *expr_p, pre_p);
13307 *expr_p = NULL;
13308 }
13309 else
13310 /* We can't do anything useful with a volatile reference to
13311 an incomplete type, so just throw it away. Likewise for
13312 a BLKmode type, since any implicit inner load should
13313 already have been turned into an explicit one by the
13314 gimplification process. */
13315 *expr_p = NULL;
13316 }
13317
13318 /* If we are gimplifying at the statement level, we're done. Tack
13319 everything together and return. */
13320 if (fallback == fb_none || is_statement)
13321 {
13322 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
13323 it out for GC to reclaim it. */
13324 *expr_p = NULL_TREE;
13325
13326 if (!gimple_seq_empty_p (internal_pre)
13327 || !gimple_seq_empty_p (internal_post))
13328 {
13329 gimplify_seq_add_seq (&internal_pre, internal_post);
13330 gimplify_seq_add_seq (pre_p, internal_pre);
13331 }
13332
13333 /* The result of gimplifying *EXPR_P is going to be the last few
13334 statements in *PRE_P and *POST_P. Add location information
13335 to all the statements that were added by the gimplification
13336 helpers. */
13337 if (!gimple_seq_empty_p (*pre_p))
13338 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
13339
13340 if (!gimple_seq_empty_p (*post_p))
13341 annotate_all_with_location_after (*post_p, post_last_gsi,
13342 input_location);
13343
13344 goto out;
13345 }
13346
13347 #ifdef ENABLE_GIMPLE_CHECKING
13348 if (*expr_p)
13349 {
13350 enum tree_code code = TREE_CODE (*expr_p);
13351 /* These expressions should already be in gimple IR form. */
13352 gcc_assert (code != MODIFY_EXPR
13353 && code != ASM_EXPR
13354 && code != BIND_EXPR
13355 && code != CATCH_EXPR
13356 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
13357 && code != EH_FILTER_EXPR
13358 && code != GOTO_EXPR
13359 && code != LABEL_EXPR
13360 && code != LOOP_EXPR
13361 && code != SWITCH_EXPR
13362 && code != TRY_FINALLY_EXPR
13363 && code != OACC_PARALLEL
13364 && code != OACC_KERNELS
13365 && code != OACC_DATA
13366 && code != OACC_HOST_DATA
13367 && code != OACC_DECLARE
13368 && code != OACC_UPDATE
13369 && code != OACC_ENTER_DATA
13370 && code != OACC_EXIT_DATA
13371 && code != OACC_CACHE
13372 && code != OMP_CRITICAL
13373 && code != OMP_FOR
13374 && code != OACC_LOOP
13375 && code != OMP_MASTER
13376 && code != OMP_TASKGROUP
13377 && code != OMP_ORDERED
13378 && code != OMP_PARALLEL
13379 && code != OMP_SECTIONS
13380 && code != OMP_SECTION
13381 && code != OMP_SINGLE);
13382 }
13383 #endif
13384
13385 /* Otherwise we're gimplifying a subexpression, so the resulting
13386 value is interesting. If it's a valid operand that matches
13387 GIMPLE_TEST_F, we're done. Unless we are handling some
13388 post-effects internally; if that's the case, we need to copy into
13389 a temporary before adding the post-effects to POST_P. */
13390 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
13391 goto out;
13392
13393 /* Otherwise, we need to create a new temporary for the gimplified
13394 expression. */
13395
13396 /* We can't return an lvalue if we have an internal postqueue. The
13397 object the lvalue refers to would (probably) be modified by the
13398 postqueue; we need to copy the value out first, which means an
13399 rvalue. */
13400 if ((fallback & fb_lvalue)
13401 && gimple_seq_empty_p (internal_post)
13402 && is_gimple_addressable (*expr_p))
13403 {
13404 /* An lvalue will do. Take the address of the expression, store it
13405 in a temporary, and replace the expression with an INDIRECT_REF of
13406 that temporary. */
13407 tree ref_alias_type = reference_alias_ptr_type (*expr_p);
13408 unsigned int ref_align = get_object_alignment (*expr_p);
13409 tree ref_type = TREE_TYPE (*expr_p);
13410 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
13411 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
13412 if (TYPE_ALIGN (ref_type) != ref_align)
13413 ref_type = build_aligned_type (ref_type, ref_align);
13414 *expr_p = build2 (MEM_REF, ref_type,
13415 tmp, build_zero_cst (ref_alias_type));
13416 }
13417 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
13418 {
13419 /* An rvalue will do. Assign the gimplified expression into a
13420 new temporary TMP and replace the original expression with
13421 TMP. First, make sure that the expression has a type so that
13422 it can be assigned into a temporary. */
13423 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
13424 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
13425 }
13426 else
13427 {
13428 #ifdef ENABLE_GIMPLE_CHECKING
13429 if (!(fallback & fb_mayfail))
13430 {
13431 fprintf (stderr, "gimplification failed:\n");
13432 print_generic_expr (stderr, *expr_p);
13433 debug_tree (*expr_p);
13434 internal_error ("gimplification failed");
13435 }
13436 #endif
13437 gcc_assert (fallback & fb_mayfail);
13438
13439 /* If this is an asm statement, and the user asked for the
13440 impossible, don't die. Fail and let gimplify_asm_expr
13441 issue an error. */
13442 ret = GS_ERROR;
13443 goto out;
13444 }
13445
13446 /* Make sure the temporary matches our predicate. */
13447 gcc_assert ((*gimple_test_f) (*expr_p));
13448
13449 if (!gimple_seq_empty_p (internal_post))
13450 {
13451 annotate_all_with_location (internal_post, input_location);
13452 gimplify_seq_add_seq (pre_p, internal_post);
13453 }
13454
13455 out:
13456 input_location = saved_location;
13457 return ret;
13458 }
13459
13460 /* Like gimplify_expr but make sure the gimplified result is not itself
13461 a SSA name (but a decl if it were). Temporaries required by
13462 evaluating *EXPR_P may be still SSA names. */
13463
13464 static enum gimplify_status
13465 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
13466 bool (*gimple_test_f) (tree), fallback_t fallback,
13467 bool allow_ssa)
13468 {
13469 bool was_ssa_name_p = TREE_CODE (*expr_p) == SSA_NAME;
13470 enum gimplify_status ret = gimplify_expr (expr_p, pre_p, post_p,
13471 gimple_test_f, fallback);
13472 if (! allow_ssa
13473 && TREE_CODE (*expr_p) == SSA_NAME)
13474 {
13475 tree name = *expr_p;
13476 if (was_ssa_name_p)
13477 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, NULL, false);
13478 else
13479 {
13480 /* Avoid the extra copy if possible. */
13481 *expr_p = create_tmp_reg (TREE_TYPE (name));
13482 gimple_set_lhs (SSA_NAME_DEF_STMT (name), *expr_p);
13483 release_ssa_name (name);
13484 }
13485 }
13486 return ret;
13487 }
13488
13489 /* Look through TYPE for variable-sized objects and gimplify each such
13490 size that we find. Add to LIST_P any statements generated. */
13491
13492 void
13493 gimplify_type_sizes (tree type, gimple_seq *list_p)
13494 {
13495 tree field, t;
13496
13497 if (type == NULL || type == error_mark_node)
13498 return;
13499
13500 /* We first do the main variant, then copy into any other variants. */
13501 type = TYPE_MAIN_VARIANT (type);
13502
13503 /* Avoid infinite recursion. */
13504 if (TYPE_SIZES_GIMPLIFIED (type))
13505 return;
13506
13507 TYPE_SIZES_GIMPLIFIED (type) = 1;
13508
13509 switch (TREE_CODE (type))
13510 {
13511 case INTEGER_TYPE:
13512 case ENUMERAL_TYPE:
13513 case BOOLEAN_TYPE:
13514 case REAL_TYPE:
13515 case FIXED_POINT_TYPE:
13516 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
13517 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
13518
13519 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
13520 {
13521 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
13522 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
13523 }
13524 break;
13525
13526 case ARRAY_TYPE:
13527 /* These types may not have declarations, so handle them here. */
13528 gimplify_type_sizes (TREE_TYPE (type), list_p);
13529 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
13530 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
13531 with assigned stack slots, for -O1+ -g they should be tracked
13532 by VTA. */
13533 if (!(TYPE_NAME (type)
13534 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
13535 && DECL_IGNORED_P (TYPE_NAME (type)))
13536 && TYPE_DOMAIN (type)
13537 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
13538 {
13539 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
13540 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
13541 DECL_IGNORED_P (t) = 0;
13542 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
13543 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
13544 DECL_IGNORED_P (t) = 0;
13545 }
13546 break;
13547
13548 case RECORD_TYPE:
13549 case UNION_TYPE:
13550 case QUAL_UNION_TYPE:
13551 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
13552 if (TREE_CODE (field) == FIELD_DECL)
13553 {
13554 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
13555 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
13556 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
13557 gimplify_type_sizes (TREE_TYPE (field), list_p);
13558 }
13559 break;
13560
13561 case POINTER_TYPE:
13562 case REFERENCE_TYPE:
13563 /* We used to recurse on the pointed-to type here, which turned out to
13564 be incorrect because its definition might refer to variables not
13565 yet initialized at this point if a forward declaration is involved.
13566
13567 It was actually useful for anonymous pointed-to types to ensure
13568 that the sizes evaluation dominates every possible later use of the
13569 values. Restricting to such types here would be safe since there
13570 is no possible forward declaration around, but would introduce an
13571 undesirable middle-end semantic to anonymity. We then defer to
13572 front-ends the responsibility of ensuring that the sizes are
13573 evaluated both early and late enough, e.g. by attaching artificial
13574 type declarations to the tree. */
13575 break;
13576
13577 default:
13578 break;
13579 }
13580
13581 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
13582 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
13583
13584 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
13585 {
13586 TYPE_SIZE (t) = TYPE_SIZE (type);
13587 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
13588 TYPE_SIZES_GIMPLIFIED (t) = 1;
13589 }
13590 }
13591
13592 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
13593 a size or position, has had all of its SAVE_EXPRs evaluated.
13594 We add any required statements to *STMT_P. */
13595
13596 void
13597 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
13598 {
13599 tree expr = *expr_p;
13600
13601 /* We don't do anything if the value isn't there, is constant, or contains
13602 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
13603 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
13604 will want to replace it with a new variable, but that will cause problems
13605 if this type is from outside the function. It's OK to have that here. */
13606 if (expr == NULL_TREE
13607 || is_gimple_constant (expr)
13608 || TREE_CODE (expr) == VAR_DECL
13609 || CONTAINS_PLACEHOLDER_P (expr))
13610 return;
13611
13612 *expr_p = unshare_expr (expr);
13613
13614 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
13615 if the def vanishes. */
13616 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue, false);
13617
13618 /* If expr wasn't already is_gimple_sizepos or is_gimple_constant from the
13619 FE, ensure that it is a VAR_DECL, otherwise we might handle some decls
13620 as gimplify_vla_decl even when they would have all sizes INTEGER_CSTs. */
13621 if (is_gimple_constant (*expr_p))
13622 *expr_p = get_initialized_tmp_var (*expr_p, stmt_p, NULL, false);
13623 }
13624
13625 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
13626 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
13627 is true, also gimplify the parameters. */
13628
13629 gbind *
13630 gimplify_body (tree fndecl, bool do_parms)
13631 {
13632 location_t saved_location = input_location;
13633 gimple_seq parm_stmts, parm_cleanup = NULL, seq;
13634 gimple *outer_stmt;
13635 gbind *outer_bind;
13636
13637 timevar_push (TV_TREE_GIMPLIFY);
13638
13639 init_tree_ssa (cfun);
13640
13641 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
13642 gimplification. */
13643 default_rtl_profile ();
13644
13645 gcc_assert (gimplify_ctxp == NULL);
13646 push_gimplify_context (true);
13647
13648 if (flag_openacc || flag_openmp)
13649 {
13650 gcc_assert (gimplify_omp_ctxp == NULL);
13651 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
13652 gimplify_omp_ctxp = new_omp_context (ORT_TARGET);
13653 }
13654
13655 /* Unshare most shared trees in the body and in that of any nested functions.
13656 It would seem we don't have to do this for nested functions because
13657 they are supposed to be output and then the outer function gimplified
13658 first, but the g++ front end doesn't always do it that way. */
13659 unshare_body (fndecl);
13660 unvisit_body (fndecl);
13661
13662 /* Make sure input_location isn't set to something weird. */
13663 input_location = DECL_SOURCE_LOCATION (fndecl);
13664
13665 /* Resolve callee-copies. This has to be done before processing
13666 the body so that DECL_VALUE_EXPR gets processed correctly. */
13667 parm_stmts = do_parms ? gimplify_parameters (&parm_cleanup) : NULL;
13668
13669 /* Gimplify the function's body. */
13670 seq = NULL;
13671 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
13672 outer_stmt = gimple_seq_first_stmt (seq);
13673 if (!outer_stmt)
13674 {
13675 outer_stmt = gimple_build_nop ();
13676 gimplify_seq_add_stmt (&seq, outer_stmt);
13677 }
13678
13679 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
13680 not the case, wrap everything in a GIMPLE_BIND to make it so. */
13681 if (gimple_code (outer_stmt) == GIMPLE_BIND
13682 && gimple_seq_first (seq) == gimple_seq_last (seq))
13683 outer_bind = as_a <gbind *> (outer_stmt);
13684 else
13685 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
13686
13687 DECL_SAVED_TREE (fndecl) = NULL_TREE;
13688
13689 /* If we had callee-copies statements, insert them at the beginning
13690 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
13691 if (!gimple_seq_empty_p (parm_stmts))
13692 {
13693 tree parm;
13694
13695 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
13696 if (parm_cleanup)
13697 {
13698 gtry *g = gimple_build_try (parm_stmts, parm_cleanup,
13699 GIMPLE_TRY_FINALLY);
13700 parm_stmts = NULL;
13701 gimple_seq_add_stmt (&parm_stmts, g);
13702 }
13703 gimple_bind_set_body (outer_bind, parm_stmts);
13704
13705 for (parm = DECL_ARGUMENTS (current_function_decl);
13706 parm; parm = DECL_CHAIN (parm))
13707 if (DECL_HAS_VALUE_EXPR_P (parm))
13708 {
13709 DECL_HAS_VALUE_EXPR_P (parm) = 0;
13710 DECL_IGNORED_P (parm) = 0;
13711 }
13712 }
13713
13714 if ((flag_openacc || flag_openmp || flag_openmp_simd)
13715 && gimplify_omp_ctxp)
13716 {
13717 delete_omp_context (gimplify_omp_ctxp);
13718 gimplify_omp_ctxp = NULL;
13719 }
13720
13721 pop_gimplify_context (outer_bind);
13722 gcc_assert (gimplify_ctxp == NULL);
13723
13724 if (flag_checking && !seen_error ())
13725 verify_gimple_in_seq (gimple_bind_body (outer_bind));
13726
13727 timevar_pop (TV_TREE_GIMPLIFY);
13728 input_location = saved_location;
13729
13730 return outer_bind;
13731 }
13732
13733 typedef char *char_p; /* For DEF_VEC_P. */
13734
13735 /* Return whether we should exclude FNDECL from instrumentation. */
13736
13737 static bool
13738 flag_instrument_functions_exclude_p (tree fndecl)
13739 {
13740 vec<char_p> *v;
13741
13742 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
13743 if (v && v->length () > 0)
13744 {
13745 const char *name;
13746 int i;
13747 char *s;
13748
13749 name = lang_hooks.decl_printable_name (fndecl, 0);
13750 FOR_EACH_VEC_ELT (*v, i, s)
13751 if (strstr (name, s) != NULL)
13752 return true;
13753 }
13754
13755 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
13756 if (v && v->length () > 0)
13757 {
13758 const char *name;
13759 int i;
13760 char *s;
13761
13762 name = DECL_SOURCE_FILE (fndecl);
13763 FOR_EACH_VEC_ELT (*v, i, s)
13764 if (strstr (name, s) != NULL)
13765 return true;
13766 }
13767
13768 return false;
13769 }
13770
13771 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
13772 node for the function we want to gimplify.
13773
13774 Return the sequence of GIMPLE statements corresponding to the body
13775 of FNDECL. */
13776
13777 void
13778 gimplify_function_tree (tree fndecl)
13779 {
13780 tree parm, ret;
13781 gimple_seq seq;
13782 gbind *bind;
13783
13784 gcc_assert (!gimple_body (fndecl));
13785
13786 if (DECL_STRUCT_FUNCTION (fndecl))
13787 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
13788 else
13789 push_struct_function (fndecl);
13790
13791 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
13792 if necessary. */
13793 cfun->curr_properties |= PROP_gimple_lva;
13794
13795 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
13796 {
13797 /* Preliminarily mark non-addressed complex variables as eligible
13798 for promotion to gimple registers. We'll transform their uses
13799 as we find them. */
13800 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
13801 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
13802 && !TREE_THIS_VOLATILE (parm)
13803 && !needs_to_live_in_memory (parm))
13804 DECL_GIMPLE_REG_P (parm) = 1;
13805 }
13806
13807 ret = DECL_RESULT (fndecl);
13808 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
13809 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
13810 && !needs_to_live_in_memory (ret))
13811 DECL_GIMPLE_REG_P (ret) = 1;
13812
13813 if (asan_sanitize_use_after_scope () && sanitize_flags_p (SANITIZE_ADDRESS))
13814 asan_poisoned_variables = new hash_set<tree> ();
13815 bind = gimplify_body (fndecl, true);
13816 if (asan_poisoned_variables)
13817 {
13818 delete asan_poisoned_variables;
13819 asan_poisoned_variables = NULL;
13820 }
13821
13822 /* The tree body of the function is no longer needed, replace it
13823 with the new GIMPLE body. */
13824 seq = NULL;
13825 gimple_seq_add_stmt (&seq, bind);
13826 gimple_set_body (fndecl, seq);
13827
13828 /* If we're instrumenting function entry/exit, then prepend the call to
13829 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
13830 catch the exit hook. */
13831 /* ??? Add some way to ignore exceptions for this TFE. */
13832 if (flag_instrument_function_entry_exit
13833 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
13834 /* Do not instrument extern inline functions. */
13835 && !(DECL_DECLARED_INLINE_P (fndecl)
13836 && DECL_EXTERNAL (fndecl)
13837 && DECL_DISREGARD_INLINE_LIMITS (fndecl))
13838 && !flag_instrument_functions_exclude_p (fndecl))
13839 {
13840 tree x;
13841 gbind *new_bind;
13842 gimple *tf;
13843 gimple_seq cleanup = NULL, body = NULL;
13844 tree tmp_var, this_fn_addr;
13845 gcall *call;
13846
13847 /* The instrumentation hooks aren't going to call the instrumented
13848 function and the address they receive is expected to be matchable
13849 against symbol addresses. Make sure we don't create a trampoline,
13850 in case the current function is nested. */
13851 this_fn_addr = build_fold_addr_expr (current_function_decl);
13852 TREE_NO_TRAMPOLINE (this_fn_addr) = 1;
13853
13854 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
13855 call = gimple_build_call (x, 1, integer_zero_node);
13856 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
13857 gimple_call_set_lhs (call, tmp_var);
13858 gimplify_seq_add_stmt (&cleanup, call);
13859 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
13860 call = gimple_build_call (x, 2, this_fn_addr, tmp_var);
13861 gimplify_seq_add_stmt (&cleanup, call);
13862 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
13863
13864 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
13865 call = gimple_build_call (x, 1, integer_zero_node);
13866 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
13867 gimple_call_set_lhs (call, tmp_var);
13868 gimplify_seq_add_stmt (&body, call);
13869 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
13870 call = gimple_build_call (x, 2, this_fn_addr, tmp_var);
13871 gimplify_seq_add_stmt (&body, call);
13872 gimplify_seq_add_stmt (&body, tf);
13873 new_bind = gimple_build_bind (NULL, body, NULL);
13874
13875 /* Replace the current function body with the body
13876 wrapped in the try/finally TF. */
13877 seq = NULL;
13878 gimple_seq_add_stmt (&seq, new_bind);
13879 gimple_set_body (fndecl, seq);
13880 bind = new_bind;
13881 }
13882
13883 if (sanitize_flags_p (SANITIZE_THREAD))
13884 {
13885 gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
13886 gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
13887 gbind *new_bind = gimple_build_bind (NULL, tf, NULL);
13888 /* Replace the current function body with the body
13889 wrapped in the try/finally TF. */
13890 seq = NULL;
13891 gimple_seq_add_stmt (&seq, new_bind);
13892 gimple_set_body (fndecl, seq);
13893 }
13894
13895 DECL_SAVED_TREE (fndecl) = NULL_TREE;
13896 cfun->curr_properties |= PROP_gimple_any;
13897
13898 pop_cfun ();
13899
13900 dump_function (TDI_gimple, fndecl);
13901 }
13902
13903 /* Return a dummy expression of type TYPE in order to keep going after an
13904 error. */
13905
13906 static tree
13907 dummy_object (tree type)
13908 {
13909 tree t = build_int_cst (build_pointer_type (type), 0);
13910 return build2 (MEM_REF, type, t, t);
13911 }
13912
13913 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
13914 builtin function, but a very special sort of operator. */
13915
13916 enum gimplify_status
13917 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
13918 gimple_seq *post_p ATTRIBUTE_UNUSED)
13919 {
13920 tree promoted_type, have_va_type;
13921 tree valist = TREE_OPERAND (*expr_p, 0);
13922 tree type = TREE_TYPE (*expr_p);
13923 tree t, tag, aptag;
13924 location_t loc = EXPR_LOCATION (*expr_p);
13925
13926 /* Verify that valist is of the proper type. */
13927 have_va_type = TREE_TYPE (valist);
13928 if (have_va_type == error_mark_node)
13929 return GS_ERROR;
13930 have_va_type = targetm.canonical_va_list_type (have_va_type);
13931 if (have_va_type == NULL_TREE
13932 && POINTER_TYPE_P (TREE_TYPE (valist)))
13933 /* Handle 'Case 1: Not an array type' from c-common.c/build_va_arg. */
13934 have_va_type
13935 = targetm.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist)));
13936 gcc_assert (have_va_type != NULL_TREE);
13937
13938 /* Generate a diagnostic for requesting data of a type that cannot
13939 be passed through `...' due to type promotion at the call site. */
13940 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
13941 != type)
13942 {
13943 static bool gave_help;
13944 bool warned;
13945 /* Use the expansion point to handle cases such as passing bool (defined
13946 in a system header) through `...'. */
13947 location_t xloc
13948 = expansion_point_location_if_in_system_header (loc);
13949
13950 /* Unfortunately, this is merely undefined, rather than a constraint
13951 violation, so we cannot make this an error. If this call is never
13952 executed, the program is still strictly conforming. */
13953 auto_diagnostic_group d;
13954 warned = warning_at (xloc, 0,
13955 "%qT is promoted to %qT when passed through %<...%>",
13956 type, promoted_type);
13957 if (!gave_help && warned)
13958 {
13959 gave_help = true;
13960 inform (xloc, "(so you should pass %qT not %qT to %<va_arg%>)",
13961 promoted_type, type);
13962 }
13963
13964 /* We can, however, treat "undefined" any way we please.
13965 Call abort to encourage the user to fix the program. */
13966 if (warned)
13967 inform (xloc, "if this code is reached, the program will abort");
13968 /* Before the abort, allow the evaluation of the va_list
13969 expression to exit or longjmp. */
13970 gimplify_and_add (valist, pre_p);
13971 t = build_call_expr_loc (loc,
13972 builtin_decl_implicit (BUILT_IN_TRAP), 0);
13973 gimplify_and_add (t, pre_p);
13974
13975 /* This is dead code, but go ahead and finish so that the
13976 mode of the result comes out right. */
13977 *expr_p = dummy_object (type);
13978 return GS_ALL_DONE;
13979 }
13980
13981 tag = build_int_cst (build_pointer_type (type), 0);
13982 aptag = build_int_cst (TREE_TYPE (valist), 0);
13983
13984 *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 3,
13985 valist, tag, aptag);
13986
13987 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
13988 needs to be expanded. */
13989 cfun->curr_properties &= ~PROP_gimple_lva;
13990
13991 return GS_OK;
13992 }
13993
13994 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
13995
13996 DST/SRC are the destination and source respectively. You can pass
13997 ungimplified trees in DST or SRC, in which case they will be
13998 converted to a gimple operand if necessary.
13999
14000 This function returns the newly created GIMPLE_ASSIGN tuple. */
14001
14002 gimple *
14003 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
14004 {
14005 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
14006 gimplify_and_add (t, seq_p);
14007 ggc_free (t);
14008 return gimple_seq_last_stmt (*seq_p);
14009 }
14010
14011 inline hashval_t
14012 gimplify_hasher::hash (const elt_t *p)
14013 {
14014 tree t = p->val;
14015 return iterative_hash_expr (t, 0);
14016 }
14017
14018 inline bool
14019 gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
14020 {
14021 tree t1 = p1->val;
14022 tree t2 = p2->val;
14023 enum tree_code code = TREE_CODE (t1);
14024
14025 if (TREE_CODE (t2) != code
14026 || TREE_TYPE (t1) != TREE_TYPE (t2))
14027 return false;
14028
14029 if (!operand_equal_p (t1, t2, 0))
14030 return false;
14031
14032 /* Only allow them to compare equal if they also hash equal; otherwise
14033 results are nondeterminate, and we fail bootstrap comparison. */
14034 gcc_checking_assert (hash (p1) == hash (p2));
14035
14036 return true;
14037 }