]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gimplify.c
Makefile.in (C_COMMON_OBJS): Depend on c-cilkplus.o.
[thirdparty/gcc.git] / gcc / gimplify.c
CommitLineData
6de9cd9a
DN
1/* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
d1e082c2 3 Copyright (C) 2002-2013 Free Software Foundation, Inc.
6de9cd9a
DN
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
6
7This file is part of GCC.
8
9GCC is free software; you can redistribute it and/or modify it under
10the terms of the GNU General Public License as published by the Free
9dcd6f09 11Software Foundation; either version 3, or (at your option) any later
6de9cd9a
DN
12version.
13
14GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15WARRANTY; without even the implied warranty of MERCHANTABILITY or
16FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17for more details.
18
19You should have received a copy of the GNU General Public License
9dcd6f09
NC
20along with GCC; see the file COPYING3. If not see
21<http://www.gnu.org/licenses/>. */
6de9cd9a
DN
22
23#include "config.h"
24#include "system.h"
25#include "coretypes.h"
6de9cd9a 26#include "tree.h"
18f429e2 27#include "gimple.h"
45b0be94 28#include "gimplify.h"
5be5c238 29#include "gimple-iterator.h"
726a989a 30#include "tree-iterator.h"
6de9cd9a 31#include "tree-inline.h"
cf835838 32#include "tree-pretty-print.h"
6de9cd9a 33#include "langhooks.h"
442b4905
AM
34#include "bitmap.h"
35#include "gimple-ssa.h"
44de5aeb 36#include "cgraph.h"
442b4905
AM
37#include "tree-cfg.h"
38#include "tree-ssanames.h"
39#include "tree-ssa.h"
718f9c0f 40#include "diagnostic-core.h"
cd3ce9b4 41#include "target.h"
6be42dd4 42#include "splay-tree.h"
0645c1a2 43#include "omp-low.h"
4484a35a 44#include "gimple-low.h"
939b37da 45#include "cilk.h"
6de9cd9a 46
7ee2468b
SB
47#include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
48#include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
953ff289
DN
49
50enum gimplify_omp_var_data
51{
52 GOVD_SEEN = 1,
53 GOVD_EXPLICIT = 2,
54 GOVD_SHARED = 4,
55 GOVD_PRIVATE = 8,
56 GOVD_FIRSTPRIVATE = 16,
57 GOVD_LASTPRIVATE = 32,
58 GOVD_REDUCTION = 64,
59 GOVD_LOCAL = 128,
acf0174b
JJ
60 GOVD_MAP = 256,
61 GOVD_DEBUG_PRIVATE = 512,
62 GOVD_PRIVATE_OUTER_REF = 1024,
74bf76ed 63 GOVD_LINEAR = 2048,
acf0174b
JJ
64 GOVD_ALIGNED = 4096,
65 GOVD_MAP_TO_ONLY = 8192,
953ff289 66 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
74bf76ed
JJ
67 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
68 | GOVD_LOCAL)
953ff289
DN
69};
70
726a989a 71
a68ab351
JJ
72enum omp_region_type
73{
74 ORT_WORKSHARE = 0,
74bf76ed 75 ORT_SIMD = 1,
a68ab351 76 ORT_PARALLEL = 2,
f22f4340
JJ
77 ORT_COMBINED_PARALLEL = 3,
78 ORT_TASK = 4,
acf0174b
JJ
79 ORT_UNTIED_TASK = 5,
80 ORT_TEAMS = 8,
81 ORT_TARGET_DATA = 16,
82 ORT_TARGET = 32
a68ab351
JJ
83};
84
953ff289 85struct gimplify_omp_ctx
6de9cd9a 86{
953ff289
DN
87 struct gimplify_omp_ctx *outer_context;
88 splay_tree variables;
89 struct pointer_set_t *privatized_types;
90 location_t location;
91 enum omp_clause_default_kind default_kind;
a68ab351 92 enum omp_region_type region_type;
acf0174b 93 bool combined_loop;
953ff289
DN
94};
95
18f429e2 96struct gimplify_ctx *gimplify_ctxp;
953ff289
DN
97static struct gimplify_omp_ctx *gimplify_omp_ctxp;
98
6de9cd9a 99
ad19c4be 100/* Forward declaration. */
726a989a 101static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
eb6127a4 102
a1a6c5b2
JJ
103/* Shorter alias name for the above function for use in gimplify.c
104 only. */
105
106static inline void
107gimplify_seq_add_stmt (gimple_seq *seq_p, gimple gs)
108{
109 gimple_seq_add_stmt_without_update (seq_p, gs);
110}
111
726a989a
RB
112/* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
113 NULL, a new sequence is allocated. This function is
114 similar to gimple_seq_add_seq, but does not scan the operands.
115 During gimplification, we need to manipulate statement sequences
116 before the def/use vectors have been constructed. */
117
118static void
119gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
120{
121 gimple_stmt_iterator si;
122
123 if (src == NULL)
124 return;
125
726a989a
RB
126 si = gsi_last (*dst_p);
127 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
128}
129
6de9cd9a
DN
130/* Set up a context for the gimplifier. */
131
132void
d406b663 133push_gimplify_context (struct gimplify_ctx *c)
6de9cd9a 134{
d406b663 135 memset (c, '\0', sizeof (*c));
953ff289 136 c->prev_context = gimplify_ctxp;
953ff289 137 gimplify_ctxp = c;
6de9cd9a
DN
138}
139
140/* Tear down a context for the gimplifier. If BODY is non-null, then
141 put the temporaries into the outer BIND_EXPR. Otherwise, put them
726a989a
RB
142 in the local_decls.
143
144 BODY is not a sequence, but the first tuple in a sequence. */
6de9cd9a
DN
145
146void
726a989a 147pop_gimplify_context (gimple body)
6de9cd9a 148{
953ff289 149 struct gimplify_ctx *c = gimplify_ctxp;
17ad5b5e 150
9771b263
DN
151 gcc_assert (c
152 && (!c->bind_expr_stack.exists ()
153 || c->bind_expr_stack.is_empty ()));
154 c->bind_expr_stack.release ();
953ff289 155 gimplify_ctxp = c->prev_context;
6de9cd9a
DN
156
157 if (body)
5123ad09 158 declare_vars (c->temps, body, false);
6de9cd9a 159 else
953ff289 160 record_vars (c->temps);
6de9cd9a 161
4a8fb1a1
LC
162 if (c->temp_htab.is_created ())
163 c->temp_htab.dispose ();
6de9cd9a
DN
164}
165
ad19c4be
EB
166/* Push a GIMPLE_BIND tuple onto the stack of bindings. */
167
c24b7de9 168static void
726a989a 169gimple_push_bind_expr (gimple gimple_bind)
6de9cd9a 170{
9771b263
DN
171 gimplify_ctxp->bind_expr_stack.reserve (8);
172 gimplify_ctxp->bind_expr_stack.safe_push (gimple_bind);
6de9cd9a
DN
173}
174
ad19c4be
EB
175/* Pop the first element off the stack of bindings. */
176
c24b7de9 177static void
6de9cd9a
DN
178gimple_pop_bind_expr (void)
179{
9771b263 180 gimplify_ctxp->bind_expr_stack.pop ();
6de9cd9a
DN
181}
182
ad19c4be
EB
183/* Return the first element of the stack of bindings. */
184
726a989a 185gimple
6de9cd9a
DN
186gimple_current_bind_expr (void)
187{
9771b263 188 return gimplify_ctxp->bind_expr_stack.last ();
726a989a
RB
189}
190
ad19c4be 191/* Return the stack of bindings created during gimplification. */
726a989a 192
9771b263 193vec<gimple>
726a989a
RB
194gimple_bind_expr_stack (void)
195{
196 return gimplify_ctxp->bind_expr_stack;
6de9cd9a
DN
197}
198
ad19c4be 199/* Return true iff there is a COND_EXPR between us and the innermost
6de9cd9a
DN
200 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
201
202static bool
203gimple_conditional_context (void)
204{
205 return gimplify_ctxp->conditions > 0;
206}
207
208/* Note that we've entered a COND_EXPR. */
209
210static void
211gimple_push_condition (void)
212{
726a989a 213#ifdef ENABLE_GIMPLE_CHECKING
d775bc45 214 if (gimplify_ctxp->conditions == 0)
726a989a 215 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
d775bc45 216#endif
6de9cd9a
DN
217 ++(gimplify_ctxp->conditions);
218}
219
220/* Note that we've left a COND_EXPR. If we're back at unconditional scope
221 now, add any conditional cleanups we've seen to the prequeue. */
222
223static void
726a989a 224gimple_pop_condition (gimple_seq *pre_p)
6de9cd9a
DN
225{
226 int conds = --(gimplify_ctxp->conditions);
aa4a53af 227
282899df 228 gcc_assert (conds >= 0);
6de9cd9a
DN
229 if (conds == 0)
230 {
726a989a
RB
231 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
232 gimplify_ctxp->conditional_cleanups = NULL;
6de9cd9a 233 }
6de9cd9a
DN
234}
235
953ff289
DN
236/* A stable comparison routine for use with splay trees and DECLs. */
237
238static int
239splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
240{
241 tree a = (tree) xa;
242 tree b = (tree) xb;
243
244 return DECL_UID (a) - DECL_UID (b);
245}
246
247/* Create a new omp construct that deals with variable remapping. */
248
249static struct gimplify_omp_ctx *
a68ab351 250new_omp_context (enum omp_region_type region_type)
953ff289
DN
251{
252 struct gimplify_omp_ctx *c;
253
254 c = XCNEW (struct gimplify_omp_ctx);
255 c->outer_context = gimplify_omp_ctxp;
256 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
257 c->privatized_types = pointer_set_create ();
258 c->location = input_location;
a68ab351 259 c->region_type = region_type;
f22f4340 260 if ((region_type & ORT_TASK) == 0)
a68ab351
JJ
261 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
262 else
263 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
953ff289
DN
264
265 return c;
266}
267
268/* Destroy an omp construct that deals with variable remapping. */
269
270static void
271delete_omp_context (struct gimplify_omp_ctx *c)
272{
273 splay_tree_delete (c->variables);
274 pointer_set_destroy (c->privatized_types);
275 XDELETE (c);
276}
277
278static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
279static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
280
726a989a
RB
281/* Both gimplify the statement T and append it to *SEQ_P. This function
282 behaves exactly as gimplify_stmt, but you don't have to pass T as a
283 reference. */
cd3ce9b4
JM
284
285void
726a989a
RB
286gimplify_and_add (tree t, gimple_seq *seq_p)
287{
288 gimplify_stmt (&t, seq_p);
289}
290
291/* Gimplify statement T into sequence *SEQ_P, and return the first
292 tuple in the sequence of generated tuples for this statement.
293 Return NULL if gimplifying T produced no tuples. */
294
295static gimple
296gimplify_and_return_first (tree t, gimple_seq *seq_p)
cd3ce9b4 297{
726a989a
RB
298 gimple_stmt_iterator last = gsi_last (*seq_p);
299
300 gimplify_and_add (t, seq_p);
301
302 if (!gsi_end_p (last))
303 {
304 gsi_next (&last);
305 return gsi_stmt (last);
306 }
307 else
308 return gimple_seq_first_stmt (*seq_p);
cd3ce9b4
JM
309}
310
216820a4
RG
311/* Returns true iff T is a valid RHS for an assignment to an un-renamed
312 LHS, or for a call argument. */
313
314static bool
315is_gimple_mem_rhs (tree t)
316{
317 /* If we're dealing with a renamable type, either source or dest must be
318 a renamed variable. */
319 if (is_gimple_reg_type (TREE_TYPE (t)))
320 return is_gimple_val (t);
321 else
322 return is_gimple_val (t) || is_gimple_lvalue (t);
323}
324
726a989a 325/* Return true if T is a CALL_EXPR or an expression that can be
12947319 326 assigned to a temporary. Note that this predicate should only be
726a989a
RB
327 used during gimplification. See the rationale for this in
328 gimplify_modify_expr. */
329
330static bool
ba4d8f9d 331is_gimple_reg_rhs_or_call (tree t)
726a989a 332{
ba4d8f9d
RG
333 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
334 || TREE_CODE (t) == CALL_EXPR);
726a989a
RB
335}
336
337/* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
338 this predicate should only be used during gimplification. See the
339 rationale for this in gimplify_modify_expr. */
340
341static bool
ba4d8f9d 342is_gimple_mem_rhs_or_call (tree t)
726a989a
RB
343{
344 /* If we're dealing with a renamable type, either source or dest must be
050bbfeb
RG
345 a renamed variable. */
346 if (is_gimple_reg_type (TREE_TYPE (t)))
726a989a
RB
347 return is_gimple_val (t);
348 else
ba4d8f9d
RG
349 return (is_gimple_val (t) || is_gimple_lvalue (t)
350 || TREE_CODE (t) == CALL_EXPR);
726a989a
RB
351}
352
2ad728d2
RG
353/* Create a temporary with a name derived from VAL. Subroutine of
354 lookup_tmp_var; nobody else should call this function. */
355
356static inline tree
357create_tmp_from_val (tree val, bool is_formal)
358{
359 /* Drop all qualifiers and address-space information from the value type. */
360 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
361 tree var = create_tmp_var (type, get_name (val));
362 if (is_formal
363 && (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
364 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE))
365 DECL_GIMPLE_REG_P (var) = 1;
366 return var;
367}
368
369/* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
370 an existing expression temporary. */
371
372static tree
373lookup_tmp_var (tree val, bool is_formal)
374{
375 tree ret;
376
377 /* If not optimizing, never really reuse a temporary. local-alloc
378 won't allocate any variable that is used in more than one basic
379 block, which means it will go into memory, causing much extra
380 work in reload and final and poorer code generation, outweighing
381 the extra memory allocation here. */
382 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
383 ret = create_tmp_from_val (val, is_formal);
384 else
385 {
386 elt_t elt, *elt_p;
4a8fb1a1 387 elt_t **slot;
2ad728d2
RG
388
389 elt.val = val;
4a8fb1a1
LC
390 if (!gimplify_ctxp->temp_htab.is_created ())
391 gimplify_ctxp->temp_htab.create (1000);
392 slot = gimplify_ctxp->temp_htab.find_slot (&elt, INSERT);
2ad728d2
RG
393 if (*slot == NULL)
394 {
395 elt_p = XNEW (elt_t);
396 elt_p->val = val;
397 elt_p->temp = ret = create_tmp_from_val (val, is_formal);
4a8fb1a1 398 *slot = elt_p;
2ad728d2
RG
399 }
400 else
401 {
4a8fb1a1 402 elt_p = *slot;
2ad728d2
RG
403 ret = elt_p->temp;
404 }
405 }
406
407 return ret;
408}
409
ba4d8f9d 410/* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
6de9cd9a
DN
411
412static tree
726a989a
RB
413internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
414 bool is_formal)
6de9cd9a
DN
415{
416 tree t, mod;
6de9cd9a 417
726a989a
RB
418 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
419 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
ba4d8f9d 420 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
726a989a 421 fb_rvalue);
6de9cd9a 422
2ad728d2
RG
423 if (gimplify_ctxp->into_ssa
424 && is_gimple_reg_type (TREE_TYPE (val)))
425 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)), NULL);
426 else
427 t = lookup_tmp_var (val, is_formal);
e41d82f5 428
2e929cf3 429 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
6de9cd9a 430
ec52b111 431 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_HERE (val));
6de9cd9a 432
fff34d35
RK
433 /* gimplify_modify_expr might want to reduce this further. */
434 gimplify_and_add (mod, pre_p);
726a989a 435 ggc_free (mod);
8b11a64c 436
6de9cd9a
DN
437 return t;
438}
439
ad19c4be 440/* Return a formal temporary variable initialized with VAL. PRE_P is as
ba4d8f9d
RG
441 in gimplify_expr. Only use this function if:
442
443 1) The value of the unfactored expression represented by VAL will not
444 change between the initialization and use of the temporary, and
445 2) The temporary will not be otherwise modified.
446
447 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
448 and #2 means it is inappropriate for && temps.
449
450 For other cases, use get_initialized_tmp_var instead. */
50674e96 451
6de9cd9a 452tree
726a989a 453get_formal_tmp_var (tree val, gimple_seq *pre_p)
6de9cd9a
DN
454{
455 return internal_get_tmp_var (val, pre_p, NULL, true);
456}
457
ad19c4be 458/* Return a temporary variable initialized with VAL. PRE_P and POST_P
6de9cd9a
DN
459 are as in gimplify_expr. */
460
461tree
726a989a 462get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p)
6de9cd9a
DN
463{
464 return internal_get_tmp_var (val, pre_p, post_p, false);
465}
466
ad19c4be
EB
467/* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
468 generate debug info for them; otherwise don't. */
6de9cd9a
DN
469
470void
726a989a 471declare_vars (tree vars, gimple scope, bool debug_info)
6de9cd9a
DN
472{
473 tree last = vars;
474 if (last)
475 {
5123ad09 476 tree temps, block;
6de9cd9a 477
726a989a 478 gcc_assert (gimple_code (scope) == GIMPLE_BIND);
6de9cd9a
DN
479
480 temps = nreverse (last);
5123ad09 481
524d9a45 482 block = gimple_bind_block (scope);
726a989a 483 gcc_assert (!block || TREE_CODE (block) == BLOCK);
5123ad09
EB
484 if (!block || !debug_info)
485 {
910ad8de 486 DECL_CHAIN (last) = gimple_bind_vars (scope);
726a989a 487 gimple_bind_set_vars (scope, temps);
5123ad09
EB
488 }
489 else
490 {
491 /* We need to attach the nodes both to the BIND_EXPR and to its
492 associated BLOCK for debugging purposes. The key point here
493 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
494 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
495 if (BLOCK_VARS (block))
496 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
497 else
498 {
726a989a
RB
499 gimple_bind_set_vars (scope,
500 chainon (gimple_bind_vars (scope), temps));
5123ad09
EB
501 BLOCK_VARS (block) = temps;
502 }
503 }
6de9cd9a
DN
504 }
505}
506
a441447f
OH
507/* For VAR a VAR_DECL of variable size, try to find a constant upper bound
508 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
509 no such upper bound can be obtained. */
510
511static void
512force_constant_size (tree var)
513{
514 /* The only attempt we make is by querying the maximum size of objects
515 of the variable's type. */
516
517 HOST_WIDE_INT max_size;
518
519 gcc_assert (TREE_CODE (var) == VAR_DECL);
520
521 max_size = max_int_size_in_bytes (TREE_TYPE (var));
522
523 gcc_assert (max_size >= 0);
524
525 DECL_SIZE_UNIT (var)
526 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
527 DECL_SIZE (var)
528 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
529}
530
ad19c4be
EB
531/* Push the temporary variable TMP into the current binding. */
532
6de9cd9a
DN
533void
534gimple_add_tmp_var (tree tmp)
535{
910ad8de 536 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
6de9cd9a 537
a441447f
OH
538 /* Later processing assumes that the object size is constant, which might
539 not be true at this point. Force the use of a constant upper bound in
540 this case. */
541 if (!host_integerp (DECL_SIZE_UNIT (tmp), 1))
542 force_constant_size (tmp);
543
6de9cd9a 544 DECL_CONTEXT (tmp) = current_function_decl;
48eb4e53 545 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
6de9cd9a
DN
546
547 if (gimplify_ctxp)
548 {
910ad8de 549 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
6de9cd9a 550 gimplify_ctxp->temps = tmp;
953ff289
DN
551
552 /* Mark temporaries local within the nearest enclosing parallel. */
553 if (gimplify_omp_ctxp)
554 {
555 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
74bf76ed
JJ
556 while (ctx
557 && (ctx->region_type == ORT_WORKSHARE
558 || ctx->region_type == ORT_SIMD))
953ff289
DN
559 ctx = ctx->outer_context;
560 if (ctx)
561 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
562 }
6de9cd9a
DN
563 }
564 else if (cfun)
565 record_vars (tmp);
566 else
726a989a
RB
567 {
568 gimple_seq body_seq;
569
570 /* This case is for nested functions. We need to expose the locals
571 they create. */
572 body_seq = gimple_body (current_function_decl);
573 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
574 }
575}
576
726a989a 577
616f1431
EB
578\f
579/* This page contains routines to unshare tree nodes, i.e. to duplicate tree
580 nodes that are referenced more than once in GENERIC functions. This is
581 necessary because gimplification (translation into GIMPLE) is performed
582 by modifying tree nodes in-place, so gimplication of a shared node in a
583 first context could generate an invalid GIMPLE form in a second context.
584
585 This is achieved with a simple mark/copy/unmark algorithm that walks the
586 GENERIC representation top-down, marks nodes with TREE_VISITED the first
587 time it encounters them, duplicates them if they already have TREE_VISITED
588 set, and finally removes the TREE_VISITED marks it has set.
589
590 The algorithm works only at the function level, i.e. it generates a GENERIC
591 representation of a function with no nodes shared within the function when
592 passed a GENERIC function (except for nodes that are allowed to be shared).
593
594 At the global level, it is also necessary to unshare tree nodes that are
595 referenced in more than one function, for the same aforementioned reason.
596 This requires some cooperation from the front-end. There are 2 strategies:
597
598 1. Manual unsharing. The front-end needs to call unshare_expr on every
599 expression that might end up being shared across functions.
600
601 2. Deep unsharing. This is an extension of regular unsharing. Instead
602 of calling unshare_expr on expressions that might be shared across
603 functions, the front-end pre-marks them with TREE_VISITED. This will
604 ensure that they are unshared on the first reference within functions
605 when the regular unsharing algorithm runs. The counterpart is that
606 this algorithm must look deeper than for manual unsharing, which is
607 specified by LANG_HOOKS_DEEP_UNSHARING.
608
609 If there are only few specific cases of node sharing across functions, it is
610 probably easier for a front-end to unshare the expressions manually. On the
611 contrary, if the expressions generated at the global level are as widespread
612 as expressions generated within functions, deep unsharing is very likely the
613 way to go. */
614
615/* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
3ad065ef
EB
616 These nodes model computations that must be done once. If we were to
617 unshare something like SAVE_EXPR(i++), the gimplification process would
618 create wrong code. However, if DATA is non-null, it must hold a pointer
619 set that is used to unshare the subtrees of these nodes. */
6de9cd9a
DN
620
621static tree
622mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
623{
616f1431
EB
624 tree t = *tp;
625 enum tree_code code = TREE_CODE (t);
626
6687b740
EB
627 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
628 copy their subtrees if we can make sure to do it only once. */
629 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
616f1431
EB
630 {
631 if (data && !pointer_set_insert ((struct pointer_set_t *)data, t))
632 ;
633 else
634 *walk_subtrees = 0;
635 }
636
637 /* Stop at types, decls, constants like copy_tree_r. */
638 else if (TREE_CODE_CLASS (code) == tcc_type
639 || TREE_CODE_CLASS (code) == tcc_declaration
640 || TREE_CODE_CLASS (code) == tcc_constant
641 /* We can't do anything sensible with a BLOCK used as an
642 expression, but we also can't just die when we see it
643 because of non-expression uses. So we avert our eyes
644 and cross our fingers. Silly Java. */
645 || code == BLOCK)
6de9cd9a 646 *walk_subtrees = 0;
616f1431
EB
647
648 /* Cope with the statement expression extension. */
649 else if (code == STATEMENT_LIST)
650 ;
651
652 /* Leave the bulk of the work to copy_tree_r itself. */
6de9cd9a 653 else
6687b740 654 copy_tree_r (tp, walk_subtrees, NULL);
6de9cd9a
DN
655
656 return NULL_TREE;
657}
658
3ad065ef
EB
659/* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
660 If *TP has been visited already, then *TP is deeply copied by calling
661 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
6de9cd9a
DN
662
663static tree
616f1431 664copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
6de9cd9a 665{
f0638e1d
RH
666 tree t = *tp;
667 enum tree_code code = TREE_CODE (t);
668
44de5aeb
RK
669 /* Skip types, decls, and constants. But we do want to look at their
670 types and the bounds of types. Mark them as visited so we properly
671 unmark their subtrees on the unmark pass. If we've already seen them,
672 don't look down further. */
6615c446
JO
673 if (TREE_CODE_CLASS (code) == tcc_type
674 || TREE_CODE_CLASS (code) == tcc_declaration
675 || TREE_CODE_CLASS (code) == tcc_constant)
44de5aeb
RK
676 {
677 if (TREE_VISITED (t))
678 *walk_subtrees = 0;
679 else
680 TREE_VISITED (t) = 1;
681 }
f0638e1d 682
6de9cd9a
DN
683 /* If this node has been visited already, unshare it and don't look
684 any deeper. */
f0638e1d 685 else if (TREE_VISITED (t))
6de9cd9a 686 {
616f1431 687 walk_tree (tp, mostly_copy_tree_r, data, NULL);
6de9cd9a
DN
688 *walk_subtrees = 0;
689 }
f0638e1d 690
616f1431 691 /* Otherwise, mark the node as visited and keep looking. */
6de9cd9a 692 else
77c9db77 693 TREE_VISITED (t) = 1;
f0638e1d 694
6de9cd9a
DN
695 return NULL_TREE;
696}
697
3ad065ef
EB
698/* Unshare most of the shared trees rooted at *TP. DATA is passed to the
699 copy_if_shared_r callback unmodified. */
6de9cd9a 700
616f1431 701static inline void
3ad065ef 702copy_if_shared (tree *tp, void *data)
616f1431 703{
3ad065ef 704 walk_tree (tp, copy_if_shared_r, data, NULL);
6de9cd9a
DN
705}
706
3ad065ef
EB
707/* Unshare all the trees in the body of FNDECL, as well as in the bodies of
708 any nested functions. */
44de5aeb
RK
709
710static void
3ad065ef 711unshare_body (tree fndecl)
44de5aeb 712{
9f9ebcdf 713 struct cgraph_node *cgn = cgraph_get_node (fndecl);
3ad065ef
EB
714 /* If the language requires deep unsharing, we need a pointer set to make
715 sure we don't repeatedly unshare subtrees of unshareable nodes. */
716 struct pointer_set_t *visited
717 = lang_hooks.deep_unsharing ? pointer_set_create () : NULL;
44de5aeb 718
3ad065ef
EB
719 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
720 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
721 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
722
723 if (visited)
724 pointer_set_destroy (visited);
616f1431 725
3ad065ef 726 if (cgn)
48eb4e53 727 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
67348ccc 728 unshare_body (cgn->decl);
44de5aeb
RK
729}
730
616f1431
EB
731/* Callback for walk_tree to unmark the visited trees rooted at *TP.
732 Subtrees are walked until the first unvisited node is encountered. */
733
734static tree
735unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
736{
737 tree t = *tp;
738
739 /* If this node has been visited, unmark it and keep looking. */
740 if (TREE_VISITED (t))
741 TREE_VISITED (t) = 0;
742
743 /* Otherwise, don't look any deeper. */
744 else
745 *walk_subtrees = 0;
746
747 return NULL_TREE;
748}
749
750/* Unmark the visited trees rooted at *TP. */
751
752static inline void
753unmark_visited (tree *tp)
754{
755 walk_tree (tp, unmark_visited_r, NULL, NULL);
756}
757
44de5aeb
RK
758/* Likewise, but mark all trees as not visited. */
759
760static void
3ad065ef 761unvisit_body (tree fndecl)
44de5aeb 762{
9f9ebcdf 763 struct cgraph_node *cgn = cgraph_get_node (fndecl);
44de5aeb 764
3ad065ef
EB
765 unmark_visited (&DECL_SAVED_TREE (fndecl));
766 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
767 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
616f1431 768
3ad065ef 769 if (cgn)
48eb4e53 770 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
67348ccc 771 unvisit_body (cgn->decl);
44de5aeb
RK
772}
773
6de9cd9a
DN
774/* Unconditionally make an unshared copy of EXPR. This is used when using
775 stored expressions which span multiple functions, such as BINFO_VTABLE,
776 as the normal unsharing process can't tell that they're shared. */
777
778tree
779unshare_expr (tree expr)
780{
781 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
782 return expr;
783}
d1f98542
RB
784
785/* Worker for unshare_expr_without_location. */
786
787static tree
788prune_expr_location (tree *tp, int *walk_subtrees, void *)
789{
790 if (EXPR_P (*tp))
791 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
792 else
793 *walk_subtrees = 0;
794 return NULL_TREE;
795}
796
797/* Similar to unshare_expr but also prune all expression locations
798 from EXPR. */
799
800tree
801unshare_expr_without_location (tree expr)
802{
803 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
804 if (EXPR_P (expr))
805 walk_tree (&expr, prune_expr_location, NULL, NULL);
806 return expr;
807}
6de9cd9a
DN
808\f
809/* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
810 contain statements and have a value. Assign its value to a temporary
ad19c4be 811 and give it void_type_node. Return the temporary, or NULL_TREE if
6de9cd9a
DN
812 WRAPPER was already void. */
813
814tree
325c3691 815voidify_wrapper_expr (tree wrapper, tree temp)
6de9cd9a 816{
4832214a
JM
817 tree type = TREE_TYPE (wrapper);
818 if (type && !VOID_TYPE_P (type))
6de9cd9a 819 {
c6c7698d 820 tree *p;
6de9cd9a 821
c6c7698d
JM
822 /* Set p to point to the body of the wrapper. Loop until we find
823 something that isn't a wrapper. */
824 for (p = &wrapper; p && *p; )
d3147f64 825 {
c6c7698d 826 switch (TREE_CODE (*p))
6de9cd9a 827 {
c6c7698d
JM
828 case BIND_EXPR:
829 TREE_SIDE_EFFECTS (*p) = 1;
830 TREE_TYPE (*p) = void_type_node;
831 /* For a BIND_EXPR, the body is operand 1. */
832 p = &BIND_EXPR_BODY (*p);
833 break;
834
835 case CLEANUP_POINT_EXPR:
836 case TRY_FINALLY_EXPR:
837 case TRY_CATCH_EXPR:
6de9cd9a
DN
838 TREE_SIDE_EFFECTS (*p) = 1;
839 TREE_TYPE (*p) = void_type_node;
c6c7698d
JM
840 p = &TREE_OPERAND (*p, 0);
841 break;
842
843 case STATEMENT_LIST:
844 {
845 tree_stmt_iterator i = tsi_last (*p);
846 TREE_SIDE_EFFECTS (*p) = 1;
847 TREE_TYPE (*p) = void_type_node;
848 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
849 }
850 break;
851
852 case COMPOUND_EXPR:
ad19c4be
EB
853 /* Advance to the last statement. Set all container types to
854 void. */
c6c7698d
JM
855 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
856 {
857 TREE_SIDE_EFFECTS (*p) = 1;
858 TREE_TYPE (*p) = void_type_node;
859 }
860 break;
861
0a35513e
AH
862 case TRANSACTION_EXPR:
863 TREE_SIDE_EFFECTS (*p) = 1;
864 TREE_TYPE (*p) = void_type_node;
865 p = &TRANSACTION_EXPR_BODY (*p);
866 break;
867
c6c7698d 868 default:
5f23640f
TR
869 /* Assume that any tree upon which voidify_wrapper_expr is
870 directly called is a wrapper, and that its body is op0. */
871 if (p == &wrapper)
872 {
873 TREE_SIDE_EFFECTS (*p) = 1;
874 TREE_TYPE (*p) = void_type_node;
875 p = &TREE_OPERAND (*p, 0);
876 break;
877 }
c6c7698d 878 goto out;
6de9cd9a
DN
879 }
880 }
881
c6c7698d 882 out:
325c3691 883 if (p == NULL || IS_EMPTY_STMT (*p))
c6c7698d
JM
884 temp = NULL_TREE;
885 else if (temp)
6de9cd9a 886 {
c6c7698d
JM
887 /* The wrapper is on the RHS of an assignment that we're pushing
888 down. */
889 gcc_assert (TREE_CODE (temp) == INIT_EXPR
890 || TREE_CODE (temp) == MODIFY_EXPR);
726a989a 891 TREE_OPERAND (temp, 1) = *p;
c6c7698d 892 *p = temp;
6de9cd9a
DN
893 }
894 else
895 {
c6c7698d
JM
896 temp = create_tmp_var (type, "retval");
897 *p = build2 (INIT_EXPR, type, temp, *p);
6de9cd9a
DN
898 }
899
6de9cd9a
DN
900 return temp;
901 }
902
903 return NULL_TREE;
904}
905
906/* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1ea7e6ad 907 a temporary through which they communicate. */
6de9cd9a
DN
908
909static void
726a989a 910build_stack_save_restore (gimple *save, gimple *restore)
6de9cd9a 911{
726a989a 912 tree tmp_var;
6de9cd9a 913
e79983f4 914 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
6de9cd9a 915 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
726a989a 916 gimple_call_set_lhs (*save, tmp_var);
6de9cd9a 917
ad19c4be 918 *restore
e79983f4 919 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
ad19c4be 920 1, tmp_var);
6de9cd9a
DN
921}
922
923/* Gimplify a BIND_EXPR. Just voidify and recurse. */
924
925static enum gimplify_status
726a989a 926gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
6de9cd9a
DN
927{
928 tree bind_expr = *expr_p;
6de9cd9a
DN
929 bool old_save_stack = gimplify_ctxp->save_stack;
930 tree t;
726a989a 931 gimple gimple_bind;
47598145
MM
932 gimple_seq body, cleanup;
933 gimple stack_save;
6de9cd9a 934
c6c7698d 935 tree temp = voidify_wrapper_expr (bind_expr, NULL);
325c3691 936
6de9cd9a 937 /* Mark variables seen in this bind expr. */
910ad8de 938 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
e41d82f5 939 {
820cc88f 940 if (TREE_CODE (t) == VAR_DECL)
8cb86b65
JJ
941 {
942 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
943
944 /* Mark variable as local. */
144f4153 945 if (ctx && !DECL_EXTERNAL (t)
8cb86b65
JJ
946 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
947 || splay_tree_lookup (ctx->variables,
948 (splay_tree_key) t) == NULL))
949 omp_add_variable (gimplify_omp_ctxp, t, GOVD_LOCAL | GOVD_SEEN);
950
951 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
fc3103e7
JJ
952
953 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
954 cfun->has_local_explicit_reg_vars = true;
8cb86b65 955 }
e41d82f5
RH
956
957 /* Preliminarily mark non-addressed complex variables as eligible
958 for promotion to gimple registers. We'll transform their uses
bd2e63a1
RG
959 as we find them. */
960 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
961 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
e41d82f5
RH
962 && !TREE_THIS_VOLATILE (t)
963 && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
964 && !needs_to_live_in_memory (t))
0890b981 965 DECL_GIMPLE_REG_P (t) = 1;
e41d82f5 966 }
6de9cd9a 967
726a989a
RB
968 gimple_bind = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
969 BIND_EXPR_BLOCK (bind_expr));
970 gimple_push_bind_expr (gimple_bind);
971
6de9cd9a
DN
972 gimplify_ctxp->save_stack = false;
973
726a989a
RB
974 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
975 body = NULL;
976 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
977 gimple_bind_set_body (gimple_bind, body);
6de9cd9a 978
47598145
MM
979 cleanup = NULL;
980 stack_save = NULL;
6de9cd9a
DN
981 if (gimplify_ctxp->save_stack)
982 {
47598145 983 gimple stack_restore;
6de9cd9a
DN
984
985 /* Save stack on entry and restore it on exit. Add a try_finally
98906124 986 block to achieve this. */
6de9cd9a
DN
987 build_stack_save_restore (&stack_save, &stack_restore);
988
726a989a 989 gimplify_seq_add_stmt (&cleanup, stack_restore);
47598145
MM
990 }
991
992 /* Add clobbers for all variables that go out of scope. */
993 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
994 {
995 if (TREE_CODE (t) == VAR_DECL
996 && !is_global_var (t)
997 && DECL_CONTEXT (t) == current_function_decl
998 && !DECL_HARD_REGISTER (t)
999 && !TREE_THIS_VOLATILE (t)
1000 && !DECL_HAS_VALUE_EXPR_P (t)
1001 /* Only care for variables that have to be in memory. Others
1002 will be rewritten into SSA names, hence moved to the top-level. */
87e2a8fd
XDL
1003 && !is_gimple_reg (t)
1004 && flag_stack_reuse != SR_NONE)
47598145 1005 {
9771b263
DN
1006 tree clobber = build_constructor (TREE_TYPE (t),
1007 NULL);
47598145
MM
1008 TREE_THIS_VOLATILE (clobber) = 1;
1009 gimplify_seq_add_stmt (&cleanup, gimple_build_assign (t, clobber));
1010 }
1011 }
1012
1013 if (cleanup)
1014 {
1015 gimple gs;
1016 gimple_seq new_body;
1017
1018 new_body = NULL;
726a989a
RB
1019 gs = gimple_build_try (gimple_bind_body (gimple_bind), cleanup,
1020 GIMPLE_TRY_FINALLY);
6de9cd9a 1021
47598145
MM
1022 if (stack_save)
1023 gimplify_seq_add_stmt (&new_body, stack_save);
726a989a
RB
1024 gimplify_seq_add_stmt (&new_body, gs);
1025 gimple_bind_set_body (gimple_bind, new_body);
6de9cd9a
DN
1026 }
1027
1028 gimplify_ctxp->save_stack = old_save_stack;
1029 gimple_pop_bind_expr ();
1030
726a989a
RB
1031 gimplify_seq_add_stmt (pre_p, gimple_bind);
1032
6de9cd9a
DN
1033 if (temp)
1034 {
1035 *expr_p = temp;
6de9cd9a
DN
1036 return GS_OK;
1037 }
726a989a
RB
1038
1039 *expr_p = NULL_TREE;
1040 return GS_ALL_DONE;
6de9cd9a
DN
1041}
1042
1043/* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1044 GIMPLE value, it is assigned to a new temporary and the statement is
1045 re-written to return the temporary.
1046
726a989a 1047 PRE_P points to the sequence where side effects that must happen before
6de9cd9a
DN
1048 STMT should be stored. */
1049
1050static enum gimplify_status
726a989a 1051gimplify_return_expr (tree stmt, gimple_seq *pre_p)
6de9cd9a 1052{
726a989a 1053 gimple ret;
6de9cd9a 1054 tree ret_expr = TREE_OPERAND (stmt, 0);
71877985 1055 tree result_decl, result;
6de9cd9a 1056
726a989a
RB
1057 if (ret_expr == error_mark_node)
1058 return GS_ERROR;
1059
939b37da
BI
1060 /* Implicit _Cilk_sync must be inserted right before any return statement
1061 if there is a _Cilk_spawn in the function. If the user has provided a
1062 _Cilk_sync, the optimizer should remove this duplicate one. */
1063 if (fn_contains_cilk_spawn_p (cfun))
1064 {
1065 tree impl_sync = build0 (CILK_SYNC_STMT, void_type_node);
1066 gimplify_and_add (impl_sync, pre_p);
1067 }
1068
726a989a
RB
1069 if (!ret_expr
1070 || TREE_CODE (ret_expr) == RESULT_DECL
55e99d52 1071 || ret_expr == error_mark_node)
726a989a
RB
1072 {
1073 gimple ret = gimple_build_return (ret_expr);
1074 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1075 gimplify_seq_add_stmt (pre_p, ret);
1076 return GS_ALL_DONE;
1077 }
6de9cd9a 1078
6de9cd9a 1079 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
71877985 1080 result_decl = NULL_TREE;
6de9cd9a
DN
1081 else
1082 {
726a989a
RB
1083 result_decl = TREE_OPERAND (ret_expr, 0);
1084
1085 /* See through a return by reference. */
cc77ae10 1086 if (TREE_CODE (result_decl) == INDIRECT_REF)
cc77ae10 1087 result_decl = TREE_OPERAND (result_decl, 0);
282899df
NS
1088
1089 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1090 || TREE_CODE (ret_expr) == INIT_EXPR)
1091 && TREE_CODE (result_decl) == RESULT_DECL);
6de9cd9a
DN
1092 }
1093
71877985
RH
1094 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1095 Recall that aggregate_value_p is FALSE for any aggregate type that is
1096 returned in registers. If we're returning values in registers, then
1097 we don't want to extend the lifetime of the RESULT_DECL, particularly
d3147f64 1098 across another call. In addition, for those aggregates for which
535a42b1 1099 hard_function_value generates a PARALLEL, we'll die during normal
71877985
RH
1100 expansion of structure assignments; there's special code in expand_return
1101 to handle this case that does not exist in expand_expr. */
ca361dec
EB
1102 if (!result_decl)
1103 result = NULL_TREE;
1104 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1105 {
1106 if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST)
1107 {
1108 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1109 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1110 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1111 should be effectively allocated by the caller, i.e. all calls to
1112 this function must be subject to the Return Slot Optimization. */
1113 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1114 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1115 }
1116 result = result_decl;
1117 }
71877985
RH
1118 else if (gimplify_ctxp->return_temp)
1119 result = gimplify_ctxp->return_temp;
1120 else
1121 {
acd63801 1122 result = create_tmp_reg (TREE_TYPE (result_decl), NULL);
ff98621c
RH
1123
1124 /* ??? With complex control flow (usually involving abnormal edges),
1125 we can wind up warning about an uninitialized value for this. Due
1126 to how this variable is constructed and initialized, this is never
1127 true. Give up and never warn. */
1128 TREE_NO_WARNING (result) = 1;
1129
71877985
RH
1130 gimplify_ctxp->return_temp = result;
1131 }
1132
726a989a 1133 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
71877985
RH
1134 Then gimplify the whole thing. */
1135 if (result != result_decl)
726a989a 1136 TREE_OPERAND (ret_expr, 0) = result;
fff34d35
RK
1137
1138 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
6de9cd9a 1139
726a989a
RB
1140 ret = gimple_build_return (result);
1141 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1142 gimplify_seq_add_stmt (pre_p, ret);
6de9cd9a 1143
6de9cd9a
DN
1144 return GS_ALL_DONE;
1145}
1146
ad19c4be
EB
1147/* Gimplify a variable-length array DECL. */
1148
786025ea 1149static void
726a989a 1150gimplify_vla_decl (tree decl, gimple_seq *seq_p)
786025ea
JJ
1151{
1152 /* This is a variable-sized decl. Simplify its size and mark it
98906124 1153 for deferred expansion. */
786025ea
JJ
1154 tree t, addr, ptr_type;
1155
726a989a
RB
1156 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1157 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
786025ea 1158
0138d6b2
JM
1159 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1160 if (DECL_HAS_VALUE_EXPR_P (decl))
1161 return;
1162
786025ea
JJ
1163 /* All occurrences of this decl in final gimplified code will be
1164 replaced by indirection. Setting DECL_VALUE_EXPR does two
1165 things: First, it lets the rest of the gimplifier know what
1166 replacement to use. Second, it lets the debug info know
1167 where to find the value. */
1168 ptr_type = build_pointer_type (TREE_TYPE (decl));
1169 addr = create_tmp_var (ptr_type, get_name (decl));
1170 DECL_IGNORED_P (addr) = 0;
1171 t = build_fold_indirect_ref (addr);
31408f60 1172 TREE_THIS_NOTRAP (t) = 1;
786025ea
JJ
1173 SET_DECL_VALUE_EXPR (decl, t);
1174 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1175
e79983f4 1176 t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
13e49da9
TV
1177 t = build_call_expr (t, 2, DECL_SIZE_UNIT (decl),
1178 size_int (DECL_ALIGN (decl)));
d3c12306 1179 /* The call has been built for a variable-sized object. */
63d2a353 1180 CALL_ALLOCA_FOR_VAR_P (t) = 1;
786025ea 1181 t = fold_convert (ptr_type, t);
726a989a 1182 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
786025ea 1183
726a989a 1184 gimplify_and_add (t, seq_p);
786025ea
JJ
1185
1186 /* Indicate that we need to restore the stack level when the
1187 enclosing BIND_EXPR is exited. */
1188 gimplify_ctxp->save_stack = true;
1189}
1190
45b0be94
AM
1191/* A helper function to be called via walk_tree. Mark all labels under *TP
1192 as being forced. To be called for DECL_INITIAL of static variables. */
1193
1194static tree
1195force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1196{
1197 if (TYPE_P (*tp))
1198 *walk_subtrees = 0;
1199 if (TREE_CODE (*tp) == LABEL_DECL)
1200 FORCED_LABEL (*tp) = 1;
1201
1202 return NULL_TREE;
1203}
1204
ad19c4be 1205/* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
350fae66
RK
1206 and initialization explicit. */
1207
1208static enum gimplify_status
726a989a 1209gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
350fae66
RK
1210{
1211 tree stmt = *stmt_p;
1212 tree decl = DECL_EXPR_DECL (stmt);
1213
1214 *stmt_p = NULL_TREE;
1215
1216 if (TREE_TYPE (decl) == error_mark_node)
1217 return GS_ERROR;
1218
8e0a600b
JJ
1219 if ((TREE_CODE (decl) == TYPE_DECL
1220 || TREE_CODE (decl) == VAR_DECL)
1221 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
726a989a 1222 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
350fae66 1223
d400d17e
EB
1224 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1225 in case its size expressions contain problematic nodes like CALL_EXPR. */
1226 if (TREE_CODE (decl) == TYPE_DECL
1227 && DECL_ORIGINAL_TYPE (decl)
1228 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1229 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1230
8e0a600b 1231 if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl))
350fae66
RK
1232 {
1233 tree init = DECL_INITIAL (decl);
1234
b38f3813
EB
1235 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1236 || (!TREE_STATIC (decl)
1237 && flag_stack_check == GENERIC_STACK_CHECK
1238 && compare_tree_int (DECL_SIZE_UNIT (decl),
1239 STACK_CHECK_MAX_VAR_SIZE) > 0))
726a989a 1240 gimplify_vla_decl (decl, seq_p);
350fae66 1241
22192559
JM
1242 /* Some front ends do not explicitly declare all anonymous
1243 artificial variables. We compensate here by declaring the
1244 variables, though it would be better if the front ends would
1245 explicitly declare them. */
1246 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1247 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1248 gimple_add_tmp_var (decl);
1249
350fae66
RK
1250 if (init && init != error_mark_node)
1251 {
1252 if (!TREE_STATIC (decl))
1253 {
1254 DECL_INITIAL (decl) = NULL_TREE;
dae7ec87 1255 init = build2 (INIT_EXPR, void_type_node, decl, init);
726a989a
RB
1256 gimplify_and_add (init, seq_p);
1257 ggc_free (init);
350fae66
RK
1258 }
1259 else
1260 /* We must still examine initializers for static variables
1261 as they may contain a label address. */
1262 walk_tree (&init, force_labels_r, NULL, NULL);
1263 }
350fae66
RK
1264 }
1265
1266 return GS_ALL_DONE;
1267}
1268
6de9cd9a
DN
1269/* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1270 and replacing the LOOP_EXPR with goto, but if the loop contains an
1271 EXIT_EXPR, we need to append a label for it to jump to. */
1272
1273static enum gimplify_status
726a989a 1274gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
6de9cd9a
DN
1275{
1276 tree saved_label = gimplify_ctxp->exit_label;
c2255bc4 1277 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
6de9cd9a 1278
726a989a 1279 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
6de9cd9a
DN
1280
1281 gimplify_ctxp->exit_label = NULL_TREE;
1282
fff34d35 1283 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
6de9cd9a 1284
726a989a
RB
1285 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1286
6de9cd9a 1287 if (gimplify_ctxp->exit_label)
ad19c4be
EB
1288 gimplify_seq_add_stmt (pre_p,
1289 gimple_build_label (gimplify_ctxp->exit_label));
726a989a
RB
1290
1291 gimplify_ctxp->exit_label = saved_label;
1292
1293 *expr_p = NULL;
1294 return GS_ALL_DONE;
1295}
1296
ad19c4be 1297/* Gimplify a statement list onto a sequence. These may be created either
726a989a
RB
1298 by an enlightened front-end, or by shortcut_cond_expr. */
1299
1300static enum gimplify_status
1301gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1302{
1303 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1304
1305 tree_stmt_iterator i = tsi_start (*expr_p);
1306
1307 while (!tsi_end_p (i))
6de9cd9a 1308 {
726a989a
RB
1309 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1310 tsi_delink (&i);
6de9cd9a 1311 }
6de9cd9a 1312
726a989a
RB
1313 if (temp)
1314 {
1315 *expr_p = temp;
1316 return GS_OK;
1317 }
6de9cd9a
DN
1318
1319 return GS_ALL_DONE;
1320}
0f1f6967 1321
68e72840
SB
1322\f
1323/* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
6de9cd9a
DN
1324 branch to. */
1325
1326static enum gimplify_status
726a989a 1327gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
6de9cd9a
DN
1328{
1329 tree switch_expr = *expr_p;
726a989a 1330 gimple_seq switch_body_seq = NULL;
6de9cd9a 1331 enum gimplify_status ret;
0cd2402d
SB
1332 tree index_type = TREE_TYPE (switch_expr);
1333 if (index_type == NULL_TREE)
1334 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
6de9cd9a 1335
726a989a
RB
1336 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
1337 fb_rvalue);
1338 if (ret == GS_ERROR || ret == GS_UNHANDLED)
1339 return ret;
6de9cd9a
DN
1340
1341 if (SWITCH_BODY (switch_expr))
1342 {
9771b263
DN
1343 vec<tree> labels;
1344 vec<tree> saved_labels;
726a989a 1345 tree default_case = NULL_TREE;
726a989a 1346 gimple gimple_switch;
b8698a0f 1347
6de9cd9a
DN
1348 /* If someone can be bothered to fill in the labels, they can
1349 be bothered to null out the body too. */
282899df 1350 gcc_assert (!SWITCH_LABELS (switch_expr));
6de9cd9a 1351
0cd2402d 1352 /* Save old labels, get new ones from body, then restore the old
726a989a 1353 labels. Save all the things from the switch body to append after. */
6de9cd9a 1354 saved_labels = gimplify_ctxp->case_labels;
9771b263 1355 gimplify_ctxp->case_labels.create (8);
6de9cd9a 1356
726a989a 1357 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
6de9cd9a
DN
1358 labels = gimplify_ctxp->case_labels;
1359 gimplify_ctxp->case_labels = saved_labels;
b8698a0f 1360
68e72840
SB
1361 preprocess_case_label_vec_for_gimple (labels, index_type,
1362 &default_case);
32f579f6 1363
726a989a 1364 if (!default_case)
6de9cd9a 1365 {
68e72840 1366 gimple new_default;
6de9cd9a 1367
68e72840
SB
1368 default_case
1369 = build_case_label (NULL_TREE, NULL_TREE,
1370 create_artificial_label (UNKNOWN_LOCATION));
1371 new_default = gimple_build_label (CASE_LABEL (default_case));
1372 gimplify_seq_add_stmt (&switch_body_seq, new_default);
32f579f6 1373 }
f667741c 1374
fd8d363e
SB
1375 gimple_switch = gimple_build_switch (SWITCH_COND (switch_expr),
1376 default_case, labels);
726a989a
RB
1377 gimplify_seq_add_stmt (pre_p, gimple_switch);
1378 gimplify_seq_add_seq (pre_p, switch_body_seq);
9771b263 1379 labels.release ();
6de9cd9a 1380 }
282899df
NS
1381 else
1382 gcc_assert (SWITCH_LABELS (switch_expr));
6de9cd9a 1383
726a989a 1384 return GS_ALL_DONE;
6de9cd9a
DN
1385}
1386
ad19c4be 1387/* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
726a989a 1388
6de9cd9a 1389static enum gimplify_status
726a989a 1390gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
6de9cd9a 1391{
953ff289 1392 struct gimplify_ctx *ctxp;
726a989a 1393 gimple gimple_label;
953ff289
DN
1394
1395 /* Invalid OpenMP programs can play Duff's Device type games with
1396 #pragma omp parallel. At least in the C front end, we don't
1397 detect such invalid branches until after gimplification. */
1398 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
9771b263 1399 if (ctxp->case_labels.exists ())
953ff289 1400 break;
282899df 1401
726a989a 1402 gimple_label = gimple_build_label (CASE_LABEL (*expr_p));
9771b263 1403 ctxp->case_labels.safe_push (*expr_p);
726a989a
RB
1404 gimplify_seq_add_stmt (pre_p, gimple_label);
1405
6de9cd9a
DN
1406 return GS_ALL_DONE;
1407}
1408
6de9cd9a
DN
1409/* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
1410 if necessary. */
1411
1412tree
1413build_and_jump (tree *label_p)
1414{
1415 if (label_p == NULL)
1416 /* If there's nowhere to jump, just fall through. */
65355d53 1417 return NULL_TREE;
6de9cd9a
DN
1418
1419 if (*label_p == NULL_TREE)
1420 {
c2255bc4 1421 tree label = create_artificial_label (UNKNOWN_LOCATION);
6de9cd9a
DN
1422 *label_p = label;
1423 }
1424
1425 return build1 (GOTO_EXPR, void_type_node, *label_p);
1426}
1427
1428/* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
1429 This also involves building a label to jump to and communicating it to
1430 gimplify_loop_expr through gimplify_ctxp->exit_label. */
1431
1432static enum gimplify_status
1433gimplify_exit_expr (tree *expr_p)
1434{
1435 tree cond = TREE_OPERAND (*expr_p, 0);
1436 tree expr;
1437
1438 expr = build_and_jump (&gimplify_ctxp->exit_label);
b4257cfc 1439 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
6de9cd9a
DN
1440 *expr_p = expr;
1441
1442 return GS_OK;
1443}
1444
26d44ae2
RH
1445/* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
1446 different from its canonical type, wrap the whole thing inside a
1447 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
1448 type.
6de9cd9a 1449
26d44ae2
RH
1450 The canonical type of a COMPONENT_REF is the type of the field being
1451 referenced--unless the field is a bit-field which can be read directly
1452 in a smaller mode, in which case the canonical type is the
1453 sign-appropriate type corresponding to that mode. */
6de9cd9a 1454
26d44ae2
RH
1455static void
1456canonicalize_component_ref (tree *expr_p)
6de9cd9a 1457{
26d44ae2
RH
1458 tree expr = *expr_p;
1459 tree type;
6de9cd9a 1460
282899df 1461 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
6de9cd9a 1462
26d44ae2
RH
1463 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
1464 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
1465 else
1466 type = TREE_TYPE (TREE_OPERAND (expr, 1));
6de9cd9a 1467
b26c6d55
RG
1468 /* One could argue that all the stuff below is not necessary for
1469 the non-bitfield case and declare it a FE error if type
1470 adjustment would be needed. */
26d44ae2 1471 if (TREE_TYPE (expr) != type)
6de9cd9a 1472 {
b26c6d55 1473#ifdef ENABLE_TYPES_CHECKING
26d44ae2 1474 tree old_type = TREE_TYPE (expr);
b26c6d55
RG
1475#endif
1476 int type_quals;
1477
1478 /* We need to preserve qualifiers and propagate them from
1479 operand 0. */
1480 type_quals = TYPE_QUALS (type)
1481 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
1482 if (TYPE_QUALS (type) != type_quals)
1483 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
6de9cd9a 1484
26d44ae2
RH
1485 /* Set the type of the COMPONENT_REF to the underlying type. */
1486 TREE_TYPE (expr) = type;
6de9cd9a 1487
b26c6d55
RG
1488#ifdef ENABLE_TYPES_CHECKING
1489 /* It is now a FE error, if the conversion from the canonical
1490 type to the original expression type is not useless. */
1491 gcc_assert (useless_type_conversion_p (old_type, type));
1492#endif
26d44ae2
RH
1493 }
1494}
6de9cd9a 1495
26d44ae2 1496/* If a NOP conversion is changing a pointer to array of foo to a pointer
d3147f64 1497 to foo, embed that change in the ADDR_EXPR by converting
26d44ae2
RH
1498 T array[U];
1499 (T *)&array
1500 ==>
1501 &array[L]
1502 where L is the lower bound. For simplicity, only do this for constant
04d86531
RG
1503 lower bound.
1504 The constraint is that the type of &array[L] is trivially convertible
1505 to T *. */
6de9cd9a 1506
26d44ae2
RH
1507static void
1508canonicalize_addr_expr (tree *expr_p)
1509{
1510 tree expr = *expr_p;
26d44ae2 1511 tree addr_expr = TREE_OPERAND (expr, 0);
04d86531 1512 tree datype, ddatype, pddatype;
6de9cd9a 1513
04d86531
RG
1514 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
1515 if (!POINTER_TYPE_P (TREE_TYPE (expr))
1516 || TREE_CODE (addr_expr) != ADDR_EXPR)
26d44ae2 1517 return;
6de9cd9a 1518
26d44ae2 1519 /* The addr_expr type should be a pointer to an array. */
04d86531 1520 datype = TREE_TYPE (TREE_TYPE (addr_expr));
26d44ae2
RH
1521 if (TREE_CODE (datype) != ARRAY_TYPE)
1522 return;
6de9cd9a 1523
04d86531
RG
1524 /* The pointer to element type shall be trivially convertible to
1525 the expression pointer type. */
26d44ae2 1526 ddatype = TREE_TYPE (datype);
04d86531 1527 pddatype = build_pointer_type (ddatype);
e5fdcd8c
RG
1528 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
1529 pddatype))
26d44ae2 1530 return;
6de9cd9a 1531
26d44ae2 1532 /* The lower bound and element sizes must be constant. */
04d86531
RG
1533 if (!TYPE_SIZE_UNIT (ddatype)
1534 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
26d44ae2
RH
1535 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
1536 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
1537 return;
6de9cd9a 1538
26d44ae2 1539 /* All checks succeeded. Build a new node to merge the cast. */
04d86531 1540 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
26d44ae2 1541 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
5852948c 1542 NULL_TREE, NULL_TREE);
04d86531 1543 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
e5fdcd8c
RG
1544
1545 /* We can have stripped a required restrict qualifier above. */
1546 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
1547 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
26d44ae2 1548}
6de9cd9a 1549
26d44ae2
RH
1550/* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
1551 underneath as appropriate. */
6de9cd9a 1552
26d44ae2
RH
1553static enum gimplify_status
1554gimplify_conversion (tree *expr_p)
d3147f64 1555{
db3927fb 1556 location_t loc = EXPR_LOCATION (*expr_p);
1043771b 1557 gcc_assert (CONVERT_EXPR_P (*expr_p));
c2255bc4 1558
0710ccff
NS
1559 /* Then strip away all but the outermost conversion. */
1560 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
1561
1562 /* And remove the outermost conversion if it's useless. */
1563 if (tree_ssa_useless_type_conversion (*expr_p))
1564 *expr_p = TREE_OPERAND (*expr_p, 0);
6de9cd9a 1565
26d44ae2
RH
1566 /* If we still have a conversion at the toplevel,
1567 then canonicalize some constructs. */
1043771b 1568 if (CONVERT_EXPR_P (*expr_p))
26d44ae2
RH
1569 {
1570 tree sub = TREE_OPERAND (*expr_p, 0);
6de9cd9a 1571
26d44ae2
RH
1572 /* If a NOP conversion is changing the type of a COMPONENT_REF
1573 expression, then canonicalize its type now in order to expose more
1574 redundant conversions. */
1575 if (TREE_CODE (sub) == COMPONENT_REF)
1576 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
6de9cd9a 1577
26d44ae2
RH
1578 /* If a NOP conversion is changing a pointer to array of foo
1579 to a pointer to foo, embed that change in the ADDR_EXPR. */
1580 else if (TREE_CODE (sub) == ADDR_EXPR)
1581 canonicalize_addr_expr (expr_p);
1582 }
6de9cd9a 1583
8b17cc05
RG
1584 /* If we have a conversion to a non-register type force the
1585 use of a VIEW_CONVERT_EXPR instead. */
4f934809 1586 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
db3927fb 1587 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
4f934809 1588 TREE_OPERAND (*expr_p, 0));
8b17cc05 1589
6de9cd9a
DN
1590 return GS_OK;
1591}
1592
77f2a970
JJ
1593/* Nonlocal VLAs seen in the current function. */
1594static struct pointer_set_t *nonlocal_vlas;
1595
ad19c4be 1596/* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
a9f7c570
RH
1597 DECL_VALUE_EXPR, and it's worth re-examining things. */
1598
1599static enum gimplify_status
1600gimplify_var_or_parm_decl (tree *expr_p)
1601{
1602 tree decl = *expr_p;
1603
1604 /* ??? If this is a local variable, and it has not been seen in any
1605 outer BIND_EXPR, then it's probably the result of a duplicate
1606 declaration, for which we've already issued an error. It would
1607 be really nice if the front end wouldn't leak these at all.
1608 Currently the only known culprit is C++ destructors, as seen
1609 in g++.old-deja/g++.jason/binding.C. */
1610 if (TREE_CODE (decl) == VAR_DECL
1611 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
1612 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
1613 && decl_function_context (decl) == current_function_decl)
1614 {
1da2ed5f 1615 gcc_assert (seen_error ());
a9f7c570
RH
1616 return GS_ERROR;
1617 }
1618
953ff289
DN
1619 /* When within an OpenMP context, notice uses of variables. */
1620 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
1621 return GS_ALL_DONE;
1622
a9f7c570
RH
1623 /* If the decl is an alias for another expression, substitute it now. */
1624 if (DECL_HAS_VALUE_EXPR_P (decl))
1625 {
77f2a970
JJ
1626 tree value_expr = DECL_VALUE_EXPR (decl);
1627
1628 /* For referenced nonlocal VLAs add a decl for debugging purposes
1629 to the current function. */
1630 if (TREE_CODE (decl) == VAR_DECL
1631 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1632 && nonlocal_vlas != NULL
1633 && TREE_CODE (value_expr) == INDIRECT_REF
1634 && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
1635 && decl_function_context (decl) != current_function_decl)
1636 {
1637 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
74bf76ed
JJ
1638 while (ctx
1639 && (ctx->region_type == ORT_WORKSHARE
1640 || ctx->region_type == ORT_SIMD))
77f2a970
JJ
1641 ctx = ctx->outer_context;
1642 if (!ctx && !pointer_set_insert (nonlocal_vlas, decl))
1643 {
1644 tree copy = copy_node (decl), block;
1645
1646 lang_hooks.dup_lang_specific_decl (copy);
2eb79bbb 1647 SET_DECL_RTL (copy, 0);
77f2a970
JJ
1648 TREE_USED (copy) = 1;
1649 block = DECL_INITIAL (current_function_decl);
910ad8de 1650 DECL_CHAIN (copy) = BLOCK_VARS (block);
77f2a970
JJ
1651 BLOCK_VARS (block) = copy;
1652 SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
1653 DECL_HAS_VALUE_EXPR_P (copy) = 1;
1654 }
1655 }
1656
1657 *expr_p = unshare_expr (value_expr);
a9f7c570
RH
1658 return GS_OK;
1659 }
1660
1661 return GS_ALL_DONE;
1662}
1663
6de9cd9a 1664/* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
726a989a 1665 node *EXPR_P.
6de9cd9a
DN
1666
1667 compound_lval
1668 : min_lval '[' val ']'
1669 | min_lval '.' ID
1670 | compound_lval '[' val ']'
1671 | compound_lval '.' ID
1672
1673 This is not part of the original SIMPLE definition, which separates
1674 array and member references, but it seems reasonable to handle them
1675 together. Also, this way we don't run into problems with union
1676 aliasing; gcc requires that for accesses through a union to alias, the
1677 union reference must be explicit, which was not always the case when we
1678 were splitting up array and member refs.
1679
726a989a 1680 PRE_P points to the sequence where side effects that must happen before
6de9cd9a
DN
1681 *EXPR_P should be stored.
1682
726a989a 1683 POST_P points to the sequence where side effects that must happen after
6de9cd9a
DN
1684 *EXPR_P should be stored. */
1685
1686static enum gimplify_status
726a989a
RB
1687gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
1688 fallback_t fallback)
6de9cd9a
DN
1689{
1690 tree *p;
941f78d1 1691 enum gimplify_status ret = GS_ALL_DONE, tret;
af72267c 1692 int i;
db3927fb 1693 location_t loc = EXPR_LOCATION (*expr_p);
941f78d1 1694 tree expr = *expr_p;
6de9cd9a 1695
6de9cd9a 1696 /* Create a stack of the subexpressions so later we can walk them in
ec234842 1697 order from inner to outer. */
07687835 1698 stack_vec<tree, 10> expr_stack;
6de9cd9a 1699
afe84921 1700 /* We can handle anything that get_inner_reference can deal with. */
6a720599
JM
1701 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
1702 {
a9f7c570 1703 restart:
6a720599
JM
1704 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
1705 if (TREE_CODE (*p) == INDIRECT_REF)
db3927fb 1706 *p = fold_indirect_ref_loc (loc, *p);
a9f7c570
RH
1707
1708 if (handled_component_p (*p))
1709 ;
1710 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
1711 additional COMPONENT_REFs. */
1712 else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL)
1713 && gimplify_var_or_parm_decl (p) == GS_OK)
1714 goto restart;
1715 else
6a720599 1716 break;
b8698a0f 1717
9771b263 1718 expr_stack.safe_push (*p);
6a720599 1719 }
6de9cd9a 1720
9771b263 1721 gcc_assert (expr_stack.length ());
9e51aaf5 1722
0823efed
DN
1723 /* Now EXPR_STACK is a stack of pointers to all the refs we've
1724 walked through and P points to the innermost expression.
6de9cd9a 1725
af72267c
RK
1726 Java requires that we elaborated nodes in source order. That
1727 means we must gimplify the inner expression followed by each of
1728 the indices, in order. But we can't gimplify the inner
1729 expression until we deal with any variable bounds, sizes, or
1730 positions in order to deal with PLACEHOLDER_EXPRs.
1731
1732 So we do this in three steps. First we deal with the annotations
1733 for any variables in the components, then we gimplify the base,
1734 then we gimplify any indices, from left to right. */
9771b263 1735 for (i = expr_stack.length () - 1; i >= 0; i--)
6de9cd9a 1736 {
9771b263 1737 tree t = expr_stack[i];
44de5aeb
RK
1738
1739 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
6de9cd9a 1740 {
44de5aeb
RK
1741 /* Gimplify the low bound and element type size and put them into
1742 the ARRAY_REF. If these values are set, they have already been
1743 gimplified. */
726a989a 1744 if (TREE_OPERAND (t, 2) == NULL_TREE)
44de5aeb 1745 {
a7cc468a
RH
1746 tree low = unshare_expr (array_ref_low_bound (t));
1747 if (!is_gimple_min_invariant (low))
44de5aeb 1748 {
726a989a
RB
1749 TREE_OPERAND (t, 2) = low;
1750 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
ba4d8f9d 1751 post_p, is_gimple_reg,
726a989a 1752 fb_rvalue);
44de5aeb
RK
1753 ret = MIN (ret, tret);
1754 }
1755 }
19c44640
JJ
1756 else
1757 {
1758 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
1759 is_gimple_reg, fb_rvalue);
1760 ret = MIN (ret, tret);
1761 }
44de5aeb 1762
19c44640 1763 if (TREE_OPERAND (t, 3) == NULL_TREE)
44de5aeb
RK
1764 {
1765 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
1766 tree elmt_size = unshare_expr (array_ref_element_size (t));
a4e9ffe5 1767 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
44de5aeb
RK
1768
1769 /* Divide the element size by the alignment of the element
1770 type (above). */
ad19c4be
EB
1771 elmt_size
1772 = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
44de5aeb 1773
a7cc468a 1774 if (!is_gimple_min_invariant (elmt_size))
44de5aeb 1775 {
726a989a
RB
1776 TREE_OPERAND (t, 3) = elmt_size;
1777 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
ba4d8f9d 1778 post_p, is_gimple_reg,
726a989a 1779 fb_rvalue);
44de5aeb
RK
1780 ret = MIN (ret, tret);
1781 }
6de9cd9a 1782 }
19c44640
JJ
1783 else
1784 {
1785 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
1786 is_gimple_reg, fb_rvalue);
1787 ret = MIN (ret, tret);
1788 }
6de9cd9a 1789 }
44de5aeb
RK
1790 else if (TREE_CODE (t) == COMPONENT_REF)
1791 {
1792 /* Set the field offset into T and gimplify it. */
19c44640 1793 if (TREE_OPERAND (t, 2) == NULL_TREE)
44de5aeb
RK
1794 {
1795 tree offset = unshare_expr (component_ref_field_offset (t));
1796 tree field = TREE_OPERAND (t, 1);
1797 tree factor
1798 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
1799
1800 /* Divide the offset by its alignment. */
db3927fb 1801 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
44de5aeb 1802
a7cc468a 1803 if (!is_gimple_min_invariant (offset))
44de5aeb 1804 {
726a989a
RB
1805 TREE_OPERAND (t, 2) = offset;
1806 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
ba4d8f9d 1807 post_p, is_gimple_reg,
726a989a 1808 fb_rvalue);
44de5aeb
RK
1809 ret = MIN (ret, tret);
1810 }
1811 }
19c44640
JJ
1812 else
1813 {
1814 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
1815 is_gimple_reg, fb_rvalue);
1816 ret = MIN (ret, tret);
1817 }
44de5aeb 1818 }
af72267c
RK
1819 }
1820
a9f7c570
RH
1821 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
1822 so as to match the min_lval predicate. Failure to do so may result
1823 in the creation of large aggregate temporaries. */
1824 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
1825 fallback | fb_lvalue);
af72267c
RK
1826 ret = MIN (ret, tret);
1827
ea814c66 1828 /* And finally, the indices and operands of ARRAY_REF. During this
48eb4e53 1829 loop we also remove any useless conversions. */
9771b263 1830 for (; expr_stack.length () > 0; )
af72267c 1831 {
9771b263 1832 tree t = expr_stack.pop ();
af72267c
RK
1833
1834 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1835 {
ba4d8f9d 1836 /* Gimplify the dimension. */
af72267c
RK
1837 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
1838 {
1839 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
ba4d8f9d 1840 is_gimple_val, fb_rvalue);
af72267c
RK
1841 ret = MIN (ret, tret);
1842 }
1843 }
48eb4e53
RK
1844
1845 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
1846
726a989a
RB
1847 /* The innermost expression P may have originally had
1848 TREE_SIDE_EFFECTS set which would have caused all the outer
1849 expressions in *EXPR_P leading to P to also have had
1850 TREE_SIDE_EFFECTS set. */
6de9cd9a 1851 recalculate_side_effects (t);
6de9cd9a
DN
1852 }
1853
1854 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
90051e16 1855 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
6de9cd9a
DN
1856 {
1857 canonicalize_component_ref (expr_p);
6de9cd9a
DN
1858 }
1859
9771b263 1860 expr_stack.release ();
07724022 1861
941f78d1
JM
1862 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
1863
6de9cd9a
DN
1864 return ret;
1865}
1866
206048bd
VR
1867/* Gimplify the self modifying expression pointed to by EXPR_P
1868 (++, --, +=, -=).
6de9cd9a
DN
1869
1870 PRE_P points to the list where side effects that must happen before
1871 *EXPR_P should be stored.
1872
1873 POST_P points to the list where side effects that must happen after
1874 *EXPR_P should be stored.
1875
1876 WANT_VALUE is nonzero iff we want to use the value of this expression
cc3c4f62 1877 in another expression.
6de9cd9a 1878
cc3c4f62
RB
1879 ARITH_TYPE is the type the computation should be performed in. */
1880
1881enum gimplify_status
726a989a 1882gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
cc3c4f62 1883 bool want_value, tree arith_type)
6de9cd9a
DN
1884{
1885 enum tree_code code;
726a989a
RB
1886 tree lhs, lvalue, rhs, t1;
1887 gimple_seq post = NULL, *orig_post_p = post_p;
6de9cd9a
DN
1888 bool postfix;
1889 enum tree_code arith_code;
1890 enum gimplify_status ret;
db3927fb 1891 location_t loc = EXPR_LOCATION (*expr_p);
6de9cd9a
DN
1892
1893 code = TREE_CODE (*expr_p);
1894
282899df
NS
1895 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
1896 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
6de9cd9a
DN
1897
1898 /* Prefix or postfix? */
1899 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
1900 /* Faster to treat as prefix if result is not used. */
1901 postfix = want_value;
1902 else
1903 postfix = false;
1904
82181741
JJ
1905 /* For postfix, make sure the inner expression's post side effects
1906 are executed after side effects from this expression. */
1907 if (postfix)
1908 post_p = &post;
1909
6de9cd9a
DN
1910 /* Add or subtract? */
1911 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
1912 arith_code = PLUS_EXPR;
1913 else
1914 arith_code = MINUS_EXPR;
1915
1916 /* Gimplify the LHS into a GIMPLE lvalue. */
1917 lvalue = TREE_OPERAND (*expr_p, 0);
1918 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
1919 if (ret == GS_ERROR)
1920 return ret;
1921
1922 /* Extract the operands to the arithmetic operation. */
1923 lhs = lvalue;
1924 rhs = TREE_OPERAND (*expr_p, 1);
1925
1926 /* For postfix operator, we evaluate the LHS to an rvalue and then use
d97c9b22 1927 that as the result value and in the postqueue operation. */
6de9cd9a
DN
1928 if (postfix)
1929 {
1930 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
1931 if (ret == GS_ERROR)
1932 return ret;
6de9cd9a 1933
d97c9b22
JJ
1934 lhs = get_initialized_tmp_var (lhs, pre_p, NULL);
1935 }
cc3c4f62 1936
5be014d5
AP
1937 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
1938 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
1939 {
0d82a1c8 1940 rhs = convert_to_ptrofftype_loc (loc, rhs);
5be014d5 1941 if (arith_code == MINUS_EXPR)
db3927fb 1942 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
cc3c4f62 1943 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
5be014d5 1944 }
cc3c4f62
RB
1945 else
1946 t1 = fold_convert (TREE_TYPE (*expr_p),
1947 fold_build2 (arith_code, arith_type,
1948 fold_convert (arith_type, lhs),
1949 fold_convert (arith_type, rhs)));
5be014d5 1950
6de9cd9a
DN
1951 if (postfix)
1952 {
cf1867a0 1953 gimplify_assign (lvalue, t1, pre_p);
726a989a 1954 gimplify_seq_add_seq (orig_post_p, post);
cc3c4f62 1955 *expr_p = lhs;
6de9cd9a
DN
1956 return GS_ALL_DONE;
1957 }
1958 else
1959 {
726a989a 1960 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
6de9cd9a
DN
1961 return GS_OK;
1962 }
1963}
1964
d25cee4d
RH
1965/* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
1966
1967static void
1968maybe_with_size_expr (tree *expr_p)
1969{
61025d1b
RK
1970 tree expr = *expr_p;
1971 tree type = TREE_TYPE (expr);
1972 tree size;
d25cee4d 1973
61025d1b
RK
1974 /* If we've already wrapped this or the type is error_mark_node, we can't do
1975 anything. */
1976 if (TREE_CODE (expr) == WITH_SIZE_EXPR
1977 || type == error_mark_node)
d25cee4d
RH
1978 return;
1979
61025d1b 1980 /* If the size isn't known or is a constant, we have nothing to do. */
d25cee4d 1981 size = TYPE_SIZE_UNIT (type);
61025d1b
RK
1982 if (!size || TREE_CODE (size) == INTEGER_CST)
1983 return;
1984
1985 /* Otherwise, make a WITH_SIZE_EXPR. */
1986 size = unshare_expr (size);
1987 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
1988 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
d25cee4d
RH
1989}
1990
726a989a 1991/* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
1282697f
AH
1992 Store any side-effects in PRE_P. CALL_LOCATION is the location of
1993 the CALL_EXPR. */
e4f78bd4
JM
1994
1995static enum gimplify_status
1282697f 1996gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location)
e4f78bd4
JM
1997{
1998 bool (*test) (tree);
1999 fallback_t fb;
2000
2001 /* In general, we allow lvalues for function arguments to avoid
2002 extra overhead of copying large aggregates out of even larger
2003 aggregates into temporaries only to copy the temporaries to
2004 the argument list. Make optimizers happy by pulling out to
2005 temporaries those types that fit in registers. */
726a989a 2006 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
e4f78bd4
JM
2007 test = is_gimple_val, fb = fb_rvalue;
2008 else
b4ef8aac
JM
2009 {
2010 test = is_gimple_lvalue, fb = fb_either;
2011 /* Also strip a TARGET_EXPR that would force an extra copy. */
2012 if (TREE_CODE (*arg_p) == TARGET_EXPR)
2013 {
2014 tree init = TARGET_EXPR_INITIAL (*arg_p);
2015 if (init
2016 && !VOID_TYPE_P (TREE_TYPE (init)))
2017 *arg_p = init;
2018 }
2019 }
e4f78bd4 2020
d25cee4d 2021 /* If this is a variable sized type, we must remember the size. */
726a989a 2022 maybe_with_size_expr (arg_p);
d25cee4d 2023
c2255bc4 2024 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
1282697f
AH
2025 /* Make sure arguments have the same location as the function call
2026 itself. */
2027 protected_set_expr_location (*arg_p, call_location);
2028
e4f78bd4
JM
2029 /* There is a sequence point before a function call. Side effects in
2030 the argument list must occur before the actual call. So, when
2031 gimplifying arguments, force gimplify_expr to use an internal
2032 post queue which is then appended to the end of PRE_P. */
726a989a 2033 return gimplify_expr (arg_p, pre_p, NULL, test, fb);
e4f78bd4
JM
2034}
2035
726a989a 2036/* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
90051e16 2037 WANT_VALUE is true if the result of the call is desired. */
6de9cd9a
DN
2038
2039static enum gimplify_status
726a989a 2040gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
6de9cd9a 2041{
f20ca725 2042 tree fndecl, parms, p, fnptrtype;
6de9cd9a 2043 enum gimplify_status ret;
5039610b 2044 int i, nargs;
726a989a
RB
2045 gimple call;
2046 bool builtin_va_start_p = FALSE;
db3927fb 2047 location_t loc = EXPR_LOCATION (*expr_p);
6de9cd9a 2048
282899df 2049 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
6de9cd9a 2050
d3147f64 2051 /* For reliable diagnostics during inlining, it is necessary that
6de9cd9a 2052 every call_expr be annotated with file and line. */
a281759f
PB
2053 if (! EXPR_HAS_LOCATION (*expr_p))
2054 SET_EXPR_LOCATION (*expr_p, input_location);
6de9cd9a 2055
939b37da
BI
2056 if (fn_contains_cilk_spawn_p (cfun)
2057 && lang_hooks.cilkplus.cilk_detect_spawn_and_unwrap (expr_p)
2058 && !seen_error ())
2059 return (enum gimplify_status)
2060 lang_hooks.cilkplus.gimplify_cilk_spawn (expr_p, pre_p, NULL);
2061
6de9cd9a
DN
2062 /* This may be a call to a builtin function.
2063
2064 Builtin function calls may be transformed into different
2065 (and more efficient) builtin function calls under certain
2066 circumstances. Unfortunately, gimplification can muck things
2067 up enough that the builtin expanders are not aware that certain
2068 transformations are still valid.
2069
2070 So we attempt transformation/gimplification of the call before
2071 we gimplify the CALL_EXPR. At this time we do not manage to
2072 transform all calls in the same manner as the expanders do, but
2073 we do transform most of them. */
726a989a 2074 fndecl = get_callee_fndecl (*expr_p);
3537a0cd
RG
2075 if (fndecl
2076 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2077 switch (DECL_FUNCTION_CODE (fndecl))
2078 {
2079 case BUILT_IN_VA_START:
2efcfa4e 2080 {
726a989a 2081 builtin_va_start_p = TRUE;
5039610b 2082 if (call_expr_nargs (*expr_p) < 2)
2efcfa4e
AP
2083 {
2084 error ("too few arguments to function %<va_start%>");
c2255bc4 2085 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2efcfa4e
AP
2086 return GS_OK;
2087 }
b8698a0f 2088
5039610b 2089 if (fold_builtin_next_arg (*expr_p, true))
2efcfa4e 2090 {
c2255bc4 2091 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2efcfa4e
AP
2092 return GS_OK;
2093 }
3537a0cd
RG
2094 break;
2095 }
2096 case BUILT_IN_LINE:
2097 {
2098 expanded_location loc = expand_location (EXPR_LOCATION (*expr_p));
2099 *expr_p = build_int_cst (TREE_TYPE (*expr_p), loc.line);
2100 return GS_OK;
2101 }
2102 case BUILT_IN_FILE:
2103 {
2104 expanded_location loc = expand_location (EXPR_LOCATION (*expr_p));
2105 *expr_p = build_string_literal (strlen (loc.file) + 1, loc.file);
2106 return GS_OK;
2107 }
2108 case BUILT_IN_FUNCTION:
2109 {
2110 const char *function;
2111 function = IDENTIFIER_POINTER (DECL_NAME (current_function_decl));
2112 *expr_p = build_string_literal (strlen (function) + 1, function);
2113 return GS_OK;
2114 }
2115 default:
2116 ;
2117 }
2118 if (fndecl && DECL_BUILT_IN (fndecl))
2119 {
2120 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2121 if (new_tree && new_tree != *expr_p)
2122 {
2123 /* There was a transformation of this call which computes the
2124 same value, but in a more efficient way. Return and try
2125 again. */
2126 *expr_p = new_tree;
2127 return GS_OK;
2efcfa4e 2128 }
6de9cd9a
DN
2129 }
2130
f20ca725
RG
2131 /* Remember the original function pointer type. */
2132 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
2133
6de9cd9a
DN
2134 /* There is a sequence point before the call, so any side effects in
2135 the calling expression must occur before the actual call. Force
2136 gimplify_expr to use an internal post queue. */
5039610b 2137 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
0f59171d 2138 is_gimple_call_addr, fb_rvalue);
6de9cd9a 2139
5039610b
SL
2140 nargs = call_expr_nargs (*expr_p);
2141
e36711f3 2142 /* Get argument types for verification. */
726a989a 2143 fndecl = get_callee_fndecl (*expr_p);
e36711f3 2144 parms = NULL_TREE;
726a989a
RB
2145 if (fndecl)
2146 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
e36711f3
RG
2147 else if (POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_FN (*expr_p))))
2148 parms = TYPE_ARG_TYPES (TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (*expr_p))));
2149
726a989a 2150 if (fndecl && DECL_ARGUMENTS (fndecl))
f9487002 2151 p = DECL_ARGUMENTS (fndecl);
004e2fa7 2152 else if (parms)
f9487002 2153 p = parms;
6ef5231b 2154 else
498e51ca 2155 p = NULL_TREE;
f9487002
JJ
2156 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
2157 ;
6ef5231b
JJ
2158
2159 /* If the last argument is __builtin_va_arg_pack () and it is not
2160 passed as a named argument, decrease the number of CALL_EXPR
2161 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
2162 if (!p
2163 && i < nargs
2164 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
2165 {
2166 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
2167 tree last_arg_fndecl = get_callee_fndecl (last_arg);
2168
2169 if (last_arg_fndecl
2170 && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
2171 && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
2172 && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
2173 {
2174 tree call = *expr_p;
2175
2176 --nargs;
db3927fb
AH
2177 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
2178 CALL_EXPR_FN (call),
2179 nargs, CALL_EXPR_ARGP (call));
726a989a
RB
2180
2181 /* Copy all CALL_EXPR flags, location and block, except
6ef5231b
JJ
2182 CALL_EXPR_VA_ARG_PACK flag. */
2183 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
2184 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
2185 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
2186 = CALL_EXPR_RETURN_SLOT_OPT (call);
2187 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
5e278028 2188 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
726a989a 2189
6ef5231b
JJ
2190 /* Set CALL_EXPR_VA_ARG_PACK. */
2191 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
2192 }
2193 }
e36711f3
RG
2194
2195 /* Finally, gimplify the function arguments. */
726a989a 2196 if (nargs > 0)
6de9cd9a 2197 {
726a989a
RB
2198 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
2199 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
2200 PUSH_ARGS_REVERSED ? i-- : i++)
2201 {
2202 enum gimplify_status t;
6de9cd9a 2203
726a989a
RB
2204 /* Avoid gimplifying the second argument to va_start, which needs to
2205 be the plain PARM_DECL. */
2206 if ((i != 1) || !builtin_va_start_p)
2207 {
1282697f
AH
2208 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
2209 EXPR_LOCATION (*expr_p));
6de9cd9a 2210
726a989a
RB
2211 if (t == GS_ERROR)
2212 ret = GS_ERROR;
2213 }
2214 }
6de9cd9a 2215 }
6de9cd9a 2216
33922890
RG
2217 /* Verify the function result. */
2218 if (want_value && fndecl
f20ca725 2219 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
33922890
RG
2220 {
2221 error_at (loc, "using result of function returning %<void%>");
2222 ret = GS_ERROR;
2223 }
2224
6de9cd9a 2225 /* Try this again in case gimplification exposed something. */
6f538523 2226 if (ret != GS_ERROR)
6de9cd9a 2227 {
db3927fb 2228 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
6f538523 2229
82d6e6fc 2230 if (new_tree && new_tree != *expr_p)
5039610b
SL
2231 {
2232 /* There was a transformation of this call which computes the
2233 same value, but in a more efficient way. Return and try
2234 again. */
82d6e6fc 2235 *expr_p = new_tree;
5039610b 2236 return GS_OK;
6de9cd9a
DN
2237 }
2238 }
726a989a
RB
2239 else
2240 {
df8fa700 2241 *expr_p = error_mark_node;
726a989a
RB
2242 return GS_ERROR;
2243 }
6de9cd9a
DN
2244
2245 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
2246 decl. This allows us to eliminate redundant or useless
2247 calls to "const" functions. */
becfd6e5
KZ
2248 if (TREE_CODE (*expr_p) == CALL_EXPR)
2249 {
2250 int flags = call_expr_flags (*expr_p);
2251 if (flags & (ECF_CONST | ECF_PURE)
2252 /* An infinite loop is considered a side effect. */
2253 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
2254 TREE_SIDE_EFFECTS (*expr_p) = 0;
2255 }
726a989a
RB
2256
2257 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
2258 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
2259 form and delegate the creation of a GIMPLE_CALL to
2260 gimplify_modify_expr. This is always possible because when
2261 WANT_VALUE is true, the caller wants the result of this call into
2262 a temporary, which means that we will emit an INIT_EXPR in
2263 internal_get_tmp_var which will then be handled by
2264 gimplify_modify_expr. */
2265 if (!want_value)
2266 {
2267 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
2268 have to do is replicate it as a GIMPLE_CALL tuple. */
64e0f5ff 2269 gimple_stmt_iterator gsi;
726a989a 2270 call = gimple_build_call_from_tree (*expr_p);
f20ca725 2271 gimple_call_set_fntype (call, TREE_TYPE (fnptrtype));
f6b64c35 2272 notice_special_calls (call);
726a989a 2273 gimplify_seq_add_stmt (pre_p, call);
64e0f5ff 2274 gsi = gsi_last (*pre_p);
acf0174b
JJ
2275 /* Don't fold stmts inside of target construct. We'll do it
2276 during omplower pass instead. */
2277 struct gimplify_omp_ctx *ctx;
2278 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
2279 if (ctx->region_type == ORT_TARGET)
2280 break;
2281 if (ctx == NULL)
2282 fold_stmt (&gsi);
726a989a
RB
2283 *expr_p = NULL_TREE;
2284 }
f20ca725
RG
2285 else
2286 /* Remember the original function type. */
2287 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
2288 CALL_EXPR_FN (*expr_p));
726a989a 2289
6de9cd9a
DN
2290 return ret;
2291}
2292
2293/* Handle shortcut semantics in the predicate operand of a COND_EXPR by
2294 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
2295
2296 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
2297 condition is true or false, respectively. If null, we should generate
2298 our own to skip over the evaluation of this specific expression.
2299
ca80e52b
EB
2300 LOCUS is the source location of the COND_EXPR.
2301
6de9cd9a
DN
2302 This function is the tree equivalent of do_jump.
2303
2304 shortcut_cond_r should only be called by shortcut_cond_expr. */
2305
2306static tree
ca80e52b
EB
2307shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
2308 location_t locus)
6de9cd9a
DN
2309{
2310 tree local_label = NULL_TREE;
2311 tree t, expr = NULL;
2312
2313 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
2314 retain the shortcut semantics. Just insert the gotos here;
2315 shortcut_cond_expr will append the real blocks later. */
2316 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2317 {
ca80e52b
EB
2318 location_t new_locus;
2319
6de9cd9a
DN
2320 /* Turn if (a && b) into
2321
2322 if (a); else goto no;
2323 if (b) goto yes; else goto no;
2324 (no:) */
2325
2326 if (false_label_p == NULL)
2327 false_label_p = &local_label;
2328
ca80e52b
EB
2329 /* Keep the original source location on the first 'if'. */
2330 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
6de9cd9a
DN
2331 append_to_statement_list (t, &expr);
2332
ca80e52b
EB
2333 /* Set the source location of the && on the second 'if'. */
2334 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2335 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2336 new_locus);
6de9cd9a
DN
2337 append_to_statement_list (t, &expr);
2338 }
2339 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2340 {
ca80e52b
EB
2341 location_t new_locus;
2342
6de9cd9a
DN
2343 /* Turn if (a || b) into
2344
2345 if (a) goto yes;
2346 if (b) goto yes; else goto no;
2347 (yes:) */
2348
2349 if (true_label_p == NULL)
2350 true_label_p = &local_label;
2351
ca80e52b
EB
2352 /* Keep the original source location on the first 'if'. */
2353 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
6de9cd9a
DN
2354 append_to_statement_list (t, &expr);
2355
ca80e52b
EB
2356 /* Set the source location of the || on the second 'if'. */
2357 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2358 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2359 new_locus);
6de9cd9a
DN
2360 append_to_statement_list (t, &expr);
2361 }
1537737f
JJ
2362 else if (TREE_CODE (pred) == COND_EXPR
2363 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
2364 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
6de9cd9a 2365 {
ca80e52b
EB
2366 location_t new_locus;
2367
6de9cd9a
DN
2368 /* As long as we're messing with gotos, turn if (a ? b : c) into
2369 if (a)
2370 if (b) goto yes; else goto no;
2371 else
1537737f
JJ
2372 if (c) goto yes; else goto no;
2373
2374 Don't do this if one of the arms has void type, which can happen
2375 in C++ when the arm is throw. */
ca80e52b
EB
2376
2377 /* Keep the original source location on the first 'if'. Set the source
2378 location of the ? on the second 'if'. */
2379 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
b4257cfc
RG
2380 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
2381 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
ca80e52b 2382 false_label_p, locus),
b4257cfc 2383 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
ca80e52b 2384 false_label_p, new_locus));
6de9cd9a
DN
2385 }
2386 else
2387 {
b4257cfc
RG
2388 expr = build3 (COND_EXPR, void_type_node, pred,
2389 build_and_jump (true_label_p),
2390 build_and_jump (false_label_p));
ca80e52b 2391 SET_EXPR_LOCATION (expr, locus);
6de9cd9a
DN
2392 }
2393
2394 if (local_label)
2395 {
2396 t = build1 (LABEL_EXPR, void_type_node, local_label);
2397 append_to_statement_list (t, &expr);
2398 }
2399
2400 return expr;
2401}
2402
726a989a
RB
2403/* Given a conditional expression EXPR with short-circuit boolean
2404 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
073a8998 2405 predicate apart into the equivalent sequence of conditionals. */
726a989a 2406
6de9cd9a
DN
2407static tree
2408shortcut_cond_expr (tree expr)
2409{
2410 tree pred = TREE_OPERAND (expr, 0);
2411 tree then_ = TREE_OPERAND (expr, 1);
2412 tree else_ = TREE_OPERAND (expr, 2);
2413 tree true_label, false_label, end_label, t;
2414 tree *true_label_p;
2415 tree *false_label_p;
089efaa4 2416 bool emit_end, emit_false, jump_over_else;
65355d53
RH
2417 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
2418 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
6de9cd9a
DN
2419
2420 /* First do simple transformations. */
65355d53 2421 if (!else_se)
6de9cd9a 2422 {
ca80e52b
EB
2423 /* If there is no 'else', turn
2424 if (a && b) then c
2425 into
2426 if (a) if (b) then c. */
6de9cd9a
DN
2427 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2428 {
ca80e52b 2429 /* Keep the original source location on the first 'if'. */
ec52b111 2430 location_t locus = EXPR_LOC_OR_HERE (expr);
6de9cd9a 2431 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
ca80e52b
EB
2432 /* Set the source location of the && on the second 'if'. */
2433 if (EXPR_HAS_LOCATION (pred))
2434 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
6de9cd9a 2435 then_ = shortcut_cond_expr (expr);
4356a1bf 2436 then_se = then_ && TREE_SIDE_EFFECTS (then_);
6de9cd9a 2437 pred = TREE_OPERAND (pred, 0);
b4257cfc 2438 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
ca80e52b 2439 SET_EXPR_LOCATION (expr, locus);
6de9cd9a
DN
2440 }
2441 }
726a989a 2442
65355d53 2443 if (!then_se)
6de9cd9a
DN
2444 {
2445 /* If there is no 'then', turn
2446 if (a || b); else d
2447 into
2448 if (a); else if (b); else d. */
2449 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2450 {
ca80e52b 2451 /* Keep the original source location on the first 'if'. */
ec52b111 2452 location_t locus = EXPR_LOC_OR_HERE (expr);
6de9cd9a 2453 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
ca80e52b
EB
2454 /* Set the source location of the || on the second 'if'. */
2455 if (EXPR_HAS_LOCATION (pred))
2456 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
6de9cd9a 2457 else_ = shortcut_cond_expr (expr);
4356a1bf 2458 else_se = else_ && TREE_SIDE_EFFECTS (else_);
6de9cd9a 2459 pred = TREE_OPERAND (pred, 0);
b4257cfc 2460 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
ca80e52b 2461 SET_EXPR_LOCATION (expr, locus);
6de9cd9a
DN
2462 }
2463 }
2464
2465 /* If we're done, great. */
2466 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
2467 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
2468 return expr;
2469
2470 /* Otherwise we need to mess with gotos. Change
2471 if (a) c; else d;
2472 to
2473 if (a); else goto no;
2474 c; goto end;
2475 no: d; end:
2476 and recursively gimplify the condition. */
2477
2478 true_label = false_label = end_label = NULL_TREE;
2479
2480 /* If our arms just jump somewhere, hijack those labels so we don't
2481 generate jumps to jumps. */
2482
65355d53
RH
2483 if (then_
2484 && TREE_CODE (then_) == GOTO_EXPR
6de9cd9a
DN
2485 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
2486 {
2487 true_label = GOTO_DESTINATION (then_);
65355d53
RH
2488 then_ = NULL;
2489 then_se = false;
6de9cd9a
DN
2490 }
2491
65355d53
RH
2492 if (else_
2493 && TREE_CODE (else_) == GOTO_EXPR
6de9cd9a
DN
2494 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
2495 {
2496 false_label = GOTO_DESTINATION (else_);
65355d53
RH
2497 else_ = NULL;
2498 else_se = false;
6de9cd9a
DN
2499 }
2500
9cf737f8 2501 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
6de9cd9a
DN
2502 if (true_label)
2503 true_label_p = &true_label;
2504 else
2505 true_label_p = NULL;
2506
2507 /* The 'else' branch also needs a label if it contains interesting code. */
65355d53 2508 if (false_label || else_se)
6de9cd9a
DN
2509 false_label_p = &false_label;
2510 else
2511 false_label_p = NULL;
2512
2513 /* If there was nothing else in our arms, just forward the label(s). */
65355d53 2514 if (!then_se && !else_se)
ca80e52b 2515 return shortcut_cond_r (pred, true_label_p, false_label_p,
ec52b111 2516 EXPR_LOC_OR_HERE (expr));
6de9cd9a
DN
2517
2518 /* If our last subexpression already has a terminal label, reuse it. */
65355d53 2519 if (else_se)
ca80e52b 2520 t = expr_last (else_);
65355d53 2521 else if (then_se)
ca80e52b 2522 t = expr_last (then_);
65355d53 2523 else
ca80e52b
EB
2524 t = NULL;
2525 if (t && TREE_CODE (t) == LABEL_EXPR)
2526 end_label = LABEL_EXPR_LABEL (t);
6de9cd9a
DN
2527
2528 /* If we don't care about jumping to the 'else' branch, jump to the end
2529 if the condition is false. */
2530 if (!false_label_p)
2531 false_label_p = &end_label;
2532
2533 /* We only want to emit these labels if we aren't hijacking them. */
2534 emit_end = (end_label == NULL_TREE);
2535 emit_false = (false_label == NULL_TREE);
2536
089efaa4
ILT
2537 /* We only emit the jump over the else clause if we have to--if the
2538 then clause may fall through. Otherwise we can wind up with a
2539 useless jump and a useless label at the end of gimplified code,
2540 which will cause us to think that this conditional as a whole
2541 falls through even if it doesn't. If we then inline a function
2542 which ends with such a condition, that can cause us to issue an
2543 inappropriate warning about control reaching the end of a
2544 non-void function. */
2545 jump_over_else = block_may_fallthru (then_);
2546
ca80e52b 2547 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
ec52b111 2548 EXPR_LOC_OR_HERE (expr));
6de9cd9a
DN
2549
2550 expr = NULL;
2551 append_to_statement_list (pred, &expr);
2552
2553 append_to_statement_list (then_, &expr);
65355d53 2554 if (else_se)
6de9cd9a 2555 {
089efaa4
ILT
2556 if (jump_over_else)
2557 {
ca80e52b 2558 tree last = expr_last (expr);
089efaa4 2559 t = build_and_jump (&end_label);
ca80e52b
EB
2560 if (EXPR_HAS_LOCATION (last))
2561 SET_EXPR_LOCATION (t, EXPR_LOCATION (last));
089efaa4
ILT
2562 append_to_statement_list (t, &expr);
2563 }
6de9cd9a
DN
2564 if (emit_false)
2565 {
2566 t = build1 (LABEL_EXPR, void_type_node, false_label);
2567 append_to_statement_list (t, &expr);
2568 }
2569 append_to_statement_list (else_, &expr);
2570 }
2571 if (emit_end && end_label)
2572 {
2573 t = build1 (LABEL_EXPR, void_type_node, end_label);
2574 append_to_statement_list (t, &expr);
2575 }
2576
2577 return expr;
2578}
2579
2580/* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
2581
50674e96 2582tree
6de9cd9a
DN
2583gimple_boolify (tree expr)
2584{
2585 tree type = TREE_TYPE (expr);
db3927fb 2586 location_t loc = EXPR_LOCATION (expr);
6de9cd9a 2587
554cf330
JJ
2588 if (TREE_CODE (expr) == NE_EXPR
2589 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
2590 && integer_zerop (TREE_OPERAND (expr, 1)))
2591 {
2592 tree call = TREE_OPERAND (expr, 0);
2593 tree fn = get_callee_fndecl (call);
2594
d53c73e0
JJ
2595 /* For __builtin_expect ((long) (x), y) recurse into x as well
2596 if x is truth_value_p. */
554cf330
JJ
2597 if (fn
2598 && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL
2599 && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT
2600 && call_expr_nargs (call) == 2)
2601 {
2602 tree arg = CALL_EXPR_ARG (call, 0);
2603 if (arg)
2604 {
2605 if (TREE_CODE (arg) == NOP_EXPR
2606 && TREE_TYPE (arg) == TREE_TYPE (call))
2607 arg = TREE_OPERAND (arg, 0);
d53c73e0
JJ
2608 if (truth_value_p (TREE_CODE (arg)))
2609 {
2610 arg = gimple_boolify (arg);
2611 CALL_EXPR_ARG (call, 0)
2612 = fold_convert_loc (loc, TREE_TYPE (call), arg);
2613 }
554cf330
JJ
2614 }
2615 }
2616 }
2617
6de9cd9a
DN
2618 switch (TREE_CODE (expr))
2619 {
2620 case TRUTH_AND_EXPR:
2621 case TRUTH_OR_EXPR:
2622 case TRUTH_XOR_EXPR:
2623 case TRUTH_ANDIF_EXPR:
2624 case TRUTH_ORIF_EXPR:
2625 /* Also boolify the arguments of truth exprs. */
2626 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
2627 /* FALLTHRU */
2628
2629 case TRUTH_NOT_EXPR:
2630 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
6de9cd9a 2631
6de9cd9a 2632 /* These expressions always produce boolean results. */
7f3ff782
KT
2633 if (TREE_CODE (type) != BOOLEAN_TYPE)
2634 TREE_TYPE (expr) = boolean_type_node;
6de9cd9a 2635 return expr;
d3147f64 2636
8170608b
TB
2637 case ANNOTATE_EXPR:
2638 if ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1))
2639 == annot_expr_ivdep_kind)
2640 {
2641 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2642 if (TREE_CODE (type) != BOOLEAN_TYPE)
2643 TREE_TYPE (expr) = boolean_type_node;
2644 return expr;
2645 }
2646 /* FALLTHRU */
2647
6de9cd9a 2648 default:
7f3ff782
KT
2649 if (COMPARISON_CLASS_P (expr))
2650 {
2651 /* There expressions always prduce boolean results. */
2652 if (TREE_CODE (type) != BOOLEAN_TYPE)
2653 TREE_TYPE (expr) = boolean_type_node;
2654 return expr;
2655 }
6de9cd9a
DN
2656 /* Other expressions that get here must have boolean values, but
2657 might need to be converted to the appropriate mode. */
7f3ff782 2658 if (TREE_CODE (type) == BOOLEAN_TYPE)
1d15f620 2659 return expr;
db3927fb 2660 return fold_convert_loc (loc, boolean_type_node, expr);
6de9cd9a
DN
2661 }
2662}
2663
aea74440
JJ
2664/* Given a conditional expression *EXPR_P without side effects, gimplify
2665 its operands. New statements are inserted to PRE_P. */
2666
2667static enum gimplify_status
726a989a 2668gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
aea74440
JJ
2669{
2670 tree expr = *expr_p, cond;
2671 enum gimplify_status ret, tret;
2672 enum tree_code code;
2673
2674 cond = gimple_boolify (COND_EXPR_COND (expr));
2675
2676 /* We need to handle && and || specially, as their gimplification
2677 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
2678 code = TREE_CODE (cond);
2679 if (code == TRUTH_ANDIF_EXPR)
2680 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
2681 else if (code == TRUTH_ORIF_EXPR)
2682 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
726a989a 2683 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
aea74440
JJ
2684 COND_EXPR_COND (*expr_p) = cond;
2685
2686 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
2687 is_gimple_val, fb_rvalue);
2688 ret = MIN (ret, tret);
2689 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
2690 is_gimple_val, fb_rvalue);
2691
2692 return MIN (ret, tret);
2693}
2694
ad19c4be 2695/* Return true if evaluating EXPR could trap.
aea74440
JJ
2696 EXPR is GENERIC, while tree_could_trap_p can be called
2697 only on GIMPLE. */
2698
2699static bool
2700generic_expr_could_trap_p (tree expr)
2701{
2702 unsigned i, n;
2703
2704 if (!expr || is_gimple_val (expr))
2705 return false;
2706
2707 if (!EXPR_P (expr) || tree_could_trap_p (expr))
2708 return true;
2709
2710 n = TREE_OPERAND_LENGTH (expr);
2711 for (i = 0; i < n; i++)
2712 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
2713 return true;
2714
2715 return false;
2716}
2717
206048bd 2718/* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
6de9cd9a
DN
2719 into
2720
2721 if (p) if (p)
2722 t1 = a; a;
2723 else or else
2724 t1 = b; b;
2725 t1;
2726
2727 The second form is used when *EXPR_P is of type void.
2728
2729 PRE_P points to the list where side effects that must happen before
dae7ec87 2730 *EXPR_P should be stored. */
6de9cd9a
DN
2731
2732static enum gimplify_status
726a989a 2733gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
6de9cd9a
DN
2734{
2735 tree expr = *expr_p;
06ec59e6
EB
2736 tree type = TREE_TYPE (expr);
2737 location_t loc = EXPR_LOCATION (expr);
2738 tree tmp, arm1, arm2;
6de9cd9a 2739 enum gimplify_status ret;
726a989a
RB
2740 tree label_true, label_false, label_cont;
2741 bool have_then_clause_p, have_else_clause_p;
2742 gimple gimple_cond;
2743 enum tree_code pred_code;
2744 gimple_seq seq = NULL;
26d44ae2
RH
2745
2746 /* If this COND_EXPR has a value, copy the values into a temporary within
2747 the arms. */
06ec59e6 2748 if (!VOID_TYPE_P (type))
26d44ae2 2749 {
06ec59e6 2750 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
aff98faf
AO
2751 tree result;
2752
06ec59e6
EB
2753 /* If either an rvalue is ok or we do not require an lvalue, create the
2754 temporary. But we cannot do that if the type is addressable. */
2755 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
c3e203cf 2756 && !TREE_ADDRESSABLE (type))
aff98faf 2757 {
aea74440
JJ
2758 if (gimplify_ctxp->allow_rhs_cond_expr
2759 /* If either branch has side effects or could trap, it can't be
2760 evaluated unconditionally. */
06ec59e6
EB
2761 && !TREE_SIDE_EFFECTS (then_)
2762 && !generic_expr_could_trap_p (then_)
2763 && !TREE_SIDE_EFFECTS (else_)
2764 && !generic_expr_could_trap_p (else_))
aea74440
JJ
2765 return gimplify_pure_cond_expr (expr_p, pre_p);
2766
06ec59e6
EB
2767 tmp = create_tmp_var (type, "iftmp");
2768 result = tmp;
aff98faf 2769 }
06ec59e6
EB
2770
2771 /* Otherwise, only create and copy references to the values. */
26d44ae2
RH
2772 else
2773 {
06ec59e6 2774 type = build_pointer_type (type);
aff98faf 2775
06ec59e6
EB
2776 if (!VOID_TYPE_P (TREE_TYPE (then_)))
2777 then_ = build_fold_addr_expr_loc (loc, then_);
aff98faf 2778
06ec59e6
EB
2779 if (!VOID_TYPE_P (TREE_TYPE (else_)))
2780 else_ = build_fold_addr_expr_loc (loc, else_);
2781
2782 expr
2783 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
aea74440 2784
726a989a 2785 tmp = create_tmp_var (type, "iftmp");
70f34814 2786 result = build_simple_mem_ref_loc (loc, tmp);
26d44ae2
RH
2787 }
2788
06ec59e6
EB
2789 /* Build the new then clause, `tmp = then_;'. But don't build the
2790 assignment if the value is void; in C++ it can be if it's a throw. */
2791 if (!VOID_TYPE_P (TREE_TYPE (then_)))
2792 TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_);
26d44ae2 2793
06ec59e6
EB
2794 /* Similarly, build the new else clause, `tmp = else_;'. */
2795 if (!VOID_TYPE_P (TREE_TYPE (else_)))
2796 TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_);
26d44ae2
RH
2797
2798 TREE_TYPE (expr) = void_type_node;
2799 recalculate_side_effects (expr);
2800
d91ba7b0 2801 /* Move the COND_EXPR to the prequeue. */
726a989a 2802 gimplify_stmt (&expr, pre_p);
26d44ae2 2803
aff98faf 2804 *expr_p = result;
726a989a 2805 return GS_ALL_DONE;
26d44ae2
RH
2806 }
2807
f2f81d57
EB
2808 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
2809 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
2810 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
2811 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
2812
26d44ae2
RH
2813 /* Make sure the condition has BOOLEAN_TYPE. */
2814 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2815
2816 /* Break apart && and || conditions. */
2817 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
2818 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
2819 {
2820 expr = shortcut_cond_expr (expr);
2821
2822 if (expr != *expr_p)
2823 {
2824 *expr_p = expr;
2825
2826 /* We can't rely on gimplify_expr to re-gimplify the expanded
2827 form properly, as cleanups might cause the target labels to be
2828 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
2829 set up a conditional context. */
2830 gimple_push_condition ();
726a989a 2831 gimplify_stmt (expr_p, &seq);
26d44ae2 2832 gimple_pop_condition (pre_p);
726a989a 2833 gimple_seq_add_seq (pre_p, seq);
26d44ae2
RH
2834
2835 return GS_ALL_DONE;
2836 }
2837 }
2838
2839 /* Now do the normal gimplification. */
26d44ae2 2840
726a989a
RB
2841 /* Gimplify condition. */
2842 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
2843 fb_rvalue);
26d44ae2 2844 if (ret == GS_ERROR)
726a989a
RB
2845 return GS_ERROR;
2846 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
2847
2848 gimple_push_condition ();
26d44ae2 2849
726a989a
RB
2850 have_then_clause_p = have_else_clause_p = false;
2851 if (TREE_OPERAND (expr, 1) != NULL
2852 && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR
2853 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL
2854 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1)))
2855 == current_function_decl)
2856 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
2857 have different locations, otherwise we end up with incorrect
2858 location information on the branches. */
2859 && (optimize
2860 || !EXPR_HAS_LOCATION (expr)
2861 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1))
2862 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1))))
2863 {
2864 label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1));
2865 have_then_clause_p = true;
26d44ae2
RH
2866 }
2867 else
c2255bc4 2868 label_true = create_artificial_label (UNKNOWN_LOCATION);
726a989a
RB
2869 if (TREE_OPERAND (expr, 2) != NULL
2870 && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR
2871 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL
2872 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2)))
2873 == current_function_decl)
2874 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
2875 have different locations, otherwise we end up with incorrect
2876 location information on the branches. */
2877 && (optimize
2878 || !EXPR_HAS_LOCATION (expr)
2879 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2))
2880 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2))))
2881 {
2882 label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2));
2883 have_else_clause_p = true;
2884 }
2885 else
c2255bc4 2886 label_false = create_artificial_label (UNKNOWN_LOCATION);
26d44ae2 2887
726a989a
RB
2888 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
2889 &arm2);
26d44ae2 2890
726a989a
RB
2891 gimple_cond = gimple_build_cond (pred_code, arm1, arm2, label_true,
2892 label_false);
26d44ae2 2893
726a989a
RB
2894 gimplify_seq_add_stmt (&seq, gimple_cond);
2895 label_cont = NULL_TREE;
2896 if (!have_then_clause_p)
2897 {
2898 /* For if (...) {} else { code; } put label_true after
2899 the else block. */
2900 if (TREE_OPERAND (expr, 1) == NULL_TREE
2901 && !have_else_clause_p
2902 && TREE_OPERAND (expr, 2) != NULL_TREE)
2903 label_cont = label_true;
2904 else
2905 {
2906 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
2907 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
2908 /* For if (...) { code; } else {} or
2909 if (...) { code; } else goto label; or
2910 if (...) { code; return; } else { ... }
2911 label_cont isn't needed. */
2912 if (!have_else_clause_p
2913 && TREE_OPERAND (expr, 2) != NULL_TREE
2914 && gimple_seq_may_fallthru (seq))
2915 {
2916 gimple g;
c2255bc4 2917 label_cont = create_artificial_label (UNKNOWN_LOCATION);
726a989a
RB
2918
2919 g = gimple_build_goto (label_cont);
2920
2921 /* GIMPLE_COND's are very low level; they have embedded
2922 gotos. This particular embedded goto should not be marked
2923 with the location of the original COND_EXPR, as it would
2924 correspond to the COND_EXPR's condition, not the ELSE or the
2925 THEN arms. To avoid marking it with the wrong location, flag
2926 it as "no location". */
2927 gimple_set_do_not_emit_location (g);
2928
2929 gimplify_seq_add_stmt (&seq, g);
2930 }
2931 }
2932 }
2933 if (!have_else_clause_p)
2934 {
2935 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
2936 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
2937 }
2938 if (label_cont)
2939 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
2940
2941 gimple_pop_condition (pre_p);
2942 gimple_seq_add_seq (pre_p, seq);
2943
2944 if (ret == GS_ERROR)
2945 ; /* Do nothing. */
2946 else if (have_then_clause_p || have_else_clause_p)
2947 ret = GS_ALL_DONE;
2948 else
2949 {
2950 /* Both arms are empty; replace the COND_EXPR with its predicate. */
2951 expr = TREE_OPERAND (expr, 0);
2952 gimplify_stmt (&expr, pre_p);
2953 }
2954
2955 *expr_p = NULL;
2956 return ret;
2957}
2958
f76d6e6f
EB
2959/* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
2960 to be marked addressable.
2961
2962 We cannot rely on such an expression being directly markable if a temporary
2963 has been created by the gimplification. In this case, we create another
2964 temporary and initialize it with a copy, which will become a store after we
2965 mark it addressable. This can happen if the front-end passed us something
2966 that it could not mark addressable yet, like a Fortran pass-by-reference
2967 parameter (int) floatvar. */
2968
2969static void
2970prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
2971{
2972 while (handled_component_p (*expr_p))
2973 expr_p = &TREE_OPERAND (*expr_p, 0);
2974 if (is_gimple_reg (*expr_p))
2975 *expr_p = get_initialized_tmp_var (*expr_p, seq_p, NULL);
2976}
2977
726a989a
RB
2978/* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
2979 a call to __builtin_memcpy. */
2980
2981static enum gimplify_status
2982gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
2983 gimple_seq *seq_p)
26d44ae2 2984{
5039610b 2985 tree t, to, to_ptr, from, from_ptr;
726a989a 2986 gimple gs;
db3927fb 2987 location_t loc = EXPR_LOCATION (*expr_p);
26d44ae2 2988
726a989a
RB
2989 to = TREE_OPERAND (*expr_p, 0);
2990 from = TREE_OPERAND (*expr_p, 1);
26d44ae2 2991
f76d6e6f
EB
2992 /* Mark the RHS addressable. Beware that it may not be possible to do so
2993 directly if a temporary has been created by the gimplification. */
2994 prepare_gimple_addressable (&from, seq_p);
2995
628c189e 2996 mark_addressable (from);
db3927fb
AH
2997 from_ptr = build_fold_addr_expr_loc (loc, from);
2998 gimplify_arg (&from_ptr, seq_p, loc);
26d44ae2 2999
628c189e 3000 mark_addressable (to);
db3927fb
AH
3001 to_ptr = build_fold_addr_expr_loc (loc, to);
3002 gimplify_arg (&to_ptr, seq_p, loc);
726a989a 3003
e79983f4 3004 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
726a989a
RB
3005
3006 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
26d44ae2
RH
3007
3008 if (want_value)
3009 {
726a989a
RB
3010 /* tmp = memcpy() */
3011 t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3012 gimple_call_set_lhs (gs, t);
3013 gimplify_seq_add_stmt (seq_p, gs);
3014
70f34814 3015 *expr_p = build_simple_mem_ref (t);
726a989a 3016 return GS_ALL_DONE;
26d44ae2
RH
3017 }
3018
726a989a
RB
3019 gimplify_seq_add_stmt (seq_p, gs);
3020 *expr_p = NULL;
3021 return GS_ALL_DONE;
26d44ae2
RH
3022}
3023
3024/* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3025 a call to __builtin_memset. In this case we know that the RHS is
3026 a CONSTRUCTOR with an empty element list. */
3027
3028static enum gimplify_status
726a989a
RB
3029gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
3030 gimple_seq *seq_p)
26d44ae2 3031{
1a13360e 3032 tree t, from, to, to_ptr;
726a989a 3033 gimple gs;
db3927fb 3034 location_t loc = EXPR_LOCATION (*expr_p);
26d44ae2 3035
1a13360e
OH
3036 /* Assert our assumptions, to abort instead of producing wrong code
3037 silently if they are not met. Beware that the RHS CONSTRUCTOR might
3038 not be immediately exposed. */
b8698a0f 3039 from = TREE_OPERAND (*expr_p, 1);
1a13360e
OH
3040 if (TREE_CODE (from) == WITH_SIZE_EXPR)
3041 from = TREE_OPERAND (from, 0);
3042
3043 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
9771b263 3044 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
1a13360e
OH
3045
3046 /* Now proceed. */
726a989a 3047 to = TREE_OPERAND (*expr_p, 0);
26d44ae2 3048
db3927fb
AH
3049 to_ptr = build_fold_addr_expr_loc (loc, to);
3050 gimplify_arg (&to_ptr, seq_p, loc);
e79983f4 3051 t = builtin_decl_implicit (BUILT_IN_MEMSET);
726a989a
RB
3052
3053 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
26d44ae2
RH
3054
3055 if (want_value)
3056 {
726a989a
RB
3057 /* tmp = memset() */
3058 t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3059 gimple_call_set_lhs (gs, t);
3060 gimplify_seq_add_stmt (seq_p, gs);
3061
3062 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
3063 return GS_ALL_DONE;
26d44ae2
RH
3064 }
3065
726a989a
RB
3066 gimplify_seq_add_stmt (seq_p, gs);
3067 *expr_p = NULL;
3068 return GS_ALL_DONE;
26d44ae2
RH
3069}
3070
57d1dd87
RH
3071/* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
3072 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
ad19c4be 3073 assignment. Return non-null if we detect a potential overlap. */
57d1dd87
RH
3074
3075struct gimplify_init_ctor_preeval_data
3076{
3077 /* The base decl of the lhs object. May be NULL, in which case we
3078 have to assume the lhs is indirect. */
3079 tree lhs_base_decl;
3080
3081 /* The alias set of the lhs object. */
4862826d 3082 alias_set_type lhs_alias_set;
57d1dd87
RH
3083};
3084
3085static tree
3086gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
3087{
3088 struct gimplify_init_ctor_preeval_data *data
3089 = (struct gimplify_init_ctor_preeval_data *) xdata;
3090 tree t = *tp;
3091
3092 /* If we find the base object, obviously we have overlap. */
3093 if (data->lhs_base_decl == t)
3094 return t;
3095
3096 /* If the constructor component is indirect, determine if we have a
3097 potential overlap with the lhs. The only bits of information we
3098 have to go on at this point are addressability and alias sets. */
70f34814
RG
3099 if ((INDIRECT_REF_P (t)
3100 || TREE_CODE (t) == MEM_REF)
57d1dd87
RH
3101 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3102 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
3103 return t;
3104
df10ee2a 3105 /* If the constructor component is a call, determine if it can hide a
70f34814
RG
3106 potential overlap with the lhs through an INDIRECT_REF like above.
3107 ??? Ugh - this is completely broken. In fact this whole analysis
3108 doesn't look conservative. */
df10ee2a
EB
3109 if (TREE_CODE (t) == CALL_EXPR)
3110 {
3111 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
3112
3113 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
3114 if (POINTER_TYPE_P (TREE_VALUE (type))
3115 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3116 && alias_sets_conflict_p (data->lhs_alias_set,
3117 get_alias_set
3118 (TREE_TYPE (TREE_VALUE (type)))))
3119 return t;
3120 }
3121
6615c446 3122 if (IS_TYPE_OR_DECL_P (t))
57d1dd87
RH
3123 *walk_subtrees = 0;
3124 return NULL;
3125}
3126
726a989a 3127/* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
57d1dd87
RH
3128 force values that overlap with the lhs (as described by *DATA)
3129 into temporaries. */
3130
3131static void
726a989a 3132gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
57d1dd87
RH
3133 struct gimplify_init_ctor_preeval_data *data)
3134{
3135 enum gimplify_status one;
3136
51eed280
PB
3137 /* If the value is constant, then there's nothing to pre-evaluate. */
3138 if (TREE_CONSTANT (*expr_p))
3139 {
3140 /* Ensure it does not have side effects, it might contain a reference to
3141 the object we're initializing. */
3142 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
3143 return;
3144 }
57d1dd87
RH
3145
3146 /* If the type has non-trivial constructors, we can't pre-evaluate. */
3147 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
3148 return;
3149
3150 /* Recurse for nested constructors. */
3151 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
3152 {
4038c495
GB
3153 unsigned HOST_WIDE_INT ix;
3154 constructor_elt *ce;
9771b263 3155 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
4038c495 3156
9771b263 3157 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
4038c495 3158 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
726a989a 3159
57d1dd87
RH
3160 return;
3161 }
3162
0461b801
EB
3163 /* If this is a variable sized type, we must remember the size. */
3164 maybe_with_size_expr (expr_p);
57d1dd87
RH
3165
3166 /* Gimplify the constructor element to something appropriate for the rhs
726a989a 3167 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
d3147f64 3168 the gimplifier will consider this a store to memory. Doing this
57d1dd87
RH
3169 gimplification now means that we won't have to deal with complicated
3170 language-specific trees, nor trees like SAVE_EXPR that can induce
b01d837f 3171 exponential search behavior. */
57d1dd87
RH
3172 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
3173 if (one == GS_ERROR)
3174 {
3175 *expr_p = NULL;
3176 return;
3177 }
3178
3179 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
3180 with the lhs, since "a = { .x=a }" doesn't make sense. This will
3181 always be true for all scalars, since is_gimple_mem_rhs insists on a
3182 temporary variable for them. */
3183 if (DECL_P (*expr_p))
3184 return;
3185
3186 /* If this is of variable size, we have no choice but to assume it doesn't
3187 overlap since we can't make a temporary for it. */
4c923c28 3188 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
57d1dd87
RH
3189 return;
3190
3191 /* Otherwise, we must search for overlap ... */
3192 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
3193 return;
3194
3195 /* ... and if found, force the value into a temporary. */
3196 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
3197}
3198
6fa91b48
SB
3199/* A subroutine of gimplify_init_ctor_eval. Create a loop for
3200 a RANGE_EXPR in a CONSTRUCTOR for an array.
3201
3202 var = lower;
3203 loop_entry:
3204 object[var] = value;
3205 if (var == upper)
3206 goto loop_exit;
3207 var = var + 1;
3208 goto loop_entry;
3209 loop_exit:
3210
3211 We increment var _after_ the loop exit check because we might otherwise
3212 fail if upper == TYPE_MAX_VALUE (type for upper).
3213
3214 Note that we never have to deal with SAVE_EXPRs here, because this has
3215 already been taken care of for us, in gimplify_init_ctor_preeval(). */
3216
9771b263 3217static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
726a989a 3218 gimple_seq *, bool);
6fa91b48
SB
3219
3220static void
3221gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
3222 tree value, tree array_elt_type,
726a989a 3223 gimple_seq *pre_p, bool cleared)
6fa91b48 3224{
726a989a 3225 tree loop_entry_label, loop_exit_label, fall_thru_label;
b56b9fe3 3226 tree var, var_type, cref, tmp;
6fa91b48 3227
c2255bc4
AH
3228 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
3229 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
3230 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
6fa91b48
SB
3231
3232 /* Create and initialize the index variable. */
3233 var_type = TREE_TYPE (upper);
3234 var = create_tmp_var (var_type, NULL);
726a989a 3235 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
6fa91b48
SB
3236
3237 /* Add the loop entry label. */
726a989a 3238 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
6fa91b48
SB
3239
3240 /* Build the reference. */
3241 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3242 var, NULL_TREE, NULL_TREE);
3243
3244 /* If we are a constructor, just call gimplify_init_ctor_eval to do
3245 the store. Otherwise just assign value to the reference. */
3246
3247 if (TREE_CODE (value) == CONSTRUCTOR)
3248 /* NB we might have to call ourself recursively through
3249 gimplify_init_ctor_eval if the value is a constructor. */
3250 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3251 pre_p, cleared);
3252 else
726a989a 3253 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
6fa91b48
SB
3254
3255 /* We exit the loop when the index var is equal to the upper bound. */
726a989a
RB
3256 gimplify_seq_add_stmt (pre_p,
3257 gimple_build_cond (EQ_EXPR, var, upper,
3258 loop_exit_label, fall_thru_label));
3259
3260 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
6fa91b48
SB
3261
3262 /* Otherwise, increment the index var... */
b56b9fe3
RS
3263 tmp = build2 (PLUS_EXPR, var_type, var,
3264 fold_convert (var_type, integer_one_node));
726a989a 3265 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
6fa91b48
SB
3266
3267 /* ...and jump back to the loop entry. */
726a989a 3268 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
6fa91b48
SB
3269
3270 /* Add the loop exit label. */
726a989a 3271 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
6fa91b48
SB
3272}
3273
292a398f 3274/* Return true if FDECL is accessing a field that is zero sized. */
b8698a0f 3275
292a398f 3276static bool
22ea9ec0 3277zero_sized_field_decl (const_tree fdecl)
292a398f 3278{
b8698a0f 3279 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
292a398f
DB
3280 && integer_zerop (DECL_SIZE (fdecl)))
3281 return true;
3282 return false;
3283}
3284
d06526b7 3285/* Return true if TYPE is zero sized. */
b8698a0f 3286
d06526b7 3287static bool
22ea9ec0 3288zero_sized_type (const_tree type)
d06526b7
AP
3289{
3290 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
3291 && integer_zerop (TYPE_SIZE (type)))
3292 return true;
3293 return false;
3294}
3295
57d1dd87
RH
3296/* A subroutine of gimplify_init_constructor. Generate individual
3297 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
4038c495 3298 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
57d1dd87
RH
3299 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
3300 zeroed first. */
3301
3302static void
9771b263 3303gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
726a989a 3304 gimple_seq *pre_p, bool cleared)
57d1dd87
RH
3305{
3306 tree array_elt_type = NULL;
4038c495
GB
3307 unsigned HOST_WIDE_INT ix;
3308 tree purpose, value;
57d1dd87
RH
3309
3310 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
3311 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
3312
4038c495 3313 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
57d1dd87 3314 {
726a989a 3315 tree cref;
57d1dd87
RH
3316
3317 /* NULL values are created above for gimplification errors. */
3318 if (value == NULL)
3319 continue;
3320
3321 if (cleared && initializer_zerop (value))
3322 continue;
3323
6fa91b48
SB
3324 /* ??? Here's to hoping the front end fills in all of the indices,
3325 so we don't have to figure out what's missing ourselves. */
3326 gcc_assert (purpose);
3327
816fa80a
OH
3328 /* Skip zero-sized fields, unless value has side-effects. This can
3329 happen with calls to functions returning a zero-sized type, which
3330 we shouldn't discard. As a number of downstream passes don't
3331 expect sets of zero-sized fields, we rely on the gimplification of
3332 the MODIFY_EXPR we make below to drop the assignment statement. */
3333 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
292a398f
DB
3334 continue;
3335
6fa91b48
SB
3336 /* If we have a RANGE_EXPR, we have to build a loop to assign the
3337 whole range. */
3338 if (TREE_CODE (purpose) == RANGE_EXPR)
57d1dd87 3339 {
6fa91b48
SB
3340 tree lower = TREE_OPERAND (purpose, 0);
3341 tree upper = TREE_OPERAND (purpose, 1);
3342
3343 /* If the lower bound is equal to upper, just treat it as if
3344 upper was the index. */
3345 if (simple_cst_equal (lower, upper))
3346 purpose = upper;
3347 else
3348 {
3349 gimplify_init_ctor_eval_range (object, lower, upper, value,
3350 array_elt_type, pre_p, cleared);
3351 continue;
3352 }
3353 }
57d1dd87 3354
6fa91b48
SB
3355 if (array_elt_type)
3356 {
1a1640db
RG
3357 /* Do not use bitsizetype for ARRAY_REF indices. */
3358 if (TYPE_DOMAIN (TREE_TYPE (object)))
ad19c4be
EB
3359 purpose
3360 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
3361 purpose);
b4257cfc
RG
3362 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3363 purpose, NULL_TREE, NULL_TREE);
57d1dd87
RH
3364 }
3365 else
cf0efa6a
ILT
3366 {
3367 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
b4257cfc
RG
3368 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
3369 unshare_expr (object), purpose, NULL_TREE);
cf0efa6a 3370 }
57d1dd87 3371
cf0efa6a
ILT
3372 if (TREE_CODE (value) == CONSTRUCTOR
3373 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
57d1dd87
RH
3374 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3375 pre_p, cleared);
3376 else
3377 {
726a989a 3378 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
57d1dd87 3379 gimplify_and_add (init, pre_p);
726a989a 3380 ggc_free (init);
57d1dd87
RH
3381 }
3382 }
3383}
3384
ad19c4be 3385/* Return the appropriate RHS predicate for this LHS. */
726a989a 3386
18f429e2 3387gimple_predicate
726a989a
RB
3388rhs_predicate_for (tree lhs)
3389{
ba4d8f9d
RG
3390 if (is_gimple_reg (lhs))
3391 return is_gimple_reg_rhs_or_call;
726a989a 3392 else
ba4d8f9d 3393 return is_gimple_mem_rhs_or_call;
726a989a
RB
3394}
3395
2ec5deb5
PB
3396/* Gimplify a C99 compound literal expression. This just means adding
3397 the DECL_EXPR before the current statement and using its anonymous
3398 decl instead. */
3399
3400static enum gimplify_status
a845a7f5 3401gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
4c53d183 3402 bool (*gimple_test_f) (tree),
a845a7f5 3403 fallback_t fallback)
2ec5deb5
PB
3404{
3405 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
3406 tree decl = DECL_EXPR_DECL (decl_s);
4c53d183 3407 tree init = DECL_INITIAL (decl);
2ec5deb5
PB
3408 /* Mark the decl as addressable if the compound literal
3409 expression is addressable now, otherwise it is marked too late
3410 after we gimplify the initialization expression. */
3411 if (TREE_ADDRESSABLE (*expr_p))
3412 TREE_ADDRESSABLE (decl) = 1;
4c53d183
MM
3413 /* Otherwise, if we don't need an lvalue and have a literal directly
3414 substitute it. Check if it matches the gimple predicate, as
3415 otherwise we'd generate a new temporary, and we can as well just
3416 use the decl we already have. */
3417 else if (!TREE_ADDRESSABLE (decl)
3418 && init
3419 && (fallback & fb_lvalue) == 0
3420 && gimple_test_f (init))
3421 {
3422 *expr_p = init;
3423 return GS_OK;
3424 }
2ec5deb5
PB
3425
3426 /* Preliminarily mark non-addressed complex variables as eligible
3427 for promotion to gimple registers. We'll transform their uses
3428 as we find them. */
3429 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
3430 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
3431 && !TREE_THIS_VOLATILE (decl)
3432 && !needs_to_live_in_memory (decl))
3433 DECL_GIMPLE_REG_P (decl) = 1;
3434
a845a7f5
ILT
3435 /* If the decl is not addressable, then it is being used in some
3436 expression or on the right hand side of a statement, and it can
3437 be put into a readonly data section. */
3438 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
3439 TREE_READONLY (decl) = 1;
3440
2ec5deb5
PB
3441 /* This decl isn't mentioned in the enclosing block, so add it to the
3442 list of temps. FIXME it seems a bit of a kludge to say that
3443 anonymous artificial vars aren't pushed, but everything else is. */
3444 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
3445 gimple_add_tmp_var (decl);
3446
3447 gimplify_and_add (decl_s, pre_p);
3448 *expr_p = decl;
3449 return GS_OK;
3450}
3451
3452/* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
3453 return a new CONSTRUCTOR if something changed. */
3454
3455static tree
3456optimize_compound_literals_in_ctor (tree orig_ctor)
3457{
3458 tree ctor = orig_ctor;
9771b263
DN
3459 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
3460 unsigned int idx, num = vec_safe_length (elts);
2ec5deb5
PB
3461
3462 for (idx = 0; idx < num; idx++)
3463 {
9771b263 3464 tree value = (*elts)[idx].value;
2ec5deb5
PB
3465 tree newval = value;
3466 if (TREE_CODE (value) == CONSTRUCTOR)
3467 newval = optimize_compound_literals_in_ctor (value);
3468 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
3469 {
3470 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
3471 tree decl = DECL_EXPR_DECL (decl_s);
3472 tree init = DECL_INITIAL (decl);
3473
3474 if (!TREE_ADDRESSABLE (value)
3475 && !TREE_ADDRESSABLE (decl)
6f8f67e9
JJ
3476 && init
3477 && TREE_CODE (init) == CONSTRUCTOR)
2ec5deb5
PB
3478 newval = optimize_compound_literals_in_ctor (init);
3479 }
3480 if (newval == value)
3481 continue;
3482
3483 if (ctor == orig_ctor)
3484 {
3485 ctor = copy_node (orig_ctor);
9771b263 3486 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
2ec5deb5
PB
3487 elts = CONSTRUCTOR_ELTS (ctor);
3488 }
9771b263 3489 (*elts)[idx].value = newval;
2ec5deb5
PB
3490 }
3491 return ctor;
3492}
3493
26d44ae2
RH
3494/* A subroutine of gimplify_modify_expr. Break out elements of a
3495 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
3496
3497 Note that we still need to clear any elements that don't have explicit
3498 initializers, so if not all elements are initialized we keep the
ffed8a01
AH
3499 original MODIFY_EXPR, we just remove all of the constructor elements.
3500
3501 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
3502 GS_ERROR if we would have to create a temporary when gimplifying
3503 this constructor. Otherwise, return GS_OK.
3504
3505 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
26d44ae2
RH
3506
3507static enum gimplify_status
726a989a
RB
3508gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3509 bool want_value, bool notify_temp_creation)
26d44ae2 3510{
f5a1f0d0 3511 tree object, ctor, type;
26d44ae2 3512 enum gimplify_status ret;
9771b263 3513 vec<constructor_elt, va_gc> *elts;
26d44ae2 3514
f5a1f0d0 3515 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
26d44ae2 3516
ffed8a01
AH
3517 if (!notify_temp_creation)
3518 {
726a989a 3519 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
ffed8a01
AH
3520 is_gimple_lvalue, fb_lvalue);
3521 if (ret == GS_ERROR)
3522 return ret;
3523 }
57d1dd87 3524
726a989a 3525 object = TREE_OPERAND (*expr_p, 0);
f5a1f0d0
PB
3526 ctor = TREE_OPERAND (*expr_p, 1) =
3527 optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
3528 type = TREE_TYPE (ctor);
3529 elts = CONSTRUCTOR_ELTS (ctor);
26d44ae2 3530 ret = GS_ALL_DONE;
726a989a 3531
26d44ae2
RH
3532 switch (TREE_CODE (type))
3533 {
3534 case RECORD_TYPE:
3535 case UNION_TYPE:
3536 case QUAL_UNION_TYPE:
3537 case ARRAY_TYPE:
3538 {
57d1dd87 3539 struct gimplify_init_ctor_preeval_data preeval_data;
953d0c90
RS
3540 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
3541 bool cleared, complete_p, valid_const_initializer;
26d44ae2
RH
3542
3543 /* Aggregate types must lower constructors to initialization of
3544 individual elements. The exception is that a CONSTRUCTOR node
3545 with no elements indicates zero-initialization of the whole. */
9771b263 3546 if (vec_safe_is_empty (elts))
ffed8a01
AH
3547 {
3548 if (notify_temp_creation)
3549 return GS_OK;
3550 break;
3551 }
b8698a0f 3552
fe24d485
OH
3553 /* Fetch information about the constructor to direct later processing.
3554 We might want to make static versions of it in various cases, and
3555 can only do so if it known to be a valid constant initializer. */
3556 valid_const_initializer
3557 = categorize_ctor_elements (ctor, &num_nonzero_elements,
953d0c90 3558 &num_ctor_elements, &complete_p);
26d44ae2
RH
3559
3560 /* If a const aggregate variable is being initialized, then it
3561 should never be a lose to promote the variable to be static. */
fe24d485 3562 if (valid_const_initializer
6f642f98 3563 && num_nonzero_elements > 1
26d44ae2 3564 && TREE_READONLY (object)
d0ea0759
SE
3565 && TREE_CODE (object) == VAR_DECL
3566 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
26d44ae2 3567 {
ffed8a01
AH
3568 if (notify_temp_creation)
3569 return GS_ERROR;
26d44ae2
RH
3570 DECL_INITIAL (object) = ctor;
3571 TREE_STATIC (object) = 1;
3572 if (!DECL_NAME (object))
3573 DECL_NAME (object) = create_tmp_var_name ("C");
3574 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
3575
3576 /* ??? C++ doesn't automatically append a .<number> to the
6bdf3519 3577 assembler name, and even when it does, it looks at FE private
26d44ae2
RH
3578 data structures to figure out what that number should be,
3579 which are not set for this variable. I suppose this is
3580 important for local statics for inline functions, which aren't
3581 "local" in the object file sense. So in order to get a unique
3582 TU-local symbol, we must invoke the lhd version now. */
3583 lhd_set_decl_assembler_name (object);
3584
3585 *expr_p = NULL_TREE;
3586 break;
3587 }
3588
cce70747
JC
3589 /* If there are "lots" of initialized elements, even discounting
3590 those that are not address constants (and thus *must* be
3591 computed at runtime), then partition the constructor into
3592 constant and non-constant parts. Block copy the constant
3593 parts in, then generate code for the non-constant parts. */
3594 /* TODO. There's code in cp/typeck.c to do this. */
3595
953d0c90
RS
3596 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
3597 /* store_constructor will ignore the clearing of variable-sized
3598 objects. Initializers for such objects must explicitly set
3599 every field that needs to be set. */
3600 cleared = false;
d368135f 3601 else if (!complete_p && !CONSTRUCTOR_NO_CLEARING (ctor))
953d0c90 3602 /* If the constructor isn't complete, clear the whole object
d368135f 3603 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
953d0c90
RS
3604
3605 ??? This ought not to be needed. For any element not present
3606 in the initializer, we should simply set them to zero. Except
3607 we'd need to *find* the elements that are not present, and that
3608 requires trickery to avoid quadratic compile-time behavior in
3609 large cases or excessive memory use in small cases. */
73ed17ff 3610 cleared = true;
953d0c90 3611 else if (num_ctor_elements - num_nonzero_elements
e04ad03d 3612 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
953d0c90
RS
3613 && num_nonzero_elements < num_ctor_elements / 4)
3614 /* If there are "lots" of zeros, it's more efficient to clear
3615 the memory and then set the nonzero elements. */
cce70747 3616 cleared = true;
953d0c90
RS
3617 else
3618 cleared = false;
cce70747 3619
26d44ae2
RH
3620 /* If there are "lots" of initialized elements, and all of them
3621 are valid address constants, then the entire initializer can
cce70747
JC
3622 be dropped to memory, and then memcpy'd out. Don't do this
3623 for sparse arrays, though, as it's more efficient to follow
3624 the standard CONSTRUCTOR behavior of memset followed by
8afd015a
JM
3625 individual element initialization. Also don't do this for small
3626 all-zero initializers (which aren't big enough to merit
3627 clearing), and don't try to make bitwise copies of
0038da66
IE
3628 TREE_ADDRESSABLE types.
3629
3630 We cannot apply such transformation when compiling chkp static
3631 initializer because creation of initializer image in the memory
3632 will require static initialization of bounds for it. It should
3633 result in another gimplification of similar initializer and we
3634 may fall into infinite loop. */
8afd015a
JM
3635 if (valid_const_initializer
3636 && !(cleared || num_nonzero_elements == 0)
0038da66
IE
3637 && !TREE_ADDRESSABLE (type)
3638 && (!current_function_decl
3639 || !lookup_attribute ("chkp ctor",
3640 DECL_ATTRIBUTES (current_function_decl))))
26d44ae2
RH
3641 {
3642 HOST_WIDE_INT size = int_size_in_bytes (type);
3643 unsigned int align;
3644
3645 /* ??? We can still get unbounded array types, at least
3646 from the C++ front end. This seems wrong, but attempt
3647 to work around it for now. */
3648 if (size < 0)
3649 {
3650 size = int_size_in_bytes (TREE_TYPE (object));
3651 if (size >= 0)
3652 TREE_TYPE (ctor) = type = TREE_TYPE (object);
3653 }
3654
3655 /* Find the maximum alignment we can assume for the object. */
3656 /* ??? Make use of DECL_OFFSET_ALIGN. */
3657 if (DECL_P (object))
3658 align = DECL_ALIGN (object);
3659 else
3660 align = TYPE_ALIGN (type);
3661
f301837e
EB
3662 /* Do a block move either if the size is so small as to make
3663 each individual move a sub-unit move on average, or if it
3664 is so large as to make individual moves inefficient. */
329ad380
JJ
3665 if (size > 0
3666 && num_nonzero_elements > 1
f301837e
EB
3667 && (size < num_nonzero_elements
3668 || !can_move_by_pieces (size, align)))
26d44ae2 3669 {
ffed8a01
AH
3670 if (notify_temp_creation)
3671 return GS_ERROR;
3672
46314d3e
EB
3673 walk_tree (&ctor, force_labels_r, NULL, NULL);
3674 ctor = tree_output_constant_def (ctor);
3675 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
3676 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
3677 TREE_OPERAND (*expr_p, 1) = ctor;
57d1dd87
RH
3678
3679 /* This is no longer an assignment of a CONSTRUCTOR, but
3680 we still may have processing to do on the LHS. So
3681 pretend we didn't do anything here to let that happen. */
3682 return GS_UNHANDLED;
26d44ae2
RH
3683 }
3684 }
3685
558af7ca
EB
3686 /* If the target is volatile, we have non-zero elements and more than
3687 one field to assign, initialize the target from a temporary. */
61c7cbf8
RG
3688 if (TREE_THIS_VOLATILE (object)
3689 && !TREE_ADDRESSABLE (type)
558af7ca 3690 && num_nonzero_elements > 0
9771b263 3691 && vec_safe_length (elts) > 1)
61c7cbf8
RG
3692 {
3693 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type), NULL);
3694 TREE_OPERAND (*expr_p, 0) = temp;
3695 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
3696 *expr_p,
3697 build2 (MODIFY_EXPR, void_type_node,
3698 object, temp));
3699 return GS_OK;
3700 }
3701
ffed8a01
AH
3702 if (notify_temp_creation)
3703 return GS_OK;
3704
675c873b
EB
3705 /* If there are nonzero elements and if needed, pre-evaluate to capture
3706 elements overlapping with the lhs into temporaries. We must do this
3707 before clearing to fetch the values before they are zeroed-out. */
3708 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
85d89e76
OH
3709 {
3710 preeval_data.lhs_base_decl = get_base_address (object);
3711 if (!DECL_P (preeval_data.lhs_base_decl))
3712 preeval_data.lhs_base_decl = NULL;
3713 preeval_data.lhs_alias_set = get_alias_set (object);
3714
726a989a 3715 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
85d89e76
OH
3716 pre_p, post_p, &preeval_data);
3717 }
3718
26d44ae2
RH
3719 if (cleared)
3720 {
3721 /* Zap the CONSTRUCTOR element list, which simplifies this case.
3722 Note that we still have to gimplify, in order to handle the
57d1dd87 3723 case of variable sized types. Avoid shared tree structures. */
4038c495 3724 CONSTRUCTOR_ELTS (ctor) = NULL;
726a989a 3725 TREE_SIDE_EFFECTS (ctor) = 0;
57d1dd87 3726 object = unshare_expr (object);
726a989a 3727 gimplify_stmt (expr_p, pre_p);
26d44ae2
RH
3728 }
3729
6fa91b48
SB
3730 /* If we have not block cleared the object, or if there are nonzero
3731 elements in the constructor, add assignments to the individual
3732 scalar fields of the object. */
3733 if (!cleared || num_nonzero_elements > 0)
85d89e76 3734 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
26d44ae2
RH
3735
3736 *expr_p = NULL_TREE;
3737 }
3738 break;
3739
3740 case COMPLEX_TYPE:
3741 {
3742 tree r, i;
3743
ffed8a01
AH
3744 if (notify_temp_creation)
3745 return GS_OK;
3746
26d44ae2 3747 /* Extract the real and imaginary parts out of the ctor. */
9771b263
DN
3748 gcc_assert (elts->length () == 2);
3749 r = (*elts)[0].value;
3750 i = (*elts)[1].value;
26d44ae2
RH
3751 if (r == NULL || i == NULL)
3752 {
e8160c9a 3753 tree zero = build_zero_cst (TREE_TYPE (type));
26d44ae2
RH
3754 if (r == NULL)
3755 r = zero;
3756 if (i == NULL)
3757 i = zero;
3758 }
3759
3760 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
3761 represent creation of a complex value. */
3762 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
3763 {
3764 ctor = build_complex (type, r, i);
3765 TREE_OPERAND (*expr_p, 1) = ctor;
3766 }
3767 else
3768 {
b4257cfc 3769 ctor = build2 (COMPLEX_EXPR, type, r, i);
26d44ae2 3770 TREE_OPERAND (*expr_p, 1) = ctor;
726a989a
RB
3771 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
3772 pre_p,
3773 post_p,
17ad5b5e
RH
3774 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
3775 fb_rvalue);
26d44ae2
RH
3776 }
3777 }
3778 break;
506e2710 3779
26d44ae2 3780 case VECTOR_TYPE:
4038c495
GB
3781 {
3782 unsigned HOST_WIDE_INT ix;
3783 constructor_elt *ce;
e89be13b 3784
ffed8a01
AH
3785 if (notify_temp_creation)
3786 return GS_OK;
3787
4038c495
GB
3788 /* Go ahead and simplify constant constructors to VECTOR_CST. */
3789 if (TREE_CONSTANT (ctor))
3790 {
3791 bool constant_p = true;
3792 tree value;
3793
3794 /* Even when ctor is constant, it might contain non-*_CST
9f1da821
RS
3795 elements, such as addresses or trapping values like
3796 1.0/0.0 - 1.0/0.0. Such expressions don't belong
3797 in VECTOR_CST nodes. */
4038c495
GB
3798 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
3799 if (!CONSTANT_CLASS_P (value))
3800 {
3801 constant_p = false;
3802 break;
3803 }
e89be13b 3804
4038c495
GB
3805 if (constant_p)
3806 {
3807 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
3808 break;
3809 }
84816907 3810
9f1da821 3811 /* Don't reduce an initializer constant even if we can't
84816907
JM
3812 make a VECTOR_CST. It won't do anything for us, and it'll
3813 prevent us from representing it as a single constant. */
9f1da821
RS
3814 if (initializer_constant_valid_p (ctor, type))
3815 break;
3816
3817 TREE_CONSTANT (ctor) = 0;
4038c495 3818 }
e89be13b 3819
4038c495
GB
3820 /* Vector types use CONSTRUCTOR all the way through gimple
3821 compilation as a general initializer. */
9771b263 3822 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
4038c495
GB
3823 {
3824 enum gimplify_status tret;
726a989a
RB
3825 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
3826 fb_rvalue);
4038c495
GB
3827 if (tret == GS_ERROR)
3828 ret = GS_ERROR;
3829 }
726a989a
RB
3830 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
3831 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
4038c495 3832 }
26d44ae2 3833 break;
6de9cd9a 3834
26d44ae2
RH
3835 default:
3836 /* So how did we get a CONSTRUCTOR for a scalar type? */
282899df 3837 gcc_unreachable ();
26d44ae2 3838 }
6de9cd9a 3839
26d44ae2
RH
3840 if (ret == GS_ERROR)
3841 return GS_ERROR;
3842 else if (want_value)
3843 {
26d44ae2
RH
3844 *expr_p = object;
3845 return GS_OK;
6de9cd9a 3846 }
26d44ae2 3847 else
726a989a
RB
3848 {
3849 /* If we have gimplified both sides of the initializer but have
3850 not emitted an assignment, do so now. */
3851 if (*expr_p)
3852 {
3853 tree lhs = TREE_OPERAND (*expr_p, 0);
3854 tree rhs = TREE_OPERAND (*expr_p, 1);
3855 gimple init = gimple_build_assign (lhs, rhs);
3856 gimplify_seq_add_stmt (pre_p, init);
3857 *expr_p = NULL;
3858 }
3859
3860 return GS_ALL_DONE;
3861 }
26d44ae2 3862}
6de9cd9a 3863
de4af523
JJ
3864/* Given a pointer value OP0, return a simplified version of an
3865 indirection through OP0, or NULL_TREE if no simplification is
3866 possible. This may only be applied to a rhs of an expression.
3867 Note that the resulting type may be different from the type pointed
3868 to in the sense that it is still compatible from the langhooks
3869 point of view. */
3870
3871static tree
3872gimple_fold_indirect_ref_rhs (tree t)
3873{
3874 return gimple_fold_indirect_ref (t);
3875}
3876
4caa08da
AH
3877/* Subroutine of gimplify_modify_expr to do simplifications of
3878 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
3879 something changes. */
6de9cd9a 3880
26d44ae2 3881static enum gimplify_status
726a989a
RB
3882gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
3883 gimple_seq *pre_p, gimple_seq *post_p,
3884 bool want_value)
26d44ae2 3885{
6d729f28
JM
3886 enum gimplify_status ret = GS_UNHANDLED;
3887 bool changed;
6de9cd9a 3888
6d729f28
JM
3889 do
3890 {
3891 changed = false;
3892 switch (TREE_CODE (*from_p))
3893 {
3894 case VAR_DECL:
3895 /* If we're assigning from a read-only variable initialized with
3896 a constructor, do the direct assignment from the constructor,
3897 but only if neither source nor target are volatile since this
3898 latter assignment might end up being done on a per-field basis. */
3899 if (DECL_INITIAL (*from_p)
3900 && TREE_READONLY (*from_p)
3901 && !TREE_THIS_VOLATILE (*from_p)
3902 && !TREE_THIS_VOLATILE (*to_p)
3903 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
3904 {
3905 tree old_from = *from_p;
3906 enum gimplify_status subret;
3907
3908 /* Move the constructor into the RHS. */
3909 *from_p = unshare_expr (DECL_INITIAL (*from_p));
3910
3911 /* Let's see if gimplify_init_constructor will need to put
3912 it in memory. */
3913 subret = gimplify_init_constructor (expr_p, NULL, NULL,
3914 false, true);
3915 if (subret == GS_ERROR)
3916 {
3917 /* If so, revert the change. */
3918 *from_p = old_from;
3919 }
3920 else
3921 {
3922 ret = GS_OK;
3923 changed = true;
3924 }
3925 }
3926 break;
3927 case INDIRECT_REF:
4caa08da 3928 {
6d729f28 3929 /* If we have code like
ffed8a01 3930
6d729f28 3931 *(const A*)(A*)&x
ffed8a01 3932
6d729f28
JM
3933 where the type of "x" is a (possibly cv-qualified variant
3934 of "A"), treat the entire expression as identical to "x".
3935 This kind of code arises in C++ when an object is bound
3936 to a const reference, and if "x" is a TARGET_EXPR we want
3937 to take advantage of the optimization below. */
06baaba3 3938 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
6d729f28
JM
3939 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
3940 if (t)
ffed8a01 3941 {
06baaba3
RG
3942 if (TREE_THIS_VOLATILE (t) != volatile_p)
3943 {
3944 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
3945 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
3946 build_fold_addr_expr (t));
3947 if (REFERENCE_CLASS_P (t))
3948 TREE_THIS_VOLATILE (t) = volatile_p;
3949 }
6d729f28
JM
3950 *from_p = t;
3951 ret = GS_OK;
3952 changed = true;
ffed8a01 3953 }
6d729f28
JM
3954 break;
3955 }
3956
3957 case TARGET_EXPR:
3958 {
3959 /* If we are initializing something from a TARGET_EXPR, strip the
3960 TARGET_EXPR and initialize it directly, if possible. This can't
3961 be done if the initializer is void, since that implies that the
3962 temporary is set in some non-trivial way.
3963
3964 ??? What about code that pulls out the temp and uses it
3965 elsewhere? I think that such code never uses the TARGET_EXPR as
3966 an initializer. If I'm wrong, we'll die because the temp won't
3967 have any RTL. In that case, I guess we'll need to replace
3968 references somehow. */
3969 tree init = TARGET_EXPR_INITIAL (*from_p);
3970
3971 if (init
3972 && !VOID_TYPE_P (TREE_TYPE (init)))
ffed8a01 3973 {
6d729f28 3974 *from_p = init;
ffed8a01 3975 ret = GS_OK;
6d729f28 3976 changed = true;
ffed8a01 3977 }
4caa08da 3978 }
6d729f28 3979 break;
f98625f6 3980
6d729f28
JM
3981 case COMPOUND_EXPR:
3982 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
3983 caught. */
3984 gimplify_compound_expr (from_p, pre_p, true);
3985 ret = GS_OK;
3986 changed = true;
3987 break;
f98625f6 3988
6d729f28 3989 case CONSTRUCTOR:
ce3beba3
JM
3990 /* If we already made some changes, let the front end have a
3991 crack at this before we break it down. */
3992 if (ret != GS_UNHANDLED)
3993 break;
6d729f28
JM
3994 /* If we're initializing from a CONSTRUCTOR, break this into
3995 individual MODIFY_EXPRs. */
3996 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
3997 false);
3998
3999 case COND_EXPR:
4000 /* If we're assigning to a non-register type, push the assignment
4001 down into the branches. This is mandatory for ADDRESSABLE types,
4002 since we cannot generate temporaries for such, but it saves a
4003 copy in other cases as well. */
4004 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
f98625f6 4005 {
6d729f28
JM
4006 /* This code should mirror the code in gimplify_cond_expr. */
4007 enum tree_code code = TREE_CODE (*expr_p);
4008 tree cond = *from_p;
4009 tree result = *to_p;
4010
4011 ret = gimplify_expr (&result, pre_p, post_p,
4012 is_gimple_lvalue, fb_lvalue);
4013 if (ret != GS_ERROR)
4014 ret = GS_OK;
4015
4016 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
4017 TREE_OPERAND (cond, 1)
4018 = build2 (code, void_type_node, result,
4019 TREE_OPERAND (cond, 1));
4020 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
4021 TREE_OPERAND (cond, 2)
4022 = build2 (code, void_type_node, unshare_expr (result),
4023 TREE_OPERAND (cond, 2));
4024
4025 TREE_TYPE (cond) = void_type_node;
4026 recalculate_side_effects (cond);
4027
4028 if (want_value)
4029 {
4030 gimplify_and_add (cond, pre_p);
4031 *expr_p = unshare_expr (result);
4032 }
4033 else
4034 *expr_p = cond;
4035 return ret;
f98625f6 4036 }
f98625f6 4037 break;
f98625f6 4038
6d729f28
JM
4039 case CALL_EXPR:
4040 /* For calls that return in memory, give *to_p as the CALL_EXPR's
4041 return slot so that we don't generate a temporary. */
4042 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
4043 && aggregate_value_p (*from_p, *from_p))
26d44ae2 4044 {
6d729f28
JM
4045 bool use_target;
4046
4047 if (!(rhs_predicate_for (*to_p))(*from_p))
4048 /* If we need a temporary, *to_p isn't accurate. */
4049 use_target = false;
ad19c4be 4050 /* It's OK to use the return slot directly unless it's an NRV. */
6d729f28
JM
4051 else if (TREE_CODE (*to_p) == RESULT_DECL
4052 && DECL_NAME (*to_p) == NULL_TREE
4053 && needs_to_live_in_memory (*to_p))
6d729f28
JM
4054 use_target = true;
4055 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
4056 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
4057 /* Don't force regs into memory. */
4058 use_target = false;
4059 else if (TREE_CODE (*expr_p) == INIT_EXPR)
4060 /* It's OK to use the target directly if it's being
4061 initialized. */
4062 use_target = true;
aabb90e5
RG
4063 else if (variably_modified_type_p (TREE_TYPE (*to_p), NULL_TREE))
4064 /* Always use the target and thus RSO for variable-sized types.
4065 GIMPLE cannot deal with a variable-sized assignment
4066 embedded in a call statement. */
4067 use_target = true;
345ae177
AH
4068 else if (TREE_CODE (*to_p) != SSA_NAME
4069 && (!is_gimple_variable (*to_p)
4070 || needs_to_live_in_memory (*to_p)))
6d729f28
JM
4071 /* Don't use the original target if it's already addressable;
4072 if its address escapes, and the called function uses the
4073 NRV optimization, a conforming program could see *to_p
4074 change before the called function returns; see c++/19317.
4075 When optimizing, the return_slot pass marks more functions
4076 as safe after we have escape info. */
4077 use_target = false;
4078 else
4079 use_target = true;
4080
4081 if (use_target)
4082 {
4083 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
4084 mark_addressable (*to_p);
4085 }
26d44ae2 4086 }
6d729f28 4087 break;
6de9cd9a 4088
6d729f28
JM
4089 case WITH_SIZE_EXPR:
4090 /* Likewise for calls that return an aggregate of non-constant size,
4091 since we would not be able to generate a temporary at all. */
4092 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
4093 {
4094 *from_p = TREE_OPERAND (*from_p, 0);
ebad5233
JM
4095 /* We don't change ret in this case because the
4096 WITH_SIZE_EXPR might have been added in
4097 gimplify_modify_expr, so returning GS_OK would lead to an
4098 infinite loop. */
6d729f28
JM
4099 changed = true;
4100 }
4101 break;
6de9cd9a 4102
6d729f28
JM
4103 /* If we're initializing from a container, push the initialization
4104 inside it. */
4105 case CLEANUP_POINT_EXPR:
4106 case BIND_EXPR:
4107 case STATEMENT_LIST:
26d44ae2 4108 {
6d729f28
JM
4109 tree wrap = *from_p;
4110 tree t;
dae7ec87 4111
6d729f28
JM
4112 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
4113 fb_lvalue);
dae7ec87
JM
4114 if (ret != GS_ERROR)
4115 ret = GS_OK;
4116
6d729f28
JM
4117 t = voidify_wrapper_expr (wrap, *expr_p);
4118 gcc_assert (t == *expr_p);
dae7ec87
JM
4119
4120 if (want_value)
4121 {
6d729f28
JM
4122 gimplify_and_add (wrap, pre_p);
4123 *expr_p = unshare_expr (*to_p);
dae7ec87
JM
4124 }
4125 else
6d729f28
JM
4126 *expr_p = wrap;
4127 return GS_OK;
26d44ae2 4128 }
6de9cd9a 4129
6d729f28 4130 case COMPOUND_LITERAL_EXPR:
fa47911c 4131 {
6d729f28
JM
4132 tree complit = TREE_OPERAND (*expr_p, 1);
4133 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
4134 tree decl = DECL_EXPR_DECL (decl_s);
4135 tree init = DECL_INITIAL (decl);
4136
4137 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
4138 into struct T x = { 0, 1, 2 } if the address of the
4139 compound literal has never been taken. */
4140 if (!TREE_ADDRESSABLE (complit)
4141 && !TREE_ADDRESSABLE (decl)
4142 && init)
fa47911c 4143 {
6d729f28
JM
4144 *expr_p = copy_node (*expr_p);
4145 TREE_OPERAND (*expr_p, 1) = init;
4146 return GS_OK;
fa47911c
JM
4147 }
4148 }
4149
6d729f28
JM
4150 default:
4151 break;
2ec5deb5 4152 }
6d729f28
JM
4153 }
4154 while (changed);
6de9cd9a 4155
6de9cd9a
DN
4156 return ret;
4157}
4158
216820a4
RG
4159
4160/* Return true if T looks like a valid GIMPLE statement. */
4161
4162static bool
4163is_gimple_stmt (tree t)
4164{
4165 const enum tree_code code = TREE_CODE (t);
4166
4167 switch (code)
4168 {
4169 case NOP_EXPR:
4170 /* The only valid NOP_EXPR is the empty statement. */
4171 return IS_EMPTY_STMT (t);
4172
4173 case BIND_EXPR:
4174 case COND_EXPR:
4175 /* These are only valid if they're void. */
4176 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
4177
4178 case SWITCH_EXPR:
4179 case GOTO_EXPR:
4180 case RETURN_EXPR:
4181 case LABEL_EXPR:
4182 case CASE_LABEL_EXPR:
4183 case TRY_CATCH_EXPR:
4184 case TRY_FINALLY_EXPR:
4185 case EH_FILTER_EXPR:
4186 case CATCH_EXPR:
4187 case ASM_EXPR:
4188 case STATEMENT_LIST:
4189 case OMP_PARALLEL:
4190 case OMP_FOR:
74bf76ed 4191 case OMP_SIMD:
c02065fc 4192 case CILK_SIMD:
acf0174b 4193 case OMP_DISTRIBUTE:
216820a4
RG
4194 case OMP_SECTIONS:
4195 case OMP_SECTION:
4196 case OMP_SINGLE:
4197 case OMP_MASTER:
acf0174b 4198 case OMP_TASKGROUP:
216820a4
RG
4199 case OMP_ORDERED:
4200 case OMP_CRITICAL:
4201 case OMP_TASK:
4202 /* These are always void. */
4203 return true;
4204
4205 case CALL_EXPR:
4206 case MODIFY_EXPR:
4207 case PREDICT_EXPR:
4208 /* These are valid regardless of their type. */
4209 return true;
4210
4211 default:
4212 return false;
4213 }
4214}
4215
4216
d9c2d296
AP
4217/* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
4218 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
7b7e6ecd
EB
4219 DECL_GIMPLE_REG_P set.
4220
4221 IMPORTANT NOTE: This promotion is performed by introducing a load of the
4222 other, unmodified part of the complex object just before the total store.
4223 As a consequence, if the object is still uninitialized, an undefined value
4224 will be loaded into a register, which may result in a spurious exception
4225 if the register is floating-point and the value happens to be a signaling
4226 NaN for example. Then the fully-fledged complex operations lowering pass
4227 followed by a DCE pass are necessary in order to fix things up. */
d9c2d296
AP
4228
4229static enum gimplify_status
726a989a
RB
4230gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
4231 bool want_value)
d9c2d296
AP
4232{
4233 enum tree_code code, ocode;
4234 tree lhs, rhs, new_rhs, other, realpart, imagpart;
4235
726a989a
RB
4236 lhs = TREE_OPERAND (*expr_p, 0);
4237 rhs = TREE_OPERAND (*expr_p, 1);
d9c2d296
AP
4238 code = TREE_CODE (lhs);
4239 lhs = TREE_OPERAND (lhs, 0);
4240
4241 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
4242 other = build1 (ocode, TREE_TYPE (rhs), lhs);
8d2b0410 4243 TREE_NO_WARNING (other) = 1;
d9c2d296
AP
4244 other = get_formal_tmp_var (other, pre_p);
4245
4246 realpart = code == REALPART_EXPR ? rhs : other;
4247 imagpart = code == REALPART_EXPR ? other : rhs;
4248
4249 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
4250 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
4251 else
4252 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
4253
726a989a
RB
4254 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
4255 *expr_p = (want_value) ? rhs : NULL_TREE;
d9c2d296
AP
4256
4257 return GS_ALL_DONE;
4258}
4259
206048bd 4260/* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
6de9cd9a
DN
4261
4262 modify_expr
4263 : varname '=' rhs
4264 | '*' ID '=' rhs
4265
4266 PRE_P points to the list where side effects that must happen before
4267 *EXPR_P should be stored.
4268
4269 POST_P points to the list where side effects that must happen after
4270 *EXPR_P should be stored.
4271
4272 WANT_VALUE is nonzero iff we want to use the value of this expression
4273 in another expression. */
4274
4275static enum gimplify_status
726a989a
RB
4276gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4277 bool want_value)
6de9cd9a 4278{
726a989a
RB
4279 tree *from_p = &TREE_OPERAND (*expr_p, 1);
4280 tree *to_p = &TREE_OPERAND (*expr_p, 0);
44de5aeb 4281 enum gimplify_status ret = GS_UNHANDLED;
726a989a 4282 gimple assign;
db3927fb 4283 location_t loc = EXPR_LOCATION (*expr_p);
6da8be89 4284 gimple_stmt_iterator gsi;
6de9cd9a 4285
282899df
NS
4286 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
4287 || TREE_CODE (*expr_p) == INIT_EXPR);
939b37da
BI
4288
4289 if (fn_contains_cilk_spawn_p (cfun)
4290 && lang_hooks.cilkplus.cilk_detect_spawn_and_unwrap (expr_p)
4291 && !seen_error ())
4292 return (enum gimplify_status)
4293 lang_hooks.cilkplus.gimplify_cilk_spawn (expr_p, pre_p, post_p);
6de9cd9a 4294
d0ad58f9
JM
4295 /* Trying to simplify a clobber using normal logic doesn't work,
4296 so handle it here. */
4297 if (TREE_CLOBBER_P (*from_p))
4298 {
5d751b0c
JJ
4299 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4300 if (ret == GS_ERROR)
4301 return ret;
4302 gcc_assert (!want_value
4303 && (TREE_CODE (*to_p) == VAR_DECL
4304 || TREE_CODE (*to_p) == MEM_REF));
d0ad58f9
JM
4305 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
4306 *expr_p = NULL;
4307 return GS_ALL_DONE;
4308 }
4309
1b24a790
RG
4310 /* Insert pointer conversions required by the middle-end that are not
4311 required by the frontend. This fixes middle-end type checking for
4312 for example gcc.dg/redecl-6.c. */
daad0278 4313 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
1b24a790
RG
4314 {
4315 STRIP_USELESS_TYPE_CONVERSION (*from_p);
4316 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
db3927fb 4317 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
1b24a790
RG
4318 }
4319
83d7e8f0
JM
4320 /* See if any simplifications can be done based on what the RHS is. */
4321 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4322 want_value);
4323 if (ret != GS_UNHANDLED)
4324 return ret;
4325
4326 /* For zero sized types only gimplify the left hand side and right hand
4327 side as statements and throw away the assignment. Do this after
4328 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
4329 types properly. */
753b34d7 4330 if (zero_sized_type (TREE_TYPE (*from_p)) && !want_value)
412f8986 4331 {
726a989a
RB
4332 gimplify_stmt (from_p, pre_p);
4333 gimplify_stmt (to_p, pre_p);
412f8986
AP
4334 *expr_p = NULL_TREE;
4335 return GS_ALL_DONE;
4336 }
6de9cd9a 4337
d25cee4d
RH
4338 /* If the value being copied is of variable width, compute the length
4339 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
4340 before gimplifying any of the operands so that we can resolve any
4341 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
4342 the size of the expression to be copied, not of the destination, so
726a989a 4343 that is what we must do here. */
d25cee4d 4344 maybe_with_size_expr (from_p);
6de9cd9a 4345
44de5aeb
RK
4346 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4347 if (ret == GS_ERROR)
4348 return ret;
6de9cd9a 4349
726a989a
RB
4350 /* As a special case, we have to temporarily allow for assignments
4351 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
4352 a toplevel statement, when gimplifying the GENERIC expression
4353 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
4354 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
4355
4356 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
4357 prevent gimplify_expr from trying to create a new temporary for
4358 foo's LHS, we tell it that it should only gimplify until it
4359 reaches the CALL_EXPR. On return from gimplify_expr, the newly
4360 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
4361 and all we need to do here is set 'a' to be its LHS. */
4362 ret = gimplify_expr (from_p, pre_p, post_p, rhs_predicate_for (*to_p),
4363 fb_rvalue);
6de9cd9a
DN
4364 if (ret == GS_ERROR)
4365 return ret;
4366
44de5aeb
RK
4367 /* Now see if the above changed *from_p to something we handle specially. */
4368 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4369 want_value);
6de9cd9a
DN
4370 if (ret != GS_UNHANDLED)
4371 return ret;
4372
d25cee4d
RH
4373 /* If we've got a variable sized assignment between two lvalues (i.e. does
4374 not involve a call), then we can make things a bit more straightforward
4375 by converting the assignment to memcpy or memset. */
4376 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
4377 {
4378 tree from = TREE_OPERAND (*from_p, 0);
4379 tree size = TREE_OPERAND (*from_p, 1);
4380
4381 if (TREE_CODE (from) == CONSTRUCTOR)
726a989a
RB
4382 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
4383
e847cc68 4384 if (is_gimple_addressable (from))
d25cee4d
RH
4385 {
4386 *from_p = from;
726a989a
RB
4387 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
4388 pre_p);
d25cee4d
RH
4389 }
4390 }
4391
e41d82f5
RH
4392 /* Transform partial stores to non-addressable complex variables into
4393 total stores. This allows us to use real instead of virtual operands
4394 for these variables, which improves optimization. */
4395 if ((TREE_CODE (*to_p) == REALPART_EXPR
4396 || TREE_CODE (*to_p) == IMAGPART_EXPR)
4397 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
4398 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
4399
f173837a
EB
4400 /* Try to alleviate the effects of the gimplification creating artificial
4401 temporaries (see for example is_gimple_reg_rhs) on the debug info. */
4402 if (!gimplify_ctxp->into_ssa
f2896bc9 4403 && TREE_CODE (*from_p) == VAR_DECL
726a989a
RB
4404 && DECL_IGNORED_P (*from_p)
4405 && DECL_P (*to_p)
4406 && !DECL_IGNORED_P (*to_p))
f173837a
EB
4407 {
4408 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
4409 DECL_NAME (*from_p)
4410 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
839b422f 4411 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
f173837a 4412 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
726a989a
RB
4413 }
4414
8f0fe813
NS
4415 if (want_value && TREE_THIS_VOLATILE (*to_p))
4416 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
4417
726a989a
RB
4418 if (TREE_CODE (*from_p) == CALL_EXPR)
4419 {
4420 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
4421 instead of a GIMPLE_ASSIGN. */
f20ca725
RG
4422 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
4423 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
4424 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
726a989a 4425 assign = gimple_build_call_from_tree (*from_p);
f20ca725 4426 gimple_call_set_fntype (assign, TREE_TYPE (fnptrtype));
f6b64c35 4427 notice_special_calls (assign);
5de8da9b
AO
4428 if (!gimple_call_noreturn_p (assign))
4429 gimple_call_set_lhs (assign, *to_p);
f173837a 4430 }
726a989a 4431 else
c2255bc4
AH
4432 {
4433 assign = gimple_build_assign (*to_p, *from_p);
4434 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
4435 }
f173837a 4436
726a989a 4437 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
6de9cd9a 4438 {
2ad728d2
RG
4439 /* We should have got an SSA name from the start. */
4440 gcc_assert (TREE_CODE (*to_p) == SSA_NAME);
726a989a 4441 }
07beea0d 4442
6da8be89
MM
4443 gimplify_seq_add_stmt (pre_p, assign);
4444 gsi = gsi_last (*pre_p);
acf0174b
JJ
4445 /* Don't fold stmts inside of target construct. We'll do it
4446 during omplower pass instead. */
4447 struct gimplify_omp_ctx *ctx;
4448 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
4449 if (ctx->region_type == ORT_TARGET)
4450 break;
4451 if (ctx == NULL)
4452 fold_stmt (&gsi);
6da8be89 4453
726a989a
RB
4454 if (want_value)
4455 {
8f0fe813 4456 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
17ad5b5e 4457 return GS_OK;
6de9cd9a 4458 }
726a989a
RB
4459 else
4460 *expr_p = NULL;
6de9cd9a 4461
17ad5b5e 4462 return GS_ALL_DONE;
6de9cd9a
DN
4463}
4464
ad19c4be
EB
4465/* Gimplify a comparison between two variable-sized objects. Do this
4466 with a call to BUILT_IN_MEMCMP. */
44de5aeb
RK
4467
4468static enum gimplify_status
4469gimplify_variable_sized_compare (tree *expr_p)
4470{
692ad9aa 4471 location_t loc = EXPR_LOCATION (*expr_p);
44de5aeb
RK
4472 tree op0 = TREE_OPERAND (*expr_p, 0);
4473 tree op1 = TREE_OPERAND (*expr_p, 1);
692ad9aa 4474 tree t, arg, dest, src, expr;
5039610b
SL
4475
4476 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
4477 arg = unshare_expr (arg);
4478 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
db3927fb
AH
4479 src = build_fold_addr_expr_loc (loc, op1);
4480 dest = build_fold_addr_expr_loc (loc, op0);
e79983f4 4481 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
db3927fb 4482 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
692ad9aa
EB
4483
4484 expr
b4257cfc 4485 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
692ad9aa
EB
4486 SET_EXPR_LOCATION (expr, loc);
4487 *expr_p = expr;
44de5aeb
RK
4488
4489 return GS_OK;
4490}
4491
ad19c4be
EB
4492/* Gimplify a comparison between two aggregate objects of integral scalar
4493 mode as a comparison between the bitwise equivalent scalar values. */
61c25908
OH
4494
4495static enum gimplify_status
4496gimplify_scalar_mode_aggregate_compare (tree *expr_p)
4497{
db3927fb 4498 location_t loc = EXPR_LOCATION (*expr_p);
61c25908
OH
4499 tree op0 = TREE_OPERAND (*expr_p, 0);
4500 tree op1 = TREE_OPERAND (*expr_p, 1);
4501
4502 tree type = TREE_TYPE (op0);
4503 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
4504
db3927fb
AH
4505 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
4506 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
61c25908
OH
4507
4508 *expr_p
db3927fb 4509 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
61c25908
OH
4510
4511 return GS_OK;
4512}
4513
ad19c4be
EB
4514/* Gimplify an expression sequence. This function gimplifies each
4515 expression and rewrites the original expression with the last
6de9cd9a
DN
4516 expression of the sequence in GIMPLE form.
4517
4518 PRE_P points to the list where the side effects for all the
4519 expressions in the sequence will be emitted.
d3147f64 4520
6de9cd9a 4521 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
6de9cd9a
DN
4522
4523static enum gimplify_status
726a989a 4524gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
6de9cd9a
DN
4525{
4526 tree t = *expr_p;
4527
4528 do
4529 {
4530 tree *sub_p = &TREE_OPERAND (t, 0);
4531
4532 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
4533 gimplify_compound_expr (sub_p, pre_p, false);
4534 else
726a989a 4535 gimplify_stmt (sub_p, pre_p);
6de9cd9a
DN
4536
4537 t = TREE_OPERAND (t, 1);
4538 }
4539 while (TREE_CODE (t) == COMPOUND_EXPR);
4540
4541 *expr_p = t;
4542 if (want_value)
4543 return GS_OK;
4544 else
4545 {
726a989a 4546 gimplify_stmt (expr_p, pre_p);
6de9cd9a
DN
4547 return GS_ALL_DONE;
4548 }
4549}
4550
726a989a
RB
4551/* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
4552 gimplify. After gimplification, EXPR_P will point to a new temporary
4553 that holds the original value of the SAVE_EXPR node.
6de9cd9a 4554
726a989a 4555 PRE_P points to the list where side effects that must happen before
ad19c4be 4556 *EXPR_P should be stored. */
6de9cd9a
DN
4557
4558static enum gimplify_status
726a989a 4559gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6de9cd9a
DN
4560{
4561 enum gimplify_status ret = GS_ALL_DONE;
4562 tree val;
4563
282899df 4564 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
6de9cd9a
DN
4565 val = TREE_OPERAND (*expr_p, 0);
4566
7f5e6307
RH
4567 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
4568 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
17ad5b5e 4569 {
7f5e6307
RH
4570 /* The operand may be a void-valued expression such as SAVE_EXPRs
4571 generated by the Java frontend for class initialization. It is
4572 being executed only for its side-effects. */
4573 if (TREE_TYPE (val) == void_type_node)
4574 {
4575 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4576 is_gimple_stmt, fb_none);
7f5e6307
RH
4577 val = NULL;
4578 }
4579 else
4580 val = get_initialized_tmp_var (val, pre_p, post_p);
4581
4582 TREE_OPERAND (*expr_p, 0) = val;
4583 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
17ad5b5e 4584 }
6de9cd9a 4585
7f5e6307
RH
4586 *expr_p = val;
4587
6de9cd9a
DN
4588 return ret;
4589}
4590
ad19c4be 4591/* Rewrite the ADDR_EXPR node pointed to by EXPR_P
6de9cd9a
DN
4592
4593 unary_expr
4594 : ...
4595 | '&' varname
4596 ...
4597
4598 PRE_P points to the list where side effects that must happen before
4599 *EXPR_P should be stored.
4600
4601 POST_P points to the list where side effects that must happen after
4602 *EXPR_P should be stored. */
4603
4604static enum gimplify_status
726a989a 4605gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6de9cd9a
DN
4606{
4607 tree expr = *expr_p;
4608 tree op0 = TREE_OPERAND (expr, 0);
4609 enum gimplify_status ret;
db3927fb 4610 location_t loc = EXPR_LOCATION (*expr_p);
6de9cd9a
DN
4611
4612 switch (TREE_CODE (op0))
4613 {
4614 case INDIRECT_REF:
67f23620 4615 do_indirect_ref:
6de9cd9a
DN
4616 /* Check if we are dealing with an expression of the form '&*ptr'.
4617 While the front end folds away '&*ptr' into 'ptr', these
4618 expressions may be generated internally by the compiler (e.g.,
4619 builtins like __builtin_va_end). */
67f23620
RH
4620 /* Caution: the silent array decomposition semantics we allow for
4621 ADDR_EXPR means we can't always discard the pair. */
c87ac7e8
AO
4622 /* Gimplification of the ADDR_EXPR operand may drop
4623 cv-qualification conversions, so make sure we add them if
4624 needed. */
67f23620
RH
4625 {
4626 tree op00 = TREE_OPERAND (op0, 0);
4627 tree t_expr = TREE_TYPE (expr);
4628 tree t_op00 = TREE_TYPE (op00);
4629
f4088621 4630 if (!useless_type_conversion_p (t_expr, t_op00))
db3927fb 4631 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
67f23620
RH
4632 *expr_p = op00;
4633 ret = GS_OK;
4634 }
6de9cd9a
DN
4635 break;
4636
44de5aeb
RK
4637 case VIEW_CONVERT_EXPR:
4638 /* Take the address of our operand and then convert it to the type of
af72267c
RK
4639 this ADDR_EXPR.
4640
4641 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
4642 all clear. The impact of this transformation is even less clear. */
91804752
EB
4643
4644 /* If the operand is a useless conversion, look through it. Doing so
4645 guarantees that the ADDR_EXPR and its operand will remain of the
4646 same type. */
4647 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
317c0092 4648 op0 = TREE_OPERAND (op0, 0);
91804752 4649
db3927fb
AH
4650 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
4651 build_fold_addr_expr_loc (loc,
4652 TREE_OPERAND (op0, 0)));
44de5aeb 4653 ret = GS_OK;
6de9cd9a
DN
4654 break;
4655
4656 default:
4657 /* We use fb_either here because the C frontend sometimes takes
5201931e
JM
4658 the address of a call that returns a struct; see
4659 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
4660 the implied temporary explicit. */
936d04b6 4661
f76d6e6f 4662 /* Make the operand addressable. */
6de9cd9a 4663 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
e847cc68 4664 is_gimple_addressable, fb_either);
8b17cc05
RG
4665 if (ret == GS_ERROR)
4666 break;
67f23620 4667
f76d6e6f
EB
4668 /* Then mark it. Beware that it may not be possible to do so directly
4669 if a temporary has been created by the gimplification. */
4670 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
9e51aaf5 4671
8b17cc05 4672 op0 = TREE_OPERAND (expr, 0);
6de9cd9a 4673
8b17cc05
RG
4674 /* For various reasons, the gimplification of the expression
4675 may have made a new INDIRECT_REF. */
4676 if (TREE_CODE (op0) == INDIRECT_REF)
4677 goto do_indirect_ref;
4678
6b8b9e42
RG
4679 mark_addressable (TREE_OPERAND (expr, 0));
4680
4681 /* The FEs may end up building ADDR_EXPRs early on a decl with
4682 an incomplete type. Re-build ADDR_EXPRs in canonical form
4683 here. */
4684 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
4685 *expr_p = build_fold_addr_expr (op0);
4686
8b17cc05 4687 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
6b8b9e42
RG
4688 recompute_tree_invariant_for_addr_expr (*expr_p);
4689
4690 /* If we re-built the ADDR_EXPR add a conversion to the original type
4691 if required. */
4692 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
4693 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
8b17cc05 4694
6de9cd9a
DN
4695 break;
4696 }
4697
6de9cd9a
DN
4698 return ret;
4699}
4700
4701/* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
4702 value; output operands should be a gimple lvalue. */
4703
4704static enum gimplify_status
726a989a 4705gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6de9cd9a 4706{
726a989a
RB
4707 tree expr;
4708 int noutputs;
4709 const char **oconstraints;
6de9cd9a
DN
4710 int i;
4711 tree link;
4712 const char *constraint;
4713 bool allows_mem, allows_reg, is_inout;
4714 enum gimplify_status ret, tret;
726a989a 4715 gimple stmt;
9771b263
DN
4716 vec<tree, va_gc> *inputs;
4717 vec<tree, va_gc> *outputs;
4718 vec<tree, va_gc> *clobbers;
4719 vec<tree, va_gc> *labels;
726a989a 4720 tree link_next;
b8698a0f 4721
726a989a
RB
4722 expr = *expr_p;
4723 noutputs = list_length (ASM_OUTPUTS (expr));
4724 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
4725
9771b263
DN
4726 inputs = NULL;
4727 outputs = NULL;
4728 clobbers = NULL;
4729 labels = NULL;
6de9cd9a 4730
6de9cd9a 4731 ret = GS_ALL_DONE;
726a989a
RB
4732 link_next = NULL_TREE;
4733 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
6de9cd9a 4734 {
2c68ba8e 4735 bool ok;
726a989a
RB
4736 size_t constraint_len;
4737
4738 link_next = TREE_CHAIN (link);
4739
4740 oconstraints[i]
4741 = constraint
6de9cd9a 4742 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6db081f1
AP
4743 constraint_len = strlen (constraint);
4744 if (constraint_len == 0)
4745 continue;
6de9cd9a 4746
2c68ba8e
LB
4747 ok = parse_output_constraint (&constraint, i, 0, 0,
4748 &allows_mem, &allows_reg, &is_inout);
4749 if (!ok)
4750 {
4751 ret = GS_ERROR;
4752 is_inout = false;
4753 }
6de9cd9a
DN
4754
4755 if (!allows_reg && allows_mem)
936d04b6 4756 mark_addressable (TREE_VALUE (link));
6de9cd9a
DN
4757
4758 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4759 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
4760 fb_lvalue | fb_mayfail);
4761 if (tret == GS_ERROR)
4762 {
4763 error ("invalid lvalue in asm output %d", i);
4764 ret = tret;
4765 }
4766
9771b263 4767 vec_safe_push (outputs, link);
726a989a
RB
4768 TREE_CHAIN (link) = NULL_TREE;
4769
6de9cd9a
DN
4770 if (is_inout)
4771 {
4772 /* An input/output operand. To give the optimizers more
4773 flexibility, split it into separate input and output
4774 operands. */
4775 tree input;
4776 char buf[10];
6de9cd9a
DN
4777
4778 /* Turn the in/out constraint into an output constraint. */
4779 char *p = xstrdup (constraint);
4780 p[0] = '=';
4781 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
6de9cd9a
DN
4782
4783 /* And add a matching input constraint. */
4784 if (allows_reg)
4785 {
4786 sprintf (buf, "%d", i);
372d72d9
JJ
4787
4788 /* If there are multiple alternatives in the constraint,
4789 handle each of them individually. Those that allow register
4790 will be replaced with operand number, the others will stay
4791 unchanged. */
4792 if (strchr (p, ',') != NULL)
4793 {
4794 size_t len = 0, buflen = strlen (buf);
4795 char *beg, *end, *str, *dst;
4796
4797 for (beg = p + 1;;)
4798 {
4799 end = strchr (beg, ',');
4800 if (end == NULL)
4801 end = strchr (beg, '\0');
4802 if ((size_t) (end - beg) < buflen)
4803 len += buflen + 1;
4804 else
4805 len += end - beg + 1;
4806 if (*end)
4807 beg = end + 1;
4808 else
4809 break;
4810 }
4811
858904db 4812 str = (char *) alloca (len);
372d72d9
JJ
4813 for (beg = p + 1, dst = str;;)
4814 {
4815 const char *tem;
4816 bool mem_p, reg_p, inout_p;
4817
4818 end = strchr (beg, ',');
4819 if (end)
4820 *end = '\0';
4821 beg[-1] = '=';
4822 tem = beg - 1;
4823 parse_output_constraint (&tem, i, 0, 0,
4824 &mem_p, &reg_p, &inout_p);
4825 if (dst != str)
4826 *dst++ = ',';
4827 if (reg_p)
4828 {
4829 memcpy (dst, buf, buflen);
4830 dst += buflen;
4831 }
4832 else
4833 {
4834 if (end)
4835 len = end - beg;
4836 else
4837 len = strlen (beg);
4838 memcpy (dst, beg, len);
4839 dst += len;
4840 }
4841 if (end)
4842 beg = end + 1;
4843 else
4844 break;
4845 }
4846 *dst = '\0';
4847 input = build_string (dst - str, str);
4848 }
4849 else
4850 input = build_string (strlen (buf), buf);
6de9cd9a
DN
4851 }
4852 else
4853 input = build_string (constraint_len - 1, constraint + 1);
372d72d9
JJ
4854
4855 free (p);
4856
6de9cd9a
DN
4857 input = build_tree_list (build_tree_list (NULL_TREE, input),
4858 unshare_expr (TREE_VALUE (link)));
4859 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
4860 }
4861 }
4862
726a989a
RB
4863 link_next = NULL_TREE;
4864 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
6de9cd9a 4865 {
726a989a
RB
4866 link_next = TREE_CHAIN (link);
4867 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6de9cd9a
DN
4868 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
4869 oconstraints, &allows_mem, &allows_reg);
4870
f497c16c
JJ
4871 /* If we can't make copies, we can only accept memory. */
4872 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
4873 {
4874 if (allows_mem)
4875 allows_reg = 0;
4876 else
4877 {
4878 error ("impossible constraint in %<asm%>");
4879 error ("non-memory input %d must stay in memory", i);
4880 return GS_ERROR;
4881 }
4882 }
4883
6de9cd9a
DN
4884 /* If the operand is a memory input, it should be an lvalue. */
4885 if (!allows_reg && allows_mem)
4886 {
502c5084
JJ
4887 tree inputv = TREE_VALUE (link);
4888 STRIP_NOPS (inputv);
4889 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
4890 || TREE_CODE (inputv) == PREINCREMENT_EXPR
4891 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
4892 || TREE_CODE (inputv) == POSTINCREMENT_EXPR)
4893 TREE_VALUE (link) = error_mark_node;
6de9cd9a
DN
4894 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4895 is_gimple_lvalue, fb_lvalue | fb_mayfail);
936d04b6 4896 mark_addressable (TREE_VALUE (link));
6de9cd9a
DN
4897 if (tret == GS_ERROR)
4898 {
6a3799eb
AH
4899 if (EXPR_HAS_LOCATION (TREE_VALUE (link)))
4900 input_location = EXPR_LOCATION (TREE_VALUE (link));
6de9cd9a
DN
4901 error ("memory input %d is not directly addressable", i);
4902 ret = tret;
4903 }
4904 }
4905 else
4906 {
4907 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
e670d9e4 4908 is_gimple_asm_val, fb_rvalue);
6de9cd9a
DN
4909 if (tret == GS_ERROR)
4910 ret = tret;
4911 }
726a989a
RB
4912
4913 TREE_CHAIN (link) = NULL_TREE;
9771b263 4914 vec_safe_push (inputs, link);
6de9cd9a 4915 }
b8698a0f 4916
ca081cc8
EB
4917 link_next = NULL_TREE;
4918 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
4919 {
4920 link_next = TREE_CHAIN (link);
4921 TREE_CHAIN (link) = NULL_TREE;
4922 vec_safe_push (clobbers, link);
4923 }
1c384bf1 4924
ca081cc8
EB
4925 link_next = NULL_TREE;
4926 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
4927 {
4928 link_next = TREE_CHAIN (link);
4929 TREE_CHAIN (link) = NULL_TREE;
4930 vec_safe_push (labels, link);
4931 }
726a989a 4932
a406865a
RG
4933 /* Do not add ASMs with errors to the gimple IL stream. */
4934 if (ret != GS_ERROR)
4935 {
4936 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
1c384bf1 4937 inputs, outputs, clobbers, labels);
726a989a 4938
a406865a
RG
4939 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr));
4940 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
4941
4942 gimplify_seq_add_stmt (pre_p, stmt);
4943 }
6de9cd9a
DN
4944
4945 return ret;
4946}
4947
4948/* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
726a989a 4949 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6de9cd9a
DN
4950 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
4951 return to this function.
4952
4953 FIXME should we complexify the prequeue handling instead? Or use flags
4954 for all the cleanups and let the optimizer tighten them up? The current
4955 code seems pretty fragile; it will break on a cleanup within any
4956 non-conditional nesting. But any such nesting would be broken, anyway;
4957 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
4958 and continues out of it. We can do that at the RTL level, though, so
4959 having an optimizer to tighten up try/finally regions would be a Good
4960 Thing. */
4961
4962static enum gimplify_status
726a989a 4963gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
6de9cd9a 4964{
726a989a
RB
4965 gimple_stmt_iterator iter;
4966 gimple_seq body_sequence = NULL;
6de9cd9a 4967
325c3691 4968 tree temp = voidify_wrapper_expr (*expr_p, NULL);
6de9cd9a
DN
4969
4970 /* We only care about the number of conditions between the innermost
df77f454
JM
4971 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
4972 any cleanups collected outside the CLEANUP_POINT_EXPR. */
6de9cd9a 4973 int old_conds = gimplify_ctxp->conditions;
726a989a 4974 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
32be32af 4975 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
6de9cd9a 4976 gimplify_ctxp->conditions = 0;
726a989a 4977 gimplify_ctxp->conditional_cleanups = NULL;
32be32af 4978 gimplify_ctxp->in_cleanup_point_expr = true;
6de9cd9a 4979
726a989a 4980 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
6de9cd9a
DN
4981
4982 gimplify_ctxp->conditions = old_conds;
df77f454 4983 gimplify_ctxp->conditional_cleanups = old_cleanups;
32be32af 4984 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
6de9cd9a 4985
726a989a 4986 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
6de9cd9a 4987 {
726a989a 4988 gimple wce = gsi_stmt (iter);
6de9cd9a 4989
726a989a 4990 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
6de9cd9a 4991 {
726a989a 4992 if (gsi_one_before_end_p (iter))
6de9cd9a 4993 {
726a989a
RB
4994 /* Note that gsi_insert_seq_before and gsi_remove do not
4995 scan operands, unlike some other sequence mutators. */
ae0595b0
RG
4996 if (!gimple_wce_cleanup_eh_only (wce))
4997 gsi_insert_seq_before_without_update (&iter,
4998 gimple_wce_cleanup (wce),
4999 GSI_SAME_STMT);
726a989a 5000 gsi_remove (&iter, true);
6de9cd9a
DN
5001 break;
5002 }
5003 else
5004 {
82d6e6fc 5005 gimple gtry;
726a989a
RB
5006 gimple_seq seq;
5007 enum gimple_try_flags kind;
40aac948 5008
726a989a
RB
5009 if (gimple_wce_cleanup_eh_only (wce))
5010 kind = GIMPLE_TRY_CATCH;
40aac948 5011 else
726a989a
RB
5012 kind = GIMPLE_TRY_FINALLY;
5013 seq = gsi_split_seq_after (iter);
5014
82d6e6fc 5015 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
726a989a
RB
5016 /* Do not use gsi_replace here, as it may scan operands.
5017 We want to do a simple structural modification only. */
355a7673
MM
5018 gsi_set_stmt (&iter, gtry);
5019 iter = gsi_start (gtry->gimple_try.eval);
6de9cd9a
DN
5020 }
5021 }
5022 else
726a989a 5023 gsi_next (&iter);
6de9cd9a
DN
5024 }
5025
726a989a 5026 gimplify_seq_add_seq (pre_p, body_sequence);
6de9cd9a
DN
5027 if (temp)
5028 {
5029 *expr_p = temp;
6de9cd9a
DN
5030 return GS_OK;
5031 }
5032 else
5033 {
726a989a 5034 *expr_p = NULL;
6de9cd9a
DN
5035 return GS_ALL_DONE;
5036 }
5037}
5038
5039/* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
726a989a
RB
5040 is the cleanup action required. EH_ONLY is true if the cleanup should
5041 only be executed if an exception is thrown, not on normal exit. */
6de9cd9a
DN
5042
5043static void
726a989a 5044gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p)
6de9cd9a 5045{
726a989a
RB
5046 gimple wce;
5047 gimple_seq cleanup_stmts = NULL;
6de9cd9a
DN
5048
5049 /* Errors can result in improperly nested cleanups. Which results in
726a989a 5050 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
1da2ed5f 5051 if (seen_error ())
6de9cd9a
DN
5052 return;
5053
5054 if (gimple_conditional_context ())
5055 {
5056 /* If we're in a conditional context, this is more complex. We only
5057 want to run the cleanup if we actually ran the initialization that
5058 necessitates it, but we want to run it after the end of the
5059 conditional context. So we wrap the try/finally around the
5060 condition and use a flag to determine whether or not to actually
5061 run the destructor. Thus
5062
5063 test ? f(A()) : 0
5064
5065 becomes (approximately)
5066
5067 flag = 0;
5068 try {
5069 if (test) { A::A(temp); flag = 1; val = f(temp); }
5070 else { val = 0; }
5071 } finally {
5072 if (flag) A::~A(temp);
5073 }
5074 val
5075 */
6de9cd9a 5076 tree flag = create_tmp_var (boolean_type_node, "cleanup");
726a989a
RB
5077 gimple ffalse = gimple_build_assign (flag, boolean_false_node);
5078 gimple ftrue = gimple_build_assign (flag, boolean_true_node);
5079
b4257cfc 5080 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
726a989a
RB
5081 gimplify_stmt (&cleanup, &cleanup_stmts);
5082 wce = gimple_build_wce (cleanup_stmts);
5083
5084 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
5085 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
5086 gimplify_seq_add_stmt (pre_p, ftrue);
6de9cd9a
DN
5087
5088 /* Because of this manipulation, and the EH edges that jump
5089 threading cannot redirect, the temporary (VAR) will appear
5090 to be used uninitialized. Don't warn. */
5091 TREE_NO_WARNING (var) = 1;
5092 }
5093 else
5094 {
726a989a
RB
5095 gimplify_stmt (&cleanup, &cleanup_stmts);
5096 wce = gimple_build_wce (cleanup_stmts);
5097 gimple_wce_set_cleanup_eh_only (wce, eh_only);
5098 gimplify_seq_add_stmt (pre_p, wce);
6de9cd9a 5099 }
6de9cd9a
DN
5100}
5101
5102/* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
5103
5104static enum gimplify_status
726a989a 5105gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6de9cd9a
DN
5106{
5107 tree targ = *expr_p;
5108 tree temp = TARGET_EXPR_SLOT (targ);
5109 tree init = TARGET_EXPR_INITIAL (targ);
5110 enum gimplify_status ret;
5111
5112 if (init)
5113 {
d0ad58f9
JM
5114 tree cleanup = NULL_TREE;
5115
3a5b9284 5116 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
786025ea
JJ
5117 to the temps list. Handle also variable length TARGET_EXPRs. */
5118 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
5119 {
5120 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
5121 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
5122 gimplify_vla_decl (temp, pre_p);
5123 }
5124 else
5125 gimple_add_tmp_var (temp);
6de9cd9a 5126
3a5b9284
RH
5127 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
5128 expression is supposed to initialize the slot. */
5129 if (VOID_TYPE_P (TREE_TYPE (init)))
5130 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5131 else
325c3691 5132 {
726a989a
RB
5133 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
5134 init = init_expr;
5135 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5136 init = NULL;
5137 ggc_free (init_expr);
325c3691 5138 }
3a5b9284 5139 if (ret == GS_ERROR)
abc67de1
SM
5140 {
5141 /* PR c++/28266 Make sure this is expanded only once. */
5142 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5143 return GS_ERROR;
5144 }
726a989a
RB
5145 if (init)
5146 gimplify_and_add (init, pre_p);
6de9cd9a
DN
5147
5148 /* If needed, push the cleanup for the temp. */
5149 if (TARGET_EXPR_CLEANUP (targ))
d0ad58f9
JM
5150 {
5151 if (CLEANUP_EH_ONLY (targ))
5152 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
5153 CLEANUP_EH_ONLY (targ), pre_p);
5154 else
5155 cleanup = TARGET_EXPR_CLEANUP (targ);
5156 }
5157
5158 /* Add a clobber for the temporary going out of scope, like
5159 gimplify_bind_expr. */
32be32af 5160 if (gimplify_ctxp->in_cleanup_point_expr
87e2a8fd
XDL
5161 && needs_to_live_in_memory (temp)
5162 && flag_stack_reuse == SR_ALL)
d0ad58f9 5163 {
9771b263
DN
5164 tree clobber = build_constructor (TREE_TYPE (temp),
5165 NULL);
d0ad58f9
JM
5166 TREE_THIS_VOLATILE (clobber) = true;
5167 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
5168 if (cleanup)
5169 cleanup = build2 (COMPOUND_EXPR, void_type_node, cleanup,
5170 clobber);
5171 else
5172 cleanup = clobber;
5173 }
5174
5175 if (cleanup)
5176 gimple_push_cleanup (temp, cleanup, false, pre_p);
6de9cd9a
DN
5177
5178 /* Only expand this once. */
5179 TREE_OPERAND (targ, 3) = init;
5180 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5181 }
282899df 5182 else
6de9cd9a 5183 /* We should have expanded this before. */
282899df 5184 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
6de9cd9a
DN
5185
5186 *expr_p = temp;
5187 return GS_OK;
5188}
5189
5190/* Gimplification of expression trees. */
5191
726a989a
RB
5192/* Gimplify an expression which appears at statement context. The
5193 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
5194 NULL, a new sequence is allocated.
6de9cd9a 5195
726a989a
RB
5196 Return true if we actually added a statement to the queue. */
5197
5198bool
5199gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
6de9cd9a 5200{
726a989a 5201 gimple_seq_node last;
6de9cd9a 5202
726a989a
RB
5203 last = gimple_seq_last (*seq_p);
5204 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
5205 return last != gimple_seq_last (*seq_p);
6de9cd9a
DN
5206}
5207
953ff289
DN
5208/* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
5209 to CTX. If entries already exist, force them to be some flavor of private.
5210 If there is no enclosing parallel, do nothing. */
5211
5212void
5213omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
5214{
5215 splay_tree_node n;
5216
5217 if (decl == NULL || !DECL_P (decl))
5218 return;
5219
5220 do
5221 {
5222 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5223 if (n != NULL)
5224 {
5225 if (n->value & GOVD_SHARED)
5226 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
acf0174b
JJ
5227 else if (n->value & GOVD_MAP)
5228 n->value |= GOVD_MAP_TO_ONLY;
953ff289
DN
5229 else
5230 return;
5231 }
acf0174b
JJ
5232 else if (ctx->region_type == ORT_TARGET)
5233 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
74bf76ed 5234 else if (ctx->region_type != ORT_WORKSHARE
acf0174b
JJ
5235 && ctx->region_type != ORT_SIMD
5236 && ctx->region_type != ORT_TARGET_DATA)
953ff289
DN
5237 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
5238
5239 ctx = ctx->outer_context;
5240 }
5241 while (ctx);
5242}
5243
5244/* Similarly for each of the type sizes of TYPE. */
5245
5246static void
5247omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
5248{
5249 if (type == NULL || type == error_mark_node)
5250 return;
5251 type = TYPE_MAIN_VARIANT (type);
5252
5253 if (pointer_set_insert (ctx->privatized_types, type))
5254 return;
5255
5256 switch (TREE_CODE (type))
5257 {
5258 case INTEGER_TYPE:
5259 case ENUMERAL_TYPE:
5260 case BOOLEAN_TYPE:
953ff289 5261 case REAL_TYPE:
325217ed 5262 case FIXED_POINT_TYPE:
953ff289
DN
5263 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
5264 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
5265 break;
5266
5267 case ARRAY_TYPE:
5268 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5269 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
5270 break;
5271
5272 case RECORD_TYPE:
5273 case UNION_TYPE:
5274 case QUAL_UNION_TYPE:
5275 {
5276 tree field;
910ad8de 5277 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
953ff289
DN
5278 if (TREE_CODE (field) == FIELD_DECL)
5279 {
5280 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
5281 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
5282 }
5283 }
5284 break;
5285
5286 case POINTER_TYPE:
5287 case REFERENCE_TYPE:
5288 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5289 break;
5290
5291 default:
5292 break;
5293 }
5294
5295 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
5296 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
5297 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
5298}
5299
5300/* Add an entry for DECL in the OpenMP context CTX with FLAGS. */
5301
5302static void
5303omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
5304{
5305 splay_tree_node n;
5306 unsigned int nflags;
5307 tree t;
5308
b504a918 5309 if (error_operand_p (decl))
953ff289
DN
5310 return;
5311
5312 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
5313 there are constructors involved somewhere. */
5314 if (TREE_ADDRESSABLE (TREE_TYPE (decl))
5315 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))
5316 flags |= GOVD_SEEN;
5317
5318 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
acf0174b 5319 if (n != NULL && n->value != GOVD_ALIGNED)
953ff289
DN
5320 {
5321 /* We shouldn't be re-adding the decl with the same data
5322 sharing class. */
5323 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
5324 /* The only combination of data sharing classes we should see is
5325 FIRSTPRIVATE and LASTPRIVATE. */
5326 nflags = n->value | flags;
5327 gcc_assert ((nflags & GOVD_DATA_SHARE_CLASS)
74bf76ed
JJ
5328 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE)
5329 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
953ff289
DN
5330 n->value = nflags;
5331 return;
5332 }
5333
5334 /* When adding a variable-sized variable, we have to handle all sorts
b8698a0f 5335 of additional bits of data: the pointer replacement variable, and
953ff289 5336 the parameters of the type. */
4c923c28 5337 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
953ff289
DN
5338 {
5339 /* Add the pointer replacement variable as PRIVATE if the variable
5340 replacement is private, else FIRSTPRIVATE since we'll need the
5341 address of the original variable either for SHARED, or for the
5342 copy into or out of the context. */
5343 if (!(flags & GOVD_LOCAL))
5344 {
acf0174b
JJ
5345 nflags = flags & GOVD_MAP
5346 ? GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT
5347 : flags & GOVD_PRIVATE ? GOVD_PRIVATE : GOVD_FIRSTPRIVATE;
953ff289
DN
5348 nflags |= flags & GOVD_SEEN;
5349 t = DECL_VALUE_EXPR (decl);
5350 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5351 t = TREE_OPERAND (t, 0);
5352 gcc_assert (DECL_P (t));
5353 omp_add_variable (ctx, t, nflags);
5354 }
5355
5356 /* Add all of the variable and type parameters (which should have
5357 been gimplified to a formal temporary) as FIRSTPRIVATE. */
5358 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
5359 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
5360 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5361
5362 /* The variable-sized variable itself is never SHARED, only some form
5363 of PRIVATE. The sharing would take place via the pointer variable
5364 which we remapped above. */
5365 if (flags & GOVD_SHARED)
5366 flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE
5367 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
5368
b8698a0f 5369 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
953ff289
DN
5370 alloca statement we generate for the variable, so make sure it
5371 is available. This isn't automatically needed for the SHARED
4288fea2
JJ
5372 case, since we won't be allocating local storage then.
5373 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
5374 in this case omp_notice_variable will be called later
5375 on when it is gimplified. */
acf0174b 5376 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
423ed416 5377 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
953ff289
DN
5378 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
5379 }
acf0174b
JJ
5380 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
5381 && lang_hooks.decls.omp_privatize_by_reference (decl))
953ff289 5382 {
953ff289
DN
5383 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5384
5385 /* Similar to the direct variable sized case above, we'll need the
5386 size of references being privatized. */
5387 if ((flags & GOVD_SHARED) == 0)
5388 {
5389 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
4c923c28 5390 if (TREE_CODE (t) != INTEGER_CST)
953ff289
DN
5391 omp_notice_variable (ctx, t, true);
5392 }
5393 }
5394
74bf76ed
JJ
5395 if (n != NULL)
5396 n->value |= flags;
5397 else
5398 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
953ff289
DN
5399}
5400
f22f4340
JJ
5401/* Notice a threadprivate variable DECL used in OpenMP context CTX.
5402 This just prints out diagnostics about threadprivate variable uses
5403 in untied tasks. If DECL2 is non-NULL, prevent this warning
5404 on that variable. */
5405
5406static bool
5407omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
5408 tree decl2)
5409{
5410 splay_tree_node n;
acf0174b
JJ
5411 struct gimplify_omp_ctx *octx;
5412
5413 for (octx = ctx; octx; octx = octx->outer_context)
5414 if (octx->region_type == ORT_TARGET)
5415 {
5416 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
5417 if (n == NULL)
5418 {
5419 error ("threadprivate variable %qE used in target region",
5420 DECL_NAME (decl));
5421 error_at (octx->location, "enclosing target region");
5422 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
5423 }
5424 if (decl2)
5425 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
5426 }
f22f4340
JJ
5427
5428 if (ctx->region_type != ORT_UNTIED_TASK)
5429 return false;
5430 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5431 if (n == NULL)
5432 {
ad19c4be
EB
5433 error ("threadprivate variable %qE used in untied task",
5434 DECL_NAME (decl));
f22f4340
JJ
5435 error_at (ctx->location, "enclosing task");
5436 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
5437 }
5438 if (decl2)
5439 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
5440 return false;
5441}
5442
953ff289
DN
5443/* Record the fact that DECL was used within the OpenMP context CTX.
5444 IN_CODE is true when real code uses DECL, and false when we should
5445 merely emit default(none) errors. Return true if DECL is going to
5446 be remapped and thus DECL shouldn't be gimplified into its
5447 DECL_VALUE_EXPR (if any). */
5448
5449static bool
5450omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
5451{
5452 splay_tree_node n;
5453 unsigned flags = in_code ? GOVD_SEEN : 0;
5454 bool ret = false, shared;
5455
b504a918 5456 if (error_operand_p (decl))
953ff289
DN
5457 return false;
5458
5459 /* Threadprivate variables are predetermined. */
5460 if (is_global_var (decl))
5461 {
5462 if (DECL_THREAD_LOCAL_P (decl))
f22f4340 5463 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
953ff289
DN
5464
5465 if (DECL_HAS_VALUE_EXPR_P (decl))
5466 {
5467 tree value = get_base_address (DECL_VALUE_EXPR (decl));
5468
5469 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
f22f4340 5470 return omp_notice_threadprivate_variable (ctx, decl, value);
953ff289
DN
5471 }
5472 }
5473
5474 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
acf0174b
JJ
5475 if (ctx->region_type == ORT_TARGET)
5476 {
5477 if (n == NULL)
5478 {
5479 if (!lang_hooks.types.omp_mappable_type (TREE_TYPE (decl)))
5480 {
5481 error ("%qD referenced in target region does not have "
5482 "a mappable type", decl);
5483 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_EXPLICIT | flags);
5484 }
5485 else
5486 omp_add_variable (ctx, decl, GOVD_MAP | flags);
5487 }
5488 else
5489 n->value |= flags;
5490 ret = lang_hooks.decls.omp_disregard_value_expr (decl, true);
5491 goto do_outer;
5492 }
5493
953ff289
DN
5494 if (n == NULL)
5495 {
5496 enum omp_clause_default_kind default_kind, kind;
a68ab351 5497 struct gimplify_omp_ctx *octx;
953ff289 5498
74bf76ed 5499 if (ctx->region_type == ORT_WORKSHARE
acf0174b
JJ
5500 || ctx->region_type == ORT_SIMD
5501 || ctx->region_type == ORT_TARGET_DATA)
953ff289
DN
5502 goto do_outer;
5503
5504 /* ??? Some compiler-generated variables (like SAVE_EXPRs) could be
5505 remapped firstprivate instead of shared. To some extent this is
5506 addressed in omp_firstprivatize_type_sizes, but not effectively. */
5507 default_kind = ctx->default_kind;
5508 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
5509 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
5510 default_kind = kind;
5511
5512 switch (default_kind)
5513 {
5514 case OMP_CLAUSE_DEFAULT_NONE:
f22f4340 5515 if ((ctx->region_type & ORT_TASK) != 0)
acf0174b
JJ
5516 {
5517 error ("%qE not specified in enclosing task",
5518 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)));
5519 error_at (ctx->location, "enclosing task");
5520 }
5521 else if (ctx->region_type == ORT_TEAMS)
5522 {
5523 error ("%qE not specified in enclosing teams construct",
5524 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)));
5525 error_at (ctx->location, "enclosing teams construct");
5526 }
f22f4340 5527 else
acf0174b
JJ
5528 {
5529 error ("%qE not specified in enclosing parallel",
5530 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)));
5531 error_at (ctx->location, "enclosing parallel");
5532 }
953ff289
DN
5533 /* FALLTHRU */
5534 case OMP_CLAUSE_DEFAULT_SHARED:
5535 flags |= GOVD_SHARED;
5536 break;
5537 case OMP_CLAUSE_DEFAULT_PRIVATE:
5538 flags |= GOVD_PRIVATE;
5539 break;
a68ab351
JJ
5540 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
5541 flags |= GOVD_FIRSTPRIVATE;
5542 break;
5543 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
5544 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
f22f4340 5545 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
a68ab351
JJ
5546 if (ctx->outer_context)
5547 omp_notice_variable (ctx->outer_context, decl, in_code);
5548 for (octx = ctx->outer_context; octx; octx = octx->outer_context)
5549 {
5550 splay_tree_node n2;
5551
acf0174b
JJ
5552 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0)
5553 continue;
a68ab351
JJ
5554 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
5555 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
5556 {
5557 flags |= GOVD_FIRSTPRIVATE;
5558 break;
5559 }
acf0174b 5560 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
a68ab351
JJ
5561 break;
5562 }
5563 if (flags & GOVD_FIRSTPRIVATE)
5564 break;
5565 if (octx == NULL
5566 && (TREE_CODE (decl) == PARM_DECL
5567 || (!is_global_var (decl)
5568 && DECL_CONTEXT (decl) == current_function_decl)))
5569 {
5570 flags |= GOVD_FIRSTPRIVATE;
5571 break;
5572 }
5573 flags |= GOVD_SHARED;
5574 break;
953ff289
DN
5575 default:
5576 gcc_unreachable ();
5577 }
5578
a68ab351
JJ
5579 if ((flags & GOVD_PRIVATE)
5580 && lang_hooks.decls.omp_private_outer_ref (decl))
5581 flags |= GOVD_PRIVATE_OUTER_REF;
5582
953ff289
DN
5583 omp_add_variable (ctx, decl, flags);
5584
5585 shared = (flags & GOVD_SHARED) != 0;
5586 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5587 goto do_outer;
5588 }
5589
3ad6b266
JJ
5590 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
5591 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
5592 && DECL_SIZE (decl)
5593 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5594 {
5595 splay_tree_node n2;
5596 tree t = DECL_VALUE_EXPR (decl);
5597 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5598 t = TREE_OPERAND (t, 0);
5599 gcc_assert (DECL_P (t));
5600 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
5601 n2->value |= GOVD_SEEN;
5602 }
5603
953ff289
DN
5604 shared = ((flags | n->value) & GOVD_SHARED) != 0;
5605 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5606
5607 /* If nothing changed, there's nothing left to do. */
5608 if ((n->value & flags) == flags)
5609 return ret;
5610 flags |= n->value;
5611 n->value = flags;
5612
5613 do_outer:
5614 /* If the variable is private in the current context, then we don't
5615 need to propagate anything to an outer context. */
a68ab351 5616 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
953ff289
DN
5617 return ret;
5618 if (ctx->outer_context
5619 && omp_notice_variable (ctx->outer_context, decl, in_code))
5620 return true;
5621 return ret;
5622}
5623
5624/* Verify that DECL is private within CTX. If there's specific information
5625 to the contrary in the innermost scope, generate an error. */
5626
5627static bool
74bf76ed 5628omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, bool simd)
953ff289
DN
5629{
5630 splay_tree_node n;
5631
5632 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5633 if (n != NULL)
5634 {
5635 if (n->value & GOVD_SHARED)
5636 {
5637 if (ctx == gimplify_omp_ctxp)
f6a5ffbf 5638 {
74bf76ed
JJ
5639 if (simd)
5640 error ("iteration variable %qE is predetermined linear",
5641 DECL_NAME (decl));
5642 else
5643 error ("iteration variable %qE should be private",
5644 DECL_NAME (decl));
f6a5ffbf
JJ
5645 n->value = GOVD_PRIVATE;
5646 return true;
5647 }
5648 else
5649 return false;
953ff289 5650 }
761041be
JJ
5651 else if ((n->value & GOVD_EXPLICIT) != 0
5652 && (ctx == gimplify_omp_ctxp
a68ab351 5653 || (ctx->region_type == ORT_COMBINED_PARALLEL
761041be
JJ
5654 && gimplify_omp_ctxp->outer_context == ctx)))
5655 {
5656 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
4f1e4960
JM
5657 error ("iteration variable %qE should not be firstprivate",
5658 DECL_NAME (decl));
761041be 5659 else if ((n->value & GOVD_REDUCTION) != 0)
4f1e4960
JM
5660 error ("iteration variable %qE should not be reduction",
5661 DECL_NAME (decl));
74bf76ed
JJ
5662 else if (simd && (n->value & GOVD_LASTPRIVATE) != 0)
5663 error ("iteration variable %qE should not be lastprivate",
5664 DECL_NAME (decl));
5665 else if (simd && (n->value & GOVD_PRIVATE) != 0)
5666 error ("iteration variable %qE should not be private",
5667 DECL_NAME (decl));
5668 else if (simd && (n->value & GOVD_LINEAR) != 0)
5669 error ("iteration variable %qE is predetermined linear",
5670 DECL_NAME (decl));
761041be 5671 }
ca2b1311
JJ
5672 return (ctx == gimplify_omp_ctxp
5673 || (ctx->region_type == ORT_COMBINED_PARALLEL
5674 && gimplify_omp_ctxp->outer_context == ctx));
953ff289
DN
5675 }
5676
74bf76ed
JJ
5677 if (ctx->region_type != ORT_WORKSHARE
5678 && ctx->region_type != ORT_SIMD)
953ff289 5679 return false;
f6a5ffbf 5680 else if (ctx->outer_context)
74bf76ed 5681 return omp_is_private (ctx->outer_context, decl, simd);
ca2b1311 5682 return false;
953ff289
DN
5683}
5684
07b7aade
JJ
5685/* Return true if DECL is private within a parallel region
5686 that binds to the current construct's context or in parallel
5687 region's REDUCTION clause. */
5688
5689static bool
5690omp_check_private (struct gimplify_omp_ctx *ctx, tree decl)
5691{
5692 splay_tree_node n;
5693
5694 do
5695 {
5696 ctx = ctx->outer_context;
5697 if (ctx == NULL)
5698 return !(is_global_var (decl)
5699 /* References might be private, but might be shared too. */
5700 || lang_hooks.decls.omp_privatize_by_reference (decl));
5701
acf0174b
JJ
5702 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0)
5703 continue;
5704
07b7aade
JJ
5705 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5706 if (n != NULL)
5707 return (n->value & GOVD_SHARED) == 0;
5708 }
74bf76ed
JJ
5709 while (ctx->region_type == ORT_WORKSHARE
5710 || ctx->region_type == ORT_SIMD);
07b7aade
JJ
5711 return false;
5712}
5713
953ff289
DN
5714/* Scan the OpenMP clauses in *LIST_P, installing mappings into a new
5715 and previous omp contexts. */
5716
5717static void
726a989a 5718gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
a68ab351 5719 enum omp_region_type region_type)
953ff289
DN
5720{
5721 struct gimplify_omp_ctx *ctx, *outer_ctx;
d406b663 5722 struct gimplify_ctx gctx;
953ff289
DN
5723 tree c;
5724
a68ab351 5725 ctx = new_omp_context (region_type);
953ff289
DN
5726 outer_ctx = ctx->outer_context;
5727
5728 while ((c = *list_p) != NULL)
5729 {
953ff289
DN
5730 bool remove = false;
5731 bool notice_outer = true;
07b7aade 5732 const char *check_non_private = NULL;
953ff289
DN
5733 unsigned int flags;
5734 tree decl;
5735
aaf46ef9 5736 switch (OMP_CLAUSE_CODE (c))
953ff289
DN
5737 {
5738 case OMP_CLAUSE_PRIVATE:
5739 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
a68ab351
JJ
5740 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
5741 {
5742 flags |= GOVD_PRIVATE_OUTER_REF;
5743 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
5744 }
5745 else
5746 notice_outer = false;
953ff289
DN
5747 goto do_add;
5748 case OMP_CLAUSE_SHARED:
5749 flags = GOVD_SHARED | GOVD_EXPLICIT;
5750 goto do_add;
5751 case OMP_CLAUSE_FIRSTPRIVATE:
5752 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
07b7aade 5753 check_non_private = "firstprivate";
953ff289
DN
5754 goto do_add;
5755 case OMP_CLAUSE_LASTPRIVATE:
5756 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
07b7aade 5757 check_non_private = "lastprivate";
953ff289
DN
5758 goto do_add;
5759 case OMP_CLAUSE_REDUCTION:
5760 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
07b7aade 5761 check_non_private = "reduction";
953ff289 5762 goto do_add;
acf0174b
JJ
5763 case OMP_CLAUSE_LINEAR:
5764 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
5765 is_gimple_val, fb_rvalue) == GS_ERROR)
5766 {
5767 remove = true;
5768 break;
5769 }
5770 flags = GOVD_LINEAR | GOVD_EXPLICIT;
5771 goto do_add;
5772
5773 case OMP_CLAUSE_MAP:
5774 if (OMP_CLAUSE_SIZE (c)
5775 && gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
5776 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
5777 {
5778 remove = true;
5779 break;
5780 }
5781 decl = OMP_CLAUSE_DECL (c);
5782 if (!DECL_P (decl))
5783 {
5784 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
5785 NULL, is_gimple_lvalue, fb_lvalue)
5786 == GS_ERROR)
5787 {
5788 remove = true;
5789 break;
5790 }
5791 break;
5792 }
5793 flags = GOVD_MAP | GOVD_EXPLICIT;
5794 goto do_add;
5795
5796 case OMP_CLAUSE_DEPEND:
5797 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
5798 {
5799 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
5800 NULL, is_gimple_val, fb_rvalue);
5801 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
5802 }
5803 if (error_operand_p (OMP_CLAUSE_DECL (c)))
5804 {
5805 remove = true;
5806 break;
5807 }
5808 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
5809 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
5810 is_gimple_val, fb_rvalue) == GS_ERROR)
5811 {
5812 remove = true;
5813 break;
5814 }
5815 break;
5816
5817 case OMP_CLAUSE_TO:
5818 case OMP_CLAUSE_FROM:
5819 if (OMP_CLAUSE_SIZE (c)
5820 && gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
5821 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
5822 {
5823 remove = true;
5824 break;
5825 }
5826 decl = OMP_CLAUSE_DECL (c);
5827 if (error_operand_p (decl))
5828 {
5829 remove = true;
5830 break;
5831 }
5832 if (!DECL_P (decl))
5833 {
5834 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
5835 NULL, is_gimple_lvalue, fb_lvalue)
5836 == GS_ERROR)
5837 {
5838 remove = true;
5839 break;
5840 }
5841 break;
5842 }
5843 goto do_notice;
953ff289
DN
5844
5845 do_add:
5846 decl = OMP_CLAUSE_DECL (c);
b504a918 5847 if (error_operand_p (decl))
953ff289
DN
5848 {
5849 remove = true;
5850 break;
5851 }
5852 omp_add_variable (ctx, decl, flags);
693d710f 5853 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
953ff289
DN
5854 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5855 {
5856 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
693d710f 5857 GOVD_LOCAL | GOVD_SEEN);
953ff289 5858 gimplify_omp_ctxp = ctx;
d406b663 5859 push_gimplify_context (&gctx);
726a989a 5860
355a7673
MM
5861 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5862 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
726a989a
RB
5863
5864 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
5865 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
5866 pop_gimplify_context
5867 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
d406b663 5868 push_gimplify_context (&gctx);
726a989a
RB
5869 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
5870 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
b8698a0f 5871 pop_gimplify_context
726a989a
RB
5872 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
5873 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
5874 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
5875
953ff289
DN
5876 gimplify_omp_ctxp = outer_ctx;
5877 }
a68ab351
JJ
5878 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5879 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
5880 {
5881 gimplify_omp_ctxp = ctx;
d406b663 5882 push_gimplify_context (&gctx);
a68ab351
JJ
5883 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
5884 {
5885 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
5886 NULL, NULL);
5887 TREE_SIDE_EFFECTS (bind) = 1;
5888 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
5889 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
5890 }
726a989a
RB
5891 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
5892 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
5893 pop_gimplify_context
5894 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
5895 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
5896
a68ab351
JJ
5897 gimplify_omp_ctxp = outer_ctx;
5898 }
953ff289
DN
5899 if (notice_outer)
5900 goto do_notice;
5901 break;
5902
5903 case OMP_CLAUSE_COPYIN:
5904 case OMP_CLAUSE_COPYPRIVATE:
5905 decl = OMP_CLAUSE_DECL (c);
b504a918 5906 if (error_operand_p (decl))
953ff289
DN
5907 {
5908 remove = true;
5909 break;
5910 }
5911 do_notice:
5912 if (outer_ctx)
5913 omp_notice_variable (outer_ctx, decl, true);
07b7aade 5914 if (check_non_private
a68ab351 5915 && region_type == ORT_WORKSHARE
07b7aade
JJ
5916 && omp_check_private (ctx, decl))
5917 {
4f1e4960
JM
5918 error ("%s variable %qE is private in outer context",
5919 check_non_private, DECL_NAME (decl));
07b7aade
JJ
5920 remove = true;
5921 }
953ff289
DN
5922 break;
5923
20906c66 5924 case OMP_CLAUSE_FINAL:
953ff289 5925 case OMP_CLAUSE_IF:
d568d1a8
RS
5926 OMP_CLAUSE_OPERAND (c, 0)
5927 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
5928 /* Fall through. */
5929
5930 case OMP_CLAUSE_SCHEDULE:
953ff289 5931 case OMP_CLAUSE_NUM_THREADS:
acf0174b
JJ
5932 case OMP_CLAUSE_NUM_TEAMS:
5933 case OMP_CLAUSE_THREAD_LIMIT:
5934 case OMP_CLAUSE_DIST_SCHEDULE:
5935 case OMP_CLAUSE_DEVICE:
726a989a
RB
5936 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
5937 is_gimple_val, fb_rvalue) == GS_ERROR)
acf0174b 5938 remove = true;
953ff289
DN
5939 break;
5940
5941 case OMP_CLAUSE_NOWAIT:
5942 case OMP_CLAUSE_ORDERED:
a68ab351
JJ
5943 case OMP_CLAUSE_UNTIED:
5944 case OMP_CLAUSE_COLLAPSE:
20906c66 5945 case OMP_CLAUSE_MERGEABLE:
acf0174b 5946 case OMP_CLAUSE_PROC_BIND:
74bf76ed 5947 case OMP_CLAUSE_SAFELEN:
953ff289
DN
5948 break;
5949
acf0174b
JJ
5950 case OMP_CLAUSE_ALIGNED:
5951 decl = OMP_CLAUSE_DECL (c);
5952 if (error_operand_p (decl))
5953 {
5954 remove = true;
5955 break;
5956 }
5957 if (!is_global_var (decl)
5958 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
5959 omp_add_variable (ctx, decl, GOVD_ALIGNED);
5960 break;
5961
953ff289
DN
5962 case OMP_CLAUSE_DEFAULT:
5963 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
5964 break;
5965
5966 default:
5967 gcc_unreachable ();
5968 }
5969
5970 if (remove)
5971 *list_p = OMP_CLAUSE_CHAIN (c);
5972 else
5973 list_p = &OMP_CLAUSE_CHAIN (c);
5974 }
5975
5976 gimplify_omp_ctxp = ctx;
5977}
5978
5979/* For all variables that were not actually used within the context,
5980 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
5981
5982static int
5983gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
5984{
5985 tree *list_p = (tree *) data;
5986 tree decl = (tree) n->key;
5987 unsigned flags = n->value;
aaf46ef9 5988 enum omp_clause_code code;
953ff289
DN
5989 tree clause;
5990 bool private_debug;
5991
5992 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
5993 return 0;
5994 if ((flags & GOVD_SEEN) == 0)
5995 return 0;
5996 if (flags & GOVD_DEBUG_PRIVATE)
5997 {
5998 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE);
5999 private_debug = true;
6000 }
acf0174b
JJ
6001 else if (flags & GOVD_MAP)
6002 private_debug = false;
953ff289
DN
6003 else
6004 private_debug
6005 = lang_hooks.decls.omp_private_debug_clause (decl,
6006 !!(flags & GOVD_SHARED));
6007 if (private_debug)
6008 code = OMP_CLAUSE_PRIVATE;
acf0174b
JJ
6009 else if (flags & GOVD_MAP)
6010 code = OMP_CLAUSE_MAP;
953ff289
DN
6011 else if (flags & GOVD_SHARED)
6012 {
6013 if (is_global_var (decl))
64964499
JJ
6014 {
6015 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
6016 while (ctx != NULL)
6017 {
6018 splay_tree_node on
6019 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6020 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
74bf76ed
JJ
6021 | GOVD_PRIVATE | GOVD_REDUCTION
6022 | GOVD_LINEAR)) != 0)
64964499
JJ
6023 break;
6024 ctx = ctx->outer_context;
6025 }
6026 if (ctx == NULL)
6027 return 0;
6028 }
953ff289
DN
6029 code = OMP_CLAUSE_SHARED;
6030 }
6031 else if (flags & GOVD_PRIVATE)
6032 code = OMP_CLAUSE_PRIVATE;
6033 else if (flags & GOVD_FIRSTPRIVATE)
6034 code = OMP_CLAUSE_FIRSTPRIVATE;
74bf76ed
JJ
6035 else if (flags & GOVD_LASTPRIVATE)
6036 code = OMP_CLAUSE_LASTPRIVATE;
acf0174b
JJ
6037 else if (flags & GOVD_ALIGNED)
6038 return 0;
953ff289
DN
6039 else
6040 gcc_unreachable ();
6041
c2255bc4 6042 clause = build_omp_clause (input_location, code);
aaf46ef9 6043 OMP_CLAUSE_DECL (clause) = decl;
953ff289
DN
6044 OMP_CLAUSE_CHAIN (clause) = *list_p;
6045 if (private_debug)
6046 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
a68ab351
JJ
6047 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
6048 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
acf0174b
JJ
6049 else if (code == OMP_CLAUSE_MAP)
6050 {
6051 OMP_CLAUSE_MAP_KIND (clause) = flags & GOVD_MAP_TO_ONLY
6052 ? OMP_CLAUSE_MAP_TO
6053 : OMP_CLAUSE_MAP_TOFROM;
6054 if (DECL_SIZE (decl)
6055 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6056 {
6057 tree decl2 = DECL_VALUE_EXPR (decl);
6058 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
6059 decl2 = TREE_OPERAND (decl2, 0);
6060 gcc_assert (DECL_P (decl2));
6061 tree mem = build_simple_mem_ref (decl2);
6062 OMP_CLAUSE_DECL (clause) = mem;
6063 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
6064 if (gimplify_omp_ctxp->outer_context)
6065 {
6066 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
6067 omp_notice_variable (ctx, decl2, true);
6068 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
6069 }
6070 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
6071 OMP_CLAUSE_MAP);
6072 OMP_CLAUSE_DECL (nc) = decl;
6073 OMP_CLAUSE_SIZE (nc) = size_zero_node;
6074 OMP_CLAUSE_MAP_KIND (nc) = OMP_CLAUSE_MAP_POINTER;
6075 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
6076 OMP_CLAUSE_CHAIN (clause) = nc;
6077 }
6078 }
953ff289 6079 *list_p = clause;
a68ab351 6080 lang_hooks.decls.omp_finish_clause (clause);
953ff289
DN
6081
6082 return 0;
6083}
6084
6085static void
6086gimplify_adjust_omp_clauses (tree *list_p)
6087{
6088 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
6089 tree c, decl;
6090
6091 while ((c = *list_p) != NULL)
6092 {
6093 splay_tree_node n;
6094 bool remove = false;
6095
aaf46ef9 6096 switch (OMP_CLAUSE_CODE (c))
953ff289
DN
6097 {
6098 case OMP_CLAUSE_PRIVATE:
6099 case OMP_CLAUSE_SHARED:
6100 case OMP_CLAUSE_FIRSTPRIVATE:
74bf76ed 6101 case OMP_CLAUSE_LINEAR:
953ff289
DN
6102 decl = OMP_CLAUSE_DECL (c);
6103 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6104 remove = !(n->value & GOVD_SEEN);
6105 if (! remove)
6106 {
aaf46ef9 6107 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
953ff289
DN
6108 if ((n->value & GOVD_DEBUG_PRIVATE)
6109 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
6110 {
6111 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
6112 || ((n->value & GOVD_DATA_SHARE_CLASS)
6113 == GOVD_PRIVATE));
aaf46ef9 6114 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
953ff289
DN
6115 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
6116 }
74bf76ed
JJ
6117 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6118 && ctx->outer_context
6119 && !(OMP_CLAUSE_LINEAR_NO_COPYIN (c)
6120 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6121 && !is_global_var (decl))
6122 {
6123 if (ctx->outer_context->region_type == ORT_COMBINED_PARALLEL)
6124 {
6125 n = splay_tree_lookup (ctx->outer_context->variables,
6126 (splay_tree_key) decl);
6127 if (n == NULL
6128 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
6129 {
6130 int flags = OMP_CLAUSE_LINEAR_NO_COPYIN (c)
6131 ? GOVD_LASTPRIVATE : GOVD_SHARED;
6132 if (n == NULL)
6133 omp_add_variable (ctx->outer_context, decl,
6134 flags | GOVD_SEEN);
6135 else
6136 n->value |= flags | GOVD_SEEN;
6137 }
6138 }
6139 else
6140 omp_notice_variable (ctx->outer_context, decl, true);
6141 }
953ff289
DN
6142 }
6143 break;
6144
6145 case OMP_CLAUSE_LASTPRIVATE:
6146 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
6147 accurately reflect the presence of a FIRSTPRIVATE clause. */
6148 decl = OMP_CLAUSE_DECL (c);
6149 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6150 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
6151 = (n->value & GOVD_FIRSTPRIVATE) != 0;
6152 break;
b8698a0f 6153
acf0174b
JJ
6154 case OMP_CLAUSE_ALIGNED:
6155 decl = OMP_CLAUSE_DECL (c);
6156 if (!is_global_var (decl))
6157 {
6158 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6159 remove = n == NULL || !(n->value & GOVD_SEEN);
6160 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
6161 {
6162 struct gimplify_omp_ctx *octx;
6163 if (n != NULL
6164 && (n->value & (GOVD_DATA_SHARE_CLASS
6165 & ~GOVD_FIRSTPRIVATE)))
6166 remove = true;
6167 else
6168 for (octx = ctx->outer_context; octx;
6169 octx = octx->outer_context)
6170 {
6171 n = splay_tree_lookup (octx->variables,
6172 (splay_tree_key) decl);
6173 if (n == NULL)
6174 continue;
6175 if (n->value & GOVD_LOCAL)
6176 break;
6177 /* We have to avoid assigning a shared variable
6178 to itself when trying to add
6179 __builtin_assume_aligned. */
6180 if (n->value & GOVD_SHARED)
6181 {
6182 remove = true;
6183 break;
6184 }
6185 }
6186 }
6187 }
6188 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
6189 {
6190 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6191 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6192 remove = true;
6193 }
6194 break;
6195
6196 case OMP_CLAUSE_MAP:
6197 decl = OMP_CLAUSE_DECL (c);
6198 if (!DECL_P (decl))
6199 break;
6200 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6201 if (ctx->region_type == ORT_TARGET && !(n->value & GOVD_SEEN))
6202 remove = true;
6203 else if (DECL_SIZE (decl)
6204 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
6205 && OMP_CLAUSE_MAP_KIND (c) != OMP_CLAUSE_MAP_POINTER)
6206 {
6207 tree decl2 = DECL_VALUE_EXPR (decl);
6208 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
6209 decl2 = TREE_OPERAND (decl2, 0);
6210 gcc_assert (DECL_P (decl2));
6211 tree mem = build_simple_mem_ref (decl2);
6212 OMP_CLAUSE_DECL (c) = mem;
6213 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
6214 if (ctx->outer_context)
6215 {
6216 omp_notice_variable (ctx->outer_context, decl2, true);
6217 omp_notice_variable (ctx->outer_context,
6218 OMP_CLAUSE_SIZE (c), true);
6219 }
6220 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
6221 OMP_CLAUSE_MAP);
6222 OMP_CLAUSE_DECL (nc) = decl;
6223 OMP_CLAUSE_SIZE (nc) = size_zero_node;
6224 OMP_CLAUSE_MAP_KIND (nc) = OMP_CLAUSE_MAP_POINTER;
6225 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
6226 OMP_CLAUSE_CHAIN (c) = nc;
6227 c = nc;
6228 }
6229 break;
6230
6231 case OMP_CLAUSE_TO:
6232 case OMP_CLAUSE_FROM:
6233 decl = OMP_CLAUSE_DECL (c);
6234 if (!DECL_P (decl))
6235 break;
6236 if (DECL_SIZE (decl)
6237 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6238 {
6239 tree decl2 = DECL_VALUE_EXPR (decl);
6240 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
6241 decl2 = TREE_OPERAND (decl2, 0);
6242 gcc_assert (DECL_P (decl2));
6243 tree mem = build_simple_mem_ref (decl2);
6244 OMP_CLAUSE_DECL (c) = mem;
6245 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
6246 if (ctx->outer_context)
6247 {
6248 omp_notice_variable (ctx->outer_context, decl2, true);
6249 omp_notice_variable (ctx->outer_context,
6250 OMP_CLAUSE_SIZE (c), true);
6251 }
6252 }
6253 break;
6254
953ff289
DN
6255 case OMP_CLAUSE_REDUCTION:
6256 case OMP_CLAUSE_COPYIN:
6257 case OMP_CLAUSE_COPYPRIVATE:
6258 case OMP_CLAUSE_IF:
6259 case OMP_CLAUSE_NUM_THREADS:
acf0174b
JJ
6260 case OMP_CLAUSE_NUM_TEAMS:
6261 case OMP_CLAUSE_THREAD_LIMIT:
6262 case OMP_CLAUSE_DIST_SCHEDULE:
6263 case OMP_CLAUSE_DEVICE:
953ff289
DN
6264 case OMP_CLAUSE_SCHEDULE:
6265 case OMP_CLAUSE_NOWAIT:
6266 case OMP_CLAUSE_ORDERED:
6267 case OMP_CLAUSE_DEFAULT:
a68ab351
JJ
6268 case OMP_CLAUSE_UNTIED:
6269 case OMP_CLAUSE_COLLAPSE:
20906c66
JJ
6270 case OMP_CLAUSE_FINAL:
6271 case OMP_CLAUSE_MERGEABLE:
acf0174b 6272 case OMP_CLAUSE_PROC_BIND:
74bf76ed 6273 case OMP_CLAUSE_SAFELEN:
acf0174b 6274 case OMP_CLAUSE_DEPEND:
953ff289
DN
6275 break;
6276
6277 default:
6278 gcc_unreachable ();
6279 }
6280
6281 if (remove)
6282 *list_p = OMP_CLAUSE_CHAIN (c);
6283 else
6284 list_p = &OMP_CLAUSE_CHAIN (c);
6285 }
6286
6287 /* Add in any implicit data sharing. */
6288 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, list_p);
b8698a0f 6289
953ff289
DN
6290 gimplify_omp_ctxp = ctx->outer_context;
6291 delete_omp_context (ctx);
6292}
6293
6294/* Gimplify the contents of an OMP_PARALLEL statement. This involves
6295 gimplification of the body, as well as scanning the body for used
6296 variables. We need to do this scan now, because variable-sized
6297 decls will be decomposed during gimplification. */
6298
726a989a
RB
6299static void
6300gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
953ff289
DN
6301{
6302 tree expr = *expr_p;
726a989a
RB
6303 gimple g;
6304 gimple_seq body = NULL;
d406b663 6305 struct gimplify_ctx gctx;
953ff289 6306
a68ab351
JJ
6307 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
6308 OMP_PARALLEL_COMBINED (expr)
6309 ? ORT_COMBINED_PARALLEL
6310 : ORT_PARALLEL);
953ff289 6311
d406b663 6312 push_gimplify_context (&gctx);
953ff289 6313
726a989a
RB
6314 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
6315 if (gimple_code (g) == GIMPLE_BIND)
6316 pop_gimplify_context (g);
50674e96 6317 else
726a989a 6318 pop_gimplify_context (NULL);
953ff289
DN
6319
6320 gimplify_adjust_omp_clauses (&OMP_PARALLEL_CLAUSES (expr));
6321
726a989a
RB
6322 g = gimple_build_omp_parallel (body,
6323 OMP_PARALLEL_CLAUSES (expr),
6324 NULL_TREE, NULL_TREE);
6325 if (OMP_PARALLEL_COMBINED (expr))
6326 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
6327 gimplify_seq_add_stmt (pre_p, g);
6328 *expr_p = NULL_TREE;
953ff289
DN
6329}
6330
a68ab351
JJ
6331/* Gimplify the contents of an OMP_TASK statement. This involves
6332 gimplification of the body, as well as scanning the body for used
6333 variables. We need to do this scan now, because variable-sized
6334 decls will be decomposed during gimplification. */
953ff289 6335
726a989a
RB
6336static void
6337gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
953ff289 6338{
a68ab351 6339 tree expr = *expr_p;
726a989a
RB
6340 gimple g;
6341 gimple_seq body = NULL;
d406b663 6342 struct gimplify_ctx gctx;
953ff289 6343
f22f4340
JJ
6344 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
6345 find_omp_clause (OMP_TASK_CLAUSES (expr),
6346 OMP_CLAUSE_UNTIED)
6347 ? ORT_UNTIED_TASK : ORT_TASK);
953ff289 6348
d406b663 6349 push_gimplify_context (&gctx);
953ff289 6350
726a989a
RB
6351 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
6352 if (gimple_code (g) == GIMPLE_BIND)
6353 pop_gimplify_context (g);
953ff289 6354 else
726a989a 6355 pop_gimplify_context (NULL);
953ff289 6356
a68ab351 6357 gimplify_adjust_omp_clauses (&OMP_TASK_CLAUSES (expr));
917948d3 6358
726a989a
RB
6359 g = gimple_build_omp_task (body,
6360 OMP_TASK_CLAUSES (expr),
6361 NULL_TREE, NULL_TREE,
6362 NULL_TREE, NULL_TREE, NULL_TREE);
6363 gimplify_seq_add_stmt (pre_p, g);
6364 *expr_p = NULL_TREE;
a68ab351
JJ
6365}
6366
acf0174b
JJ
6367/* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD
6368 with non-NULL OMP_FOR_INIT. */
6369
6370static tree
6371find_combined_omp_for (tree *tp, int *walk_subtrees, void *)
6372{
6373 *walk_subtrees = 0;
6374 switch (TREE_CODE (*tp))
6375 {
6376 case OMP_FOR:
6377 *walk_subtrees = 1;
6378 /* FALLTHRU */
6379 case OMP_SIMD:
6380 if (OMP_FOR_INIT (*tp) != NULL_TREE)
6381 return *tp;
6382 break;
6383 case BIND_EXPR:
6384 case STATEMENT_LIST:
6385 case OMP_PARALLEL:
6386 *walk_subtrees = 1;
6387 break;
6388 default:
6389 break;
6390 }
6391 return NULL_TREE;
6392}
6393
a68ab351
JJ
6394/* Gimplify the gross structure of an OMP_FOR statement. */
6395
6396static enum gimplify_status
726a989a 6397gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
a68ab351 6398{
acf0174b 6399 tree for_stmt, orig_for_stmt, decl, var, t;
32e8bb8e
ILT
6400 enum gimplify_status ret = GS_ALL_DONE;
6401 enum gimplify_status tret;
726a989a
RB
6402 gimple gfor;
6403 gimple_seq for_body, for_pre_body;
a68ab351 6404 int i;
74bf76ed
JJ
6405 bool simd;
6406 bitmap has_decl_expr = NULL;
a68ab351 6407
acf0174b 6408 orig_for_stmt = for_stmt = *expr_p;
a68ab351 6409
c02065fc
AH
6410 simd = TREE_CODE (for_stmt) == OMP_SIMD
6411 || TREE_CODE (for_stmt) == CILK_SIMD;
a68ab351 6412 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p,
74bf76ed 6413 simd ? ORT_SIMD : ORT_WORKSHARE);
917948d3 6414
726a989a
RB
6415 /* Handle OMP_FOR_INIT. */
6416 for_pre_body = NULL;
74bf76ed
JJ
6417 if (simd && OMP_FOR_PRE_BODY (for_stmt))
6418 {
6419 has_decl_expr = BITMAP_ALLOC (NULL);
6420 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
6421 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
acf0174b 6422 == VAR_DECL)
74bf76ed
JJ
6423 {
6424 t = OMP_FOR_PRE_BODY (for_stmt);
6425 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
6426 }
6427 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
6428 {
6429 tree_stmt_iterator si;
6430 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
6431 tsi_next (&si))
6432 {
6433 t = tsi_stmt (si);
6434 if (TREE_CODE (t) == DECL_EXPR
6435 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
6436 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
6437 }
6438 }
6439 }
726a989a
RB
6440 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
6441 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
a68ab351 6442
acf0174b
JJ
6443 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
6444 {
6445 for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt), find_combined_omp_for,
6446 NULL, NULL);
6447 gcc_assert (for_stmt != NULL_TREE);
6448 gimplify_omp_ctxp->combined_loop = true;
6449 }
6450
355a7673 6451 for_body = NULL;
a68ab351
JJ
6452 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
6453 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
6454 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
6455 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
6456 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
6457 {
6458 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
726a989a
RB
6459 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
6460 decl = TREE_OPERAND (t, 0);
a68ab351
JJ
6461 gcc_assert (DECL_P (decl));
6462 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
6463 || POINTER_TYPE_P (TREE_TYPE (decl)));
6464
6465 /* Make sure the iteration variable is private. */
74bf76ed 6466 tree c = NULL_TREE;
acf0174b
JJ
6467 if (orig_for_stmt != for_stmt)
6468 /* Do this only on innermost construct for combined ones. */;
6469 else if (simd)
74bf76ed
JJ
6470 {
6471 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
6472 (splay_tree_key)decl);
6473 omp_is_private (gimplify_omp_ctxp, decl, simd);
6474 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6475 omp_notice_variable (gimplify_omp_ctxp, decl, true);
6476 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
6477 {
6478 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
6479 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
6480 if (has_decl_expr
6481 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
6482 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
6483 OMP_CLAUSE_DECL (c) = decl;
6484 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
6485 OMP_FOR_CLAUSES (for_stmt) = c;
6486 omp_add_variable (gimplify_omp_ctxp, decl,
6487 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
6488 }
6489 else
6490 {
6491 bool lastprivate
6492 = (!has_decl_expr
6493 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
6494 c = build_omp_clause (input_location,
6495 lastprivate ? OMP_CLAUSE_LASTPRIVATE
6496 : OMP_CLAUSE_PRIVATE);
6497 OMP_CLAUSE_DECL (c) = decl;
6498 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
6499 omp_add_variable (gimplify_omp_ctxp, decl,
6500 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
6501 | GOVD_SEEN);
6502 c = NULL_TREE;
6503 }
6504 }
6505 else if (omp_is_private (gimplify_omp_ctxp, decl, simd))
a68ab351
JJ
6506 omp_notice_variable (gimplify_omp_ctxp, decl, true);
6507 else
6508 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
6509
6510 /* If DECL is not a gimple register, create a temporary variable to act
6511 as an iteration counter. This is valid, since DECL cannot be
6512 modified in the body of the loop. */
acf0174b
JJ
6513 if (orig_for_stmt != for_stmt)
6514 var = decl;
6515 else if (!is_gimple_reg (decl))
a68ab351
JJ
6516 {
6517 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
726a989a 6518 TREE_OPERAND (t, 0) = var;
b8698a0f 6519
726a989a 6520 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
953ff289 6521
a68ab351
JJ
6522 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
6523 }
6524 else
6525 var = decl;
07beea0d 6526
32e8bb8e 6527 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
726a989a 6528 is_gimple_val, fb_rvalue);
32e8bb8e 6529 ret = MIN (ret, tret);
726a989a
RB
6530 if (ret == GS_ERROR)
6531 return ret;
953ff289 6532
726a989a 6533 /* Handle OMP_FOR_COND. */
a68ab351
JJ
6534 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
6535 gcc_assert (COMPARISON_CLASS_P (t));
726a989a 6536 gcc_assert (TREE_OPERAND (t, 0) == decl);
b56b9fe3 6537
32e8bb8e 6538 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
726a989a 6539 is_gimple_val, fb_rvalue);
32e8bb8e 6540 ret = MIN (ret, tret);
917948d3 6541
726a989a 6542 /* Handle OMP_FOR_INCR. */
a68ab351 6543 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
953ff289
DN
6544 switch (TREE_CODE (t))
6545 {
a68ab351
JJ
6546 case PREINCREMENT_EXPR:
6547 case POSTINCREMENT_EXPR:
c02065fc
AH
6548 {
6549 tree decl = TREE_OPERAND (t, 0);
6550 // c_omp_for_incr_canonicalize_ptr() should have been
6551 // called to massage things appropriately.
6552 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
6553
6554 if (orig_for_stmt != for_stmt)
6555 break;
6556 t = build_int_cst (TREE_TYPE (decl), 1);
6557 if (c)
6558 OMP_CLAUSE_LINEAR_STEP (c) = t;
6559 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
6560 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
6561 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
acf0174b 6562 break;
c02065fc 6563 }
a68ab351
JJ
6564
6565 case PREDECREMENT_EXPR:
6566 case POSTDECREMENT_EXPR:
acf0174b
JJ
6567 if (orig_for_stmt != for_stmt)
6568 break;
a68ab351 6569 t = build_int_cst (TREE_TYPE (decl), -1);
74bf76ed
JJ
6570 if (c)
6571 OMP_CLAUSE_LINEAR_STEP (c) = t;
a68ab351 6572 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
726a989a 6573 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
a68ab351
JJ
6574 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
6575 break;
6576
726a989a
RB
6577 case MODIFY_EXPR:
6578 gcc_assert (TREE_OPERAND (t, 0) == decl);
6579 TREE_OPERAND (t, 0) = var;
a68ab351 6580
726a989a 6581 t = TREE_OPERAND (t, 1);
a68ab351 6582 switch (TREE_CODE (t))
953ff289 6583 {
a68ab351
JJ
6584 case PLUS_EXPR:
6585 if (TREE_OPERAND (t, 1) == decl)
6586 {
6587 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
6588 TREE_OPERAND (t, 0) = var;
6589 break;
6590 }
6591
6592 /* Fallthru. */
6593 case MINUS_EXPR:
6594 case POINTER_PLUS_EXPR:
6595 gcc_assert (TREE_OPERAND (t, 0) == decl);
917948d3 6596 TREE_OPERAND (t, 0) = var;
953ff289 6597 break;
a68ab351
JJ
6598 default:
6599 gcc_unreachable ();
953ff289 6600 }
917948d3 6601
32e8bb8e 6602 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
726a989a 6603 is_gimple_val, fb_rvalue);
32e8bb8e 6604 ret = MIN (ret, tret);
74bf76ed
JJ
6605 if (c)
6606 {
6607 OMP_CLAUSE_LINEAR_STEP (c) = TREE_OPERAND (t, 1);
6608 if (TREE_CODE (t) == MINUS_EXPR)
6609 {
6610 t = TREE_OPERAND (t, 1);
6611 OMP_CLAUSE_LINEAR_STEP (c)
6612 = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
6613 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
6614 &for_pre_body, NULL,
6615 is_gimple_val, fb_rvalue);
6616 ret = MIN (ret, tret);
6617 }
6618 }
953ff289 6619 break;
a68ab351 6620
953ff289
DN
6621 default:
6622 gcc_unreachable ();
6623 }
6624
acf0174b
JJ
6625 if ((var != decl || TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1)
6626 && orig_for_stmt == for_stmt)
a68ab351 6627 {
a68ab351 6628 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
726a989a
RB
6629 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6630 && OMP_CLAUSE_DECL (c) == decl
6631 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
6632 {
6633 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6634 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
6635 gcc_assert (TREE_OPERAND (t, 0) == var);
6636 t = TREE_OPERAND (t, 1);
6637 gcc_assert (TREE_CODE (t) == PLUS_EXPR
6638 || TREE_CODE (t) == MINUS_EXPR
6639 || TREE_CODE (t) == POINTER_PLUS_EXPR);
6640 gcc_assert (TREE_OPERAND (t, 0) == var);
6641 t = build2 (TREE_CODE (t), TREE_TYPE (decl), decl,
6642 TREE_OPERAND (t, 1));
6643 gimplify_assign (decl, t,
6644 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
a68ab351
JJ
6645 }
6646 }
953ff289
DN
6647 }
6648
74bf76ed
JJ
6649 BITMAP_FREE (has_decl_expr);
6650
acf0174b 6651 gimplify_and_add (OMP_FOR_BODY (orig_for_stmt), &for_body);
726a989a 6652
acf0174b
JJ
6653 if (orig_for_stmt != for_stmt)
6654 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
6655 {
6656 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
6657 decl = TREE_OPERAND (t, 0);
6658 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
6659 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
6660 TREE_OPERAND (t, 0) = var;
6661 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6662 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
6663 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
6664 }
6665
6666 gimplify_adjust_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt));
953ff289 6667
74bf76ed 6668 int kind;
acf0174b 6669 switch (TREE_CODE (orig_for_stmt))
74bf76ed
JJ
6670 {
6671 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
6672 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
c02065fc 6673 case CILK_SIMD: kind = GF_OMP_FOR_KIND_CILKSIMD; break;
acf0174b 6674 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
74bf76ed
JJ
6675 default:
6676 gcc_unreachable ();
6677 }
acf0174b 6678 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
726a989a
RB
6679 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
6680 for_pre_body);
acf0174b
JJ
6681 if (orig_for_stmt != for_stmt)
6682 gimple_omp_for_set_combined_p (gfor, true);
6683 if (gimplify_omp_ctxp
6684 && (gimplify_omp_ctxp->combined_loop
6685 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
6686 && gimplify_omp_ctxp->outer_context
6687 && gimplify_omp_ctxp->outer_context->combined_loop)))
6688 {
6689 gimple_omp_for_set_combined_into_p (gfor, true);
6690 if (gimplify_omp_ctxp->combined_loop)
6691 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
6692 else
6693 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
6694 }
726a989a
RB
6695
6696 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
6697 {
6698 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
6699 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
6700 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
6701 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
6702 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
6703 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
6704 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6705 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
6706 }
6707
6708 gimplify_seq_add_stmt (pre_p, gfor);
74bf76ed
JJ
6709 if (ret != GS_ALL_DONE)
6710 return GS_ERROR;
6711 *expr_p = NULL_TREE;
6712 return GS_ALL_DONE;
953ff289
DN
6713}
6714
acf0174b
JJ
6715/* Gimplify the gross structure of other OpenMP constructs.
6716 In particular, OMP_SECTIONS, OMP_SINGLE, OMP_TARGET, OMP_TARGET_DATA
6717 and OMP_TEAMS. */
953ff289 6718
726a989a
RB
6719static void
6720gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
953ff289 6721{
726a989a
RB
6722 tree expr = *expr_p;
6723 gimple stmt;
6724 gimple_seq body = NULL;
acf0174b 6725 enum omp_region_type ort = ORT_WORKSHARE;
953ff289 6726
acf0174b
JJ
6727 switch (TREE_CODE (expr))
6728 {
6729 case OMP_SECTIONS:
6730 case OMP_SINGLE:
6731 break;
6732 case OMP_TARGET:
6733 ort = ORT_TARGET;
6734 break;
6735 case OMP_TARGET_DATA:
6736 ort = ORT_TARGET_DATA;
6737 break;
6738 case OMP_TEAMS:
6739 ort = ORT_TEAMS;
6740 break;
6741 default:
6742 gcc_unreachable ();
6743 }
6744 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort);
6745 if (ort == ORT_TARGET || ort == ORT_TARGET_DATA)
6746 {
6747 struct gimplify_ctx gctx;
6748 push_gimplify_context (&gctx);
6749 gimple g = gimplify_and_return_first (OMP_BODY (expr), &body);
6750 if (gimple_code (g) == GIMPLE_BIND)
6751 pop_gimplify_context (g);
6752 else
6753 pop_gimplify_context (NULL);
6754 if (ort == ORT_TARGET_DATA)
6755 {
6756 gimple_seq cleanup = NULL;
6757 tree fn = builtin_decl_explicit (BUILT_IN_GOMP_TARGET_END_DATA);
6758 g = gimple_build_call (fn, 0);
6759 gimple_seq_add_stmt (&cleanup, g);
6760 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
6761 body = NULL;
6762 gimple_seq_add_stmt (&body, g);
6763 }
6764 }
6765 else
6766 gimplify_and_add (OMP_BODY (expr), &body);
726a989a 6767 gimplify_adjust_omp_clauses (&OMP_CLAUSES (expr));
953ff289 6768
acf0174b
JJ
6769 switch (TREE_CODE (expr))
6770 {
6771 case OMP_SECTIONS:
6772 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
6773 break;
6774 case OMP_SINGLE:
6775 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
6776 break;
6777 case OMP_TARGET:
6778 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
6779 OMP_CLAUSES (expr));
6780 break;
6781 case OMP_TARGET_DATA:
6782 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
6783 OMP_CLAUSES (expr));
6784 break;
6785 case OMP_TEAMS:
6786 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
6787 break;
6788 default:
6789 gcc_unreachable ();
6790 }
6791
6792 gimplify_seq_add_stmt (pre_p, stmt);
6793 *expr_p = NULL_TREE;
6794}
6795
6796/* Gimplify the gross structure of OpenMP target update construct. */
6797
6798static void
6799gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
6800{
6801 tree expr = *expr_p;
6802 gimple stmt;
6803
6804 gimplify_scan_omp_clauses (&OMP_TARGET_UPDATE_CLAUSES (expr), pre_p,
6805 ORT_WORKSHARE);
6806 gimplify_adjust_omp_clauses (&OMP_TARGET_UPDATE_CLAUSES (expr));
6807 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_UPDATE,
6808 OMP_TARGET_UPDATE_CLAUSES (expr));
726a989a
RB
6809
6810 gimplify_seq_add_stmt (pre_p, stmt);
acf0174b 6811 *expr_p = NULL_TREE;
953ff289
DN
6812}
6813
6814/* A subroutine of gimplify_omp_atomic. The front end is supposed to have
b8698a0f 6815 stabilized the lhs of the atomic operation as *ADDR. Return true if
953ff289
DN
6816 EXPR is this stabilized form. */
6817
6818static bool
a509ebb5 6819goa_lhs_expr_p (tree expr, tree addr)
953ff289
DN
6820{
6821 /* Also include casts to other type variants. The C front end is fond
b8698a0f 6822 of adding these for e.g. volatile variables. This is like
953ff289 6823 STRIP_TYPE_NOPS but includes the main variant lookup. */
9600efe1 6824 STRIP_USELESS_TYPE_CONVERSION (expr);
953ff289 6825
78e47463
JJ
6826 if (TREE_CODE (expr) == INDIRECT_REF)
6827 {
6828 expr = TREE_OPERAND (expr, 0);
6829 while (expr != addr
1043771b 6830 && (CONVERT_EXPR_P (expr)
78e47463
JJ
6831 || TREE_CODE (expr) == NON_LVALUE_EXPR)
6832 && TREE_CODE (expr) == TREE_CODE (addr)
9600efe1 6833 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
78e47463
JJ
6834 {
6835 expr = TREE_OPERAND (expr, 0);
6836 addr = TREE_OPERAND (addr, 0);
6837 }
251923f5
JJ
6838 if (expr == addr)
6839 return true;
71458b8a
JJ
6840 return (TREE_CODE (addr) == ADDR_EXPR
6841 && TREE_CODE (expr) == ADDR_EXPR
251923f5 6842 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
78e47463 6843 }
953ff289
DN
6844 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
6845 return true;
6846 return false;
6847}
6848
ad19c4be
EB
6849/* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
6850 expression does not involve the lhs, evaluate it into a temporary.
6851 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
6852 or -1 if an error was encountered. */
953ff289
DN
6853
6854static int
726a989a
RB
6855goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
6856 tree lhs_var)
953ff289
DN
6857{
6858 tree expr = *expr_p;
6859 int saw_lhs;
6860
6861 if (goa_lhs_expr_p (expr, lhs_addr))
6862 {
6863 *expr_p = lhs_var;
6864 return 1;
6865 }
6866 if (is_gimple_val (expr))
6867 return 0;
b8698a0f 6868
953ff289
DN
6869 saw_lhs = 0;
6870 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
6871 {
6872 case tcc_binary:
067dd3c9 6873 case tcc_comparison:
726a989a
RB
6874 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
6875 lhs_var);
953ff289 6876 case tcc_unary:
726a989a
RB
6877 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
6878 lhs_var);
953ff289 6879 break;
067dd3c9
JJ
6880 case tcc_expression:
6881 switch (TREE_CODE (expr))
6882 {
6883 case TRUTH_ANDIF_EXPR:
6884 case TRUTH_ORIF_EXPR:
f2b11865
JJ
6885 case TRUTH_AND_EXPR:
6886 case TRUTH_OR_EXPR:
6887 case TRUTH_XOR_EXPR:
067dd3c9
JJ
6888 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
6889 lhs_addr, lhs_var);
f2b11865 6890 case TRUTH_NOT_EXPR:
067dd3c9
JJ
6891 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
6892 lhs_addr, lhs_var);
6893 break;
4063e61b
JM
6894 case COMPOUND_EXPR:
6895 /* Break out any preevaluations from cp_build_modify_expr. */
6896 for (; TREE_CODE (expr) == COMPOUND_EXPR;
6897 expr = TREE_OPERAND (expr, 1))
6898 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
6899 *expr_p = expr;
6900 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
067dd3c9
JJ
6901 default:
6902 break;
6903 }
6904 break;
953ff289
DN
6905 default:
6906 break;
6907 }
6908
6909 if (saw_lhs == 0)
6910 {
6911 enum gimplify_status gs;
6912 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
6913 if (gs != GS_ALL_DONE)
6914 saw_lhs = -1;
6915 }
6916
6917 return saw_lhs;
6918}
6919
953ff289
DN
6920/* Gimplify an OMP_ATOMIC statement. */
6921
6922static enum gimplify_status
726a989a 6923gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
953ff289
DN
6924{
6925 tree addr = TREE_OPERAND (*expr_p, 0);
20906c66
JJ
6926 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
6927 ? NULL : TREE_OPERAND (*expr_p, 1);
953ff289 6928 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
726a989a 6929 tree tmp_load;
20906c66 6930 gimple loadstmt, storestmt;
953ff289 6931
20906c66
JJ
6932 tmp_load = create_tmp_reg (type, NULL);
6933 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
6934 return GS_ERROR;
6935
6936 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
6937 != GS_ALL_DONE)
6938 return GS_ERROR;
953ff289 6939
20906c66
JJ
6940 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr);
6941 gimplify_seq_add_stmt (pre_p, loadstmt);
6942 if (rhs && gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
6943 != GS_ALL_DONE)
6944 return GS_ERROR;
953ff289 6945
20906c66
JJ
6946 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
6947 rhs = tmp_load;
6948 storestmt = gimple_build_omp_atomic_store (rhs);
6949 gimplify_seq_add_stmt (pre_p, storestmt);
acf0174b
JJ
6950 if (OMP_ATOMIC_SEQ_CST (*expr_p))
6951 {
6952 gimple_omp_atomic_set_seq_cst (loadstmt);
6953 gimple_omp_atomic_set_seq_cst (storestmt);
6954 }
20906c66
JJ
6955 switch (TREE_CODE (*expr_p))
6956 {
6957 case OMP_ATOMIC_READ:
6958 case OMP_ATOMIC_CAPTURE_OLD:
6959 *expr_p = tmp_load;
6960 gimple_omp_atomic_set_need_value (loadstmt);
6961 break;
6962 case OMP_ATOMIC_CAPTURE_NEW:
6963 *expr_p = rhs;
6964 gimple_omp_atomic_set_need_value (storestmt);
6965 break;
6966 default:
6967 *expr_p = NULL;
6968 break;
6969 }
a509ebb5 6970
acf0174b 6971 return GS_ALL_DONE;
953ff289 6972}
6de9cd9a 6973
0a35513e
AH
6974/* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
6975 body, and adding some EH bits. */
6976
6977static enum gimplify_status
6978gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
6979{
6980 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
6981 gimple g;
6982 gimple_seq body = NULL;
6983 struct gimplify_ctx gctx;
6984 int subcode = 0;
6985
6986 /* Wrap the transaction body in a BIND_EXPR so we have a context
6987 where to put decls for OpenMP. */
6988 if (TREE_CODE (tbody) != BIND_EXPR)
6989 {
6990 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
6991 TREE_SIDE_EFFECTS (bind) = 1;
6992 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
6993 TRANSACTION_EXPR_BODY (expr) = bind;
6994 }
6995
6996 push_gimplify_context (&gctx);
6997 temp = voidify_wrapper_expr (*expr_p, NULL);
6998
6999 g = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
7000 pop_gimplify_context (g);
7001
7002 g = gimple_build_transaction (body, NULL);
7003 if (TRANSACTION_EXPR_OUTER (expr))
7004 subcode = GTMA_IS_OUTER;
7005 else if (TRANSACTION_EXPR_RELAXED (expr))
7006 subcode = GTMA_IS_RELAXED;
7007 gimple_transaction_set_subcode (g, subcode);
7008
7009 gimplify_seq_add_stmt (pre_p, g);
7010
7011 if (temp)
7012 {
7013 *expr_p = temp;
7014 return GS_OK;
7015 }
7016
7017 *expr_p = NULL_TREE;
7018 return GS_ALL_DONE;
7019}
7020
ad19c4be 7021/* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
726a989a
RB
7022 expression produces a value to be used as an operand inside a GIMPLE
7023 statement, the value will be stored back in *EXPR_P. This value will
7024 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
7025 an SSA_NAME. The corresponding sequence of GIMPLE statements is
7026 emitted in PRE_P and POST_P.
7027
7028 Additionally, this process may overwrite parts of the input
7029 expression during gimplification. Ideally, it should be
7030 possible to do non-destructive gimplification.
7031
7032 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
7033 the expression needs to evaluate to a value to be used as
7034 an operand in a GIMPLE statement, this value will be stored in
7035 *EXPR_P on exit. This happens when the caller specifies one
7036 of fb_lvalue or fb_rvalue fallback flags.
7037
7038 PRE_P will contain the sequence of GIMPLE statements corresponding
7039 to the evaluation of EXPR and all the side-effects that must
7040 be executed before the main expression. On exit, the last
7041 statement of PRE_P is the core statement being gimplified. For
7042 instance, when gimplifying 'if (++a)' the last statement in
7043 PRE_P will be 'if (t.1)' where t.1 is the result of
7044 pre-incrementing 'a'.
7045
7046 POST_P will contain the sequence of GIMPLE statements corresponding
7047 to the evaluation of all the side-effects that must be executed
7048 after the main expression. If this is NULL, the post
7049 side-effects are stored at the end of PRE_P.
7050
7051 The reason why the output is split in two is to handle post
7052 side-effects explicitly. In some cases, an expression may have
7053 inner and outer post side-effects which need to be emitted in
7054 an order different from the one given by the recursive
7055 traversal. For instance, for the expression (*p--)++ the post
7056 side-effects of '--' must actually occur *after* the post
7057 side-effects of '++'. However, gimplification will first visit
7058 the inner expression, so if a separate POST sequence was not
7059 used, the resulting sequence would be:
7060
7061 1 t.1 = *p
7062 2 p = p - 1
7063 3 t.2 = t.1 + 1
7064 4 *p = t.2
7065
7066 However, the post-decrement operation in line #2 must not be
7067 evaluated until after the store to *p at line #4, so the
7068 correct sequence should be:
7069
7070 1 t.1 = *p
7071 2 t.2 = t.1 + 1
7072 3 *p = t.2
7073 4 p = p - 1
7074
7075 So, by specifying a separate post queue, it is possible
7076 to emit the post side-effects in the correct order.
7077 If POST_P is NULL, an internal queue will be used. Before
7078 returning to the caller, the sequence POST_P is appended to
7079 the main output sequence PRE_P.
7080
7081 GIMPLE_TEST_F points to a function that takes a tree T and
7082 returns nonzero if T is in the GIMPLE form requested by the
12947319 7083 caller. The GIMPLE predicates are in gimple.c.
726a989a
RB
7084
7085 FALLBACK tells the function what sort of a temporary we want if
7086 gimplification cannot produce an expression that complies with
7087 GIMPLE_TEST_F.
7088
7089 fb_none means that no temporary should be generated
7090 fb_rvalue means that an rvalue is OK to generate
7091 fb_lvalue means that an lvalue is OK to generate
7092 fb_either means that either is OK, but an lvalue is preferable.
7093 fb_mayfail means that gimplification may fail (in which case
7094 GS_ERROR will be returned)
7095
7096 The return value is either GS_ERROR or GS_ALL_DONE, since this
7097 function iterates until EXPR is completely gimplified or an error
7098 occurs. */
6de9cd9a
DN
7099
7100enum gimplify_status
726a989a
RB
7101gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
7102 bool (*gimple_test_f) (tree), fallback_t fallback)
6de9cd9a
DN
7103{
7104 tree tmp;
726a989a
RB
7105 gimple_seq internal_pre = NULL;
7106 gimple_seq internal_post = NULL;
6de9cd9a 7107 tree save_expr;
726a989a 7108 bool is_statement;
6de9cd9a
DN
7109 location_t saved_location;
7110 enum gimplify_status ret;
726a989a 7111 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
6de9cd9a
DN
7112
7113 save_expr = *expr_p;
7114 if (save_expr == NULL_TREE)
7115 return GS_ALL_DONE;
7116
726a989a
RB
7117 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
7118 is_statement = gimple_test_f == is_gimple_stmt;
7119 if (is_statement)
7120 gcc_assert (pre_p);
7121
7122 /* Consistency checks. */
7123 if (gimple_test_f == is_gimple_reg)
7124 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
7125 else if (gimple_test_f == is_gimple_val
726a989a
RB
7126 || gimple_test_f == is_gimple_call_addr
7127 || gimple_test_f == is_gimple_condexpr
7128 || gimple_test_f == is_gimple_mem_rhs
ba4d8f9d 7129 || gimple_test_f == is_gimple_mem_rhs_or_call
726a989a 7130 || gimple_test_f == is_gimple_reg_rhs
ba4d8f9d 7131 || gimple_test_f == is_gimple_reg_rhs_or_call
70f34814
RG
7132 || gimple_test_f == is_gimple_asm_val
7133 || gimple_test_f == is_gimple_mem_ref_addr)
726a989a
RB
7134 gcc_assert (fallback & fb_rvalue);
7135 else if (gimple_test_f == is_gimple_min_lval
7136 || gimple_test_f == is_gimple_lvalue)
7137 gcc_assert (fallback & fb_lvalue);
7138 else if (gimple_test_f == is_gimple_addressable)
7139 gcc_assert (fallback & fb_either);
7140 else if (gimple_test_f == is_gimple_stmt)
7141 gcc_assert (fallback == fb_none);
7142 else
7143 {
7144 /* We should have recognized the GIMPLE_TEST_F predicate to
7145 know what kind of fallback to use in case a temporary is
7146 needed to hold the value or address of *EXPR_P. */
7147 gcc_unreachable ();
7148 }
7149
6de9cd9a
DN
7150 /* We used to check the predicate here and return immediately if it
7151 succeeds. This is wrong; the design is for gimplification to be
7152 idempotent, and for the predicates to only test for valid forms, not
7153 whether they are fully simplified. */
6de9cd9a
DN
7154 if (pre_p == NULL)
7155 pre_p = &internal_pre;
726a989a 7156
6de9cd9a
DN
7157 if (post_p == NULL)
7158 post_p = &internal_post;
7159
726a989a
RB
7160 /* Remember the last statements added to PRE_P and POST_P. Every
7161 new statement added by the gimplification helpers needs to be
7162 annotated with location information. To centralize the
7163 responsibility, we remember the last statement that had been
7164 added to both queues before gimplifying *EXPR_P. If
7165 gimplification produces new statements in PRE_P and POST_P, those
7166 statements will be annotated with the same location information
7167 as *EXPR_P. */
7168 pre_last_gsi = gsi_last (*pre_p);
7169 post_last_gsi = gsi_last (*post_p);
7170
6de9cd9a 7171 saved_location = input_location;
a281759f
PB
7172 if (save_expr != error_mark_node
7173 && EXPR_HAS_LOCATION (*expr_p))
7174 input_location = EXPR_LOCATION (*expr_p);
6de9cd9a
DN
7175
7176 /* Loop over the specific gimplifiers until the toplevel node
7177 remains the same. */
7178 do
7179 {
73d6ddef
RK
7180 /* Strip away as many useless type conversions as possible
7181 at the toplevel. */
7182 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
6de9cd9a
DN
7183
7184 /* Remember the expr. */
7185 save_expr = *expr_p;
7186
7187 /* Die, die, die, my darling. */
7188 if (save_expr == error_mark_node
726a989a 7189 || (TREE_TYPE (save_expr)
65355d53 7190 && TREE_TYPE (save_expr) == error_mark_node))
6de9cd9a
DN
7191 {
7192 ret = GS_ERROR;
7193 break;
7194 }
7195
7196 /* Do any language-specific gimplification. */
32e8bb8e
ILT
7197 ret = ((enum gimplify_status)
7198 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
6de9cd9a
DN
7199 if (ret == GS_OK)
7200 {
7201 if (*expr_p == NULL_TREE)
7202 break;
7203 if (*expr_p != save_expr)
7204 continue;
7205 }
7206 else if (ret != GS_UNHANDLED)
7207 break;
7208
941f78d1
JM
7209 /* Make sure that all the cases set 'ret' appropriately. */
7210 ret = GS_UNHANDLED;
6de9cd9a
DN
7211 switch (TREE_CODE (*expr_p))
7212 {
7213 /* First deal with the special cases. */
7214
7215 case POSTINCREMENT_EXPR:
7216 case POSTDECREMENT_EXPR:
7217 case PREINCREMENT_EXPR:
7218 case PREDECREMENT_EXPR:
7219 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
cc3c4f62
RB
7220 fallback != fb_none,
7221 TREE_TYPE (*expr_p));
6de9cd9a
DN
7222 break;
7223
7224 case ARRAY_REF:
44de5aeb
RK
7225 case ARRAY_RANGE_REF:
7226 case REALPART_EXPR:
7227 case IMAGPART_EXPR:
6de9cd9a 7228 case COMPONENT_REF:
9e51aaf5 7229 case VIEW_CONVERT_EXPR:
6de9cd9a 7230 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
90051e16 7231 fallback ? fallback : fb_rvalue);
6de9cd9a
DN
7232 break;
7233
7234 case COND_EXPR:
dae7ec87 7235 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
726a989a 7236
0223e4f5
JM
7237 /* C99 code may assign to an array in a structure value of a
7238 conditional expression, and this has undefined behavior
7239 only on execution, so create a temporary if an lvalue is
7240 required. */
7241 if (fallback == fb_lvalue)
7242 {
7243 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
936d04b6 7244 mark_addressable (*expr_p);
941f78d1 7245 ret = GS_OK;
0223e4f5 7246 }
6de9cd9a
DN
7247 break;
7248
939b37da
BI
7249 case CILK_SPAWN_STMT:
7250 gcc_assert
7251 (fn_contains_cilk_spawn_p (cfun)
7252 && lang_hooks.cilkplus.cilk_detect_spawn_and_unwrap (expr_p));
7253 if (!seen_error ())
7254 {
7255 ret = (enum gimplify_status)
7256 lang_hooks.cilkplus.gimplify_cilk_spawn (expr_p, pre_p,
7257 post_p);
7258 break;
7259 }
7260 /* If errors are seen, then just process it as a CALL_EXPR. */
7261
6de9cd9a 7262 case CALL_EXPR:
90051e16 7263 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
726a989a 7264
0223e4f5
JM
7265 /* C99 code may assign to an array in a structure returned
7266 from a function, and this has undefined behavior only on
7267 execution, so create a temporary if an lvalue is
7268 required. */
7269 if (fallback == fb_lvalue)
7270 {
7271 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
936d04b6 7272 mark_addressable (*expr_p);
941f78d1 7273 ret = GS_OK;
0223e4f5 7274 }
6de9cd9a
DN
7275 break;
7276
7277 case TREE_LIST:
282899df 7278 gcc_unreachable ();
6de9cd9a
DN
7279
7280 case COMPOUND_EXPR:
7281 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
7282 break;
7283
2ec5deb5 7284 case COMPOUND_LITERAL_EXPR:
4c53d183
MM
7285 ret = gimplify_compound_literal_expr (expr_p, pre_p,
7286 gimple_test_f, fallback);
2ec5deb5
PB
7287 break;
7288
6de9cd9a
DN
7289 case MODIFY_EXPR:
7290 case INIT_EXPR:
ebad5233
JM
7291 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
7292 fallback != fb_none);
6de9cd9a
DN
7293 break;
7294
7295 case TRUTH_ANDIF_EXPR:
7296 case TRUTH_ORIF_EXPR:
1d15f620
KT
7297 {
7298 /* Preserve the original type of the expression and the
7299 source location of the outer expression. */
7300 tree org_type = TREE_TYPE (*expr_p);
7301 *expr_p = gimple_boolify (*expr_p);
4b4455e5 7302 *expr_p = build3_loc (input_location, COND_EXPR,
1d15f620
KT
7303 org_type, *expr_p,
7304 fold_convert_loc
4b4455e5 7305 (input_location,
1d15f620
KT
7306 org_type, boolean_true_node),
7307 fold_convert_loc
4b4455e5 7308 (input_location,
1d15f620
KT
7309 org_type, boolean_false_node));
7310 ret = GS_OK;
7311 break;
7312 }
6de9cd9a
DN
7313
7314 case TRUTH_NOT_EXPR:
3c6cbf7a 7315 {
53020648
RG
7316 tree type = TREE_TYPE (*expr_p);
7317 /* The parsers are careful to generate TRUTH_NOT_EXPR
7318 only with operands that are always zero or one.
7319 We do not fold here but handle the only interesting case
7320 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
3c6cbf7a 7321 *expr_p = gimple_boolify (*expr_p);
53020648
RG
7322 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
7323 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
7324 TREE_TYPE (*expr_p),
7325 TREE_OPERAND (*expr_p, 0));
7326 else
7327 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
7328 TREE_TYPE (*expr_p),
7329 TREE_OPERAND (*expr_p, 0),
7330 build_int_cst (TREE_TYPE (*expr_p), 1));
7331 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
7332 *expr_p = fold_convert_loc (input_location, type, *expr_p);
7333 ret = GS_OK;
bd5d002e 7334 break;
3c6cbf7a 7335 }
67339062 7336
6de9cd9a
DN
7337 case ADDR_EXPR:
7338 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
7339 break;
7340
8170608b
TB
7341 case ANNOTATE_EXPR:
7342 {
7343 tree cond = TREE_OPERAND (*expr_p, 0);
7344 tree id = TREE_OPERAND (*expr_p, 1);
7345 tree tmp = create_tmp_var_raw (TREE_TYPE(cond), NULL);
7346 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
7347 gimple call = gimple_build_call_internal (IFN_ANNOTATE, 2,
7348 cond, id);
7349 gimple_call_set_lhs (call, tmp);
7350 gimplify_seq_add_stmt (pre_p, call);
7351 *expr_p = tmp;
7352 ret = GS_ALL_DONE;
7353 break;
7354 }
7355
6de9cd9a 7356 case VA_ARG_EXPR:
cd3ce9b4 7357 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
6de9cd9a
DN
7358 break;
7359
1043771b 7360 CASE_CONVERT:
6de9cd9a
DN
7361 if (IS_EMPTY_STMT (*expr_p))
7362 {
7363 ret = GS_ALL_DONE;
7364 break;
7365 }
7366
7367 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
7368 || fallback == fb_none)
7369 {
7370 /* Just strip a conversion to void (or in void context) and
7371 try again. */
7372 *expr_p = TREE_OPERAND (*expr_p, 0);
941f78d1 7373 ret = GS_OK;
6de9cd9a
DN
7374 break;
7375 }
7376
7377 ret = gimplify_conversion (expr_p);
7378 if (ret == GS_ERROR)
7379 break;
7380 if (*expr_p != save_expr)
7381 break;
7382 /* FALLTHRU */
7383
7384 case FIX_TRUNC_EXPR:
6de9cd9a
DN
7385 /* unary_expr: ... | '(' cast ')' val | ... */
7386 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7387 is_gimple_val, fb_rvalue);
7388 recalculate_side_effects (*expr_p);
7389 break;
7390
6a720599 7391 case INDIRECT_REF:
70f34814
RG
7392 {
7393 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
3748f5c9 7394 bool notrap = TREE_THIS_NOTRAP (*expr_p);
70f34814
RG
7395 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
7396
7397 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
7398 if (*expr_p != save_expr)
7399 {
7400 ret = GS_OK;
7401 break;
7402 }
7403
7404 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7405 is_gimple_reg, fb_rvalue);
dca26746
RG
7406 if (ret == GS_ERROR)
7407 break;
70f34814 7408
dca26746 7409 recalculate_side_effects (*expr_p);
70f34814
RG
7410 *expr_p = fold_build2_loc (input_location, MEM_REF,
7411 TREE_TYPE (*expr_p),
7412 TREE_OPERAND (*expr_p, 0),
7413 build_int_cst (saved_ptr_type, 0));
7414 TREE_THIS_VOLATILE (*expr_p) = volatilep;
3748f5c9 7415 TREE_THIS_NOTRAP (*expr_p) = notrap;
70f34814
RG
7416 ret = GS_OK;
7417 break;
7418 }
7419
7420 /* We arrive here through the various re-gimplifcation paths. */
7421 case MEM_REF:
7422 /* First try re-folding the whole thing. */
7423 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
7424 TREE_OPERAND (*expr_p, 0),
7425 TREE_OPERAND (*expr_p, 1));
7426 if (tmp)
941f78d1 7427 {
70f34814
RG
7428 *expr_p = tmp;
7429 recalculate_side_effects (*expr_p);
941f78d1
JM
7430 ret = GS_OK;
7431 break;
7432 }
01718e96
RG
7433 /* Avoid re-gimplifying the address operand if it is already
7434 in suitable form. Re-gimplifying would mark the address
7435 operand addressable. Always gimplify when not in SSA form
7436 as we still may have to gimplify decls with value-exprs. */
7437 if (!gimplify_ctxp || !gimplify_ctxp->into_ssa
7438 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
7439 {
7440 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7441 is_gimple_mem_ref_addr, fb_rvalue);
7442 if (ret == GS_ERROR)
7443 break;
7444 }
6de9cd9a 7445 recalculate_side_effects (*expr_p);
70f34814 7446 ret = GS_ALL_DONE;
6de9cd9a
DN
7447 break;
7448
01718e96 7449 /* Constants need not be gimplified. */
6de9cd9a
DN
7450 case INTEGER_CST:
7451 case REAL_CST:
325217ed 7452 case FIXED_CST:
6de9cd9a
DN
7453 case STRING_CST:
7454 case COMPLEX_CST:
7455 case VECTOR_CST:
3f5c390d
RB
7456 /* Drop the overflow flag on constants, we do not want
7457 that in the GIMPLE IL. */
7458 if (TREE_OVERFLOW_P (*expr_p))
7459 *expr_p = drop_tree_overflow (*expr_p);
6de9cd9a
DN
7460 ret = GS_ALL_DONE;
7461 break;
7462
7463 case CONST_DECL:
0534fa56 7464 /* If we require an lvalue, such as for ADDR_EXPR, retain the
2a7e31df 7465 CONST_DECL node. Otherwise the decl is replaceable by its
0534fa56
RH
7466 value. */
7467 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
7468 if (fallback & fb_lvalue)
7469 ret = GS_ALL_DONE;
7470 else
941f78d1
JM
7471 {
7472 *expr_p = DECL_INITIAL (*expr_p);
7473 ret = GS_OK;
7474 }
6de9cd9a
DN
7475 break;
7476
350fae66 7477 case DECL_EXPR:
726a989a 7478 ret = gimplify_decl_expr (expr_p, pre_p);
350fae66
RK
7479 break;
7480
6de9cd9a 7481 case BIND_EXPR:
c6c7698d 7482 ret = gimplify_bind_expr (expr_p, pre_p);
6de9cd9a
DN
7483 break;
7484
7485 case LOOP_EXPR:
7486 ret = gimplify_loop_expr (expr_p, pre_p);
7487 break;
7488
7489 case SWITCH_EXPR:
7490 ret = gimplify_switch_expr (expr_p, pre_p);
7491 break;
7492
6de9cd9a
DN
7493 case EXIT_EXPR:
7494 ret = gimplify_exit_expr (expr_p);
7495 break;
7496
7497 case GOTO_EXPR:
7498 /* If the target is not LABEL, then it is a computed jump
7499 and the target needs to be gimplified. */
7500 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
8c50b495
JJ
7501 {
7502 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
7503 NULL, is_gimple_val, fb_rvalue);
7504 if (ret == GS_ERROR)
7505 break;
7506 }
726a989a
RB
7507 gimplify_seq_add_stmt (pre_p,
7508 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
941f78d1 7509 ret = GS_ALL_DONE;
6de9cd9a
DN
7510 break;
7511
2e28e797 7512 case PREDICT_EXPR:
726a989a
RB
7513 gimplify_seq_add_stmt (pre_p,
7514 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
7515 PREDICT_EXPR_OUTCOME (*expr_p)));
7516 ret = GS_ALL_DONE;
7517 break;
2e28e797 7518
6de9cd9a
DN
7519 case LABEL_EXPR:
7520 ret = GS_ALL_DONE;
282899df
NS
7521 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
7522 == current_function_decl);
726a989a
RB
7523 gimplify_seq_add_stmt (pre_p,
7524 gimple_build_label (LABEL_EXPR_LABEL (*expr_p)));
6de9cd9a
DN
7525 break;
7526
7527 case CASE_LABEL_EXPR:
726a989a 7528 ret = gimplify_case_label_expr (expr_p, pre_p);
6de9cd9a
DN
7529 break;
7530
7531 case RETURN_EXPR:
7532 ret = gimplify_return_expr (*expr_p, pre_p);
7533 break;
7534
7535 case CONSTRUCTOR:
48eb4e53
RK
7536 /* Don't reduce this in place; let gimplify_init_constructor work its
7537 magic. Buf if we're just elaborating this for side effects, just
7538 gimplify any element that has side-effects. */
7539 if (fallback == fb_none)
7540 {
4038c495 7541 unsigned HOST_WIDE_INT ix;
ac47786e 7542 tree val;
08330ec2 7543 tree temp = NULL_TREE;
ac47786e
NF
7544 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
7545 if (TREE_SIDE_EFFECTS (val))
7546 append_to_statement_list (val, &temp);
48eb4e53 7547
08330ec2 7548 *expr_p = temp;
941f78d1 7549 ret = temp ? GS_OK : GS_ALL_DONE;
48eb4e53 7550 }
ca0b7d18
AP
7551 /* C99 code may assign to an array in a constructed
7552 structure or union, and this has undefined behavior only
7553 on execution, so create a temporary if an lvalue is
7554 required. */
7555 else if (fallback == fb_lvalue)
7556 {
7557 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
936d04b6 7558 mark_addressable (*expr_p);
941f78d1 7559 ret = GS_OK;
ca0b7d18 7560 }
08330ec2
AP
7561 else
7562 ret = GS_ALL_DONE;
6de9cd9a
DN
7563 break;
7564
7565 /* The following are special cases that are not handled by the
7566 original GIMPLE grammar. */
7567
7568 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
7569 eliminated. */
7570 case SAVE_EXPR:
7571 ret = gimplify_save_expr (expr_p, pre_p, post_p);
7572 break;
7573
7574 case BIT_FIELD_REF:
ea814c66
EB
7575 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7576 post_p, is_gimple_lvalue, fb_either);
7577 recalculate_side_effects (*expr_p);
6de9cd9a
DN
7578 break;
7579
150e3929
RG
7580 case TARGET_MEM_REF:
7581 {
7582 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
7583
23a534a1 7584 if (TMR_BASE (*expr_p))
150e3929 7585 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
4d948885 7586 post_p, is_gimple_mem_ref_addr, fb_either);
150e3929
RG
7587 if (TMR_INDEX (*expr_p))
7588 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
7589 post_p, is_gimple_val, fb_rvalue);
4d948885
RG
7590 if (TMR_INDEX2 (*expr_p))
7591 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
7592 post_p, is_gimple_val, fb_rvalue);
150e3929
RG
7593 /* TMR_STEP and TMR_OFFSET are always integer constants. */
7594 ret = MIN (r0, r1);
7595 }
7596 break;
7597
6de9cd9a
DN
7598 case NON_LVALUE_EXPR:
7599 /* This should have been stripped above. */
282899df 7600 gcc_unreachable ();
6de9cd9a
DN
7601
7602 case ASM_EXPR:
7603 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
7604 break;
7605
7606 case TRY_FINALLY_EXPR:
7607 case TRY_CATCH_EXPR:
726a989a
RB
7608 {
7609 gimple_seq eval, cleanup;
7610 gimple try_;
7611
820055a0
DC
7612 /* Calls to destructors are generated automatically in FINALLY/CATCH
7613 block. They should have location as UNKNOWN_LOCATION. However,
7614 gimplify_call_expr will reset these call stmts to input_location
7615 if it finds stmt's location is unknown. To prevent resetting for
7616 destructors, we set the input_location to unknown.
7617 Note that this only affects the destructor calls in FINALLY/CATCH
7618 block, and will automatically reset to its original value by the
7619 end of gimplify_expr. */
7620 input_location = UNKNOWN_LOCATION;
726a989a
RB
7621 eval = cleanup = NULL;
7622 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
7623 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
cc8b343d
JJ
7624 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
7625 if (gimple_seq_empty_p (cleanup))
7626 {
7627 gimple_seq_add_seq (pre_p, eval);
7628 ret = GS_ALL_DONE;
7629 break;
7630 }
726a989a
RB
7631 try_ = gimple_build_try (eval, cleanup,
7632 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
7633 ? GIMPLE_TRY_FINALLY
7634 : GIMPLE_TRY_CATCH);
e368f44f
DC
7635 if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
7636 gimple_set_location (try_, saved_location);
7637 else
7638 gimple_set_location (try_, EXPR_LOCATION (save_expr));
726a989a
RB
7639 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
7640 gimple_try_set_catch_is_cleanup (try_,
7641 TRY_CATCH_IS_CLEANUP (*expr_p));
7642 gimplify_seq_add_stmt (pre_p, try_);
7643 ret = GS_ALL_DONE;
7644 break;
7645 }
6de9cd9a
DN
7646
7647 case CLEANUP_POINT_EXPR:
7648 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
7649 break;
7650
7651 case TARGET_EXPR:
7652 ret = gimplify_target_expr (expr_p, pre_p, post_p);
7653 break;
7654
7655 case CATCH_EXPR:
726a989a
RB
7656 {
7657 gimple c;
7658 gimple_seq handler = NULL;
7659 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
7660 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
7661 gimplify_seq_add_stmt (pre_p, c);
7662 ret = GS_ALL_DONE;
7663 break;
7664 }
6de9cd9a
DN
7665
7666 case EH_FILTER_EXPR:
726a989a
RB
7667 {
7668 gimple ehf;
7669 gimple_seq failure = NULL;
7670
7671 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
7672 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
d665b6e5 7673 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
726a989a
RB
7674 gimplify_seq_add_stmt (pre_p, ehf);
7675 ret = GS_ALL_DONE;
7676 break;
7677 }
6de9cd9a 7678
0f59171d
RH
7679 case OBJ_TYPE_REF:
7680 {
7681 enum gimplify_status r0, r1;
726a989a
RB
7682 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
7683 post_p, is_gimple_val, fb_rvalue);
7684 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
7685 post_p, is_gimple_val, fb_rvalue);
0f3a057a 7686 TREE_SIDE_EFFECTS (*expr_p) = 0;
0f59171d
RH
7687 ret = MIN (r0, r1);
7688 }
6de9cd9a
DN
7689 break;
7690
6de9cd9a
DN
7691 case LABEL_DECL:
7692 /* We get here when taking the address of a label. We mark
7693 the label as "forced"; meaning it can never be removed and
7694 it is a potential target for any computed goto. */
7695 FORCED_LABEL (*expr_p) = 1;
7696 ret = GS_ALL_DONE;
7697 break;
7698
7699 case STATEMENT_LIST:
c6c7698d 7700 ret = gimplify_statement_list (expr_p, pre_p);
6de9cd9a
DN
7701 break;
7702
d25cee4d
RH
7703 case WITH_SIZE_EXPR:
7704 {
70e2829d
KH
7705 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7706 post_p == &internal_post ? NULL : post_p,
7707 gimple_test_f, fallback);
7708 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
7709 is_gimple_val, fb_rvalue);
941f78d1 7710 ret = GS_ALL_DONE;
d25cee4d
RH
7711 }
7712 break;
7713
6de9cd9a 7714 case VAR_DECL:
4744afba 7715 case PARM_DECL:
a9f7c570 7716 ret = gimplify_var_or_parm_decl (expr_p);
6de9cd9a
DN
7717 break;
7718
077b0dfb
JJ
7719 case RESULT_DECL:
7720 /* When within an OpenMP context, notice uses of variables. */
7721 if (gimplify_omp_ctxp)
7722 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
7723 ret = GS_ALL_DONE;
7724 break;
7725
71956db3
RH
7726 case SSA_NAME:
7727 /* Allow callbacks into the gimplifier during optimization. */
7728 ret = GS_ALL_DONE;
7729 break;
7730
953ff289 7731 case OMP_PARALLEL:
726a989a
RB
7732 gimplify_omp_parallel (expr_p, pre_p);
7733 ret = GS_ALL_DONE;
953ff289
DN
7734 break;
7735
a68ab351 7736 case OMP_TASK:
726a989a
RB
7737 gimplify_omp_task (expr_p, pre_p);
7738 ret = GS_ALL_DONE;
a68ab351
JJ
7739 break;
7740
953ff289 7741 case OMP_FOR:
74bf76ed 7742 case OMP_SIMD:
c02065fc 7743 case CILK_SIMD:
acf0174b 7744 case OMP_DISTRIBUTE:
953ff289
DN
7745 ret = gimplify_omp_for (expr_p, pre_p);
7746 break;
7747
7748 case OMP_SECTIONS:
7749 case OMP_SINGLE:
acf0174b
JJ
7750 case OMP_TARGET:
7751 case OMP_TARGET_DATA:
7752 case OMP_TEAMS:
726a989a
RB
7753 gimplify_omp_workshare (expr_p, pre_p);
7754 ret = GS_ALL_DONE;
953ff289
DN
7755 break;
7756
acf0174b
JJ
7757 case OMP_TARGET_UPDATE:
7758 gimplify_omp_target_update (expr_p, pre_p);
7759 ret = GS_ALL_DONE;
7760 break;
7761
953ff289
DN
7762 case OMP_SECTION:
7763 case OMP_MASTER:
acf0174b 7764 case OMP_TASKGROUP:
953ff289
DN
7765 case OMP_ORDERED:
7766 case OMP_CRITICAL:
726a989a
RB
7767 {
7768 gimple_seq body = NULL;
7769 gimple g;
7770
7771 gimplify_and_add (OMP_BODY (*expr_p), &body);
7772 switch (TREE_CODE (*expr_p))
7773 {
7774 case OMP_SECTION:
7775 g = gimple_build_omp_section (body);
7776 break;
7777 case OMP_MASTER:
7778 g = gimple_build_omp_master (body);
7779 break;
acf0174b
JJ
7780 case OMP_TASKGROUP:
7781 {
7782 gimple_seq cleanup = NULL;
7783 tree fn
7784 = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
7785 g = gimple_build_call (fn, 0);
7786 gimple_seq_add_stmt (&cleanup, g);
7787 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
7788 body = NULL;
7789 gimple_seq_add_stmt (&body, g);
7790 g = gimple_build_omp_taskgroup (body);
7791 }
7792 break;
726a989a
RB
7793 case OMP_ORDERED:
7794 g = gimple_build_omp_ordered (body);
7795 break;
7796 case OMP_CRITICAL:
7797 g = gimple_build_omp_critical (body,
7798 OMP_CRITICAL_NAME (*expr_p));
7799 break;
7800 default:
7801 gcc_unreachable ();
7802 }
7803 gimplify_seq_add_stmt (pre_p, g);
7804 ret = GS_ALL_DONE;
7805 break;
7806 }
953ff289
DN
7807
7808 case OMP_ATOMIC:
20906c66
JJ
7809 case OMP_ATOMIC_READ:
7810 case OMP_ATOMIC_CAPTURE_OLD:
7811 case OMP_ATOMIC_CAPTURE_NEW:
953ff289
DN
7812 ret = gimplify_omp_atomic (expr_p, pre_p);
7813 break;
7814
0a35513e
AH
7815 case TRANSACTION_EXPR:
7816 ret = gimplify_transaction (expr_p, pre_p);
7817 break;
7818
16949072
RG
7819 case TRUTH_AND_EXPR:
7820 case TRUTH_OR_EXPR:
7821 case TRUTH_XOR_EXPR:
1d15f620 7822 {
bd5d002e 7823 tree orig_type = TREE_TYPE (*expr_p);
fc1f4caf 7824 tree new_type, xop0, xop1;
1d15f620 7825 *expr_p = gimple_boolify (*expr_p);
fc1f4caf
KT
7826 new_type = TREE_TYPE (*expr_p);
7827 if (!useless_type_conversion_p (orig_type, new_type))
1d15f620 7828 {
4b4455e5 7829 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
1d15f620
KT
7830 ret = GS_OK;
7831 break;
7832 }
da5fb469 7833
bd5d002e
RG
7834 /* Boolified binary truth expressions are semantically equivalent
7835 to bitwise binary expressions. Canonicalize them to the
7836 bitwise variant. */
7837 switch (TREE_CODE (*expr_p))
7838 {
7839 case TRUTH_AND_EXPR:
7840 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
7841 break;
7842 case TRUTH_OR_EXPR:
7843 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
7844 break;
7845 case TRUTH_XOR_EXPR:
7846 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
7847 break;
7848 default:
7849 break;
7850 }
fc1f4caf
KT
7851 /* Now make sure that operands have compatible type to
7852 expression's new_type. */
7853 xop0 = TREE_OPERAND (*expr_p, 0);
7854 xop1 = TREE_OPERAND (*expr_p, 1);
7855 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
7856 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
7857 new_type,
7858 xop0);
7859 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
7860 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
7861 new_type,
7862 xop1);
bd5d002e
RG
7863 /* Continue classified as tcc_binary. */
7864 goto expr_2;
da5fb469 7865 }
16949072
RG
7866
7867 case FMA_EXPR:
e6ed43b0 7868 case VEC_COND_EXPR:
2205ed25 7869 case VEC_PERM_EXPR:
16949072
RG
7870 /* Classified as tcc_expression. */
7871 goto expr_3;
7872
5be014d5 7873 case POINTER_PLUS_EXPR:
315f5f1b
RG
7874 {
7875 enum gimplify_status r0, r1;
7876 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7877 post_p, is_gimple_val, fb_rvalue);
7878 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
7879 post_p, is_gimple_val, fb_rvalue);
7880 recalculate_side_effects (*expr_p);
7881 ret = MIN (r0, r1);
7882 /* Convert &X + CST to invariant &MEM[&X, CST]. Do this
7883 after gimplifying operands - this is similar to how
7884 it would be folding all gimplified stmts on creation
7885 to have them canonicalized, which is what we eventually
7886 should do anyway. */
7887 if (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
7888 && is_gimple_min_invariant (TREE_OPERAND (*expr_p, 0)))
7889 {
7890 *expr_p = build_fold_addr_expr_with_type_loc
7891 (input_location,
7892 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (*expr_p)),
7893 TREE_OPERAND (*expr_p, 0),
7894 fold_convert (ptr_type_node,
7895 TREE_OPERAND (*expr_p, 1))),
7896 TREE_TYPE (*expr_p));
7897 ret = MIN (ret, GS_OK);
7898 }
7899 break;
7900 }
726a989a 7901
939b37da
BI
7902 case CILK_SYNC_STMT:
7903 {
7904 if (!fn_contains_cilk_spawn_p (cfun))
7905 {
7906 error_at (EXPR_LOCATION (*expr_p),
7907 "expected %<_Cilk_spawn%> before %<_Cilk_sync%>");
7908 ret = GS_ERROR;
7909 }
7910 else
7911 {
7912 gimplify_cilk_sync (expr_p, pre_p);
7913 ret = GS_ALL_DONE;
7914 }
7915 break;
7916 }
7917
6de9cd9a 7918 default:
282899df 7919 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
6de9cd9a 7920 {
6615c446 7921 case tcc_comparison:
61c25908
OH
7922 /* Handle comparison of objects of non scalar mode aggregates
7923 with a call to memcmp. It would be nice to only have to do
7924 this for variable-sized objects, but then we'd have to allow
7925 the same nest of reference nodes we allow for MODIFY_EXPR and
7926 that's too complex.
7927
7928 Compare scalar mode aggregates as scalar mode values. Using
7929 memcmp for them would be very inefficient at best, and is
7930 plain wrong if bitfields are involved. */
726a989a
RB
7931 {
7932 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
61c25908 7933
544d960a
AS
7934 /* Vector comparisons need no boolification. */
7935 if (TREE_CODE (type) == VECTOR_TYPE)
7936 goto expr_2;
7937 else if (!AGGREGATE_TYPE_P (type))
7f3ff782
KT
7938 {
7939 tree org_type = TREE_TYPE (*expr_p);
7940 *expr_p = gimple_boolify (*expr_p);
7941 if (!useless_type_conversion_p (org_type,
7942 TREE_TYPE (*expr_p)))
7943 {
7944 *expr_p = fold_convert_loc (input_location,
7945 org_type, *expr_p);
7946 ret = GS_OK;
7947 }
7948 else
7949 goto expr_2;
7950 }
726a989a
RB
7951 else if (TYPE_MODE (type) != BLKmode)
7952 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
7953 else
7954 ret = gimplify_variable_sized_compare (expr_p);
61c25908 7955
726a989a 7956 break;
61c25908 7957 }
d3147f64 7958
282899df
NS
7959 /* If *EXPR_P does not need to be special-cased, handle it
7960 according to its class. */
6615c446 7961 case tcc_unary:
282899df
NS
7962 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7963 post_p, is_gimple_val, fb_rvalue);
7964 break;
6de9cd9a 7965
6615c446 7966 case tcc_binary:
282899df
NS
7967 expr_2:
7968 {
7969 enum gimplify_status r0, r1;
d3147f64 7970
282899df 7971 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
726a989a 7972 post_p, is_gimple_val, fb_rvalue);
282899df
NS
7973 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
7974 post_p, is_gimple_val, fb_rvalue);
d3147f64 7975
282899df
NS
7976 ret = MIN (r0, r1);
7977 break;
7978 }
d3147f64 7979
16949072
RG
7980 expr_3:
7981 {
7982 enum gimplify_status r0, r1, r2;
7983
7984 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7985 post_p, is_gimple_val, fb_rvalue);
7986 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
7987 post_p, is_gimple_val, fb_rvalue);
7988 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
7989 post_p, is_gimple_val, fb_rvalue);
7990
7991 ret = MIN (MIN (r0, r1), r2);
7992 break;
7993 }
7994
6615c446
JO
7995 case tcc_declaration:
7996 case tcc_constant:
6de9cd9a 7997 ret = GS_ALL_DONE;
282899df 7998 goto dont_recalculate;
d3147f64 7999
282899df 8000 default:
16949072 8001 gcc_unreachable ();
6de9cd9a 8002 }
6de9cd9a
DN
8003
8004 recalculate_side_effects (*expr_p);
726a989a 8005
282899df 8006 dont_recalculate:
6de9cd9a
DN
8007 break;
8008 }
d3147f64 8009
941f78d1 8010 gcc_assert (*expr_p || ret != GS_OK);
6de9cd9a
DN
8011 }
8012 while (ret == GS_OK);
8013
8014 /* If we encountered an error_mark somewhere nested inside, either
8015 stub out the statement or propagate the error back out. */
8016 if (ret == GS_ERROR)
8017 {
8018 if (is_statement)
65355d53 8019 *expr_p = NULL;
6de9cd9a
DN
8020 goto out;
8021 }
8022
6de9cd9a
DN
8023 /* This was only valid as a return value from the langhook, which
8024 we handled. Make sure it doesn't escape from any other context. */
282899df 8025 gcc_assert (ret != GS_UNHANDLED);
6de9cd9a 8026
65355d53 8027 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
6de9cd9a
DN
8028 {
8029 /* We aren't looking for a value, and we don't have a valid
8030 statement. If it doesn't have side-effects, throw it away. */
8031 if (!TREE_SIDE_EFFECTS (*expr_p))
65355d53 8032 *expr_p = NULL;
6de9cd9a 8033 else if (!TREE_THIS_VOLATILE (*expr_p))
44de5aeb
RK
8034 {
8035 /* This is probably a _REF that contains something nested that
8036 has side effects. Recurse through the operands to find it. */
8037 enum tree_code code = TREE_CODE (*expr_p);
8038
282899df 8039 switch (code)
44de5aeb 8040 {
282899df 8041 case COMPONENT_REF:
02a5eac4
EB
8042 case REALPART_EXPR:
8043 case IMAGPART_EXPR:
8044 case VIEW_CONVERT_EXPR:
282899df
NS
8045 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
8046 gimple_test_f, fallback);
8047 break;
8048
a9e64c63
EB
8049 case ARRAY_REF:
8050 case ARRAY_RANGE_REF:
44de5aeb
RK
8051 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
8052 gimple_test_f, fallback);
8053 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
282899df
NS
8054 gimple_test_f, fallback);
8055 break;
8056
8057 default:
8058 /* Anything else with side-effects must be converted to
a9e64c63 8059 a valid statement before we get here. */
282899df 8060 gcc_unreachable ();
44de5aeb 8061 }
44de5aeb 8062
65355d53 8063 *expr_p = NULL;
44de5aeb 8064 }
a9e64c63
EB
8065 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
8066 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
6de9cd9a 8067 {
a9e64c63
EB
8068 /* Historically, the compiler has treated a bare reference
8069 to a non-BLKmode volatile lvalue as forcing a load. */
af62f6f9 8070 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
726a989a 8071
c22b1771 8072 /* Normally, we do not want to create a temporary for a
a38578e1
MM
8073 TREE_ADDRESSABLE type because such a type should not be
8074 copied by bitwise-assignment. However, we make an
8075 exception here, as all we are doing here is ensuring that
8076 we read the bytes that make up the type. We use
8077 create_tmp_var_raw because create_tmp_var will abort when
57b51d4d 8078 given a TREE_ADDRESSABLE type. */
a38578e1
MM
8079 tree tmp = create_tmp_var_raw (type, "vol");
8080 gimple_add_tmp_var (tmp);
726a989a
RB
8081 gimplify_assign (tmp, *expr_p, pre_p);
8082 *expr_p = NULL;
6de9cd9a
DN
8083 }
8084 else
8085 /* We can't do anything useful with a volatile reference to
a9e64c63
EB
8086 an incomplete type, so just throw it away. Likewise for
8087 a BLKmode type, since any implicit inner load should
8088 already have been turned into an explicit one by the
8089 gimplification process. */
65355d53 8090 *expr_p = NULL;
6de9cd9a
DN
8091 }
8092
8093 /* If we are gimplifying at the statement level, we're done. Tack
726a989a 8094 everything together and return. */
325c3691 8095 if (fallback == fb_none || is_statement)
6de9cd9a 8096 {
726a989a
RB
8097 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
8098 it out for GC to reclaim it. */
8099 *expr_p = NULL_TREE;
8100
8101 if (!gimple_seq_empty_p (internal_pre)
8102 || !gimple_seq_empty_p (internal_post))
be00f578 8103 {
726a989a
RB
8104 gimplify_seq_add_seq (&internal_pre, internal_post);
8105 gimplify_seq_add_seq (pre_p, internal_pre);
be00f578 8106 }
726a989a
RB
8107
8108 /* The result of gimplifying *EXPR_P is going to be the last few
8109 statements in *PRE_P and *POST_P. Add location information
8110 to all the statements that were added by the gimplification
8111 helpers. */
8112 if (!gimple_seq_empty_p (*pre_p))
8113 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
8114
8115 if (!gimple_seq_empty_p (*post_p))
8116 annotate_all_with_location_after (*post_p, post_last_gsi,
8117 input_location);
8118
6de9cd9a
DN
8119 goto out;
8120 }
8121
726a989a
RB
8122#ifdef ENABLE_GIMPLE_CHECKING
8123 if (*expr_p)
8124 {
8125 enum tree_code code = TREE_CODE (*expr_p);
8126 /* These expressions should already be in gimple IR form. */
8127 gcc_assert (code != MODIFY_EXPR
8128 && code != ASM_EXPR
8129 && code != BIND_EXPR
8130 && code != CATCH_EXPR
6fc4fb06 8131 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
726a989a
RB
8132 && code != EH_FILTER_EXPR
8133 && code != GOTO_EXPR
8134 && code != LABEL_EXPR
8135 && code != LOOP_EXPR
726a989a
RB
8136 && code != SWITCH_EXPR
8137 && code != TRY_FINALLY_EXPR
8138 && code != OMP_CRITICAL
8139 && code != OMP_FOR
8140 && code != OMP_MASTER
acf0174b 8141 && code != OMP_TASKGROUP
726a989a
RB
8142 && code != OMP_ORDERED
8143 && code != OMP_PARALLEL
8144 && code != OMP_SECTIONS
8145 && code != OMP_SECTION
8146 && code != OMP_SINGLE);
8147 }
8148#endif
6de9cd9a 8149
726a989a
RB
8150 /* Otherwise we're gimplifying a subexpression, so the resulting
8151 value is interesting. If it's a valid operand that matches
8152 GIMPLE_TEST_F, we're done. Unless we are handling some
8153 post-effects internally; if that's the case, we need to copy into
8154 a temporary before adding the post-effects to POST_P. */
8155 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
6de9cd9a
DN
8156 goto out;
8157
8158 /* Otherwise, we need to create a new temporary for the gimplified
8159 expression. */
8160
8161 /* We can't return an lvalue if we have an internal postqueue. The
8162 object the lvalue refers to would (probably) be modified by the
8163 postqueue; we need to copy the value out first, which means an
8164 rvalue. */
726a989a
RB
8165 if ((fallback & fb_lvalue)
8166 && gimple_seq_empty_p (internal_post)
e847cc68 8167 && is_gimple_addressable (*expr_p))
6de9cd9a
DN
8168 {
8169 /* An lvalue will do. Take the address of the expression, store it
8170 in a temporary, and replace the expression with an INDIRECT_REF of
8171 that temporary. */
db3927fb 8172 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
6de9cd9a 8173 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
7f5ad6d7 8174 *expr_p = build_simple_mem_ref (tmp);
6de9cd9a 8175 }
ba4d8f9d 8176 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
6de9cd9a 8177 {
726a989a
RB
8178 /* An rvalue will do. Assign the gimplified expression into a
8179 new temporary TMP and replace the original expression with
8180 TMP. First, make sure that the expression has a type so that
8181 it can be assigned into a temporary. */
282899df 8182 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
412a1d9e 8183 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
6de9cd9a 8184 }
282899df 8185 else
6de9cd9a 8186 {
726a989a 8187#ifdef ENABLE_GIMPLE_CHECKING
282899df
NS
8188 if (!(fallback & fb_mayfail))
8189 {
8190 fprintf (stderr, "gimplification failed:\n");
8191 print_generic_expr (stderr, *expr_p, 0);
8192 debug_tree (*expr_p);
8193 internal_error ("gimplification failed");
8194 }
8195#endif
8196 gcc_assert (fallback & fb_mayfail);
726a989a 8197
282899df 8198 /* If this is an asm statement, and the user asked for the
535a42b1 8199 impossible, don't die. Fail and let gimplify_asm_expr
282899df 8200 issue an error. */
6de9cd9a
DN
8201 ret = GS_ERROR;
8202 goto out;
8203 }
6de9cd9a 8204
6de9cd9a 8205 /* Make sure the temporary matches our predicate. */
282899df 8206 gcc_assert ((*gimple_test_f) (*expr_p));
6de9cd9a 8207
726a989a 8208 if (!gimple_seq_empty_p (internal_post))
6de9cd9a 8209 {
726a989a
RB
8210 annotate_all_with_location (internal_post, input_location);
8211 gimplify_seq_add_seq (pre_p, internal_post);
6de9cd9a
DN
8212 }
8213
8214 out:
8215 input_location = saved_location;
8216 return ret;
8217}
8218
44de5aeb 8219/* Look through TYPE for variable-sized objects and gimplify each such
65355d53 8220 size that we find. Add to LIST_P any statements generated. */
44de5aeb 8221
65355d53 8222void
726a989a 8223gimplify_type_sizes (tree type, gimple_seq *list_p)
44de5aeb 8224{
ad50bc8d
RH
8225 tree field, t;
8226
19dbbf36 8227 if (type == NULL || type == error_mark_node)
8e0a600b 8228 return;
ad50bc8d 8229
6c6cfbfd 8230 /* We first do the main variant, then copy into any other variants. */
ad50bc8d 8231 type = TYPE_MAIN_VARIANT (type);
44de5aeb 8232
8e0a600b 8233 /* Avoid infinite recursion. */
19dbbf36 8234 if (TYPE_SIZES_GIMPLIFIED (type))
8e0a600b
JJ
8235 return;
8236
8237 TYPE_SIZES_GIMPLIFIED (type) = 1;
8238
44de5aeb
RK
8239 switch (TREE_CODE (type))
8240 {
44de5aeb
RK
8241 case INTEGER_TYPE:
8242 case ENUMERAL_TYPE:
8243 case BOOLEAN_TYPE:
44de5aeb 8244 case REAL_TYPE:
325217ed 8245 case FIXED_POINT_TYPE:
65355d53
RH
8246 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
8247 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
ad50bc8d
RH
8248
8249 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
8250 {
8251 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
8252 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
ad50bc8d 8253 }
44de5aeb
RK
8254 break;
8255
8256 case ARRAY_TYPE:
ad50bc8d 8257 /* These types may not have declarations, so handle them here. */
8e0a600b
JJ
8258 gimplify_type_sizes (TREE_TYPE (type), list_p);
8259 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
2e957792
JJ
8260 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
8261 with assigned stack slots, for -O1+ -g they should be tracked
8262 by VTA. */
08d78391
EB
8263 if (!(TYPE_NAME (type)
8264 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
8265 && DECL_IGNORED_P (TYPE_NAME (type)))
8266 && TYPE_DOMAIN (type)
802e9f8e
JJ
8267 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
8268 {
8269 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
8270 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
8271 DECL_IGNORED_P (t) = 0;
8272 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
8273 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
8274 DECL_IGNORED_P (t) = 0;
8275 }
44de5aeb
RK
8276 break;
8277
8278 case RECORD_TYPE:
8279 case UNION_TYPE:
8280 case QUAL_UNION_TYPE:
910ad8de 8281 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
44de5aeb 8282 if (TREE_CODE (field) == FIELD_DECL)
8e0a600b
JJ
8283 {
8284 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
9a9ba8d9
JJ
8285 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
8286 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
8e0a600b
JJ
8287 gimplify_type_sizes (TREE_TYPE (field), list_p);
8288 }
8289 break;
8290
8291 case POINTER_TYPE:
8292 case REFERENCE_TYPE:
706c4bb7
OH
8293 /* We used to recurse on the pointed-to type here, which turned out to
8294 be incorrect because its definition might refer to variables not
8295 yet initialized at this point if a forward declaration is involved.
8296
8297 It was actually useful for anonymous pointed-to types to ensure
8298 that the sizes evaluation dominates every possible later use of the
8299 values. Restricting to such types here would be safe since there
f63645be
KH
8300 is no possible forward declaration around, but would introduce an
8301 undesirable middle-end semantic to anonymity. We then defer to
8302 front-ends the responsibility of ensuring that the sizes are
8303 evaluated both early and late enough, e.g. by attaching artificial
706c4bb7 8304 type declarations to the tree. */
44de5aeb
RK
8305 break;
8306
8307 default:
8308 break;
8309 }
8310
65355d53
RH
8311 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
8312 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
44de5aeb 8313
ad50bc8d 8314 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
b4830636 8315 {
ad50bc8d
RH
8316 TYPE_SIZE (t) = TYPE_SIZE (type);
8317 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
8318 TYPE_SIZES_GIMPLIFIED (t) = 1;
b4830636 8319 }
b4830636
RH
8320}
8321
8322/* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
8323 a size or position, has had all of its SAVE_EXPRs evaluated.
726a989a 8324 We add any required statements to *STMT_P. */
44de5aeb
RK
8325
8326void
726a989a 8327gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
44de5aeb 8328{
3ac8781c 8329 tree expr = *expr_p;
a9c5ddf9 8330
44de5aeb 8331 /* We don't do anything if the value isn't there, is constant, or contains
1e748a2b 8332 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
aabcd309 8333 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
1e748a2b
RK
8334 will want to replace it with a new variable, but that will cause problems
8335 if this type is from outside the function. It's OK to have that here. */
848be094 8336 if (is_gimple_sizepos (expr))
44de5aeb
RK
8337 return;
8338
a9c5ddf9
RH
8339 *expr_p = unshare_expr (expr);
8340
ad50bc8d 8341 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue);
44de5aeb 8342}
6de9cd9a 8343
3ad065ef
EB
8344/* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
8345 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
8346 is true, also gimplify the parameters. */
726a989a
RB
8347
8348gimple
3ad065ef 8349gimplify_body (tree fndecl, bool do_parms)
6de9cd9a
DN
8350{
8351 location_t saved_location = input_location;
726a989a
RB
8352 gimple_seq parm_stmts, seq;
8353 gimple outer_bind;
d406b663 8354 struct gimplify_ctx gctx;
9f9ebcdf 8355 struct cgraph_node *cgn;
6de9cd9a
DN
8356
8357 timevar_push (TV_TREE_GIMPLIFY);
953ff289 8358
f66d6761
SB
8359 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
8360 gimplification. */
8361 default_rtl_profile ();
8362
953ff289 8363 gcc_assert (gimplify_ctxp == NULL);
d406b663 8364 push_gimplify_context (&gctx);
6de9cd9a 8365
acf0174b
JJ
8366 if (flag_openmp)
8367 {
8368 gcc_assert (gimplify_omp_ctxp == NULL);
8369 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
8370 gimplify_omp_ctxp = new_omp_context (ORT_TARGET);
8371 }
8372
44de5aeb
RK
8373 /* Unshare most shared trees in the body and in that of any nested functions.
8374 It would seem we don't have to do this for nested functions because
8375 they are supposed to be output and then the outer function gimplified
8376 first, but the g++ front end doesn't always do it that way. */
3ad065ef
EB
8377 unshare_body (fndecl);
8378 unvisit_body (fndecl);
6de9cd9a 8379
9f9ebcdf
MJ
8380 cgn = cgraph_get_node (fndecl);
8381 if (cgn && cgn->origin)
77f2a970
JJ
8382 nonlocal_vlas = pointer_set_create ();
8383
fa10beec 8384 /* Make sure input_location isn't set to something weird. */
6de9cd9a
DN
8385 input_location = DECL_SOURCE_LOCATION (fndecl);
8386
4744afba
RH
8387 /* Resolve callee-copies. This has to be done before processing
8388 the body so that DECL_VALUE_EXPR gets processed correctly. */
3ad065ef 8389 parm_stmts = do_parms ? gimplify_parameters () : NULL;
4744afba 8390
6de9cd9a 8391 /* Gimplify the function's body. */
726a989a 8392 seq = NULL;
3ad065ef 8393 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
726a989a
RB
8394 outer_bind = gimple_seq_first_stmt (seq);
8395 if (!outer_bind)
6de9cd9a 8396 {
726a989a
RB
8397 outer_bind = gimple_build_nop ();
8398 gimplify_seq_add_stmt (&seq, outer_bind);
6de9cd9a 8399 }
44de5aeb 8400
726a989a
RB
8401 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
8402 not the case, wrap everything in a GIMPLE_BIND to make it so. */
8403 if (gimple_code (outer_bind) == GIMPLE_BIND
8404 && gimple_seq_first (seq) == gimple_seq_last (seq))
8405 ;
8406 else
8407 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
8408
3ad065ef 8409 DECL_SAVED_TREE (fndecl) = NULL_TREE;
4744afba
RH
8410
8411 /* If we had callee-copies statements, insert them at the beginning
f0c10f0f 8412 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
726a989a 8413 if (!gimple_seq_empty_p (parm_stmts))
4744afba 8414 {
f0c10f0f
RG
8415 tree parm;
8416
726a989a
RB
8417 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
8418 gimple_bind_set_body (outer_bind, parm_stmts);
f0c10f0f
RG
8419
8420 for (parm = DECL_ARGUMENTS (current_function_decl);
910ad8de 8421 parm; parm = DECL_CHAIN (parm))
f0c10f0f
RG
8422 if (DECL_HAS_VALUE_EXPR_P (parm))
8423 {
8424 DECL_HAS_VALUE_EXPR_P (parm) = 0;
8425 DECL_IGNORED_P (parm) = 0;
8426 }
4744afba
RH
8427 }
8428
77f2a970
JJ
8429 if (nonlocal_vlas)
8430 {
8431 pointer_set_destroy (nonlocal_vlas);
8432 nonlocal_vlas = NULL;
8433 }
8434
6d7f7e0a 8435 if ((flag_openmp || flag_openmp_simd) && gimplify_omp_ctxp)
acf0174b
JJ
8436 {
8437 delete_omp_context (gimplify_omp_ctxp);
8438 gimplify_omp_ctxp = NULL;
8439 }
8440
726a989a 8441 pop_gimplify_context (outer_bind);
953ff289 8442 gcc_assert (gimplify_ctxp == NULL);
6de9cd9a 8443
07c5a154 8444#ifdef ENABLE_CHECKING
1da2ed5f 8445 if (!seen_error ())
34019e28 8446 verify_gimple_in_seq (gimple_bind_body (outer_bind));
07c5a154 8447#endif
6de9cd9a
DN
8448
8449 timevar_pop (TV_TREE_GIMPLIFY);
8450 input_location = saved_location;
726a989a
RB
8451
8452 return outer_bind;
6de9cd9a
DN
8453}
8454
6a1f6c9c 8455typedef char *char_p; /* For DEF_VEC_P. */
6a1f6c9c
JM
8456
8457/* Return whether we should exclude FNDECL from instrumentation. */
8458
8459static bool
8460flag_instrument_functions_exclude_p (tree fndecl)
8461{
9771b263 8462 vec<char_p> *v;
6a1f6c9c 8463
9771b263
DN
8464 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
8465 if (v && v->length () > 0)
6a1f6c9c
JM
8466 {
8467 const char *name;
8468 int i;
8469 char *s;
8470
8471 name = lang_hooks.decl_printable_name (fndecl, 0);
9771b263 8472 FOR_EACH_VEC_ELT (*v, i, s)
6a1f6c9c
JM
8473 if (strstr (name, s) != NULL)
8474 return true;
8475 }
8476
9771b263
DN
8477 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
8478 if (v && v->length () > 0)
6a1f6c9c
JM
8479 {
8480 const char *name;
8481 int i;
8482 char *s;
8483
8484 name = DECL_SOURCE_FILE (fndecl);
9771b263 8485 FOR_EACH_VEC_ELT (*v, i, s)
6a1f6c9c
JM
8486 if (strstr (name, s) != NULL)
8487 return true;
8488 }
8489
8490 return false;
8491}
8492
6de9cd9a 8493/* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
726a989a 8494 node for the function we want to gimplify.
b8698a0f 8495
ad19c4be 8496 Return the sequence of GIMPLE statements corresponding to the body
726a989a 8497 of FNDECL. */
6de9cd9a
DN
8498
8499void
8500gimplify_function_tree (tree fndecl)
8501{
af16bc76 8502 tree parm, ret;
726a989a
RB
8503 gimple_seq seq;
8504 gimple bind;
6de9cd9a 8505
a406865a
RG
8506 gcc_assert (!gimple_body (fndecl));
8507
db2960f4
SL
8508 if (DECL_STRUCT_FUNCTION (fndecl))
8509 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
8510 else
8511 push_struct_function (fndecl);
6de9cd9a 8512
910ad8de 8513 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
e41d82f5
RH
8514 {
8515 /* Preliminarily mark non-addressed complex variables as eligible
8516 for promotion to gimple registers. We'll transform their uses
8517 as we find them. */
0890b981
AP
8518 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
8519 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
e41d82f5
RH
8520 && !TREE_THIS_VOLATILE (parm)
8521 && !needs_to_live_in_memory (parm))
0890b981 8522 DECL_GIMPLE_REG_P (parm) = 1;
e41d82f5
RH
8523 }
8524
8525 ret = DECL_RESULT (fndecl);
0890b981 8526 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
7b7e6ecd 8527 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
e41d82f5 8528 && !needs_to_live_in_memory (ret))
0890b981 8529 DECL_GIMPLE_REG_P (ret) = 1;
e41d82f5 8530
3ad065ef 8531 bind = gimplify_body (fndecl, true);
726a989a
RB
8532
8533 /* The tree body of the function is no longer needed, replace it
8534 with the new GIMPLE body. */
355a7673 8535 seq = NULL;
726a989a
RB
8536 gimple_seq_add_stmt (&seq, bind);
8537 gimple_set_body (fndecl, seq);
6de9cd9a
DN
8538
8539 /* If we're instrumenting function entry/exit, then prepend the call to
8540 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
8541 catch the exit hook. */
8542 /* ??? Add some way to ignore exceptions for this TFE. */
8543 if (flag_instrument_function_entry_exit
8d5a7d1f
ILT
8544 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
8545 && !flag_instrument_functions_exclude_p (fndecl))
6de9cd9a 8546 {
726a989a
RB
8547 tree x;
8548 gimple new_bind;
8549 gimple tf;
8550 gimple_seq cleanup = NULL, body = NULL;
b01890ff
JH
8551 tree tmp_var;
8552 gimple call;
8553
e79983f4 8554 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
59527282 8555 call = gimple_build_call (x, 1, integer_zero_node);
b01890ff
JH
8556 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
8557 gimple_call_set_lhs (call, tmp_var);
8558 gimplify_seq_add_stmt (&cleanup, call);
e79983f4 8559 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
b01890ff
JH
8560 call = gimple_build_call (x, 2,
8561 build_fold_addr_expr (current_function_decl),
8562 tmp_var);
8563 gimplify_seq_add_stmt (&cleanup, call);
726a989a 8564 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
6de9cd9a 8565
e79983f4 8566 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
59527282 8567 call = gimple_build_call (x, 1, integer_zero_node);
b01890ff
JH
8568 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
8569 gimple_call_set_lhs (call, tmp_var);
8570 gimplify_seq_add_stmt (&body, call);
e79983f4 8571 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
b01890ff
JH
8572 call = gimple_build_call (x, 2,
8573 build_fold_addr_expr (current_function_decl),
8574 tmp_var);
8575 gimplify_seq_add_stmt (&body, call);
726a989a 8576 gimplify_seq_add_stmt (&body, tf);
32001f69 8577 new_bind = gimple_build_bind (NULL, body, gimple_bind_block (bind));
726a989a
RB
8578 /* Clear the block for BIND, since it is no longer directly inside
8579 the function, but within a try block. */
32001f69 8580 gimple_bind_set_block (bind, NULL);
6de9cd9a 8581
726a989a
RB
8582 /* Replace the current function body with the body
8583 wrapped in the try/finally TF. */
355a7673 8584 seq = NULL;
726a989a
RB
8585 gimple_seq_add_stmt (&seq, new_bind);
8586 gimple_set_body (fndecl, seq);
6de9cd9a
DN
8587 }
8588
726a989a 8589 DECL_SAVED_TREE (fndecl) = NULL_TREE;
a406865a 8590 cfun->curr_properties = PROP_gimple_any;
726a989a 8591
db2960f4 8592 pop_cfun ();
6de9cd9a 8593}
726a989a 8594
4a7cb16f
AM
8595/* Return a dummy expression of type TYPE in order to keep going after an
8596 error. */
b184c8f1 8597
4a7cb16f
AM
8598static tree
8599dummy_object (tree type)
b184c8f1 8600{
4a7cb16f
AM
8601 tree t = build_int_cst (build_pointer_type (type), 0);
8602 return build2 (MEM_REF, type, t, t);
b184c8f1
AM
8603}
8604
4a7cb16f
AM
8605/* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
8606 builtin function, but a very special sort of operator. */
b184c8f1 8607
4a7cb16f
AM
8608enum gimplify_status
8609gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
8610{
8611 tree promoted_type, have_va_type;
8612 tree valist = TREE_OPERAND (*expr_p, 0);
8613 tree type = TREE_TYPE (*expr_p);
8614 tree t;
8615 location_t loc = EXPR_LOCATION (*expr_p);
b184c8f1 8616
4a7cb16f
AM
8617 /* Verify that valist is of the proper type. */
8618 have_va_type = TREE_TYPE (valist);
8619 if (have_va_type == error_mark_node)
8620 return GS_ERROR;
8621 have_va_type = targetm.canonical_va_list_type (have_va_type);
b184c8f1 8622
4a7cb16f
AM
8623 if (have_va_type == NULL_TREE)
8624 {
8625 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
8626 return GS_ERROR;
8627 }
b184c8f1 8628
4a7cb16f
AM
8629 /* Generate a diagnostic for requesting data of a type that cannot
8630 be passed through `...' due to type promotion at the call site. */
8631 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
8632 != type)
8633 {
8634 static bool gave_help;
8635 bool warned;
b184c8f1 8636
4a7cb16f
AM
8637 /* Unfortunately, this is merely undefined, rather than a constraint
8638 violation, so we cannot make this an error. If this call is never
8639 executed, the program is still strictly conforming. */
8640 warned = warning_at (loc, 0,
8641 "%qT is promoted to %qT when passed through %<...%>",
8642 type, promoted_type);
8643 if (!gave_help && warned)
8644 {
8645 gave_help = true;
8646 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
8647 promoted_type, type);
8648 }
b184c8f1 8649
4a7cb16f
AM
8650 /* We can, however, treat "undefined" any way we please.
8651 Call abort to encourage the user to fix the program. */
8652 if (warned)
8653 inform (loc, "if this code is reached, the program will abort");
8654 /* Before the abort, allow the evaluation of the va_list
8655 expression to exit or longjmp. */
8656 gimplify_and_add (valist, pre_p);
8657 t = build_call_expr_loc (loc,
8658 builtin_decl_implicit (BUILT_IN_TRAP), 0);
b184c8f1
AM
8659 gimplify_and_add (t, pre_p);
8660
4a7cb16f
AM
8661 /* This is dead code, but go ahead and finish so that the
8662 mode of the result comes out right. */
8663 *expr_p = dummy_object (type);
8664 return GS_ALL_DONE;
b184c8f1
AM
8665 }
8666 else
b184c8f1 8667 {
4a7cb16f
AM
8668 /* Make it easier for the backends by protecting the valist argument
8669 from multiple evaluations. */
8670 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
8671 {
8672 /* For this case, the backends will be expecting a pointer to
8673 TREE_TYPE (abi), but it's possible we've
8674 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
8675 So fix it. */
8676 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
8677 {
8678 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
8679 valist = fold_convert_loc (loc, p1,
8680 build_fold_addr_expr_loc (loc, valist));
8681 }
b184c8f1 8682
4a7cb16f
AM
8683 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
8684 }
8685 else
8686 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
b184c8f1 8687
4a7cb16f
AM
8688 if (!targetm.gimplify_va_arg_expr)
8689 /* FIXME: Once most targets are converted we should merely
8690 assert this is non-null. */
8691 return GS_ALL_DONE;
b184c8f1 8692
4a7cb16f
AM
8693 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
8694 return GS_OK;
b184c8f1 8695 }
b184c8f1 8696}
bcf71673 8697
45b0be94
AM
8698/* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
8699
8700 DST/SRC are the destination and source respectively. You can pass
8701 ungimplified trees in DST or SRC, in which case they will be
8702 converted to a gimple operand if necessary.
8703
8704 This function returns the newly created GIMPLE_ASSIGN tuple. */
8705
8706gimple
8707gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
8708{
8709 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
8710 gimplify_and_add (t, seq_p);
8711 ggc_free (t);
8712 return gimple_seq_last_stmt (*seq_p);
8713}
8714
18f429e2
AM
8715inline hashval_t
8716gimplify_hasher::hash (const value_type *p)
8717{
8718 tree t = p->val;
8719 return iterative_hash_expr (t, 0);
8720}
8721
8722inline bool
8723gimplify_hasher::equal (const value_type *p1, const compare_type *p2)
8724{
8725 tree t1 = p1->val;
8726 tree t2 = p2->val;
8727 enum tree_code code = TREE_CODE (t1);
8728
8729 if (TREE_CODE (t2) != code
8730 || TREE_TYPE (t1) != TREE_TYPE (t2))
8731 return false;
8732
8733 if (!operand_equal_p (t1, t2, 0))
8734 return false;
8735
8736#ifdef ENABLE_CHECKING
8737 /* Only allow them to compare equal if they also hash equal; otherwise
8738 results are nondeterminate, and we fail bootstrap comparison. */
8739 gcc_assert (hash (p1) == hash (p2));
8740#endif
8741
8742 return true;
8743}