]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gimplify.c
Autogenerated fixes of "->symbol." to "->"
[thirdparty/gcc.git] / gcc / gimplify.c
CommitLineData
6de9cd9a
DN
1/* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
d1e082c2 3 Copyright (C) 2002-2013 Free Software Foundation, Inc.
6de9cd9a
DN
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
6
7This file is part of GCC.
8
9GCC is free software; you can redistribute it and/or modify it under
10the terms of the GNU General Public License as published by the Free
9dcd6f09 11Software Foundation; either version 3, or (at your option) any later
6de9cd9a
DN
12version.
13
14GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15WARRANTY; without even the implied warranty of MERCHANTABILITY or
16FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17for more details.
18
19You should have received a copy of the GNU General Public License
9dcd6f09
NC
20along with GCC; see the file COPYING3. If not see
21<http://www.gnu.org/licenses/>. */
6de9cd9a
DN
22
23#include "config.h"
24#include "system.h"
25#include "coretypes.h"
26#include "tm.h"
27#include "tree.h"
726a989a
RB
28#include "gimple.h"
29#include "tree-iterator.h"
6de9cd9a 30#include "tree-inline.h"
cf835838 31#include "tree-pretty-print.h"
6de9cd9a 32#include "langhooks.h"
442b4905
AM
33#include "bitmap.h"
34#include "gimple-ssa.h"
44de5aeb 35#include "cgraph.h"
442b4905
AM
36#include "tree-cfg.h"
37#include "tree-ssanames.h"
38#include "tree-ssa.h"
6de9cd9a 39#include "timevar.h"
6de9cd9a
DN
40#include "hashtab.h"
41#include "flags.h"
6de9cd9a 42#include "function.h"
6de9cd9a 43#include "ggc.h"
718f9c0f 44#include "diagnostic-core.h"
cd3ce9b4 45#include "target.h"
953ff289 46#include "pointer-set.h"
6be42dd4 47#include "splay-tree.h"
726a989a 48#include "vec.h"
0645c1a2 49#include "omp-low.h"
4484a35a 50#include "gimple-low.h"
6de9cd9a 51
7ee2468b
SB
52#include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
53#include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
de6a5608
AM
54#include "expr.h"
55#include "tm_p.h"
953ff289
DN
56
57enum gimplify_omp_var_data
58{
59 GOVD_SEEN = 1,
60 GOVD_EXPLICIT = 2,
61 GOVD_SHARED = 4,
62 GOVD_PRIVATE = 8,
63 GOVD_FIRSTPRIVATE = 16,
64 GOVD_LASTPRIVATE = 32,
65 GOVD_REDUCTION = 64,
66 GOVD_LOCAL = 128,
acf0174b
JJ
67 GOVD_MAP = 256,
68 GOVD_DEBUG_PRIVATE = 512,
69 GOVD_PRIVATE_OUTER_REF = 1024,
74bf76ed 70 GOVD_LINEAR = 2048,
acf0174b
JJ
71 GOVD_ALIGNED = 4096,
72 GOVD_MAP_TO_ONLY = 8192,
953ff289 73 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
74bf76ed
JJ
74 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
75 | GOVD_LOCAL)
953ff289
DN
76};
77
726a989a 78
a68ab351
JJ
79enum omp_region_type
80{
81 ORT_WORKSHARE = 0,
74bf76ed 82 ORT_SIMD = 1,
a68ab351 83 ORT_PARALLEL = 2,
f22f4340
JJ
84 ORT_COMBINED_PARALLEL = 3,
85 ORT_TASK = 4,
acf0174b
JJ
86 ORT_UNTIED_TASK = 5,
87 ORT_TEAMS = 8,
88 ORT_TARGET_DATA = 16,
89 ORT_TARGET = 32
a68ab351
JJ
90};
91
953ff289 92struct gimplify_omp_ctx
6de9cd9a 93{
953ff289
DN
94 struct gimplify_omp_ctx *outer_context;
95 splay_tree variables;
96 struct pointer_set_t *privatized_types;
97 location_t location;
98 enum omp_clause_default_kind default_kind;
a68ab351 99 enum omp_region_type region_type;
acf0174b 100 bool combined_loop;
953ff289
DN
101};
102
953ff289
DN
103static struct gimplify_ctx *gimplify_ctxp;
104static struct gimplify_omp_ctx *gimplify_omp_ctxp;
105
6de9cd9a 106
ad19c4be 107/* Forward declaration. */
726a989a 108static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
eb6127a4 109
936d04b6
JH
110/* Mark X addressable. Unlike the langhook we expect X to be in gimple
111 form and we don't do any syntax checking. */
ad19c4be 112
628c189e 113void
936d04b6
JH
114mark_addressable (tree x)
115{
116 while (handled_component_p (x))
117 x = TREE_OPERAND (x, 0);
70f34814
RG
118 if (TREE_CODE (x) == MEM_REF
119 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
120 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
628c189e
RG
121 if (TREE_CODE (x) != VAR_DECL
122 && TREE_CODE (x) != PARM_DECL
123 && TREE_CODE (x) != RESULT_DECL)
70f34814 124 return;
936d04b6 125 TREE_ADDRESSABLE (x) = 1;
88d8330d
EB
126
127 /* Also mark the artificial SSA_NAME that points to the partition of X. */
128 if (TREE_CODE (x) == VAR_DECL
129 && !DECL_EXTERNAL (x)
130 && !TREE_STATIC (x)
131 && cfun->gimple_df != NULL
132 && cfun->gimple_df->decls_to_pointers != NULL)
133 {
134 void *namep
135 = pointer_map_contains (cfun->gimple_df->decls_to_pointers, x);
136 if (namep)
137 TREE_ADDRESSABLE (*(tree *)namep) = 1;
138 }
936d04b6 139}
eb6127a4 140
726a989a
RB
141/* Link gimple statement GS to the end of the sequence *SEQ_P. If
142 *SEQ_P is NULL, a new sequence is allocated. This function is
143 similar to gimple_seq_add_stmt, but does not scan the operands.
144 During gimplification, we need to manipulate statement sequences
145 before the def/use vectors have been constructed. */
146
786f715d 147void
a1a6c5b2 148gimple_seq_add_stmt_without_update (gimple_seq *seq_p, gimple gs)
726a989a
RB
149{
150 gimple_stmt_iterator si;
151
152 if (gs == NULL)
153 return;
154
726a989a 155 si = gsi_last (*seq_p);
726a989a
RB
156 gsi_insert_after_without_update (&si, gs, GSI_NEW_STMT);
157}
158
a1a6c5b2
JJ
159/* Shorter alias name for the above function for use in gimplify.c
160 only. */
161
162static inline void
163gimplify_seq_add_stmt (gimple_seq *seq_p, gimple gs)
164{
165 gimple_seq_add_stmt_without_update (seq_p, gs);
166}
167
726a989a
RB
168/* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
169 NULL, a new sequence is allocated. This function is
170 similar to gimple_seq_add_seq, but does not scan the operands.
171 During gimplification, we need to manipulate statement sequences
172 before the def/use vectors have been constructed. */
173
174static void
175gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
176{
177 gimple_stmt_iterator si;
178
179 if (src == NULL)
180 return;
181
726a989a
RB
182 si = gsi_last (*dst_p);
183 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
184}
185
6de9cd9a
DN
186/* Set up a context for the gimplifier. */
187
188void
d406b663 189push_gimplify_context (struct gimplify_ctx *c)
6de9cd9a 190{
d406b663 191 memset (c, '\0', sizeof (*c));
953ff289 192 c->prev_context = gimplify_ctxp;
953ff289 193 gimplify_ctxp = c;
6de9cd9a
DN
194}
195
196/* Tear down a context for the gimplifier. If BODY is non-null, then
197 put the temporaries into the outer BIND_EXPR. Otherwise, put them
726a989a
RB
198 in the local_decls.
199
200 BODY is not a sequence, but the first tuple in a sequence. */
6de9cd9a
DN
201
202void
726a989a 203pop_gimplify_context (gimple body)
6de9cd9a 204{
953ff289 205 struct gimplify_ctx *c = gimplify_ctxp;
17ad5b5e 206
9771b263
DN
207 gcc_assert (c
208 && (!c->bind_expr_stack.exists ()
209 || c->bind_expr_stack.is_empty ()));
210 c->bind_expr_stack.release ();
953ff289 211 gimplify_ctxp = c->prev_context;
6de9cd9a
DN
212
213 if (body)
5123ad09 214 declare_vars (c->temps, body, false);
6de9cd9a 215 else
953ff289 216 record_vars (c->temps);
6de9cd9a 217
4a8fb1a1
LC
218 if (c->temp_htab.is_created ())
219 c->temp_htab.dispose ();
6de9cd9a
DN
220}
221
ad19c4be
EB
222/* Push a GIMPLE_BIND tuple onto the stack of bindings. */
223
c24b7de9 224static void
726a989a 225gimple_push_bind_expr (gimple gimple_bind)
6de9cd9a 226{
9771b263
DN
227 gimplify_ctxp->bind_expr_stack.reserve (8);
228 gimplify_ctxp->bind_expr_stack.safe_push (gimple_bind);
6de9cd9a
DN
229}
230
ad19c4be
EB
231/* Pop the first element off the stack of bindings. */
232
c24b7de9 233static void
6de9cd9a
DN
234gimple_pop_bind_expr (void)
235{
9771b263 236 gimplify_ctxp->bind_expr_stack.pop ();
6de9cd9a
DN
237}
238
ad19c4be
EB
239/* Return the first element of the stack of bindings. */
240
726a989a 241gimple
6de9cd9a
DN
242gimple_current_bind_expr (void)
243{
9771b263 244 return gimplify_ctxp->bind_expr_stack.last ();
726a989a
RB
245}
246
ad19c4be 247/* Return the stack of bindings created during gimplification. */
726a989a 248
9771b263 249vec<gimple>
726a989a
RB
250gimple_bind_expr_stack (void)
251{
252 return gimplify_ctxp->bind_expr_stack;
6de9cd9a
DN
253}
254
ad19c4be 255/* Return true iff there is a COND_EXPR between us and the innermost
6de9cd9a
DN
256 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
257
258static bool
259gimple_conditional_context (void)
260{
261 return gimplify_ctxp->conditions > 0;
262}
263
264/* Note that we've entered a COND_EXPR. */
265
266static void
267gimple_push_condition (void)
268{
726a989a 269#ifdef ENABLE_GIMPLE_CHECKING
d775bc45 270 if (gimplify_ctxp->conditions == 0)
726a989a 271 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
d775bc45 272#endif
6de9cd9a
DN
273 ++(gimplify_ctxp->conditions);
274}
275
276/* Note that we've left a COND_EXPR. If we're back at unconditional scope
277 now, add any conditional cleanups we've seen to the prequeue. */
278
279static void
726a989a 280gimple_pop_condition (gimple_seq *pre_p)
6de9cd9a
DN
281{
282 int conds = --(gimplify_ctxp->conditions);
aa4a53af 283
282899df 284 gcc_assert (conds >= 0);
6de9cd9a
DN
285 if (conds == 0)
286 {
726a989a
RB
287 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
288 gimplify_ctxp->conditional_cleanups = NULL;
6de9cd9a 289 }
6de9cd9a
DN
290}
291
953ff289
DN
292/* A stable comparison routine for use with splay trees and DECLs. */
293
294static int
295splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
296{
297 tree a = (tree) xa;
298 tree b = (tree) xb;
299
300 return DECL_UID (a) - DECL_UID (b);
301}
302
303/* Create a new omp construct that deals with variable remapping. */
304
305static struct gimplify_omp_ctx *
a68ab351 306new_omp_context (enum omp_region_type region_type)
953ff289
DN
307{
308 struct gimplify_omp_ctx *c;
309
310 c = XCNEW (struct gimplify_omp_ctx);
311 c->outer_context = gimplify_omp_ctxp;
312 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
313 c->privatized_types = pointer_set_create ();
314 c->location = input_location;
a68ab351 315 c->region_type = region_type;
f22f4340 316 if ((region_type & ORT_TASK) == 0)
a68ab351
JJ
317 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
318 else
319 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
953ff289
DN
320
321 return c;
322}
323
324/* Destroy an omp construct that deals with variable remapping. */
325
326static void
327delete_omp_context (struct gimplify_omp_ctx *c)
328{
329 splay_tree_delete (c->variables);
330 pointer_set_destroy (c->privatized_types);
331 XDELETE (c);
332}
333
334static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
335static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
336
726a989a
RB
337/* Both gimplify the statement T and append it to *SEQ_P. This function
338 behaves exactly as gimplify_stmt, but you don't have to pass T as a
339 reference. */
cd3ce9b4
JM
340
341void
726a989a
RB
342gimplify_and_add (tree t, gimple_seq *seq_p)
343{
344 gimplify_stmt (&t, seq_p);
345}
346
347/* Gimplify statement T into sequence *SEQ_P, and return the first
348 tuple in the sequence of generated tuples for this statement.
349 Return NULL if gimplifying T produced no tuples. */
350
351static gimple
352gimplify_and_return_first (tree t, gimple_seq *seq_p)
cd3ce9b4 353{
726a989a
RB
354 gimple_stmt_iterator last = gsi_last (*seq_p);
355
356 gimplify_and_add (t, seq_p);
357
358 if (!gsi_end_p (last))
359 {
360 gsi_next (&last);
361 return gsi_stmt (last);
362 }
363 else
364 return gimple_seq_first_stmt (*seq_p);
cd3ce9b4
JM
365}
366
6de9cd9a
DN
367/* Strip off a legitimate source ending from the input string NAME of
368 length LEN. Rather than having to know the names used by all of
369 our front ends, we strip off an ending of a period followed by
370 up to five characters. (Java uses ".class".) */
371
372static inline void
373remove_suffix (char *name, int len)
374{
375 int i;
376
377 for (i = 2; i < 8 && len > i; i++)
378 {
379 if (name[len - i] == '.')
380 {
381 name[len - i] = '\0';
382 break;
383 }
384 }
385}
386
ad19c4be 387/* Create a new temporary name with PREFIX. Return an identifier. */
6de9cd9a
DN
388
389static GTY(()) unsigned int tmp_var_id_num;
390
7e140280 391tree
6de9cd9a
DN
392create_tmp_var_name (const char *prefix)
393{
394 char *tmp_name;
395
396 if (prefix)
397 {
398 char *preftmp = ASTRDUP (prefix);
aa4a53af 399
6de9cd9a 400 remove_suffix (preftmp, strlen (preftmp));
0a35513e
AH
401 clean_symbol_name (preftmp);
402
6de9cd9a
DN
403 prefix = preftmp;
404 }
405
406 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix ? prefix : "T", tmp_var_id_num++);
407 return get_identifier (tmp_name);
408}
409
6de9cd9a 410/* Create a new temporary variable declaration of type TYPE.
ad19c4be 411 Do NOT push it into the current binding. */
6de9cd9a
DN
412
413tree
414create_tmp_var_raw (tree type, const char *prefix)
415{
416 tree tmp_var;
6de9cd9a 417
c2255bc4
AH
418 tmp_var = build_decl (input_location,
419 VAR_DECL, prefix ? create_tmp_var_name (prefix) : NULL,
769da818 420 type);
6de9cd9a
DN
421
422 /* The variable was declared by the compiler. */
423 DECL_ARTIFICIAL (tmp_var) = 1;
424 /* And we don't want debug info for it. */
425 DECL_IGNORED_P (tmp_var) = 1;
426
427 /* Make the variable writable. */
428 TREE_READONLY (tmp_var) = 0;
429
430 DECL_EXTERNAL (tmp_var) = 0;
431 TREE_STATIC (tmp_var) = 0;
432 TREE_USED (tmp_var) = 1;
433
434 return tmp_var;
435}
436
ad19c4be 437/* Create a new temporary variable declaration of type TYPE. DO push the
6de9cd9a
DN
438 variable into the current binding. Further, assume that this is called
439 only from gimplification or optimization, at which point the creation of
440 certain types are bugs. */
441
442tree
443create_tmp_var (tree type, const char *prefix)
444{
445 tree tmp_var;
446
44de5aeb 447 /* We don't allow types that are addressable (meaning we can't make copies),
a441447f
OH
448 or incomplete. We also used to reject every variable size objects here,
449 but now support those for which a constant upper bound can be obtained.
450 The processing for variable sizes is performed in gimple_add_tmp_var,
451 point at which it really matters and possibly reached via paths not going
452 through this function, e.g. after direct calls to create_tmp_var_raw. */
453 gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type));
6de9cd9a
DN
454
455 tmp_var = create_tmp_var_raw (type, prefix);
456 gimple_add_tmp_var (tmp_var);
457 return tmp_var;
458}
459
acd63801
MJ
460/* Create a new temporary variable declaration of type TYPE by calling
461 create_tmp_var and if TYPE is a vector or a complex number, mark the new
462 temporary as gimple register. */
463
464tree
465create_tmp_reg (tree type, const char *prefix)
466{
467 tree tmp;
468
469 tmp = create_tmp_var (type, prefix);
470 if (TREE_CODE (type) == COMPLEX_TYPE
471 || TREE_CODE (type) == VECTOR_TYPE)
472 DECL_GIMPLE_REG_P (tmp) = 1;
473
474 return tmp;
475}
476
216820a4
RG
477/* Returns true iff T is a valid RHS for an assignment to a renamed
478 user -- or front-end generated artificial -- variable. */
479
480static bool
481is_gimple_reg_rhs (tree t)
482{
483 return get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS;
484}
485
486/* Returns true iff T is a valid RHS for an assignment to an un-renamed
487 LHS, or for a call argument. */
488
489static bool
490is_gimple_mem_rhs (tree t)
491{
492 /* If we're dealing with a renamable type, either source or dest must be
493 a renamed variable. */
494 if (is_gimple_reg_type (TREE_TYPE (t)))
495 return is_gimple_val (t);
496 else
497 return is_gimple_val (t) || is_gimple_lvalue (t);
498}
499
726a989a 500/* Return true if T is a CALL_EXPR or an expression that can be
12947319 501 assigned to a temporary. Note that this predicate should only be
726a989a
RB
502 used during gimplification. See the rationale for this in
503 gimplify_modify_expr. */
504
505static bool
ba4d8f9d 506is_gimple_reg_rhs_or_call (tree t)
726a989a 507{
ba4d8f9d
RG
508 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
509 || TREE_CODE (t) == CALL_EXPR);
726a989a
RB
510}
511
512/* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
513 this predicate should only be used during gimplification. See the
514 rationale for this in gimplify_modify_expr. */
515
516static bool
ba4d8f9d 517is_gimple_mem_rhs_or_call (tree t)
726a989a
RB
518{
519 /* If we're dealing with a renamable type, either source or dest must be
050bbfeb
RG
520 a renamed variable. */
521 if (is_gimple_reg_type (TREE_TYPE (t)))
726a989a
RB
522 return is_gimple_val (t);
523 else
ba4d8f9d
RG
524 return (is_gimple_val (t) || is_gimple_lvalue (t)
525 || TREE_CODE (t) == CALL_EXPR);
726a989a
RB
526}
527
2ad728d2
RG
528/* Create a temporary with a name derived from VAL. Subroutine of
529 lookup_tmp_var; nobody else should call this function. */
530
531static inline tree
532create_tmp_from_val (tree val, bool is_formal)
533{
534 /* Drop all qualifiers and address-space information from the value type. */
535 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
536 tree var = create_tmp_var (type, get_name (val));
537 if (is_formal
538 && (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
539 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE))
540 DECL_GIMPLE_REG_P (var) = 1;
541 return var;
542}
543
544/* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
545 an existing expression temporary. */
546
547static tree
548lookup_tmp_var (tree val, bool is_formal)
549{
550 tree ret;
551
552 /* If not optimizing, never really reuse a temporary. local-alloc
553 won't allocate any variable that is used in more than one basic
554 block, which means it will go into memory, causing much extra
555 work in reload and final and poorer code generation, outweighing
556 the extra memory allocation here. */
557 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
558 ret = create_tmp_from_val (val, is_formal);
559 else
560 {
561 elt_t elt, *elt_p;
4a8fb1a1 562 elt_t **slot;
2ad728d2
RG
563
564 elt.val = val;
4a8fb1a1
LC
565 if (!gimplify_ctxp->temp_htab.is_created ())
566 gimplify_ctxp->temp_htab.create (1000);
567 slot = gimplify_ctxp->temp_htab.find_slot (&elt, INSERT);
2ad728d2
RG
568 if (*slot == NULL)
569 {
570 elt_p = XNEW (elt_t);
571 elt_p->val = val;
572 elt_p->temp = ret = create_tmp_from_val (val, is_formal);
4a8fb1a1 573 *slot = elt_p;
2ad728d2
RG
574 }
575 else
576 {
4a8fb1a1 577 elt_p = *slot;
2ad728d2
RG
578 ret = elt_p->temp;
579 }
580 }
581
582 return ret;
583}
584
ba4d8f9d 585/* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
6de9cd9a
DN
586
587static tree
726a989a
RB
588internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
589 bool is_formal)
6de9cd9a
DN
590{
591 tree t, mod;
6de9cd9a 592
726a989a
RB
593 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
594 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
ba4d8f9d 595 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
726a989a 596 fb_rvalue);
6de9cd9a 597
2ad728d2
RG
598 if (gimplify_ctxp->into_ssa
599 && is_gimple_reg_type (TREE_TYPE (val)))
600 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)), NULL);
601 else
602 t = lookup_tmp_var (val, is_formal);
e41d82f5 603
2e929cf3 604 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
6de9cd9a 605
ec52b111 606 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_HERE (val));
6de9cd9a 607
fff34d35
RK
608 /* gimplify_modify_expr might want to reduce this further. */
609 gimplify_and_add (mod, pre_p);
726a989a 610 ggc_free (mod);
8b11a64c 611
6de9cd9a
DN
612 return t;
613}
614
ad19c4be 615/* Return a formal temporary variable initialized with VAL. PRE_P is as
ba4d8f9d
RG
616 in gimplify_expr. Only use this function if:
617
618 1) The value of the unfactored expression represented by VAL will not
619 change between the initialization and use of the temporary, and
620 2) The temporary will not be otherwise modified.
621
622 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
623 and #2 means it is inappropriate for && temps.
624
625 For other cases, use get_initialized_tmp_var instead. */
50674e96 626
6de9cd9a 627tree
726a989a 628get_formal_tmp_var (tree val, gimple_seq *pre_p)
6de9cd9a
DN
629{
630 return internal_get_tmp_var (val, pre_p, NULL, true);
631}
632
ad19c4be 633/* Return a temporary variable initialized with VAL. PRE_P and POST_P
6de9cd9a
DN
634 are as in gimplify_expr. */
635
636tree
726a989a 637get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p)
6de9cd9a
DN
638{
639 return internal_get_tmp_var (val, pre_p, post_p, false);
640}
641
ad19c4be
EB
642/* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
643 generate debug info for them; otherwise don't. */
6de9cd9a
DN
644
645void
726a989a 646declare_vars (tree vars, gimple scope, bool debug_info)
6de9cd9a
DN
647{
648 tree last = vars;
649 if (last)
650 {
5123ad09 651 tree temps, block;
6de9cd9a 652
726a989a 653 gcc_assert (gimple_code (scope) == GIMPLE_BIND);
6de9cd9a
DN
654
655 temps = nreverse (last);
5123ad09 656
524d9a45 657 block = gimple_bind_block (scope);
726a989a 658 gcc_assert (!block || TREE_CODE (block) == BLOCK);
5123ad09
EB
659 if (!block || !debug_info)
660 {
910ad8de 661 DECL_CHAIN (last) = gimple_bind_vars (scope);
726a989a 662 gimple_bind_set_vars (scope, temps);
5123ad09
EB
663 }
664 else
665 {
666 /* We need to attach the nodes both to the BIND_EXPR and to its
667 associated BLOCK for debugging purposes. The key point here
668 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
669 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
670 if (BLOCK_VARS (block))
671 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
672 else
673 {
726a989a
RB
674 gimple_bind_set_vars (scope,
675 chainon (gimple_bind_vars (scope), temps));
5123ad09
EB
676 BLOCK_VARS (block) = temps;
677 }
678 }
6de9cd9a
DN
679 }
680}
681
a441447f
OH
682/* For VAR a VAR_DECL of variable size, try to find a constant upper bound
683 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
684 no such upper bound can be obtained. */
685
686static void
687force_constant_size (tree var)
688{
689 /* The only attempt we make is by querying the maximum size of objects
690 of the variable's type. */
691
692 HOST_WIDE_INT max_size;
693
694 gcc_assert (TREE_CODE (var) == VAR_DECL);
695
696 max_size = max_int_size_in_bytes (TREE_TYPE (var));
697
698 gcc_assert (max_size >= 0);
699
700 DECL_SIZE_UNIT (var)
701 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
702 DECL_SIZE (var)
703 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
704}
705
ad19c4be
EB
706/* Push the temporary variable TMP into the current binding. */
707
6de9cd9a
DN
708void
709gimple_add_tmp_var (tree tmp)
710{
910ad8de 711 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
6de9cd9a 712
a441447f
OH
713 /* Later processing assumes that the object size is constant, which might
714 not be true at this point. Force the use of a constant upper bound in
715 this case. */
716 if (!host_integerp (DECL_SIZE_UNIT (tmp), 1))
717 force_constant_size (tmp);
718
6de9cd9a 719 DECL_CONTEXT (tmp) = current_function_decl;
48eb4e53 720 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
6de9cd9a
DN
721
722 if (gimplify_ctxp)
723 {
910ad8de 724 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
6de9cd9a 725 gimplify_ctxp->temps = tmp;
953ff289
DN
726
727 /* Mark temporaries local within the nearest enclosing parallel. */
728 if (gimplify_omp_ctxp)
729 {
730 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
74bf76ed
JJ
731 while (ctx
732 && (ctx->region_type == ORT_WORKSHARE
733 || ctx->region_type == ORT_SIMD))
953ff289
DN
734 ctx = ctx->outer_context;
735 if (ctx)
736 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
737 }
6de9cd9a
DN
738 }
739 else if (cfun)
740 record_vars (tmp);
741 else
726a989a
RB
742 {
743 gimple_seq body_seq;
744
745 /* This case is for nested functions. We need to expose the locals
746 they create. */
747 body_seq = gimple_body (current_function_decl);
748 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
749 }
750}
751
ad19c4be 752/* Determine whether to assign a location to the statement GS. */
726a989a
RB
753
754static bool
755should_carry_location_p (gimple gs)
756{
757 /* Don't emit a line note for a label. We particularly don't want to
758 emit one for the break label, since it doesn't actually correspond
759 to the beginning of the loop/switch. */
760 if (gimple_code (gs) == GIMPLE_LABEL)
761 return false;
762
763 return true;
6de9cd9a
DN
764}
765
726a989a
RB
766/* Return true if a location should not be emitted for this statement
767 by annotate_one_with_location. */
768
769static inline bool
770gimple_do_not_emit_location_p (gimple g)
771{
772 return gimple_plf (g, GF_PLF_1);
773}
774
775/* Mark statement G so a location will not be emitted by
776 annotate_one_with_location. */
777
778static inline void
779gimple_set_do_not_emit_location (gimple g)
780{
781 /* The PLF flags are initialized to 0 when a new tuple is created,
782 so no need to initialize it anywhere. */
783 gimple_set_plf (g, GF_PLF_1, true);
784}
785
5e278028 786/* Set the location for gimple statement GS to LOCATION. */
726a989a
RB
787
788static void
789annotate_one_with_location (gimple gs, location_t location)
790{
b8698a0f 791 if (!gimple_has_location (gs)
726a989a
RB
792 && !gimple_do_not_emit_location_p (gs)
793 && should_carry_location_p (gs))
794 gimple_set_location (gs, location);
795}
796
726a989a
RB
797/* Set LOCATION for all the statements after iterator GSI in sequence
798 SEQ. If GSI is pointing to the end of the sequence, start with the
799 first statement in SEQ. */
800
801static void
802annotate_all_with_location_after (gimple_seq seq, gimple_stmt_iterator gsi,
803 location_t location)
804{
805 if (gsi_end_p (gsi))
806 gsi = gsi_start (seq);
807 else
808 gsi_next (&gsi);
809
810 for (; !gsi_end_p (gsi); gsi_next (&gsi))
811 annotate_one_with_location (gsi_stmt (gsi), location);
812}
813
5e278028 814/* Set the location for all the statements in a sequence STMT_P to LOCATION. */
726a989a
RB
815
816void
817annotate_all_with_location (gimple_seq stmt_p, location_t location)
818{
819 gimple_stmt_iterator i;
820
821 if (gimple_seq_empty_p (stmt_p))
822 return;
823
824 for (i = gsi_start (stmt_p); !gsi_end_p (i); gsi_next (&i))
825 {
826 gimple gs = gsi_stmt (i);
827 annotate_one_with_location (gs, location);
828 }
7c34ced1 829}
616f1431
EB
830\f
831/* This page contains routines to unshare tree nodes, i.e. to duplicate tree
832 nodes that are referenced more than once in GENERIC functions. This is
833 necessary because gimplification (translation into GIMPLE) is performed
834 by modifying tree nodes in-place, so gimplication of a shared node in a
835 first context could generate an invalid GIMPLE form in a second context.
836
837 This is achieved with a simple mark/copy/unmark algorithm that walks the
838 GENERIC representation top-down, marks nodes with TREE_VISITED the first
839 time it encounters them, duplicates them if they already have TREE_VISITED
840 set, and finally removes the TREE_VISITED marks it has set.
841
842 The algorithm works only at the function level, i.e. it generates a GENERIC
843 representation of a function with no nodes shared within the function when
844 passed a GENERIC function (except for nodes that are allowed to be shared).
845
846 At the global level, it is also necessary to unshare tree nodes that are
847 referenced in more than one function, for the same aforementioned reason.
848 This requires some cooperation from the front-end. There are 2 strategies:
849
850 1. Manual unsharing. The front-end needs to call unshare_expr on every
851 expression that might end up being shared across functions.
852
853 2. Deep unsharing. This is an extension of regular unsharing. Instead
854 of calling unshare_expr on expressions that might be shared across
855 functions, the front-end pre-marks them with TREE_VISITED. This will
856 ensure that they are unshared on the first reference within functions
857 when the regular unsharing algorithm runs. The counterpart is that
858 this algorithm must look deeper than for manual unsharing, which is
859 specified by LANG_HOOKS_DEEP_UNSHARING.
860
861 If there are only few specific cases of node sharing across functions, it is
862 probably easier for a front-end to unshare the expressions manually. On the
863 contrary, if the expressions generated at the global level are as widespread
864 as expressions generated within functions, deep unsharing is very likely the
865 way to go. */
866
867/* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
3ad065ef
EB
868 These nodes model computations that must be done once. If we were to
869 unshare something like SAVE_EXPR(i++), the gimplification process would
870 create wrong code. However, if DATA is non-null, it must hold a pointer
871 set that is used to unshare the subtrees of these nodes. */
6de9cd9a
DN
872
873static tree
874mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
875{
616f1431
EB
876 tree t = *tp;
877 enum tree_code code = TREE_CODE (t);
878
6687b740
EB
879 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
880 copy their subtrees if we can make sure to do it only once. */
881 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
616f1431
EB
882 {
883 if (data && !pointer_set_insert ((struct pointer_set_t *)data, t))
884 ;
885 else
886 *walk_subtrees = 0;
887 }
888
889 /* Stop at types, decls, constants like copy_tree_r. */
890 else if (TREE_CODE_CLASS (code) == tcc_type
891 || TREE_CODE_CLASS (code) == tcc_declaration
892 || TREE_CODE_CLASS (code) == tcc_constant
893 /* We can't do anything sensible with a BLOCK used as an
894 expression, but we also can't just die when we see it
895 because of non-expression uses. So we avert our eyes
896 and cross our fingers. Silly Java. */
897 || code == BLOCK)
6de9cd9a 898 *walk_subtrees = 0;
616f1431
EB
899
900 /* Cope with the statement expression extension. */
901 else if (code == STATEMENT_LIST)
902 ;
903
904 /* Leave the bulk of the work to copy_tree_r itself. */
6de9cd9a 905 else
6687b740 906 copy_tree_r (tp, walk_subtrees, NULL);
6de9cd9a
DN
907
908 return NULL_TREE;
909}
910
3ad065ef
EB
911/* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
912 If *TP has been visited already, then *TP is deeply copied by calling
913 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
6de9cd9a
DN
914
915static tree
616f1431 916copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
6de9cd9a 917{
f0638e1d
RH
918 tree t = *tp;
919 enum tree_code code = TREE_CODE (t);
920
44de5aeb
RK
921 /* Skip types, decls, and constants. But we do want to look at their
922 types and the bounds of types. Mark them as visited so we properly
923 unmark their subtrees on the unmark pass. If we've already seen them,
924 don't look down further. */
6615c446
JO
925 if (TREE_CODE_CLASS (code) == tcc_type
926 || TREE_CODE_CLASS (code) == tcc_declaration
927 || TREE_CODE_CLASS (code) == tcc_constant)
44de5aeb
RK
928 {
929 if (TREE_VISITED (t))
930 *walk_subtrees = 0;
931 else
932 TREE_VISITED (t) = 1;
933 }
f0638e1d 934
6de9cd9a
DN
935 /* If this node has been visited already, unshare it and don't look
936 any deeper. */
f0638e1d 937 else if (TREE_VISITED (t))
6de9cd9a 938 {
616f1431 939 walk_tree (tp, mostly_copy_tree_r, data, NULL);
6de9cd9a
DN
940 *walk_subtrees = 0;
941 }
f0638e1d 942
616f1431 943 /* Otherwise, mark the node as visited and keep looking. */
6de9cd9a 944 else
77c9db77 945 TREE_VISITED (t) = 1;
f0638e1d 946
6de9cd9a
DN
947 return NULL_TREE;
948}
949
3ad065ef
EB
950/* Unshare most of the shared trees rooted at *TP. DATA is passed to the
951 copy_if_shared_r callback unmodified. */
6de9cd9a 952
616f1431 953static inline void
3ad065ef 954copy_if_shared (tree *tp, void *data)
616f1431 955{
3ad065ef 956 walk_tree (tp, copy_if_shared_r, data, NULL);
6de9cd9a
DN
957}
958
3ad065ef
EB
959/* Unshare all the trees in the body of FNDECL, as well as in the bodies of
960 any nested functions. */
44de5aeb
RK
961
962static void
3ad065ef 963unshare_body (tree fndecl)
44de5aeb 964{
9f9ebcdf 965 struct cgraph_node *cgn = cgraph_get_node (fndecl);
3ad065ef
EB
966 /* If the language requires deep unsharing, we need a pointer set to make
967 sure we don't repeatedly unshare subtrees of unshareable nodes. */
968 struct pointer_set_t *visited
969 = lang_hooks.deep_unsharing ? pointer_set_create () : NULL;
44de5aeb 970
3ad065ef
EB
971 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
972 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
973 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
974
975 if (visited)
976 pointer_set_destroy (visited);
616f1431 977
3ad065ef 978 if (cgn)
48eb4e53 979 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
67348ccc 980 unshare_body (cgn->decl);
44de5aeb
RK
981}
982
616f1431
EB
983/* Callback for walk_tree to unmark the visited trees rooted at *TP.
984 Subtrees are walked until the first unvisited node is encountered. */
985
986static tree
987unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
988{
989 tree t = *tp;
990
991 /* If this node has been visited, unmark it and keep looking. */
992 if (TREE_VISITED (t))
993 TREE_VISITED (t) = 0;
994
995 /* Otherwise, don't look any deeper. */
996 else
997 *walk_subtrees = 0;
998
999 return NULL_TREE;
1000}
1001
1002/* Unmark the visited trees rooted at *TP. */
1003
1004static inline void
1005unmark_visited (tree *tp)
1006{
1007 walk_tree (tp, unmark_visited_r, NULL, NULL);
1008}
1009
44de5aeb
RK
1010/* Likewise, but mark all trees as not visited. */
1011
1012static void
3ad065ef 1013unvisit_body (tree fndecl)
44de5aeb 1014{
9f9ebcdf 1015 struct cgraph_node *cgn = cgraph_get_node (fndecl);
44de5aeb 1016
3ad065ef
EB
1017 unmark_visited (&DECL_SAVED_TREE (fndecl));
1018 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
1019 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
616f1431 1020
3ad065ef 1021 if (cgn)
48eb4e53 1022 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
67348ccc 1023 unvisit_body (cgn->decl);
44de5aeb
RK
1024}
1025
6de9cd9a
DN
1026/* Unconditionally make an unshared copy of EXPR. This is used when using
1027 stored expressions which span multiple functions, such as BINFO_VTABLE,
1028 as the normal unsharing process can't tell that they're shared. */
1029
1030tree
1031unshare_expr (tree expr)
1032{
1033 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1034 return expr;
1035}
d1f98542
RB
1036
1037/* Worker for unshare_expr_without_location. */
1038
1039static tree
1040prune_expr_location (tree *tp, int *walk_subtrees, void *)
1041{
1042 if (EXPR_P (*tp))
1043 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
1044 else
1045 *walk_subtrees = 0;
1046 return NULL_TREE;
1047}
1048
1049/* Similar to unshare_expr but also prune all expression locations
1050 from EXPR. */
1051
1052tree
1053unshare_expr_without_location (tree expr)
1054{
1055 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1056 if (EXPR_P (expr))
1057 walk_tree (&expr, prune_expr_location, NULL, NULL);
1058 return expr;
1059}
6de9cd9a
DN
1060\f
1061/* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1062 contain statements and have a value. Assign its value to a temporary
ad19c4be 1063 and give it void_type_node. Return the temporary, or NULL_TREE if
6de9cd9a
DN
1064 WRAPPER was already void. */
1065
1066tree
325c3691 1067voidify_wrapper_expr (tree wrapper, tree temp)
6de9cd9a 1068{
4832214a
JM
1069 tree type = TREE_TYPE (wrapper);
1070 if (type && !VOID_TYPE_P (type))
6de9cd9a 1071 {
c6c7698d 1072 tree *p;
6de9cd9a 1073
c6c7698d
JM
1074 /* Set p to point to the body of the wrapper. Loop until we find
1075 something that isn't a wrapper. */
1076 for (p = &wrapper; p && *p; )
d3147f64 1077 {
c6c7698d 1078 switch (TREE_CODE (*p))
6de9cd9a 1079 {
c6c7698d
JM
1080 case BIND_EXPR:
1081 TREE_SIDE_EFFECTS (*p) = 1;
1082 TREE_TYPE (*p) = void_type_node;
1083 /* For a BIND_EXPR, the body is operand 1. */
1084 p = &BIND_EXPR_BODY (*p);
1085 break;
1086
1087 case CLEANUP_POINT_EXPR:
1088 case TRY_FINALLY_EXPR:
1089 case TRY_CATCH_EXPR:
6de9cd9a
DN
1090 TREE_SIDE_EFFECTS (*p) = 1;
1091 TREE_TYPE (*p) = void_type_node;
c6c7698d
JM
1092 p = &TREE_OPERAND (*p, 0);
1093 break;
1094
1095 case STATEMENT_LIST:
1096 {
1097 tree_stmt_iterator i = tsi_last (*p);
1098 TREE_SIDE_EFFECTS (*p) = 1;
1099 TREE_TYPE (*p) = void_type_node;
1100 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1101 }
1102 break;
1103
1104 case COMPOUND_EXPR:
ad19c4be
EB
1105 /* Advance to the last statement. Set all container types to
1106 void. */
c6c7698d
JM
1107 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1108 {
1109 TREE_SIDE_EFFECTS (*p) = 1;
1110 TREE_TYPE (*p) = void_type_node;
1111 }
1112 break;
1113
0a35513e
AH
1114 case TRANSACTION_EXPR:
1115 TREE_SIDE_EFFECTS (*p) = 1;
1116 TREE_TYPE (*p) = void_type_node;
1117 p = &TRANSACTION_EXPR_BODY (*p);
1118 break;
1119
c6c7698d 1120 default:
5f23640f
TR
1121 /* Assume that any tree upon which voidify_wrapper_expr is
1122 directly called is a wrapper, and that its body is op0. */
1123 if (p == &wrapper)
1124 {
1125 TREE_SIDE_EFFECTS (*p) = 1;
1126 TREE_TYPE (*p) = void_type_node;
1127 p = &TREE_OPERAND (*p, 0);
1128 break;
1129 }
c6c7698d 1130 goto out;
6de9cd9a
DN
1131 }
1132 }
1133
c6c7698d 1134 out:
325c3691 1135 if (p == NULL || IS_EMPTY_STMT (*p))
c6c7698d
JM
1136 temp = NULL_TREE;
1137 else if (temp)
6de9cd9a 1138 {
c6c7698d
JM
1139 /* The wrapper is on the RHS of an assignment that we're pushing
1140 down. */
1141 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1142 || TREE_CODE (temp) == MODIFY_EXPR);
726a989a 1143 TREE_OPERAND (temp, 1) = *p;
c6c7698d 1144 *p = temp;
6de9cd9a
DN
1145 }
1146 else
1147 {
c6c7698d
JM
1148 temp = create_tmp_var (type, "retval");
1149 *p = build2 (INIT_EXPR, type, temp, *p);
6de9cd9a
DN
1150 }
1151
6de9cd9a
DN
1152 return temp;
1153 }
1154
1155 return NULL_TREE;
1156}
1157
1158/* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1ea7e6ad 1159 a temporary through which they communicate. */
6de9cd9a
DN
1160
1161static void
726a989a 1162build_stack_save_restore (gimple *save, gimple *restore)
6de9cd9a 1163{
726a989a 1164 tree tmp_var;
6de9cd9a 1165
e79983f4 1166 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
6de9cd9a 1167 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
726a989a 1168 gimple_call_set_lhs (*save, tmp_var);
6de9cd9a 1169
ad19c4be 1170 *restore
e79983f4 1171 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
ad19c4be 1172 1, tmp_var);
6de9cd9a
DN
1173}
1174
1175/* Gimplify a BIND_EXPR. Just voidify and recurse. */
1176
1177static enum gimplify_status
726a989a 1178gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
6de9cd9a
DN
1179{
1180 tree bind_expr = *expr_p;
6de9cd9a
DN
1181 bool old_save_stack = gimplify_ctxp->save_stack;
1182 tree t;
726a989a 1183 gimple gimple_bind;
47598145
MM
1184 gimple_seq body, cleanup;
1185 gimple stack_save;
6de9cd9a 1186
c6c7698d 1187 tree temp = voidify_wrapper_expr (bind_expr, NULL);
325c3691 1188
6de9cd9a 1189 /* Mark variables seen in this bind expr. */
910ad8de 1190 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
e41d82f5 1191 {
820cc88f 1192 if (TREE_CODE (t) == VAR_DECL)
8cb86b65
JJ
1193 {
1194 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1195
1196 /* Mark variable as local. */
144f4153 1197 if (ctx && !DECL_EXTERNAL (t)
8cb86b65
JJ
1198 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1199 || splay_tree_lookup (ctx->variables,
1200 (splay_tree_key) t) == NULL))
1201 omp_add_variable (gimplify_omp_ctxp, t, GOVD_LOCAL | GOVD_SEEN);
1202
1203 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
fc3103e7
JJ
1204
1205 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1206 cfun->has_local_explicit_reg_vars = true;
8cb86b65 1207 }
e41d82f5
RH
1208
1209 /* Preliminarily mark non-addressed complex variables as eligible
1210 for promotion to gimple registers. We'll transform their uses
bd2e63a1
RG
1211 as we find them. */
1212 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1213 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
e41d82f5
RH
1214 && !TREE_THIS_VOLATILE (t)
1215 && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
1216 && !needs_to_live_in_memory (t))
0890b981 1217 DECL_GIMPLE_REG_P (t) = 1;
e41d82f5 1218 }
6de9cd9a 1219
726a989a
RB
1220 gimple_bind = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1221 BIND_EXPR_BLOCK (bind_expr));
1222 gimple_push_bind_expr (gimple_bind);
1223
6de9cd9a
DN
1224 gimplify_ctxp->save_stack = false;
1225
726a989a
RB
1226 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1227 body = NULL;
1228 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1229 gimple_bind_set_body (gimple_bind, body);
6de9cd9a 1230
47598145
MM
1231 cleanup = NULL;
1232 stack_save = NULL;
6de9cd9a
DN
1233 if (gimplify_ctxp->save_stack)
1234 {
47598145 1235 gimple stack_restore;
6de9cd9a
DN
1236
1237 /* Save stack on entry and restore it on exit. Add a try_finally
98906124 1238 block to achieve this. */
6de9cd9a
DN
1239 build_stack_save_restore (&stack_save, &stack_restore);
1240
726a989a 1241 gimplify_seq_add_stmt (&cleanup, stack_restore);
47598145
MM
1242 }
1243
1244 /* Add clobbers for all variables that go out of scope. */
1245 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1246 {
1247 if (TREE_CODE (t) == VAR_DECL
1248 && !is_global_var (t)
1249 && DECL_CONTEXT (t) == current_function_decl
1250 && !DECL_HARD_REGISTER (t)
1251 && !TREE_THIS_VOLATILE (t)
1252 && !DECL_HAS_VALUE_EXPR_P (t)
1253 /* Only care for variables that have to be in memory. Others
1254 will be rewritten into SSA names, hence moved to the top-level. */
87e2a8fd
XDL
1255 && !is_gimple_reg (t)
1256 && flag_stack_reuse != SR_NONE)
47598145 1257 {
9771b263
DN
1258 tree clobber = build_constructor (TREE_TYPE (t),
1259 NULL);
47598145
MM
1260 TREE_THIS_VOLATILE (clobber) = 1;
1261 gimplify_seq_add_stmt (&cleanup, gimple_build_assign (t, clobber));
1262 }
1263 }
1264
1265 if (cleanup)
1266 {
1267 gimple gs;
1268 gimple_seq new_body;
1269
1270 new_body = NULL;
726a989a
RB
1271 gs = gimple_build_try (gimple_bind_body (gimple_bind), cleanup,
1272 GIMPLE_TRY_FINALLY);
6de9cd9a 1273
47598145
MM
1274 if (stack_save)
1275 gimplify_seq_add_stmt (&new_body, stack_save);
726a989a
RB
1276 gimplify_seq_add_stmt (&new_body, gs);
1277 gimple_bind_set_body (gimple_bind, new_body);
6de9cd9a
DN
1278 }
1279
1280 gimplify_ctxp->save_stack = old_save_stack;
1281 gimple_pop_bind_expr ();
1282
726a989a
RB
1283 gimplify_seq_add_stmt (pre_p, gimple_bind);
1284
6de9cd9a
DN
1285 if (temp)
1286 {
1287 *expr_p = temp;
6de9cd9a
DN
1288 return GS_OK;
1289 }
726a989a
RB
1290
1291 *expr_p = NULL_TREE;
1292 return GS_ALL_DONE;
6de9cd9a
DN
1293}
1294
1295/* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1296 GIMPLE value, it is assigned to a new temporary and the statement is
1297 re-written to return the temporary.
1298
726a989a 1299 PRE_P points to the sequence where side effects that must happen before
6de9cd9a
DN
1300 STMT should be stored. */
1301
1302static enum gimplify_status
726a989a 1303gimplify_return_expr (tree stmt, gimple_seq *pre_p)
6de9cd9a 1304{
726a989a 1305 gimple ret;
6de9cd9a 1306 tree ret_expr = TREE_OPERAND (stmt, 0);
71877985 1307 tree result_decl, result;
6de9cd9a 1308
726a989a
RB
1309 if (ret_expr == error_mark_node)
1310 return GS_ERROR;
1311
1312 if (!ret_expr
1313 || TREE_CODE (ret_expr) == RESULT_DECL
55e99d52 1314 || ret_expr == error_mark_node)
726a989a
RB
1315 {
1316 gimple ret = gimple_build_return (ret_expr);
1317 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1318 gimplify_seq_add_stmt (pre_p, ret);
1319 return GS_ALL_DONE;
1320 }
6de9cd9a 1321
6de9cd9a 1322 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
71877985 1323 result_decl = NULL_TREE;
6de9cd9a
DN
1324 else
1325 {
726a989a
RB
1326 result_decl = TREE_OPERAND (ret_expr, 0);
1327
1328 /* See through a return by reference. */
cc77ae10 1329 if (TREE_CODE (result_decl) == INDIRECT_REF)
cc77ae10 1330 result_decl = TREE_OPERAND (result_decl, 0);
282899df
NS
1331
1332 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1333 || TREE_CODE (ret_expr) == INIT_EXPR)
1334 && TREE_CODE (result_decl) == RESULT_DECL);
6de9cd9a
DN
1335 }
1336
71877985
RH
1337 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1338 Recall that aggregate_value_p is FALSE for any aggregate type that is
1339 returned in registers. If we're returning values in registers, then
1340 we don't want to extend the lifetime of the RESULT_DECL, particularly
d3147f64 1341 across another call. In addition, for those aggregates for which
535a42b1 1342 hard_function_value generates a PARALLEL, we'll die during normal
71877985
RH
1343 expansion of structure assignments; there's special code in expand_return
1344 to handle this case that does not exist in expand_expr. */
ca361dec
EB
1345 if (!result_decl)
1346 result = NULL_TREE;
1347 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1348 {
1349 if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST)
1350 {
1351 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1352 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1353 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1354 should be effectively allocated by the caller, i.e. all calls to
1355 this function must be subject to the Return Slot Optimization. */
1356 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1357 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1358 }
1359 result = result_decl;
1360 }
71877985
RH
1361 else if (gimplify_ctxp->return_temp)
1362 result = gimplify_ctxp->return_temp;
1363 else
1364 {
acd63801 1365 result = create_tmp_reg (TREE_TYPE (result_decl), NULL);
ff98621c
RH
1366
1367 /* ??? With complex control flow (usually involving abnormal edges),
1368 we can wind up warning about an uninitialized value for this. Due
1369 to how this variable is constructed and initialized, this is never
1370 true. Give up and never warn. */
1371 TREE_NO_WARNING (result) = 1;
1372
71877985
RH
1373 gimplify_ctxp->return_temp = result;
1374 }
1375
726a989a 1376 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
71877985
RH
1377 Then gimplify the whole thing. */
1378 if (result != result_decl)
726a989a 1379 TREE_OPERAND (ret_expr, 0) = result;
fff34d35
RK
1380
1381 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
6de9cd9a 1382
726a989a
RB
1383 ret = gimple_build_return (result);
1384 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1385 gimplify_seq_add_stmt (pre_p, ret);
6de9cd9a 1386
6de9cd9a
DN
1387 return GS_ALL_DONE;
1388}
1389
ad19c4be
EB
1390/* Gimplify a variable-length array DECL. */
1391
786025ea 1392static void
726a989a 1393gimplify_vla_decl (tree decl, gimple_seq *seq_p)
786025ea
JJ
1394{
1395 /* This is a variable-sized decl. Simplify its size and mark it
98906124 1396 for deferred expansion. */
786025ea
JJ
1397 tree t, addr, ptr_type;
1398
726a989a
RB
1399 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1400 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
786025ea 1401
0138d6b2
JM
1402 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1403 if (DECL_HAS_VALUE_EXPR_P (decl))
1404 return;
1405
786025ea
JJ
1406 /* All occurrences of this decl in final gimplified code will be
1407 replaced by indirection. Setting DECL_VALUE_EXPR does two
1408 things: First, it lets the rest of the gimplifier know what
1409 replacement to use. Second, it lets the debug info know
1410 where to find the value. */
1411 ptr_type = build_pointer_type (TREE_TYPE (decl));
1412 addr = create_tmp_var (ptr_type, get_name (decl));
1413 DECL_IGNORED_P (addr) = 0;
1414 t = build_fold_indirect_ref (addr);
31408f60 1415 TREE_THIS_NOTRAP (t) = 1;
786025ea
JJ
1416 SET_DECL_VALUE_EXPR (decl, t);
1417 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1418
e79983f4 1419 t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
13e49da9
TV
1420 t = build_call_expr (t, 2, DECL_SIZE_UNIT (decl),
1421 size_int (DECL_ALIGN (decl)));
d3c12306 1422 /* The call has been built for a variable-sized object. */
63d2a353 1423 CALL_ALLOCA_FOR_VAR_P (t) = 1;
786025ea 1424 t = fold_convert (ptr_type, t);
726a989a 1425 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
786025ea 1426
726a989a 1427 gimplify_and_add (t, seq_p);
786025ea
JJ
1428
1429 /* Indicate that we need to restore the stack level when the
1430 enclosing BIND_EXPR is exited. */
1431 gimplify_ctxp->save_stack = true;
1432}
1433
ad19c4be 1434/* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
350fae66
RK
1435 and initialization explicit. */
1436
1437static enum gimplify_status
726a989a 1438gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
350fae66
RK
1439{
1440 tree stmt = *stmt_p;
1441 tree decl = DECL_EXPR_DECL (stmt);
1442
1443 *stmt_p = NULL_TREE;
1444
1445 if (TREE_TYPE (decl) == error_mark_node)
1446 return GS_ERROR;
1447
8e0a600b
JJ
1448 if ((TREE_CODE (decl) == TYPE_DECL
1449 || TREE_CODE (decl) == VAR_DECL)
1450 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
726a989a 1451 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
350fae66 1452
d400d17e
EB
1453 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1454 in case its size expressions contain problematic nodes like CALL_EXPR. */
1455 if (TREE_CODE (decl) == TYPE_DECL
1456 && DECL_ORIGINAL_TYPE (decl)
1457 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1458 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1459
8e0a600b 1460 if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl))
350fae66
RK
1461 {
1462 tree init = DECL_INITIAL (decl);
1463
b38f3813
EB
1464 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1465 || (!TREE_STATIC (decl)
1466 && flag_stack_check == GENERIC_STACK_CHECK
1467 && compare_tree_int (DECL_SIZE_UNIT (decl),
1468 STACK_CHECK_MAX_VAR_SIZE) > 0))
726a989a 1469 gimplify_vla_decl (decl, seq_p);
350fae66 1470
22192559
JM
1471 /* Some front ends do not explicitly declare all anonymous
1472 artificial variables. We compensate here by declaring the
1473 variables, though it would be better if the front ends would
1474 explicitly declare them. */
1475 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1476 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1477 gimple_add_tmp_var (decl);
1478
350fae66
RK
1479 if (init && init != error_mark_node)
1480 {
1481 if (!TREE_STATIC (decl))
1482 {
1483 DECL_INITIAL (decl) = NULL_TREE;
dae7ec87 1484 init = build2 (INIT_EXPR, void_type_node, decl, init);
726a989a
RB
1485 gimplify_and_add (init, seq_p);
1486 ggc_free (init);
350fae66
RK
1487 }
1488 else
1489 /* We must still examine initializers for static variables
1490 as they may contain a label address. */
1491 walk_tree (&init, force_labels_r, NULL, NULL);
1492 }
350fae66
RK
1493 }
1494
1495 return GS_ALL_DONE;
1496}
1497
6de9cd9a
DN
1498/* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1499 and replacing the LOOP_EXPR with goto, but if the loop contains an
1500 EXIT_EXPR, we need to append a label for it to jump to. */
1501
1502static enum gimplify_status
726a989a 1503gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
6de9cd9a
DN
1504{
1505 tree saved_label = gimplify_ctxp->exit_label;
c2255bc4 1506 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
6de9cd9a 1507
726a989a 1508 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
6de9cd9a
DN
1509
1510 gimplify_ctxp->exit_label = NULL_TREE;
1511
fff34d35 1512 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
6de9cd9a 1513
726a989a
RB
1514 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1515
6de9cd9a 1516 if (gimplify_ctxp->exit_label)
ad19c4be
EB
1517 gimplify_seq_add_stmt (pre_p,
1518 gimple_build_label (gimplify_ctxp->exit_label));
726a989a
RB
1519
1520 gimplify_ctxp->exit_label = saved_label;
1521
1522 *expr_p = NULL;
1523 return GS_ALL_DONE;
1524}
1525
ad19c4be 1526/* Gimplify a statement list onto a sequence. These may be created either
726a989a
RB
1527 by an enlightened front-end, or by shortcut_cond_expr. */
1528
1529static enum gimplify_status
1530gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1531{
1532 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1533
1534 tree_stmt_iterator i = tsi_start (*expr_p);
1535
1536 while (!tsi_end_p (i))
6de9cd9a 1537 {
726a989a
RB
1538 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1539 tsi_delink (&i);
6de9cd9a 1540 }
6de9cd9a 1541
726a989a
RB
1542 if (temp)
1543 {
1544 *expr_p = temp;
1545 return GS_OK;
1546 }
6de9cd9a
DN
1547
1548 return GS_ALL_DONE;
1549}
68e72840 1550\f
f667741c
SB
1551/* Compare two case labels. Because the front end should already have
1552 made sure that case ranges do not overlap, it is enough to only compare
1553 the CASE_LOW values of each case label. */
1554
1555static int
1556compare_case_labels (const void *p1, const void *p2)
1557{
741ac903
KG
1558 const_tree const case1 = *(const_tree const*)p1;
1559 const_tree const case2 = *(const_tree const*)p2;
f667741c 1560
726a989a
RB
1561 /* The 'default' case label always goes first. */
1562 if (!CASE_LOW (case1))
1563 return -1;
1564 else if (!CASE_LOW (case2))
1565 return 1;
1566 else
1567 return tree_int_cst_compare (CASE_LOW (case1), CASE_LOW (case2));
f667741c
SB
1568}
1569
165b54c3 1570/* Sort the case labels in LABEL_VEC in place in ascending order. */
0f1f6967
SB
1571
1572void
9771b263 1573sort_case_labels (vec<tree> label_vec)
0f1f6967 1574{
9771b263 1575 label_vec.qsort (compare_case_labels);
0f1f6967 1576}
68e72840
SB
1577\f
1578/* Prepare a vector of case labels to be used in a GIMPLE_SWITCH statement.
1579
1580 LABELS is a vector that contains all case labels to look at.
1581
1582 INDEX_TYPE is the type of the switch index expression. Case labels
1583 in LABELS are discarded if their values are not in the value range
1584 covered by INDEX_TYPE. The remaining case label values are folded
1585 to INDEX_TYPE.
1586
1587 If a default case exists in LABELS, it is removed from LABELS and
1588 returned in DEFAULT_CASEP. If no default case exists, but the
1589 case labels already cover the whole range of INDEX_TYPE, a default
1590 case is returned pointing to one of the existing case labels.
1591 Otherwise DEFAULT_CASEP is set to NULL_TREE.
1592
1593 DEFAULT_CASEP may be NULL, in which case the above comment doesn't
1594 apply and no action is taken regardless of whether a default case is
1595 found or not. */
1596
1597void
9771b263 1598preprocess_case_label_vec_for_gimple (vec<tree> labels,
68e72840
SB
1599 tree index_type,
1600 tree *default_casep)
1601{
1602 tree min_value, max_value;
1603 tree default_case = NULL_TREE;
1604 size_t i, len;
1605
1606 i = 0;
1607 min_value = TYPE_MIN_VALUE (index_type);
1608 max_value = TYPE_MAX_VALUE (index_type);
9771b263 1609 while (i < labels.length ())
68e72840 1610 {
9771b263 1611 tree elt = labels[i];
68e72840
SB
1612 tree low = CASE_LOW (elt);
1613 tree high = CASE_HIGH (elt);
1614 bool remove_element = FALSE;
1615
1616 if (low)
1617 {
1618 gcc_checking_assert (TREE_CODE (low) == INTEGER_CST);
1619 gcc_checking_assert (!high || TREE_CODE (high) == INTEGER_CST);
1620
1621 /* This is a non-default case label, i.e. it has a value.
1622
1623 See if the case label is reachable within the range of
1624 the index type. Remove out-of-range case values. Turn
1625 case ranges into a canonical form (high > low strictly)
1626 and convert the case label values to the index type.
1627
1628 NB: The type of gimple_switch_index() may be the promoted
1629 type, but the case labels retain the original type. */
1630
1631 if (high)
1632 {
1633 /* This is a case range. Discard empty ranges.
1634 If the bounds or the range are equal, turn this
1635 into a simple (one-value) case. */
1636 int cmp = tree_int_cst_compare (high, low);
1637 if (cmp < 0)
1638 remove_element = TRUE;
1639 else if (cmp == 0)
1640 high = NULL_TREE;
1641 }
1642
1643 if (! high)
1644 {
1645 /* If the simple case value is unreachable, ignore it. */
1646 if ((TREE_CODE (min_value) == INTEGER_CST
1647 && tree_int_cst_compare (low, min_value) < 0)
1648 || (TREE_CODE (max_value) == INTEGER_CST
1649 && tree_int_cst_compare (low, max_value) > 0))
1650 remove_element = TRUE;
1651 else
1652 low = fold_convert (index_type, low);
1653 }
1654 else
1655 {
1656 /* If the entire case range is unreachable, ignore it. */
1657 if ((TREE_CODE (min_value) == INTEGER_CST
1658 && tree_int_cst_compare (high, min_value) < 0)
1659 || (TREE_CODE (max_value) == INTEGER_CST
1660 && tree_int_cst_compare (low, max_value) > 0))
1661 remove_element = TRUE;
1662 else
1663 {
1664 /* If the lower bound is less than the index type's
1665 minimum value, truncate the range bounds. */
1666 if (TREE_CODE (min_value) == INTEGER_CST
1667 && tree_int_cst_compare (low, min_value) < 0)
1668 low = min_value;
1669 low = fold_convert (index_type, low);
1670
1671 /* If the upper bound is greater than the index type's
1672 maximum value, truncate the range bounds. */
1673 if (TREE_CODE (max_value) == INTEGER_CST
1674 && tree_int_cst_compare (high, max_value) > 0)
1675 high = max_value;
1676 high = fold_convert (index_type, high);
5be1632b
SB
1677
1678 /* We may have folded a case range to a one-value case. */
1679 if (tree_int_cst_equal (low, high))
1680 high = NULL_TREE;
68e72840
SB
1681 }
1682 }
1683
1684 CASE_LOW (elt) = low;
1685 CASE_HIGH (elt) = high;
1686 }
1687 else
1688 {
1689 gcc_assert (!default_case);
1690 default_case = elt;
1691 /* The default case must be passed separately to the
fd8d363e 1692 gimple_build_switch routine. But if DEFAULT_CASEP
68e72840
SB
1693 is NULL, we do not remove the default case (it would
1694 be completely lost). */
1695 if (default_casep)
1696 remove_element = TRUE;
1697 }
1698
1699 if (remove_element)
9771b263 1700 labels.ordered_remove (i);
68e72840
SB
1701 else
1702 i++;
1703 }
1704 len = i;
1705
9771b263 1706 if (!labels.is_empty ())
68e72840
SB
1707 sort_case_labels (labels);
1708
1709 if (default_casep && !default_case)
1710 {
1711 /* If the switch has no default label, add one, so that we jump
1712 around the switch body. If the labels already cover the whole
1713 range of the switch index_type, add the default label pointing
1714 to one of the existing labels. */
1715 if (len
1716 && TYPE_MIN_VALUE (index_type)
1717 && TYPE_MAX_VALUE (index_type)
9771b263 1718 && tree_int_cst_equal (CASE_LOW (labels[0]),
68e72840
SB
1719 TYPE_MIN_VALUE (index_type)))
1720 {
9771b263 1721 tree low, high = CASE_HIGH (labels[len - 1]);
68e72840 1722 if (!high)
9771b263 1723 high = CASE_LOW (labels[len - 1]);
68e72840
SB
1724 if (tree_int_cst_equal (high, TYPE_MAX_VALUE (index_type)))
1725 {
1726 for (i = 1; i < len; i++)
1727 {
9771b263
DN
1728 high = CASE_LOW (labels[i]);
1729 low = CASE_HIGH (labels[i - 1]);
68e72840 1730 if (!low)
9771b263 1731 low = CASE_LOW (labels[i - 1]);
68e72840
SB
1732 if ((TREE_INT_CST_LOW (low) + 1
1733 != TREE_INT_CST_LOW (high))
1734 || (TREE_INT_CST_HIGH (low)
1735 + (TREE_INT_CST_LOW (high) == 0)
1736 != TREE_INT_CST_HIGH (high)))
1737 break;
1738 }
1739 if (i == len)
1740 {
9771b263 1741 tree label = CASE_LABEL (labels[0]);
68e72840
SB
1742 default_case = build_case_label (NULL_TREE, NULL_TREE,
1743 label);
1744 }
1745 }
1746 }
1747 }
0f1f6967 1748
68e72840
SB
1749 if (default_casep)
1750 *default_casep = default_case;
1751}
1752\f
1753/* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
6de9cd9a
DN
1754 branch to. */
1755
1756static enum gimplify_status
726a989a 1757gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
6de9cd9a
DN
1758{
1759 tree switch_expr = *expr_p;
726a989a 1760 gimple_seq switch_body_seq = NULL;
6de9cd9a 1761 enum gimplify_status ret;
0cd2402d
SB
1762 tree index_type = TREE_TYPE (switch_expr);
1763 if (index_type == NULL_TREE)
1764 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
6de9cd9a 1765
726a989a
RB
1766 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
1767 fb_rvalue);
1768 if (ret == GS_ERROR || ret == GS_UNHANDLED)
1769 return ret;
6de9cd9a
DN
1770
1771 if (SWITCH_BODY (switch_expr))
1772 {
9771b263
DN
1773 vec<tree> labels;
1774 vec<tree> saved_labels;
726a989a 1775 tree default_case = NULL_TREE;
726a989a 1776 gimple gimple_switch;
b8698a0f 1777
6de9cd9a
DN
1778 /* If someone can be bothered to fill in the labels, they can
1779 be bothered to null out the body too. */
282899df 1780 gcc_assert (!SWITCH_LABELS (switch_expr));
6de9cd9a 1781
0cd2402d 1782 /* Save old labels, get new ones from body, then restore the old
726a989a 1783 labels. Save all the things from the switch body to append after. */
6de9cd9a 1784 saved_labels = gimplify_ctxp->case_labels;
9771b263 1785 gimplify_ctxp->case_labels.create (8);
6de9cd9a 1786
726a989a 1787 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
6de9cd9a
DN
1788 labels = gimplify_ctxp->case_labels;
1789 gimplify_ctxp->case_labels = saved_labels;
b8698a0f 1790
68e72840
SB
1791 preprocess_case_label_vec_for_gimple (labels, index_type,
1792 &default_case);
32f579f6 1793
726a989a 1794 if (!default_case)
6de9cd9a 1795 {
68e72840 1796 gimple new_default;
6de9cd9a 1797
68e72840
SB
1798 default_case
1799 = build_case_label (NULL_TREE, NULL_TREE,
1800 create_artificial_label (UNKNOWN_LOCATION));
1801 new_default = gimple_build_label (CASE_LABEL (default_case));
1802 gimplify_seq_add_stmt (&switch_body_seq, new_default);
32f579f6 1803 }
f667741c 1804
fd8d363e
SB
1805 gimple_switch = gimple_build_switch (SWITCH_COND (switch_expr),
1806 default_case, labels);
726a989a
RB
1807 gimplify_seq_add_stmt (pre_p, gimple_switch);
1808 gimplify_seq_add_seq (pre_p, switch_body_seq);
9771b263 1809 labels.release ();
6de9cd9a 1810 }
282899df
NS
1811 else
1812 gcc_assert (SWITCH_LABELS (switch_expr));
6de9cd9a 1813
726a989a 1814 return GS_ALL_DONE;
6de9cd9a
DN
1815}
1816
ad19c4be 1817/* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
726a989a 1818
6de9cd9a 1819static enum gimplify_status
726a989a 1820gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
6de9cd9a 1821{
953ff289 1822 struct gimplify_ctx *ctxp;
726a989a 1823 gimple gimple_label;
953ff289
DN
1824
1825 /* Invalid OpenMP programs can play Duff's Device type games with
1826 #pragma omp parallel. At least in the C front end, we don't
1827 detect such invalid branches until after gimplification. */
1828 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
9771b263 1829 if (ctxp->case_labels.exists ())
953ff289 1830 break;
282899df 1831
726a989a 1832 gimple_label = gimple_build_label (CASE_LABEL (*expr_p));
9771b263 1833 ctxp->case_labels.safe_push (*expr_p);
726a989a
RB
1834 gimplify_seq_add_stmt (pre_p, gimple_label);
1835
6de9cd9a
DN
1836 return GS_ALL_DONE;
1837}
1838
6de9cd9a
DN
1839/* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
1840 if necessary. */
1841
1842tree
1843build_and_jump (tree *label_p)
1844{
1845 if (label_p == NULL)
1846 /* If there's nowhere to jump, just fall through. */
65355d53 1847 return NULL_TREE;
6de9cd9a
DN
1848
1849 if (*label_p == NULL_TREE)
1850 {
c2255bc4 1851 tree label = create_artificial_label (UNKNOWN_LOCATION);
6de9cd9a
DN
1852 *label_p = label;
1853 }
1854
1855 return build1 (GOTO_EXPR, void_type_node, *label_p);
1856}
1857
1858/* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
1859 This also involves building a label to jump to and communicating it to
1860 gimplify_loop_expr through gimplify_ctxp->exit_label. */
1861
1862static enum gimplify_status
1863gimplify_exit_expr (tree *expr_p)
1864{
1865 tree cond = TREE_OPERAND (*expr_p, 0);
1866 tree expr;
1867
1868 expr = build_and_jump (&gimplify_ctxp->exit_label);
b4257cfc 1869 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
6de9cd9a
DN
1870 *expr_p = expr;
1871
1872 return GS_OK;
1873}
1874
1875/* A helper function to be called via walk_tree. Mark all labels under *TP
1876 as being forced. To be called for DECL_INITIAL of static variables. */
1877
1878tree
1879force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1880{
1881 if (TYPE_P (*tp))
1882 *walk_subtrees = 0;
1883 if (TREE_CODE (*tp) == LABEL_DECL)
1884 FORCED_LABEL (*tp) = 1;
1885
1886 return NULL_TREE;
1887}
1888
26d44ae2
RH
1889/* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
1890 different from its canonical type, wrap the whole thing inside a
1891 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
1892 type.
6de9cd9a 1893
26d44ae2
RH
1894 The canonical type of a COMPONENT_REF is the type of the field being
1895 referenced--unless the field is a bit-field which can be read directly
1896 in a smaller mode, in which case the canonical type is the
1897 sign-appropriate type corresponding to that mode. */
6de9cd9a 1898
26d44ae2
RH
1899static void
1900canonicalize_component_ref (tree *expr_p)
6de9cd9a 1901{
26d44ae2
RH
1902 tree expr = *expr_p;
1903 tree type;
6de9cd9a 1904
282899df 1905 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
6de9cd9a 1906
26d44ae2
RH
1907 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
1908 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
1909 else
1910 type = TREE_TYPE (TREE_OPERAND (expr, 1));
6de9cd9a 1911
b26c6d55
RG
1912 /* One could argue that all the stuff below is not necessary for
1913 the non-bitfield case and declare it a FE error if type
1914 adjustment would be needed. */
26d44ae2 1915 if (TREE_TYPE (expr) != type)
6de9cd9a 1916 {
b26c6d55 1917#ifdef ENABLE_TYPES_CHECKING
26d44ae2 1918 tree old_type = TREE_TYPE (expr);
b26c6d55
RG
1919#endif
1920 int type_quals;
1921
1922 /* We need to preserve qualifiers and propagate them from
1923 operand 0. */
1924 type_quals = TYPE_QUALS (type)
1925 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
1926 if (TYPE_QUALS (type) != type_quals)
1927 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
6de9cd9a 1928
26d44ae2
RH
1929 /* Set the type of the COMPONENT_REF to the underlying type. */
1930 TREE_TYPE (expr) = type;
6de9cd9a 1931
b26c6d55
RG
1932#ifdef ENABLE_TYPES_CHECKING
1933 /* It is now a FE error, if the conversion from the canonical
1934 type to the original expression type is not useless. */
1935 gcc_assert (useless_type_conversion_p (old_type, type));
1936#endif
26d44ae2
RH
1937 }
1938}
6de9cd9a 1939
26d44ae2 1940/* If a NOP conversion is changing a pointer to array of foo to a pointer
d3147f64 1941 to foo, embed that change in the ADDR_EXPR by converting
26d44ae2
RH
1942 T array[U];
1943 (T *)&array
1944 ==>
1945 &array[L]
1946 where L is the lower bound. For simplicity, only do this for constant
04d86531
RG
1947 lower bound.
1948 The constraint is that the type of &array[L] is trivially convertible
1949 to T *. */
6de9cd9a 1950
26d44ae2
RH
1951static void
1952canonicalize_addr_expr (tree *expr_p)
1953{
1954 tree expr = *expr_p;
26d44ae2 1955 tree addr_expr = TREE_OPERAND (expr, 0);
04d86531 1956 tree datype, ddatype, pddatype;
6de9cd9a 1957
04d86531
RG
1958 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
1959 if (!POINTER_TYPE_P (TREE_TYPE (expr))
1960 || TREE_CODE (addr_expr) != ADDR_EXPR)
26d44ae2 1961 return;
6de9cd9a 1962
26d44ae2 1963 /* The addr_expr type should be a pointer to an array. */
04d86531 1964 datype = TREE_TYPE (TREE_TYPE (addr_expr));
26d44ae2
RH
1965 if (TREE_CODE (datype) != ARRAY_TYPE)
1966 return;
6de9cd9a 1967
04d86531
RG
1968 /* The pointer to element type shall be trivially convertible to
1969 the expression pointer type. */
26d44ae2 1970 ddatype = TREE_TYPE (datype);
04d86531 1971 pddatype = build_pointer_type (ddatype);
e5fdcd8c
RG
1972 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
1973 pddatype))
26d44ae2 1974 return;
6de9cd9a 1975
26d44ae2 1976 /* The lower bound and element sizes must be constant. */
04d86531
RG
1977 if (!TYPE_SIZE_UNIT (ddatype)
1978 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
26d44ae2
RH
1979 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
1980 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
1981 return;
6de9cd9a 1982
26d44ae2 1983 /* All checks succeeded. Build a new node to merge the cast. */
04d86531 1984 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
26d44ae2 1985 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
5852948c 1986 NULL_TREE, NULL_TREE);
04d86531 1987 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
e5fdcd8c
RG
1988
1989 /* We can have stripped a required restrict qualifier above. */
1990 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
1991 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
26d44ae2 1992}
6de9cd9a 1993
26d44ae2
RH
1994/* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
1995 underneath as appropriate. */
6de9cd9a 1996
26d44ae2
RH
1997static enum gimplify_status
1998gimplify_conversion (tree *expr_p)
d3147f64 1999{
db3927fb 2000 location_t loc = EXPR_LOCATION (*expr_p);
1043771b 2001 gcc_assert (CONVERT_EXPR_P (*expr_p));
c2255bc4 2002
0710ccff
NS
2003 /* Then strip away all but the outermost conversion. */
2004 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
2005
2006 /* And remove the outermost conversion if it's useless. */
2007 if (tree_ssa_useless_type_conversion (*expr_p))
2008 *expr_p = TREE_OPERAND (*expr_p, 0);
6de9cd9a 2009
26d44ae2
RH
2010 /* If we still have a conversion at the toplevel,
2011 then canonicalize some constructs. */
1043771b 2012 if (CONVERT_EXPR_P (*expr_p))
26d44ae2
RH
2013 {
2014 tree sub = TREE_OPERAND (*expr_p, 0);
6de9cd9a 2015
26d44ae2
RH
2016 /* If a NOP conversion is changing the type of a COMPONENT_REF
2017 expression, then canonicalize its type now in order to expose more
2018 redundant conversions. */
2019 if (TREE_CODE (sub) == COMPONENT_REF)
2020 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
6de9cd9a 2021
26d44ae2
RH
2022 /* If a NOP conversion is changing a pointer to array of foo
2023 to a pointer to foo, embed that change in the ADDR_EXPR. */
2024 else if (TREE_CODE (sub) == ADDR_EXPR)
2025 canonicalize_addr_expr (expr_p);
2026 }
6de9cd9a 2027
8b17cc05
RG
2028 /* If we have a conversion to a non-register type force the
2029 use of a VIEW_CONVERT_EXPR instead. */
4f934809 2030 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
db3927fb 2031 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
4f934809 2032 TREE_OPERAND (*expr_p, 0));
8b17cc05 2033
6de9cd9a
DN
2034 return GS_OK;
2035}
2036
77f2a970
JJ
2037/* Nonlocal VLAs seen in the current function. */
2038static struct pointer_set_t *nonlocal_vlas;
2039
ad19c4be 2040/* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
a9f7c570
RH
2041 DECL_VALUE_EXPR, and it's worth re-examining things. */
2042
2043static enum gimplify_status
2044gimplify_var_or_parm_decl (tree *expr_p)
2045{
2046 tree decl = *expr_p;
2047
2048 /* ??? If this is a local variable, and it has not been seen in any
2049 outer BIND_EXPR, then it's probably the result of a duplicate
2050 declaration, for which we've already issued an error. It would
2051 be really nice if the front end wouldn't leak these at all.
2052 Currently the only known culprit is C++ destructors, as seen
2053 in g++.old-deja/g++.jason/binding.C. */
2054 if (TREE_CODE (decl) == VAR_DECL
2055 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
2056 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
2057 && decl_function_context (decl) == current_function_decl)
2058 {
1da2ed5f 2059 gcc_assert (seen_error ());
a9f7c570
RH
2060 return GS_ERROR;
2061 }
2062
953ff289
DN
2063 /* When within an OpenMP context, notice uses of variables. */
2064 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
2065 return GS_ALL_DONE;
2066
a9f7c570
RH
2067 /* If the decl is an alias for another expression, substitute it now. */
2068 if (DECL_HAS_VALUE_EXPR_P (decl))
2069 {
77f2a970
JJ
2070 tree value_expr = DECL_VALUE_EXPR (decl);
2071
2072 /* For referenced nonlocal VLAs add a decl for debugging purposes
2073 to the current function. */
2074 if (TREE_CODE (decl) == VAR_DECL
2075 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
2076 && nonlocal_vlas != NULL
2077 && TREE_CODE (value_expr) == INDIRECT_REF
2078 && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
2079 && decl_function_context (decl) != current_function_decl)
2080 {
2081 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
74bf76ed
JJ
2082 while (ctx
2083 && (ctx->region_type == ORT_WORKSHARE
2084 || ctx->region_type == ORT_SIMD))
77f2a970
JJ
2085 ctx = ctx->outer_context;
2086 if (!ctx && !pointer_set_insert (nonlocal_vlas, decl))
2087 {
2088 tree copy = copy_node (decl), block;
2089
2090 lang_hooks.dup_lang_specific_decl (copy);
2eb79bbb 2091 SET_DECL_RTL (copy, 0);
77f2a970
JJ
2092 TREE_USED (copy) = 1;
2093 block = DECL_INITIAL (current_function_decl);
910ad8de 2094 DECL_CHAIN (copy) = BLOCK_VARS (block);
77f2a970
JJ
2095 BLOCK_VARS (block) = copy;
2096 SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
2097 DECL_HAS_VALUE_EXPR_P (copy) = 1;
2098 }
2099 }
2100
2101 *expr_p = unshare_expr (value_expr);
a9f7c570
RH
2102 return GS_OK;
2103 }
2104
2105 return GS_ALL_DONE;
2106}
2107
6de9cd9a 2108/* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
726a989a 2109 node *EXPR_P.
6de9cd9a
DN
2110
2111 compound_lval
2112 : min_lval '[' val ']'
2113 | min_lval '.' ID
2114 | compound_lval '[' val ']'
2115 | compound_lval '.' ID
2116
2117 This is not part of the original SIMPLE definition, which separates
2118 array and member references, but it seems reasonable to handle them
2119 together. Also, this way we don't run into problems with union
2120 aliasing; gcc requires that for accesses through a union to alias, the
2121 union reference must be explicit, which was not always the case when we
2122 were splitting up array and member refs.
2123
726a989a 2124 PRE_P points to the sequence where side effects that must happen before
6de9cd9a
DN
2125 *EXPR_P should be stored.
2126
726a989a 2127 POST_P points to the sequence where side effects that must happen after
6de9cd9a
DN
2128 *EXPR_P should be stored. */
2129
2130static enum gimplify_status
726a989a
RB
2131gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2132 fallback_t fallback)
6de9cd9a
DN
2133{
2134 tree *p;
9771b263 2135 vec<tree> expr_stack;
941f78d1 2136 enum gimplify_status ret = GS_ALL_DONE, tret;
af72267c 2137 int i;
db3927fb 2138 location_t loc = EXPR_LOCATION (*expr_p);
941f78d1 2139 tree expr = *expr_p;
6de9cd9a 2140
6de9cd9a 2141 /* Create a stack of the subexpressions so later we can walk them in
ec234842 2142 order from inner to outer. */
9771b263 2143 expr_stack.create (10);
6de9cd9a 2144
afe84921 2145 /* We can handle anything that get_inner_reference can deal with. */
6a720599
JM
2146 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
2147 {
a9f7c570 2148 restart:
6a720599
JM
2149 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
2150 if (TREE_CODE (*p) == INDIRECT_REF)
db3927fb 2151 *p = fold_indirect_ref_loc (loc, *p);
a9f7c570
RH
2152
2153 if (handled_component_p (*p))
2154 ;
2155 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
2156 additional COMPONENT_REFs. */
2157 else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL)
2158 && gimplify_var_or_parm_decl (p) == GS_OK)
2159 goto restart;
2160 else
6a720599 2161 break;
b8698a0f 2162
9771b263 2163 expr_stack.safe_push (*p);
6a720599 2164 }
6de9cd9a 2165
9771b263 2166 gcc_assert (expr_stack.length ());
9e51aaf5 2167
0823efed
DN
2168 /* Now EXPR_STACK is a stack of pointers to all the refs we've
2169 walked through and P points to the innermost expression.
6de9cd9a 2170
af72267c
RK
2171 Java requires that we elaborated nodes in source order. That
2172 means we must gimplify the inner expression followed by each of
2173 the indices, in order. But we can't gimplify the inner
2174 expression until we deal with any variable bounds, sizes, or
2175 positions in order to deal with PLACEHOLDER_EXPRs.
2176
2177 So we do this in three steps. First we deal with the annotations
2178 for any variables in the components, then we gimplify the base,
2179 then we gimplify any indices, from left to right. */
9771b263 2180 for (i = expr_stack.length () - 1; i >= 0; i--)
6de9cd9a 2181 {
9771b263 2182 tree t = expr_stack[i];
44de5aeb
RK
2183
2184 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
6de9cd9a 2185 {
44de5aeb
RK
2186 /* Gimplify the low bound and element type size and put them into
2187 the ARRAY_REF. If these values are set, they have already been
2188 gimplified. */
726a989a 2189 if (TREE_OPERAND (t, 2) == NULL_TREE)
44de5aeb 2190 {
a7cc468a
RH
2191 tree low = unshare_expr (array_ref_low_bound (t));
2192 if (!is_gimple_min_invariant (low))
44de5aeb 2193 {
726a989a
RB
2194 TREE_OPERAND (t, 2) = low;
2195 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
ba4d8f9d 2196 post_p, is_gimple_reg,
726a989a 2197 fb_rvalue);
44de5aeb
RK
2198 ret = MIN (ret, tret);
2199 }
2200 }
19c44640
JJ
2201 else
2202 {
2203 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2204 is_gimple_reg, fb_rvalue);
2205 ret = MIN (ret, tret);
2206 }
44de5aeb 2207
19c44640 2208 if (TREE_OPERAND (t, 3) == NULL_TREE)
44de5aeb
RK
2209 {
2210 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
2211 tree elmt_size = unshare_expr (array_ref_element_size (t));
a4e9ffe5 2212 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
44de5aeb
RK
2213
2214 /* Divide the element size by the alignment of the element
2215 type (above). */
ad19c4be
EB
2216 elmt_size
2217 = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
44de5aeb 2218
a7cc468a 2219 if (!is_gimple_min_invariant (elmt_size))
44de5aeb 2220 {
726a989a
RB
2221 TREE_OPERAND (t, 3) = elmt_size;
2222 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
ba4d8f9d 2223 post_p, is_gimple_reg,
726a989a 2224 fb_rvalue);
44de5aeb
RK
2225 ret = MIN (ret, tret);
2226 }
6de9cd9a 2227 }
19c44640
JJ
2228 else
2229 {
2230 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
2231 is_gimple_reg, fb_rvalue);
2232 ret = MIN (ret, tret);
2233 }
6de9cd9a 2234 }
44de5aeb
RK
2235 else if (TREE_CODE (t) == COMPONENT_REF)
2236 {
2237 /* Set the field offset into T and gimplify it. */
19c44640 2238 if (TREE_OPERAND (t, 2) == NULL_TREE)
44de5aeb
RK
2239 {
2240 tree offset = unshare_expr (component_ref_field_offset (t));
2241 tree field = TREE_OPERAND (t, 1);
2242 tree factor
2243 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
2244
2245 /* Divide the offset by its alignment. */
db3927fb 2246 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
44de5aeb 2247
a7cc468a 2248 if (!is_gimple_min_invariant (offset))
44de5aeb 2249 {
726a989a
RB
2250 TREE_OPERAND (t, 2) = offset;
2251 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
ba4d8f9d 2252 post_p, is_gimple_reg,
726a989a 2253 fb_rvalue);
44de5aeb
RK
2254 ret = MIN (ret, tret);
2255 }
2256 }
19c44640
JJ
2257 else
2258 {
2259 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2260 is_gimple_reg, fb_rvalue);
2261 ret = MIN (ret, tret);
2262 }
44de5aeb 2263 }
af72267c
RK
2264 }
2265
a9f7c570
RH
2266 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
2267 so as to match the min_lval predicate. Failure to do so may result
2268 in the creation of large aggregate temporaries. */
2269 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
2270 fallback | fb_lvalue);
af72267c
RK
2271 ret = MIN (ret, tret);
2272
ea814c66 2273 /* And finally, the indices and operands of ARRAY_REF. During this
48eb4e53 2274 loop we also remove any useless conversions. */
9771b263 2275 for (; expr_stack.length () > 0; )
af72267c 2276 {
9771b263 2277 tree t = expr_stack.pop ();
af72267c
RK
2278
2279 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2280 {
ba4d8f9d 2281 /* Gimplify the dimension. */
af72267c
RK
2282 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2283 {
2284 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
ba4d8f9d 2285 is_gimple_val, fb_rvalue);
af72267c
RK
2286 ret = MIN (ret, tret);
2287 }
2288 }
48eb4e53
RK
2289
2290 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
2291
726a989a
RB
2292 /* The innermost expression P may have originally had
2293 TREE_SIDE_EFFECTS set which would have caused all the outer
2294 expressions in *EXPR_P leading to P to also have had
2295 TREE_SIDE_EFFECTS set. */
6de9cd9a 2296 recalculate_side_effects (t);
6de9cd9a
DN
2297 }
2298
2299 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
90051e16 2300 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
6de9cd9a
DN
2301 {
2302 canonicalize_component_ref (expr_p);
6de9cd9a
DN
2303 }
2304
9771b263 2305 expr_stack.release ();
07724022 2306
941f78d1
JM
2307 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
2308
6de9cd9a
DN
2309 return ret;
2310}
2311
206048bd
VR
2312/* Gimplify the self modifying expression pointed to by EXPR_P
2313 (++, --, +=, -=).
6de9cd9a
DN
2314
2315 PRE_P points to the list where side effects that must happen before
2316 *EXPR_P should be stored.
2317
2318 POST_P points to the list where side effects that must happen after
2319 *EXPR_P should be stored.
2320
2321 WANT_VALUE is nonzero iff we want to use the value of this expression
cc3c4f62 2322 in another expression.
6de9cd9a 2323
cc3c4f62
RB
2324 ARITH_TYPE is the type the computation should be performed in. */
2325
2326enum gimplify_status
726a989a 2327gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
cc3c4f62 2328 bool want_value, tree arith_type)
6de9cd9a
DN
2329{
2330 enum tree_code code;
726a989a
RB
2331 tree lhs, lvalue, rhs, t1;
2332 gimple_seq post = NULL, *orig_post_p = post_p;
6de9cd9a
DN
2333 bool postfix;
2334 enum tree_code arith_code;
2335 enum gimplify_status ret;
db3927fb 2336 location_t loc = EXPR_LOCATION (*expr_p);
6de9cd9a
DN
2337
2338 code = TREE_CODE (*expr_p);
2339
282899df
NS
2340 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
2341 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
6de9cd9a
DN
2342
2343 /* Prefix or postfix? */
2344 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
2345 /* Faster to treat as prefix if result is not used. */
2346 postfix = want_value;
2347 else
2348 postfix = false;
2349
82181741
JJ
2350 /* For postfix, make sure the inner expression's post side effects
2351 are executed after side effects from this expression. */
2352 if (postfix)
2353 post_p = &post;
2354
6de9cd9a
DN
2355 /* Add or subtract? */
2356 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
2357 arith_code = PLUS_EXPR;
2358 else
2359 arith_code = MINUS_EXPR;
2360
2361 /* Gimplify the LHS into a GIMPLE lvalue. */
2362 lvalue = TREE_OPERAND (*expr_p, 0);
2363 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
2364 if (ret == GS_ERROR)
2365 return ret;
2366
2367 /* Extract the operands to the arithmetic operation. */
2368 lhs = lvalue;
2369 rhs = TREE_OPERAND (*expr_p, 1);
2370
2371 /* For postfix operator, we evaluate the LHS to an rvalue and then use
d97c9b22 2372 that as the result value and in the postqueue operation. */
6de9cd9a
DN
2373 if (postfix)
2374 {
2375 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
2376 if (ret == GS_ERROR)
2377 return ret;
6de9cd9a 2378
d97c9b22
JJ
2379 lhs = get_initialized_tmp_var (lhs, pre_p, NULL);
2380 }
cc3c4f62 2381
5be014d5
AP
2382 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
2383 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
2384 {
0d82a1c8 2385 rhs = convert_to_ptrofftype_loc (loc, rhs);
5be014d5 2386 if (arith_code == MINUS_EXPR)
db3927fb 2387 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
cc3c4f62 2388 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
5be014d5 2389 }
cc3c4f62
RB
2390 else
2391 t1 = fold_convert (TREE_TYPE (*expr_p),
2392 fold_build2 (arith_code, arith_type,
2393 fold_convert (arith_type, lhs),
2394 fold_convert (arith_type, rhs)));
5be014d5 2395
6de9cd9a
DN
2396 if (postfix)
2397 {
cf1867a0 2398 gimplify_assign (lvalue, t1, pre_p);
726a989a 2399 gimplify_seq_add_seq (orig_post_p, post);
cc3c4f62 2400 *expr_p = lhs;
6de9cd9a
DN
2401 return GS_ALL_DONE;
2402 }
2403 else
2404 {
726a989a 2405 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
6de9cd9a
DN
2406 return GS_OK;
2407 }
2408}
2409
d25cee4d
RH
2410/* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
2411
2412static void
2413maybe_with_size_expr (tree *expr_p)
2414{
61025d1b
RK
2415 tree expr = *expr_p;
2416 tree type = TREE_TYPE (expr);
2417 tree size;
d25cee4d 2418
61025d1b
RK
2419 /* If we've already wrapped this or the type is error_mark_node, we can't do
2420 anything. */
2421 if (TREE_CODE (expr) == WITH_SIZE_EXPR
2422 || type == error_mark_node)
d25cee4d
RH
2423 return;
2424
61025d1b 2425 /* If the size isn't known or is a constant, we have nothing to do. */
d25cee4d 2426 size = TYPE_SIZE_UNIT (type);
61025d1b
RK
2427 if (!size || TREE_CODE (size) == INTEGER_CST)
2428 return;
2429
2430 /* Otherwise, make a WITH_SIZE_EXPR. */
2431 size = unshare_expr (size);
2432 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
2433 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
d25cee4d
RH
2434}
2435
726a989a 2436/* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
1282697f
AH
2437 Store any side-effects in PRE_P. CALL_LOCATION is the location of
2438 the CALL_EXPR. */
e4f78bd4
JM
2439
2440static enum gimplify_status
1282697f 2441gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location)
e4f78bd4
JM
2442{
2443 bool (*test) (tree);
2444 fallback_t fb;
2445
2446 /* In general, we allow lvalues for function arguments to avoid
2447 extra overhead of copying large aggregates out of even larger
2448 aggregates into temporaries only to copy the temporaries to
2449 the argument list. Make optimizers happy by pulling out to
2450 temporaries those types that fit in registers. */
726a989a 2451 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
e4f78bd4
JM
2452 test = is_gimple_val, fb = fb_rvalue;
2453 else
b4ef8aac
JM
2454 {
2455 test = is_gimple_lvalue, fb = fb_either;
2456 /* Also strip a TARGET_EXPR that would force an extra copy. */
2457 if (TREE_CODE (*arg_p) == TARGET_EXPR)
2458 {
2459 tree init = TARGET_EXPR_INITIAL (*arg_p);
2460 if (init
2461 && !VOID_TYPE_P (TREE_TYPE (init)))
2462 *arg_p = init;
2463 }
2464 }
e4f78bd4 2465
d25cee4d 2466 /* If this is a variable sized type, we must remember the size. */
726a989a 2467 maybe_with_size_expr (arg_p);
d25cee4d 2468
c2255bc4 2469 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
1282697f
AH
2470 /* Make sure arguments have the same location as the function call
2471 itself. */
2472 protected_set_expr_location (*arg_p, call_location);
2473
e4f78bd4
JM
2474 /* There is a sequence point before a function call. Side effects in
2475 the argument list must occur before the actual call. So, when
2476 gimplifying arguments, force gimplify_expr to use an internal
2477 post queue which is then appended to the end of PRE_P. */
726a989a 2478 return gimplify_expr (arg_p, pre_p, NULL, test, fb);
e4f78bd4
JM
2479}
2480
726a989a 2481/* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
90051e16 2482 WANT_VALUE is true if the result of the call is desired. */
6de9cd9a
DN
2483
2484static enum gimplify_status
726a989a 2485gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
6de9cd9a 2486{
f20ca725 2487 tree fndecl, parms, p, fnptrtype;
6de9cd9a 2488 enum gimplify_status ret;
5039610b 2489 int i, nargs;
726a989a
RB
2490 gimple call;
2491 bool builtin_va_start_p = FALSE;
db3927fb 2492 location_t loc = EXPR_LOCATION (*expr_p);
6de9cd9a 2493
282899df 2494 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
6de9cd9a 2495
d3147f64 2496 /* For reliable diagnostics during inlining, it is necessary that
6de9cd9a 2497 every call_expr be annotated with file and line. */
a281759f
PB
2498 if (! EXPR_HAS_LOCATION (*expr_p))
2499 SET_EXPR_LOCATION (*expr_p, input_location);
6de9cd9a
DN
2500
2501 /* This may be a call to a builtin function.
2502
2503 Builtin function calls may be transformed into different
2504 (and more efficient) builtin function calls under certain
2505 circumstances. Unfortunately, gimplification can muck things
2506 up enough that the builtin expanders are not aware that certain
2507 transformations are still valid.
2508
2509 So we attempt transformation/gimplification of the call before
2510 we gimplify the CALL_EXPR. At this time we do not manage to
2511 transform all calls in the same manner as the expanders do, but
2512 we do transform most of them. */
726a989a 2513 fndecl = get_callee_fndecl (*expr_p);
3537a0cd
RG
2514 if (fndecl
2515 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2516 switch (DECL_FUNCTION_CODE (fndecl))
2517 {
2518 case BUILT_IN_VA_START:
2efcfa4e 2519 {
726a989a 2520 builtin_va_start_p = TRUE;
5039610b 2521 if (call_expr_nargs (*expr_p) < 2)
2efcfa4e
AP
2522 {
2523 error ("too few arguments to function %<va_start%>");
c2255bc4 2524 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2efcfa4e
AP
2525 return GS_OK;
2526 }
b8698a0f 2527
5039610b 2528 if (fold_builtin_next_arg (*expr_p, true))
2efcfa4e 2529 {
c2255bc4 2530 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2efcfa4e
AP
2531 return GS_OK;
2532 }
3537a0cd
RG
2533 break;
2534 }
2535 case BUILT_IN_LINE:
2536 {
2537 expanded_location loc = expand_location (EXPR_LOCATION (*expr_p));
2538 *expr_p = build_int_cst (TREE_TYPE (*expr_p), loc.line);
2539 return GS_OK;
2540 }
2541 case BUILT_IN_FILE:
2542 {
2543 expanded_location loc = expand_location (EXPR_LOCATION (*expr_p));
2544 *expr_p = build_string_literal (strlen (loc.file) + 1, loc.file);
2545 return GS_OK;
2546 }
2547 case BUILT_IN_FUNCTION:
2548 {
2549 const char *function;
2550 function = IDENTIFIER_POINTER (DECL_NAME (current_function_decl));
2551 *expr_p = build_string_literal (strlen (function) + 1, function);
2552 return GS_OK;
2553 }
2554 default:
2555 ;
2556 }
2557 if (fndecl && DECL_BUILT_IN (fndecl))
2558 {
2559 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2560 if (new_tree && new_tree != *expr_p)
2561 {
2562 /* There was a transformation of this call which computes the
2563 same value, but in a more efficient way. Return and try
2564 again. */
2565 *expr_p = new_tree;
2566 return GS_OK;
2efcfa4e 2567 }
6de9cd9a
DN
2568 }
2569
f20ca725
RG
2570 /* Remember the original function pointer type. */
2571 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
2572
6de9cd9a
DN
2573 /* There is a sequence point before the call, so any side effects in
2574 the calling expression must occur before the actual call. Force
2575 gimplify_expr to use an internal post queue. */
5039610b 2576 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
0f59171d 2577 is_gimple_call_addr, fb_rvalue);
6de9cd9a 2578
5039610b
SL
2579 nargs = call_expr_nargs (*expr_p);
2580
e36711f3 2581 /* Get argument types for verification. */
726a989a 2582 fndecl = get_callee_fndecl (*expr_p);
e36711f3 2583 parms = NULL_TREE;
726a989a
RB
2584 if (fndecl)
2585 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
e36711f3
RG
2586 else if (POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_FN (*expr_p))))
2587 parms = TYPE_ARG_TYPES (TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (*expr_p))));
2588
726a989a 2589 if (fndecl && DECL_ARGUMENTS (fndecl))
f9487002 2590 p = DECL_ARGUMENTS (fndecl);
004e2fa7 2591 else if (parms)
f9487002 2592 p = parms;
6ef5231b 2593 else
498e51ca 2594 p = NULL_TREE;
f9487002
JJ
2595 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
2596 ;
6ef5231b
JJ
2597
2598 /* If the last argument is __builtin_va_arg_pack () and it is not
2599 passed as a named argument, decrease the number of CALL_EXPR
2600 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
2601 if (!p
2602 && i < nargs
2603 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
2604 {
2605 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
2606 tree last_arg_fndecl = get_callee_fndecl (last_arg);
2607
2608 if (last_arg_fndecl
2609 && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
2610 && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
2611 && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
2612 {
2613 tree call = *expr_p;
2614
2615 --nargs;
db3927fb
AH
2616 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
2617 CALL_EXPR_FN (call),
2618 nargs, CALL_EXPR_ARGP (call));
726a989a
RB
2619
2620 /* Copy all CALL_EXPR flags, location and block, except
6ef5231b
JJ
2621 CALL_EXPR_VA_ARG_PACK flag. */
2622 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
2623 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
2624 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
2625 = CALL_EXPR_RETURN_SLOT_OPT (call);
2626 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
5e278028 2627 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
726a989a 2628
6ef5231b
JJ
2629 /* Set CALL_EXPR_VA_ARG_PACK. */
2630 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
2631 }
2632 }
e36711f3
RG
2633
2634 /* Finally, gimplify the function arguments. */
726a989a 2635 if (nargs > 0)
6de9cd9a 2636 {
726a989a
RB
2637 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
2638 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
2639 PUSH_ARGS_REVERSED ? i-- : i++)
2640 {
2641 enum gimplify_status t;
6de9cd9a 2642
726a989a
RB
2643 /* Avoid gimplifying the second argument to va_start, which needs to
2644 be the plain PARM_DECL. */
2645 if ((i != 1) || !builtin_va_start_p)
2646 {
1282697f
AH
2647 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
2648 EXPR_LOCATION (*expr_p));
6de9cd9a 2649
726a989a
RB
2650 if (t == GS_ERROR)
2651 ret = GS_ERROR;
2652 }
2653 }
6de9cd9a 2654 }
6de9cd9a 2655
33922890
RG
2656 /* Verify the function result. */
2657 if (want_value && fndecl
f20ca725 2658 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
33922890
RG
2659 {
2660 error_at (loc, "using result of function returning %<void%>");
2661 ret = GS_ERROR;
2662 }
2663
6de9cd9a 2664 /* Try this again in case gimplification exposed something. */
6f538523 2665 if (ret != GS_ERROR)
6de9cd9a 2666 {
db3927fb 2667 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
6f538523 2668
82d6e6fc 2669 if (new_tree && new_tree != *expr_p)
5039610b
SL
2670 {
2671 /* There was a transformation of this call which computes the
2672 same value, but in a more efficient way. Return and try
2673 again. */
82d6e6fc 2674 *expr_p = new_tree;
5039610b 2675 return GS_OK;
6de9cd9a
DN
2676 }
2677 }
726a989a
RB
2678 else
2679 {
df8fa700 2680 *expr_p = error_mark_node;
726a989a
RB
2681 return GS_ERROR;
2682 }
6de9cd9a
DN
2683
2684 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
2685 decl. This allows us to eliminate redundant or useless
2686 calls to "const" functions. */
becfd6e5
KZ
2687 if (TREE_CODE (*expr_p) == CALL_EXPR)
2688 {
2689 int flags = call_expr_flags (*expr_p);
2690 if (flags & (ECF_CONST | ECF_PURE)
2691 /* An infinite loop is considered a side effect. */
2692 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
2693 TREE_SIDE_EFFECTS (*expr_p) = 0;
2694 }
726a989a
RB
2695
2696 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
2697 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
2698 form and delegate the creation of a GIMPLE_CALL to
2699 gimplify_modify_expr. This is always possible because when
2700 WANT_VALUE is true, the caller wants the result of this call into
2701 a temporary, which means that we will emit an INIT_EXPR in
2702 internal_get_tmp_var which will then be handled by
2703 gimplify_modify_expr. */
2704 if (!want_value)
2705 {
2706 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
2707 have to do is replicate it as a GIMPLE_CALL tuple. */
64e0f5ff 2708 gimple_stmt_iterator gsi;
726a989a 2709 call = gimple_build_call_from_tree (*expr_p);
f20ca725 2710 gimple_call_set_fntype (call, TREE_TYPE (fnptrtype));
f6b64c35 2711 notice_special_calls (call);
726a989a 2712 gimplify_seq_add_stmt (pre_p, call);
64e0f5ff 2713 gsi = gsi_last (*pre_p);
acf0174b
JJ
2714 /* Don't fold stmts inside of target construct. We'll do it
2715 during omplower pass instead. */
2716 struct gimplify_omp_ctx *ctx;
2717 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
2718 if (ctx->region_type == ORT_TARGET)
2719 break;
2720 if (ctx == NULL)
2721 fold_stmt (&gsi);
726a989a
RB
2722 *expr_p = NULL_TREE;
2723 }
f20ca725
RG
2724 else
2725 /* Remember the original function type. */
2726 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
2727 CALL_EXPR_FN (*expr_p));
726a989a 2728
6de9cd9a
DN
2729 return ret;
2730}
2731
2732/* Handle shortcut semantics in the predicate operand of a COND_EXPR by
2733 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
2734
2735 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
2736 condition is true or false, respectively. If null, we should generate
2737 our own to skip over the evaluation of this specific expression.
2738
ca80e52b
EB
2739 LOCUS is the source location of the COND_EXPR.
2740
6de9cd9a
DN
2741 This function is the tree equivalent of do_jump.
2742
2743 shortcut_cond_r should only be called by shortcut_cond_expr. */
2744
2745static tree
ca80e52b
EB
2746shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
2747 location_t locus)
6de9cd9a
DN
2748{
2749 tree local_label = NULL_TREE;
2750 tree t, expr = NULL;
2751
2752 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
2753 retain the shortcut semantics. Just insert the gotos here;
2754 shortcut_cond_expr will append the real blocks later. */
2755 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2756 {
ca80e52b
EB
2757 location_t new_locus;
2758
6de9cd9a
DN
2759 /* Turn if (a && b) into
2760
2761 if (a); else goto no;
2762 if (b) goto yes; else goto no;
2763 (no:) */
2764
2765 if (false_label_p == NULL)
2766 false_label_p = &local_label;
2767
ca80e52b
EB
2768 /* Keep the original source location on the first 'if'. */
2769 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
6de9cd9a
DN
2770 append_to_statement_list (t, &expr);
2771
ca80e52b
EB
2772 /* Set the source location of the && on the second 'if'. */
2773 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2774 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2775 new_locus);
6de9cd9a
DN
2776 append_to_statement_list (t, &expr);
2777 }
2778 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2779 {
ca80e52b
EB
2780 location_t new_locus;
2781
6de9cd9a
DN
2782 /* Turn if (a || b) into
2783
2784 if (a) goto yes;
2785 if (b) goto yes; else goto no;
2786 (yes:) */
2787
2788 if (true_label_p == NULL)
2789 true_label_p = &local_label;
2790
ca80e52b
EB
2791 /* Keep the original source location on the first 'if'. */
2792 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
6de9cd9a
DN
2793 append_to_statement_list (t, &expr);
2794
ca80e52b
EB
2795 /* Set the source location of the || on the second 'if'. */
2796 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2797 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2798 new_locus);
6de9cd9a
DN
2799 append_to_statement_list (t, &expr);
2800 }
1537737f
JJ
2801 else if (TREE_CODE (pred) == COND_EXPR
2802 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
2803 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
6de9cd9a 2804 {
ca80e52b
EB
2805 location_t new_locus;
2806
6de9cd9a
DN
2807 /* As long as we're messing with gotos, turn if (a ? b : c) into
2808 if (a)
2809 if (b) goto yes; else goto no;
2810 else
1537737f
JJ
2811 if (c) goto yes; else goto no;
2812
2813 Don't do this if one of the arms has void type, which can happen
2814 in C++ when the arm is throw. */
ca80e52b
EB
2815
2816 /* Keep the original source location on the first 'if'. Set the source
2817 location of the ? on the second 'if'. */
2818 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
b4257cfc
RG
2819 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
2820 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
ca80e52b 2821 false_label_p, locus),
b4257cfc 2822 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
ca80e52b 2823 false_label_p, new_locus));
6de9cd9a
DN
2824 }
2825 else
2826 {
b4257cfc
RG
2827 expr = build3 (COND_EXPR, void_type_node, pred,
2828 build_and_jump (true_label_p),
2829 build_and_jump (false_label_p));
ca80e52b 2830 SET_EXPR_LOCATION (expr, locus);
6de9cd9a
DN
2831 }
2832
2833 if (local_label)
2834 {
2835 t = build1 (LABEL_EXPR, void_type_node, local_label);
2836 append_to_statement_list (t, &expr);
2837 }
2838
2839 return expr;
2840}
2841
726a989a
RB
2842/* Given a conditional expression EXPR with short-circuit boolean
2843 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
073a8998 2844 predicate apart into the equivalent sequence of conditionals. */
726a989a 2845
6de9cd9a
DN
2846static tree
2847shortcut_cond_expr (tree expr)
2848{
2849 tree pred = TREE_OPERAND (expr, 0);
2850 tree then_ = TREE_OPERAND (expr, 1);
2851 tree else_ = TREE_OPERAND (expr, 2);
2852 tree true_label, false_label, end_label, t;
2853 tree *true_label_p;
2854 tree *false_label_p;
089efaa4 2855 bool emit_end, emit_false, jump_over_else;
65355d53
RH
2856 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
2857 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
6de9cd9a
DN
2858
2859 /* First do simple transformations. */
65355d53 2860 if (!else_se)
6de9cd9a 2861 {
ca80e52b
EB
2862 /* If there is no 'else', turn
2863 if (a && b) then c
2864 into
2865 if (a) if (b) then c. */
6de9cd9a
DN
2866 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2867 {
ca80e52b 2868 /* Keep the original source location on the first 'if'. */
ec52b111 2869 location_t locus = EXPR_LOC_OR_HERE (expr);
6de9cd9a 2870 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
ca80e52b
EB
2871 /* Set the source location of the && on the second 'if'. */
2872 if (EXPR_HAS_LOCATION (pred))
2873 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
6de9cd9a 2874 then_ = shortcut_cond_expr (expr);
4356a1bf 2875 then_se = then_ && TREE_SIDE_EFFECTS (then_);
6de9cd9a 2876 pred = TREE_OPERAND (pred, 0);
b4257cfc 2877 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
ca80e52b 2878 SET_EXPR_LOCATION (expr, locus);
6de9cd9a
DN
2879 }
2880 }
726a989a 2881
65355d53 2882 if (!then_se)
6de9cd9a
DN
2883 {
2884 /* If there is no 'then', turn
2885 if (a || b); else d
2886 into
2887 if (a); else if (b); else d. */
2888 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2889 {
ca80e52b 2890 /* Keep the original source location on the first 'if'. */
ec52b111 2891 location_t locus = EXPR_LOC_OR_HERE (expr);
6de9cd9a 2892 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
ca80e52b
EB
2893 /* Set the source location of the || on the second 'if'. */
2894 if (EXPR_HAS_LOCATION (pred))
2895 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
6de9cd9a 2896 else_ = shortcut_cond_expr (expr);
4356a1bf 2897 else_se = else_ && TREE_SIDE_EFFECTS (else_);
6de9cd9a 2898 pred = TREE_OPERAND (pred, 0);
b4257cfc 2899 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
ca80e52b 2900 SET_EXPR_LOCATION (expr, locus);
6de9cd9a
DN
2901 }
2902 }
2903
2904 /* If we're done, great. */
2905 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
2906 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
2907 return expr;
2908
2909 /* Otherwise we need to mess with gotos. Change
2910 if (a) c; else d;
2911 to
2912 if (a); else goto no;
2913 c; goto end;
2914 no: d; end:
2915 and recursively gimplify the condition. */
2916
2917 true_label = false_label = end_label = NULL_TREE;
2918
2919 /* If our arms just jump somewhere, hijack those labels so we don't
2920 generate jumps to jumps. */
2921
65355d53
RH
2922 if (then_
2923 && TREE_CODE (then_) == GOTO_EXPR
6de9cd9a
DN
2924 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
2925 {
2926 true_label = GOTO_DESTINATION (then_);
65355d53
RH
2927 then_ = NULL;
2928 then_se = false;
6de9cd9a
DN
2929 }
2930
65355d53
RH
2931 if (else_
2932 && TREE_CODE (else_) == GOTO_EXPR
6de9cd9a
DN
2933 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
2934 {
2935 false_label = GOTO_DESTINATION (else_);
65355d53
RH
2936 else_ = NULL;
2937 else_se = false;
6de9cd9a
DN
2938 }
2939
9cf737f8 2940 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
6de9cd9a
DN
2941 if (true_label)
2942 true_label_p = &true_label;
2943 else
2944 true_label_p = NULL;
2945
2946 /* The 'else' branch also needs a label if it contains interesting code. */
65355d53 2947 if (false_label || else_se)
6de9cd9a
DN
2948 false_label_p = &false_label;
2949 else
2950 false_label_p = NULL;
2951
2952 /* If there was nothing else in our arms, just forward the label(s). */
65355d53 2953 if (!then_se && !else_se)
ca80e52b 2954 return shortcut_cond_r (pred, true_label_p, false_label_p,
ec52b111 2955 EXPR_LOC_OR_HERE (expr));
6de9cd9a
DN
2956
2957 /* If our last subexpression already has a terminal label, reuse it. */
65355d53 2958 if (else_se)
ca80e52b 2959 t = expr_last (else_);
65355d53 2960 else if (then_se)
ca80e52b 2961 t = expr_last (then_);
65355d53 2962 else
ca80e52b
EB
2963 t = NULL;
2964 if (t && TREE_CODE (t) == LABEL_EXPR)
2965 end_label = LABEL_EXPR_LABEL (t);
6de9cd9a
DN
2966
2967 /* If we don't care about jumping to the 'else' branch, jump to the end
2968 if the condition is false. */
2969 if (!false_label_p)
2970 false_label_p = &end_label;
2971
2972 /* We only want to emit these labels if we aren't hijacking them. */
2973 emit_end = (end_label == NULL_TREE);
2974 emit_false = (false_label == NULL_TREE);
2975
089efaa4
ILT
2976 /* We only emit the jump over the else clause if we have to--if the
2977 then clause may fall through. Otherwise we can wind up with a
2978 useless jump and a useless label at the end of gimplified code,
2979 which will cause us to think that this conditional as a whole
2980 falls through even if it doesn't. If we then inline a function
2981 which ends with such a condition, that can cause us to issue an
2982 inappropriate warning about control reaching the end of a
2983 non-void function. */
2984 jump_over_else = block_may_fallthru (then_);
2985
ca80e52b 2986 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
ec52b111 2987 EXPR_LOC_OR_HERE (expr));
6de9cd9a
DN
2988
2989 expr = NULL;
2990 append_to_statement_list (pred, &expr);
2991
2992 append_to_statement_list (then_, &expr);
65355d53 2993 if (else_se)
6de9cd9a 2994 {
089efaa4
ILT
2995 if (jump_over_else)
2996 {
ca80e52b 2997 tree last = expr_last (expr);
089efaa4 2998 t = build_and_jump (&end_label);
ca80e52b
EB
2999 if (EXPR_HAS_LOCATION (last))
3000 SET_EXPR_LOCATION (t, EXPR_LOCATION (last));
089efaa4
ILT
3001 append_to_statement_list (t, &expr);
3002 }
6de9cd9a
DN
3003 if (emit_false)
3004 {
3005 t = build1 (LABEL_EXPR, void_type_node, false_label);
3006 append_to_statement_list (t, &expr);
3007 }
3008 append_to_statement_list (else_, &expr);
3009 }
3010 if (emit_end && end_label)
3011 {
3012 t = build1 (LABEL_EXPR, void_type_node, end_label);
3013 append_to_statement_list (t, &expr);
3014 }
3015
3016 return expr;
3017}
3018
3019/* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
3020
50674e96 3021tree
6de9cd9a
DN
3022gimple_boolify (tree expr)
3023{
3024 tree type = TREE_TYPE (expr);
db3927fb 3025 location_t loc = EXPR_LOCATION (expr);
6de9cd9a 3026
554cf330
JJ
3027 if (TREE_CODE (expr) == NE_EXPR
3028 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
3029 && integer_zerop (TREE_OPERAND (expr, 1)))
3030 {
3031 tree call = TREE_OPERAND (expr, 0);
3032 tree fn = get_callee_fndecl (call);
3033
d53c73e0
JJ
3034 /* For __builtin_expect ((long) (x), y) recurse into x as well
3035 if x is truth_value_p. */
554cf330
JJ
3036 if (fn
3037 && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL
3038 && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT
3039 && call_expr_nargs (call) == 2)
3040 {
3041 tree arg = CALL_EXPR_ARG (call, 0);
3042 if (arg)
3043 {
3044 if (TREE_CODE (arg) == NOP_EXPR
3045 && TREE_TYPE (arg) == TREE_TYPE (call))
3046 arg = TREE_OPERAND (arg, 0);
d53c73e0
JJ
3047 if (truth_value_p (TREE_CODE (arg)))
3048 {
3049 arg = gimple_boolify (arg);
3050 CALL_EXPR_ARG (call, 0)
3051 = fold_convert_loc (loc, TREE_TYPE (call), arg);
3052 }
554cf330
JJ
3053 }
3054 }
3055 }
3056
6de9cd9a
DN
3057 switch (TREE_CODE (expr))
3058 {
3059 case TRUTH_AND_EXPR:
3060 case TRUTH_OR_EXPR:
3061 case TRUTH_XOR_EXPR:
3062 case TRUTH_ANDIF_EXPR:
3063 case TRUTH_ORIF_EXPR:
3064 /* Also boolify the arguments of truth exprs. */
3065 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
3066 /* FALLTHRU */
3067
3068 case TRUTH_NOT_EXPR:
3069 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
6de9cd9a 3070
6de9cd9a 3071 /* These expressions always produce boolean results. */
7f3ff782
KT
3072 if (TREE_CODE (type) != BOOLEAN_TYPE)
3073 TREE_TYPE (expr) = boolean_type_node;
6de9cd9a 3074 return expr;
d3147f64 3075
8170608b
TB
3076 case ANNOTATE_EXPR:
3077 if ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1))
3078 == annot_expr_ivdep_kind)
3079 {
3080 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3081 if (TREE_CODE (type) != BOOLEAN_TYPE)
3082 TREE_TYPE (expr) = boolean_type_node;
3083 return expr;
3084 }
3085 /* FALLTHRU */
3086
6de9cd9a 3087 default:
7f3ff782
KT
3088 if (COMPARISON_CLASS_P (expr))
3089 {
3090 /* There expressions always prduce boolean results. */
3091 if (TREE_CODE (type) != BOOLEAN_TYPE)
3092 TREE_TYPE (expr) = boolean_type_node;
3093 return expr;
3094 }
6de9cd9a
DN
3095 /* Other expressions that get here must have boolean values, but
3096 might need to be converted to the appropriate mode. */
7f3ff782 3097 if (TREE_CODE (type) == BOOLEAN_TYPE)
1d15f620 3098 return expr;
db3927fb 3099 return fold_convert_loc (loc, boolean_type_node, expr);
6de9cd9a
DN
3100 }
3101}
3102
aea74440
JJ
3103/* Given a conditional expression *EXPR_P without side effects, gimplify
3104 its operands. New statements are inserted to PRE_P. */
3105
3106static enum gimplify_status
726a989a 3107gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
aea74440
JJ
3108{
3109 tree expr = *expr_p, cond;
3110 enum gimplify_status ret, tret;
3111 enum tree_code code;
3112
3113 cond = gimple_boolify (COND_EXPR_COND (expr));
3114
3115 /* We need to handle && and || specially, as their gimplification
3116 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
3117 code = TREE_CODE (cond);
3118 if (code == TRUTH_ANDIF_EXPR)
3119 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
3120 else if (code == TRUTH_ORIF_EXPR)
3121 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
726a989a 3122 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
aea74440
JJ
3123 COND_EXPR_COND (*expr_p) = cond;
3124
3125 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
3126 is_gimple_val, fb_rvalue);
3127 ret = MIN (ret, tret);
3128 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
3129 is_gimple_val, fb_rvalue);
3130
3131 return MIN (ret, tret);
3132}
3133
ad19c4be 3134/* Return true if evaluating EXPR could trap.
aea74440
JJ
3135 EXPR is GENERIC, while tree_could_trap_p can be called
3136 only on GIMPLE. */
3137
3138static bool
3139generic_expr_could_trap_p (tree expr)
3140{
3141 unsigned i, n;
3142
3143 if (!expr || is_gimple_val (expr))
3144 return false;
3145
3146 if (!EXPR_P (expr) || tree_could_trap_p (expr))
3147 return true;
3148
3149 n = TREE_OPERAND_LENGTH (expr);
3150 for (i = 0; i < n; i++)
3151 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
3152 return true;
3153
3154 return false;
3155}
3156
206048bd 3157/* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
6de9cd9a
DN
3158 into
3159
3160 if (p) if (p)
3161 t1 = a; a;
3162 else or else
3163 t1 = b; b;
3164 t1;
3165
3166 The second form is used when *EXPR_P is of type void.
3167
3168 PRE_P points to the list where side effects that must happen before
dae7ec87 3169 *EXPR_P should be stored. */
6de9cd9a
DN
3170
3171static enum gimplify_status
726a989a 3172gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
6de9cd9a
DN
3173{
3174 tree expr = *expr_p;
06ec59e6
EB
3175 tree type = TREE_TYPE (expr);
3176 location_t loc = EXPR_LOCATION (expr);
3177 tree tmp, arm1, arm2;
6de9cd9a 3178 enum gimplify_status ret;
726a989a
RB
3179 tree label_true, label_false, label_cont;
3180 bool have_then_clause_p, have_else_clause_p;
3181 gimple gimple_cond;
3182 enum tree_code pred_code;
3183 gimple_seq seq = NULL;
26d44ae2
RH
3184
3185 /* If this COND_EXPR has a value, copy the values into a temporary within
3186 the arms. */
06ec59e6 3187 if (!VOID_TYPE_P (type))
26d44ae2 3188 {
06ec59e6 3189 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
aff98faf
AO
3190 tree result;
3191
06ec59e6
EB
3192 /* If either an rvalue is ok or we do not require an lvalue, create the
3193 temporary. But we cannot do that if the type is addressable. */
3194 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
c3e203cf 3195 && !TREE_ADDRESSABLE (type))
aff98faf 3196 {
aea74440
JJ
3197 if (gimplify_ctxp->allow_rhs_cond_expr
3198 /* If either branch has side effects or could trap, it can't be
3199 evaluated unconditionally. */
06ec59e6
EB
3200 && !TREE_SIDE_EFFECTS (then_)
3201 && !generic_expr_could_trap_p (then_)
3202 && !TREE_SIDE_EFFECTS (else_)
3203 && !generic_expr_could_trap_p (else_))
aea74440
JJ
3204 return gimplify_pure_cond_expr (expr_p, pre_p);
3205
06ec59e6
EB
3206 tmp = create_tmp_var (type, "iftmp");
3207 result = tmp;
aff98faf 3208 }
06ec59e6
EB
3209
3210 /* Otherwise, only create and copy references to the values. */
26d44ae2
RH
3211 else
3212 {
06ec59e6 3213 type = build_pointer_type (type);
aff98faf 3214
06ec59e6
EB
3215 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3216 then_ = build_fold_addr_expr_loc (loc, then_);
aff98faf 3217
06ec59e6
EB
3218 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3219 else_ = build_fold_addr_expr_loc (loc, else_);
3220
3221 expr
3222 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
aea74440 3223
726a989a 3224 tmp = create_tmp_var (type, "iftmp");
70f34814 3225 result = build_simple_mem_ref_loc (loc, tmp);
26d44ae2
RH
3226 }
3227
06ec59e6
EB
3228 /* Build the new then clause, `tmp = then_;'. But don't build the
3229 assignment if the value is void; in C++ it can be if it's a throw. */
3230 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3231 TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_);
26d44ae2 3232
06ec59e6
EB
3233 /* Similarly, build the new else clause, `tmp = else_;'. */
3234 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3235 TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_);
26d44ae2
RH
3236
3237 TREE_TYPE (expr) = void_type_node;
3238 recalculate_side_effects (expr);
3239
d91ba7b0 3240 /* Move the COND_EXPR to the prequeue. */
726a989a 3241 gimplify_stmt (&expr, pre_p);
26d44ae2 3242
aff98faf 3243 *expr_p = result;
726a989a 3244 return GS_ALL_DONE;
26d44ae2
RH
3245 }
3246
f2f81d57
EB
3247 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
3248 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
3249 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
3250 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
3251
26d44ae2
RH
3252 /* Make sure the condition has BOOLEAN_TYPE. */
3253 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3254
3255 /* Break apart && and || conditions. */
3256 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
3257 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
3258 {
3259 expr = shortcut_cond_expr (expr);
3260
3261 if (expr != *expr_p)
3262 {
3263 *expr_p = expr;
3264
3265 /* We can't rely on gimplify_expr to re-gimplify the expanded
3266 form properly, as cleanups might cause the target labels to be
3267 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
3268 set up a conditional context. */
3269 gimple_push_condition ();
726a989a 3270 gimplify_stmt (expr_p, &seq);
26d44ae2 3271 gimple_pop_condition (pre_p);
726a989a 3272 gimple_seq_add_seq (pre_p, seq);
26d44ae2
RH
3273
3274 return GS_ALL_DONE;
3275 }
3276 }
3277
3278 /* Now do the normal gimplification. */
26d44ae2 3279
726a989a
RB
3280 /* Gimplify condition. */
3281 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
3282 fb_rvalue);
26d44ae2 3283 if (ret == GS_ERROR)
726a989a
RB
3284 return GS_ERROR;
3285 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
3286
3287 gimple_push_condition ();
26d44ae2 3288
726a989a
RB
3289 have_then_clause_p = have_else_clause_p = false;
3290 if (TREE_OPERAND (expr, 1) != NULL
3291 && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR
3292 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL
3293 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1)))
3294 == current_function_decl)
3295 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3296 have different locations, otherwise we end up with incorrect
3297 location information on the branches. */
3298 && (optimize
3299 || !EXPR_HAS_LOCATION (expr)
3300 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1))
3301 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1))))
3302 {
3303 label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1));
3304 have_then_clause_p = true;
26d44ae2
RH
3305 }
3306 else
c2255bc4 3307 label_true = create_artificial_label (UNKNOWN_LOCATION);
726a989a
RB
3308 if (TREE_OPERAND (expr, 2) != NULL
3309 && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR
3310 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL
3311 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2)))
3312 == current_function_decl)
3313 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3314 have different locations, otherwise we end up with incorrect
3315 location information on the branches. */
3316 && (optimize
3317 || !EXPR_HAS_LOCATION (expr)
3318 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2))
3319 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2))))
3320 {
3321 label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2));
3322 have_else_clause_p = true;
3323 }
3324 else
c2255bc4 3325 label_false = create_artificial_label (UNKNOWN_LOCATION);
26d44ae2 3326
726a989a
RB
3327 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
3328 &arm2);
26d44ae2 3329
726a989a
RB
3330 gimple_cond = gimple_build_cond (pred_code, arm1, arm2, label_true,
3331 label_false);
26d44ae2 3332
726a989a
RB
3333 gimplify_seq_add_stmt (&seq, gimple_cond);
3334 label_cont = NULL_TREE;
3335 if (!have_then_clause_p)
3336 {
3337 /* For if (...) {} else { code; } put label_true after
3338 the else block. */
3339 if (TREE_OPERAND (expr, 1) == NULL_TREE
3340 && !have_else_clause_p
3341 && TREE_OPERAND (expr, 2) != NULL_TREE)
3342 label_cont = label_true;
3343 else
3344 {
3345 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
3346 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
3347 /* For if (...) { code; } else {} or
3348 if (...) { code; } else goto label; or
3349 if (...) { code; return; } else { ... }
3350 label_cont isn't needed. */
3351 if (!have_else_clause_p
3352 && TREE_OPERAND (expr, 2) != NULL_TREE
3353 && gimple_seq_may_fallthru (seq))
3354 {
3355 gimple g;
c2255bc4 3356 label_cont = create_artificial_label (UNKNOWN_LOCATION);
726a989a
RB
3357
3358 g = gimple_build_goto (label_cont);
3359
3360 /* GIMPLE_COND's are very low level; they have embedded
3361 gotos. This particular embedded goto should not be marked
3362 with the location of the original COND_EXPR, as it would
3363 correspond to the COND_EXPR's condition, not the ELSE or the
3364 THEN arms. To avoid marking it with the wrong location, flag
3365 it as "no location". */
3366 gimple_set_do_not_emit_location (g);
3367
3368 gimplify_seq_add_stmt (&seq, g);
3369 }
3370 }
3371 }
3372 if (!have_else_clause_p)
3373 {
3374 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
3375 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
3376 }
3377 if (label_cont)
3378 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
3379
3380 gimple_pop_condition (pre_p);
3381 gimple_seq_add_seq (pre_p, seq);
3382
3383 if (ret == GS_ERROR)
3384 ; /* Do nothing. */
3385 else if (have_then_clause_p || have_else_clause_p)
3386 ret = GS_ALL_DONE;
3387 else
3388 {
3389 /* Both arms are empty; replace the COND_EXPR with its predicate. */
3390 expr = TREE_OPERAND (expr, 0);
3391 gimplify_stmt (&expr, pre_p);
3392 }
3393
3394 *expr_p = NULL;
3395 return ret;
3396}
3397
f76d6e6f
EB
3398/* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
3399 to be marked addressable.
3400
3401 We cannot rely on such an expression being directly markable if a temporary
3402 has been created by the gimplification. In this case, we create another
3403 temporary and initialize it with a copy, which will become a store after we
3404 mark it addressable. This can happen if the front-end passed us something
3405 that it could not mark addressable yet, like a Fortran pass-by-reference
3406 parameter (int) floatvar. */
3407
3408static void
3409prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
3410{
3411 while (handled_component_p (*expr_p))
3412 expr_p = &TREE_OPERAND (*expr_p, 0);
3413 if (is_gimple_reg (*expr_p))
3414 *expr_p = get_initialized_tmp_var (*expr_p, seq_p, NULL);
3415}
3416
726a989a
RB
3417/* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3418 a call to __builtin_memcpy. */
3419
3420static enum gimplify_status
3421gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
3422 gimple_seq *seq_p)
26d44ae2 3423{
5039610b 3424 tree t, to, to_ptr, from, from_ptr;
726a989a 3425 gimple gs;
db3927fb 3426 location_t loc = EXPR_LOCATION (*expr_p);
26d44ae2 3427
726a989a
RB
3428 to = TREE_OPERAND (*expr_p, 0);
3429 from = TREE_OPERAND (*expr_p, 1);
26d44ae2 3430
f76d6e6f
EB
3431 /* Mark the RHS addressable. Beware that it may not be possible to do so
3432 directly if a temporary has been created by the gimplification. */
3433 prepare_gimple_addressable (&from, seq_p);
3434
628c189e 3435 mark_addressable (from);
db3927fb
AH
3436 from_ptr = build_fold_addr_expr_loc (loc, from);
3437 gimplify_arg (&from_ptr, seq_p, loc);
26d44ae2 3438
628c189e 3439 mark_addressable (to);
db3927fb
AH
3440 to_ptr = build_fold_addr_expr_loc (loc, to);
3441 gimplify_arg (&to_ptr, seq_p, loc);
726a989a 3442
e79983f4 3443 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
726a989a
RB
3444
3445 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
26d44ae2
RH
3446
3447 if (want_value)
3448 {
726a989a
RB
3449 /* tmp = memcpy() */
3450 t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3451 gimple_call_set_lhs (gs, t);
3452 gimplify_seq_add_stmt (seq_p, gs);
3453
70f34814 3454 *expr_p = build_simple_mem_ref (t);
726a989a 3455 return GS_ALL_DONE;
26d44ae2
RH
3456 }
3457
726a989a
RB
3458 gimplify_seq_add_stmt (seq_p, gs);
3459 *expr_p = NULL;
3460 return GS_ALL_DONE;
26d44ae2
RH
3461}
3462
3463/* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3464 a call to __builtin_memset. In this case we know that the RHS is
3465 a CONSTRUCTOR with an empty element list. */
3466
3467static enum gimplify_status
726a989a
RB
3468gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
3469 gimple_seq *seq_p)
26d44ae2 3470{
1a13360e 3471 tree t, from, to, to_ptr;
726a989a 3472 gimple gs;
db3927fb 3473 location_t loc = EXPR_LOCATION (*expr_p);
26d44ae2 3474
1a13360e
OH
3475 /* Assert our assumptions, to abort instead of producing wrong code
3476 silently if they are not met. Beware that the RHS CONSTRUCTOR might
3477 not be immediately exposed. */
b8698a0f 3478 from = TREE_OPERAND (*expr_p, 1);
1a13360e
OH
3479 if (TREE_CODE (from) == WITH_SIZE_EXPR)
3480 from = TREE_OPERAND (from, 0);
3481
3482 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
9771b263 3483 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
1a13360e
OH
3484
3485 /* Now proceed. */
726a989a 3486 to = TREE_OPERAND (*expr_p, 0);
26d44ae2 3487
db3927fb
AH
3488 to_ptr = build_fold_addr_expr_loc (loc, to);
3489 gimplify_arg (&to_ptr, seq_p, loc);
e79983f4 3490 t = builtin_decl_implicit (BUILT_IN_MEMSET);
726a989a
RB
3491
3492 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
26d44ae2
RH
3493
3494 if (want_value)
3495 {
726a989a
RB
3496 /* tmp = memset() */
3497 t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3498 gimple_call_set_lhs (gs, t);
3499 gimplify_seq_add_stmt (seq_p, gs);
3500
3501 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
3502 return GS_ALL_DONE;
26d44ae2
RH
3503 }
3504
726a989a
RB
3505 gimplify_seq_add_stmt (seq_p, gs);
3506 *expr_p = NULL;
3507 return GS_ALL_DONE;
26d44ae2
RH
3508}
3509
57d1dd87
RH
3510/* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
3511 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
ad19c4be 3512 assignment. Return non-null if we detect a potential overlap. */
57d1dd87
RH
3513
3514struct gimplify_init_ctor_preeval_data
3515{
3516 /* The base decl of the lhs object. May be NULL, in which case we
3517 have to assume the lhs is indirect. */
3518 tree lhs_base_decl;
3519
3520 /* The alias set of the lhs object. */
4862826d 3521 alias_set_type lhs_alias_set;
57d1dd87
RH
3522};
3523
3524static tree
3525gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
3526{
3527 struct gimplify_init_ctor_preeval_data *data
3528 = (struct gimplify_init_ctor_preeval_data *) xdata;
3529 tree t = *tp;
3530
3531 /* If we find the base object, obviously we have overlap. */
3532 if (data->lhs_base_decl == t)
3533 return t;
3534
3535 /* If the constructor component is indirect, determine if we have a
3536 potential overlap with the lhs. The only bits of information we
3537 have to go on at this point are addressability and alias sets. */
70f34814
RG
3538 if ((INDIRECT_REF_P (t)
3539 || TREE_CODE (t) == MEM_REF)
57d1dd87
RH
3540 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3541 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
3542 return t;
3543
df10ee2a 3544 /* If the constructor component is a call, determine if it can hide a
70f34814
RG
3545 potential overlap with the lhs through an INDIRECT_REF like above.
3546 ??? Ugh - this is completely broken. In fact this whole analysis
3547 doesn't look conservative. */
df10ee2a
EB
3548 if (TREE_CODE (t) == CALL_EXPR)
3549 {
3550 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
3551
3552 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
3553 if (POINTER_TYPE_P (TREE_VALUE (type))
3554 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3555 && alias_sets_conflict_p (data->lhs_alias_set,
3556 get_alias_set
3557 (TREE_TYPE (TREE_VALUE (type)))))
3558 return t;
3559 }
3560
6615c446 3561 if (IS_TYPE_OR_DECL_P (t))
57d1dd87
RH
3562 *walk_subtrees = 0;
3563 return NULL;
3564}
3565
726a989a 3566/* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
57d1dd87
RH
3567 force values that overlap with the lhs (as described by *DATA)
3568 into temporaries. */
3569
3570static void
726a989a 3571gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
57d1dd87
RH
3572 struct gimplify_init_ctor_preeval_data *data)
3573{
3574 enum gimplify_status one;
3575
51eed280
PB
3576 /* If the value is constant, then there's nothing to pre-evaluate. */
3577 if (TREE_CONSTANT (*expr_p))
3578 {
3579 /* Ensure it does not have side effects, it might contain a reference to
3580 the object we're initializing. */
3581 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
3582 return;
3583 }
57d1dd87
RH
3584
3585 /* If the type has non-trivial constructors, we can't pre-evaluate. */
3586 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
3587 return;
3588
3589 /* Recurse for nested constructors. */
3590 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
3591 {
4038c495
GB
3592 unsigned HOST_WIDE_INT ix;
3593 constructor_elt *ce;
9771b263 3594 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
4038c495 3595
9771b263 3596 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
4038c495 3597 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
726a989a 3598
57d1dd87
RH
3599 return;
3600 }
3601
0461b801
EB
3602 /* If this is a variable sized type, we must remember the size. */
3603 maybe_with_size_expr (expr_p);
57d1dd87
RH
3604
3605 /* Gimplify the constructor element to something appropriate for the rhs
726a989a 3606 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
d3147f64 3607 the gimplifier will consider this a store to memory. Doing this
57d1dd87
RH
3608 gimplification now means that we won't have to deal with complicated
3609 language-specific trees, nor trees like SAVE_EXPR that can induce
b01d837f 3610 exponential search behavior. */
57d1dd87
RH
3611 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
3612 if (one == GS_ERROR)
3613 {
3614 *expr_p = NULL;
3615 return;
3616 }
3617
3618 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
3619 with the lhs, since "a = { .x=a }" doesn't make sense. This will
3620 always be true for all scalars, since is_gimple_mem_rhs insists on a
3621 temporary variable for them. */
3622 if (DECL_P (*expr_p))
3623 return;
3624
3625 /* If this is of variable size, we have no choice but to assume it doesn't
3626 overlap since we can't make a temporary for it. */
4c923c28 3627 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
57d1dd87
RH
3628 return;
3629
3630 /* Otherwise, we must search for overlap ... */
3631 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
3632 return;
3633
3634 /* ... and if found, force the value into a temporary. */
3635 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
3636}
3637
6fa91b48
SB
3638/* A subroutine of gimplify_init_ctor_eval. Create a loop for
3639 a RANGE_EXPR in a CONSTRUCTOR for an array.
3640
3641 var = lower;
3642 loop_entry:
3643 object[var] = value;
3644 if (var == upper)
3645 goto loop_exit;
3646 var = var + 1;
3647 goto loop_entry;
3648 loop_exit:
3649
3650 We increment var _after_ the loop exit check because we might otherwise
3651 fail if upper == TYPE_MAX_VALUE (type for upper).
3652
3653 Note that we never have to deal with SAVE_EXPRs here, because this has
3654 already been taken care of for us, in gimplify_init_ctor_preeval(). */
3655
9771b263 3656static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
726a989a 3657 gimple_seq *, bool);
6fa91b48
SB
3658
3659static void
3660gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
3661 tree value, tree array_elt_type,
726a989a 3662 gimple_seq *pre_p, bool cleared)
6fa91b48 3663{
726a989a 3664 tree loop_entry_label, loop_exit_label, fall_thru_label;
b56b9fe3 3665 tree var, var_type, cref, tmp;
6fa91b48 3666
c2255bc4
AH
3667 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
3668 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
3669 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
6fa91b48
SB
3670
3671 /* Create and initialize the index variable. */
3672 var_type = TREE_TYPE (upper);
3673 var = create_tmp_var (var_type, NULL);
726a989a 3674 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
6fa91b48
SB
3675
3676 /* Add the loop entry label. */
726a989a 3677 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
6fa91b48
SB
3678
3679 /* Build the reference. */
3680 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3681 var, NULL_TREE, NULL_TREE);
3682
3683 /* If we are a constructor, just call gimplify_init_ctor_eval to do
3684 the store. Otherwise just assign value to the reference. */
3685
3686 if (TREE_CODE (value) == CONSTRUCTOR)
3687 /* NB we might have to call ourself recursively through
3688 gimplify_init_ctor_eval if the value is a constructor. */
3689 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3690 pre_p, cleared);
3691 else
726a989a 3692 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
6fa91b48
SB
3693
3694 /* We exit the loop when the index var is equal to the upper bound. */
726a989a
RB
3695 gimplify_seq_add_stmt (pre_p,
3696 gimple_build_cond (EQ_EXPR, var, upper,
3697 loop_exit_label, fall_thru_label));
3698
3699 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
6fa91b48
SB
3700
3701 /* Otherwise, increment the index var... */
b56b9fe3
RS
3702 tmp = build2 (PLUS_EXPR, var_type, var,
3703 fold_convert (var_type, integer_one_node));
726a989a 3704 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
6fa91b48
SB
3705
3706 /* ...and jump back to the loop entry. */
726a989a 3707 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
6fa91b48
SB
3708
3709 /* Add the loop exit label. */
726a989a 3710 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
6fa91b48
SB
3711}
3712
292a398f 3713/* Return true if FDECL is accessing a field that is zero sized. */
b8698a0f 3714
292a398f 3715static bool
22ea9ec0 3716zero_sized_field_decl (const_tree fdecl)
292a398f 3717{
b8698a0f 3718 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
292a398f
DB
3719 && integer_zerop (DECL_SIZE (fdecl)))
3720 return true;
3721 return false;
3722}
3723
d06526b7 3724/* Return true if TYPE is zero sized. */
b8698a0f 3725
d06526b7 3726static bool
22ea9ec0 3727zero_sized_type (const_tree type)
d06526b7
AP
3728{
3729 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
3730 && integer_zerop (TYPE_SIZE (type)))
3731 return true;
3732 return false;
3733}
3734
57d1dd87
RH
3735/* A subroutine of gimplify_init_constructor. Generate individual
3736 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
4038c495 3737 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
57d1dd87
RH
3738 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
3739 zeroed first. */
3740
3741static void
9771b263 3742gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
726a989a 3743 gimple_seq *pre_p, bool cleared)
57d1dd87
RH
3744{
3745 tree array_elt_type = NULL;
4038c495
GB
3746 unsigned HOST_WIDE_INT ix;
3747 tree purpose, value;
57d1dd87
RH
3748
3749 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
3750 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
3751
4038c495 3752 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
57d1dd87 3753 {
726a989a 3754 tree cref;
57d1dd87
RH
3755
3756 /* NULL values are created above for gimplification errors. */
3757 if (value == NULL)
3758 continue;
3759
3760 if (cleared && initializer_zerop (value))
3761 continue;
3762
6fa91b48
SB
3763 /* ??? Here's to hoping the front end fills in all of the indices,
3764 so we don't have to figure out what's missing ourselves. */
3765 gcc_assert (purpose);
3766
816fa80a
OH
3767 /* Skip zero-sized fields, unless value has side-effects. This can
3768 happen with calls to functions returning a zero-sized type, which
3769 we shouldn't discard. As a number of downstream passes don't
3770 expect sets of zero-sized fields, we rely on the gimplification of
3771 the MODIFY_EXPR we make below to drop the assignment statement. */
3772 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
292a398f
DB
3773 continue;
3774
6fa91b48
SB
3775 /* If we have a RANGE_EXPR, we have to build a loop to assign the
3776 whole range. */
3777 if (TREE_CODE (purpose) == RANGE_EXPR)
57d1dd87 3778 {
6fa91b48
SB
3779 tree lower = TREE_OPERAND (purpose, 0);
3780 tree upper = TREE_OPERAND (purpose, 1);
3781
3782 /* If the lower bound is equal to upper, just treat it as if
3783 upper was the index. */
3784 if (simple_cst_equal (lower, upper))
3785 purpose = upper;
3786 else
3787 {
3788 gimplify_init_ctor_eval_range (object, lower, upper, value,
3789 array_elt_type, pre_p, cleared);
3790 continue;
3791 }
3792 }
57d1dd87 3793
6fa91b48
SB
3794 if (array_elt_type)
3795 {
1a1640db
RG
3796 /* Do not use bitsizetype for ARRAY_REF indices. */
3797 if (TYPE_DOMAIN (TREE_TYPE (object)))
ad19c4be
EB
3798 purpose
3799 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
3800 purpose);
b4257cfc
RG
3801 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3802 purpose, NULL_TREE, NULL_TREE);
57d1dd87
RH
3803 }
3804 else
cf0efa6a
ILT
3805 {
3806 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
b4257cfc
RG
3807 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
3808 unshare_expr (object), purpose, NULL_TREE);
cf0efa6a 3809 }
57d1dd87 3810
cf0efa6a
ILT
3811 if (TREE_CODE (value) == CONSTRUCTOR
3812 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
57d1dd87
RH
3813 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3814 pre_p, cleared);
3815 else
3816 {
726a989a 3817 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
57d1dd87 3818 gimplify_and_add (init, pre_p);
726a989a 3819 ggc_free (init);
57d1dd87
RH
3820 }
3821 }
3822}
3823
ad19c4be 3824/* Return the appropriate RHS predicate for this LHS. */
726a989a
RB
3825
3826gimple_predicate
3827rhs_predicate_for (tree lhs)
3828{
ba4d8f9d
RG
3829 if (is_gimple_reg (lhs))
3830 return is_gimple_reg_rhs_or_call;
726a989a 3831 else
ba4d8f9d 3832 return is_gimple_mem_rhs_or_call;
726a989a
RB
3833}
3834
2ec5deb5
PB
3835/* Gimplify a C99 compound literal expression. This just means adding
3836 the DECL_EXPR before the current statement and using its anonymous
3837 decl instead. */
3838
3839static enum gimplify_status
a845a7f5 3840gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
4c53d183 3841 bool (*gimple_test_f) (tree),
a845a7f5 3842 fallback_t fallback)
2ec5deb5
PB
3843{
3844 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
3845 tree decl = DECL_EXPR_DECL (decl_s);
4c53d183 3846 tree init = DECL_INITIAL (decl);
2ec5deb5
PB
3847 /* Mark the decl as addressable if the compound literal
3848 expression is addressable now, otherwise it is marked too late
3849 after we gimplify the initialization expression. */
3850 if (TREE_ADDRESSABLE (*expr_p))
3851 TREE_ADDRESSABLE (decl) = 1;
4c53d183
MM
3852 /* Otherwise, if we don't need an lvalue and have a literal directly
3853 substitute it. Check if it matches the gimple predicate, as
3854 otherwise we'd generate a new temporary, and we can as well just
3855 use the decl we already have. */
3856 else if (!TREE_ADDRESSABLE (decl)
3857 && init
3858 && (fallback & fb_lvalue) == 0
3859 && gimple_test_f (init))
3860 {
3861 *expr_p = init;
3862 return GS_OK;
3863 }
2ec5deb5
PB
3864
3865 /* Preliminarily mark non-addressed complex variables as eligible
3866 for promotion to gimple registers. We'll transform their uses
3867 as we find them. */
3868 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
3869 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
3870 && !TREE_THIS_VOLATILE (decl)
3871 && !needs_to_live_in_memory (decl))
3872 DECL_GIMPLE_REG_P (decl) = 1;
3873
a845a7f5
ILT
3874 /* If the decl is not addressable, then it is being used in some
3875 expression or on the right hand side of a statement, and it can
3876 be put into a readonly data section. */
3877 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
3878 TREE_READONLY (decl) = 1;
3879
2ec5deb5
PB
3880 /* This decl isn't mentioned in the enclosing block, so add it to the
3881 list of temps. FIXME it seems a bit of a kludge to say that
3882 anonymous artificial vars aren't pushed, but everything else is. */
3883 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
3884 gimple_add_tmp_var (decl);
3885
3886 gimplify_and_add (decl_s, pre_p);
3887 *expr_p = decl;
3888 return GS_OK;
3889}
3890
3891/* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
3892 return a new CONSTRUCTOR if something changed. */
3893
3894static tree
3895optimize_compound_literals_in_ctor (tree orig_ctor)
3896{
3897 tree ctor = orig_ctor;
9771b263
DN
3898 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
3899 unsigned int idx, num = vec_safe_length (elts);
2ec5deb5
PB
3900
3901 for (idx = 0; idx < num; idx++)
3902 {
9771b263 3903 tree value = (*elts)[idx].value;
2ec5deb5
PB
3904 tree newval = value;
3905 if (TREE_CODE (value) == CONSTRUCTOR)
3906 newval = optimize_compound_literals_in_ctor (value);
3907 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
3908 {
3909 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
3910 tree decl = DECL_EXPR_DECL (decl_s);
3911 tree init = DECL_INITIAL (decl);
3912
3913 if (!TREE_ADDRESSABLE (value)
3914 && !TREE_ADDRESSABLE (decl)
6f8f67e9
JJ
3915 && init
3916 && TREE_CODE (init) == CONSTRUCTOR)
2ec5deb5
PB
3917 newval = optimize_compound_literals_in_ctor (init);
3918 }
3919 if (newval == value)
3920 continue;
3921
3922 if (ctor == orig_ctor)
3923 {
3924 ctor = copy_node (orig_ctor);
9771b263 3925 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
2ec5deb5
PB
3926 elts = CONSTRUCTOR_ELTS (ctor);
3927 }
9771b263 3928 (*elts)[idx].value = newval;
2ec5deb5
PB
3929 }
3930 return ctor;
3931}
3932
26d44ae2
RH
3933/* A subroutine of gimplify_modify_expr. Break out elements of a
3934 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
3935
3936 Note that we still need to clear any elements that don't have explicit
3937 initializers, so if not all elements are initialized we keep the
ffed8a01
AH
3938 original MODIFY_EXPR, we just remove all of the constructor elements.
3939
3940 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
3941 GS_ERROR if we would have to create a temporary when gimplifying
3942 this constructor. Otherwise, return GS_OK.
3943
3944 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
26d44ae2
RH
3945
3946static enum gimplify_status
726a989a
RB
3947gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3948 bool want_value, bool notify_temp_creation)
26d44ae2 3949{
f5a1f0d0 3950 tree object, ctor, type;
26d44ae2 3951 enum gimplify_status ret;
9771b263 3952 vec<constructor_elt, va_gc> *elts;
26d44ae2 3953
f5a1f0d0 3954 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
26d44ae2 3955
ffed8a01
AH
3956 if (!notify_temp_creation)
3957 {
726a989a 3958 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
ffed8a01
AH
3959 is_gimple_lvalue, fb_lvalue);
3960 if (ret == GS_ERROR)
3961 return ret;
3962 }
57d1dd87 3963
726a989a 3964 object = TREE_OPERAND (*expr_p, 0);
f5a1f0d0
PB
3965 ctor = TREE_OPERAND (*expr_p, 1) =
3966 optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
3967 type = TREE_TYPE (ctor);
3968 elts = CONSTRUCTOR_ELTS (ctor);
26d44ae2 3969 ret = GS_ALL_DONE;
726a989a 3970
26d44ae2
RH
3971 switch (TREE_CODE (type))
3972 {
3973 case RECORD_TYPE:
3974 case UNION_TYPE:
3975 case QUAL_UNION_TYPE:
3976 case ARRAY_TYPE:
3977 {
57d1dd87 3978 struct gimplify_init_ctor_preeval_data preeval_data;
953d0c90
RS
3979 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
3980 bool cleared, complete_p, valid_const_initializer;
26d44ae2
RH
3981
3982 /* Aggregate types must lower constructors to initialization of
3983 individual elements. The exception is that a CONSTRUCTOR node
3984 with no elements indicates zero-initialization of the whole. */
9771b263 3985 if (vec_safe_is_empty (elts))
ffed8a01
AH
3986 {
3987 if (notify_temp_creation)
3988 return GS_OK;
3989 break;
3990 }
b8698a0f 3991
fe24d485
OH
3992 /* Fetch information about the constructor to direct later processing.
3993 We might want to make static versions of it in various cases, and
3994 can only do so if it known to be a valid constant initializer. */
3995 valid_const_initializer
3996 = categorize_ctor_elements (ctor, &num_nonzero_elements,
953d0c90 3997 &num_ctor_elements, &complete_p);
26d44ae2
RH
3998
3999 /* If a const aggregate variable is being initialized, then it
4000 should never be a lose to promote the variable to be static. */
fe24d485 4001 if (valid_const_initializer
6f642f98 4002 && num_nonzero_elements > 1
26d44ae2 4003 && TREE_READONLY (object)
d0ea0759
SE
4004 && TREE_CODE (object) == VAR_DECL
4005 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
26d44ae2 4006 {
ffed8a01
AH
4007 if (notify_temp_creation)
4008 return GS_ERROR;
26d44ae2
RH
4009 DECL_INITIAL (object) = ctor;
4010 TREE_STATIC (object) = 1;
4011 if (!DECL_NAME (object))
4012 DECL_NAME (object) = create_tmp_var_name ("C");
4013 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
4014
4015 /* ??? C++ doesn't automatically append a .<number> to the
6bdf3519 4016 assembler name, and even when it does, it looks at FE private
26d44ae2
RH
4017 data structures to figure out what that number should be,
4018 which are not set for this variable. I suppose this is
4019 important for local statics for inline functions, which aren't
4020 "local" in the object file sense. So in order to get a unique
4021 TU-local symbol, we must invoke the lhd version now. */
4022 lhd_set_decl_assembler_name (object);
4023
4024 *expr_p = NULL_TREE;
4025 break;
4026 }
4027
cce70747
JC
4028 /* If there are "lots" of initialized elements, even discounting
4029 those that are not address constants (and thus *must* be
4030 computed at runtime), then partition the constructor into
4031 constant and non-constant parts. Block copy the constant
4032 parts in, then generate code for the non-constant parts. */
4033 /* TODO. There's code in cp/typeck.c to do this. */
4034
953d0c90
RS
4035 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
4036 /* store_constructor will ignore the clearing of variable-sized
4037 objects. Initializers for such objects must explicitly set
4038 every field that needs to be set. */
4039 cleared = false;
4040 else if (!complete_p)
4041 /* If the constructor isn't complete, clear the whole object
4042 beforehand.
4043
4044 ??? This ought not to be needed. For any element not present
4045 in the initializer, we should simply set them to zero. Except
4046 we'd need to *find* the elements that are not present, and that
4047 requires trickery to avoid quadratic compile-time behavior in
4048 large cases or excessive memory use in small cases. */
73ed17ff 4049 cleared = true;
953d0c90 4050 else if (num_ctor_elements - num_nonzero_elements
e04ad03d 4051 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
953d0c90
RS
4052 && num_nonzero_elements < num_ctor_elements / 4)
4053 /* If there are "lots" of zeros, it's more efficient to clear
4054 the memory and then set the nonzero elements. */
cce70747 4055 cleared = true;
953d0c90
RS
4056 else
4057 cleared = false;
cce70747 4058
26d44ae2
RH
4059 /* If there are "lots" of initialized elements, and all of them
4060 are valid address constants, then the entire initializer can
cce70747
JC
4061 be dropped to memory, and then memcpy'd out. Don't do this
4062 for sparse arrays, though, as it's more efficient to follow
4063 the standard CONSTRUCTOR behavior of memset followed by
8afd015a
JM
4064 individual element initialization. Also don't do this for small
4065 all-zero initializers (which aren't big enough to merit
4066 clearing), and don't try to make bitwise copies of
4067 TREE_ADDRESSABLE types. */
4068 if (valid_const_initializer
4069 && !(cleared || num_nonzero_elements == 0)
c69c7be1 4070 && !TREE_ADDRESSABLE (type))
26d44ae2
RH
4071 {
4072 HOST_WIDE_INT size = int_size_in_bytes (type);
4073 unsigned int align;
4074
4075 /* ??? We can still get unbounded array types, at least
4076 from the C++ front end. This seems wrong, but attempt
4077 to work around it for now. */
4078 if (size < 0)
4079 {
4080 size = int_size_in_bytes (TREE_TYPE (object));
4081 if (size >= 0)
4082 TREE_TYPE (ctor) = type = TREE_TYPE (object);
4083 }
4084
4085 /* Find the maximum alignment we can assume for the object. */
4086 /* ??? Make use of DECL_OFFSET_ALIGN. */
4087 if (DECL_P (object))
4088 align = DECL_ALIGN (object);
4089 else
4090 align = TYPE_ALIGN (type);
4091
f301837e
EB
4092 /* Do a block move either if the size is so small as to make
4093 each individual move a sub-unit move on average, or if it
4094 is so large as to make individual moves inefficient. */
329ad380
JJ
4095 if (size > 0
4096 && num_nonzero_elements > 1
f301837e
EB
4097 && (size < num_nonzero_elements
4098 || !can_move_by_pieces (size, align)))
26d44ae2 4099 {
ffed8a01
AH
4100 if (notify_temp_creation)
4101 return GS_ERROR;
4102
46314d3e
EB
4103 walk_tree (&ctor, force_labels_r, NULL, NULL);
4104 ctor = tree_output_constant_def (ctor);
4105 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
4106 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
4107 TREE_OPERAND (*expr_p, 1) = ctor;
57d1dd87
RH
4108
4109 /* This is no longer an assignment of a CONSTRUCTOR, but
4110 we still may have processing to do on the LHS. So
4111 pretend we didn't do anything here to let that happen. */
4112 return GS_UNHANDLED;
26d44ae2
RH
4113 }
4114 }
4115
558af7ca
EB
4116 /* If the target is volatile, we have non-zero elements and more than
4117 one field to assign, initialize the target from a temporary. */
61c7cbf8
RG
4118 if (TREE_THIS_VOLATILE (object)
4119 && !TREE_ADDRESSABLE (type)
558af7ca 4120 && num_nonzero_elements > 0
9771b263 4121 && vec_safe_length (elts) > 1)
61c7cbf8
RG
4122 {
4123 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type), NULL);
4124 TREE_OPERAND (*expr_p, 0) = temp;
4125 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
4126 *expr_p,
4127 build2 (MODIFY_EXPR, void_type_node,
4128 object, temp));
4129 return GS_OK;
4130 }
4131
ffed8a01
AH
4132 if (notify_temp_creation)
4133 return GS_OK;
4134
675c873b
EB
4135 /* If there are nonzero elements and if needed, pre-evaluate to capture
4136 elements overlapping with the lhs into temporaries. We must do this
4137 before clearing to fetch the values before they are zeroed-out. */
4138 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
85d89e76
OH
4139 {
4140 preeval_data.lhs_base_decl = get_base_address (object);
4141 if (!DECL_P (preeval_data.lhs_base_decl))
4142 preeval_data.lhs_base_decl = NULL;
4143 preeval_data.lhs_alias_set = get_alias_set (object);
4144
726a989a 4145 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
85d89e76
OH
4146 pre_p, post_p, &preeval_data);
4147 }
4148
26d44ae2
RH
4149 if (cleared)
4150 {
4151 /* Zap the CONSTRUCTOR element list, which simplifies this case.
4152 Note that we still have to gimplify, in order to handle the
57d1dd87 4153 case of variable sized types. Avoid shared tree structures. */
4038c495 4154 CONSTRUCTOR_ELTS (ctor) = NULL;
726a989a 4155 TREE_SIDE_EFFECTS (ctor) = 0;
57d1dd87 4156 object = unshare_expr (object);
726a989a 4157 gimplify_stmt (expr_p, pre_p);
26d44ae2
RH
4158 }
4159
6fa91b48
SB
4160 /* If we have not block cleared the object, or if there are nonzero
4161 elements in the constructor, add assignments to the individual
4162 scalar fields of the object. */
4163 if (!cleared || num_nonzero_elements > 0)
85d89e76 4164 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
26d44ae2
RH
4165
4166 *expr_p = NULL_TREE;
4167 }
4168 break;
4169
4170 case COMPLEX_TYPE:
4171 {
4172 tree r, i;
4173
ffed8a01
AH
4174 if (notify_temp_creation)
4175 return GS_OK;
4176
26d44ae2 4177 /* Extract the real and imaginary parts out of the ctor. */
9771b263
DN
4178 gcc_assert (elts->length () == 2);
4179 r = (*elts)[0].value;
4180 i = (*elts)[1].value;
26d44ae2
RH
4181 if (r == NULL || i == NULL)
4182 {
e8160c9a 4183 tree zero = build_zero_cst (TREE_TYPE (type));
26d44ae2
RH
4184 if (r == NULL)
4185 r = zero;
4186 if (i == NULL)
4187 i = zero;
4188 }
4189
4190 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
4191 represent creation of a complex value. */
4192 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
4193 {
4194 ctor = build_complex (type, r, i);
4195 TREE_OPERAND (*expr_p, 1) = ctor;
4196 }
4197 else
4198 {
b4257cfc 4199 ctor = build2 (COMPLEX_EXPR, type, r, i);
26d44ae2 4200 TREE_OPERAND (*expr_p, 1) = ctor;
726a989a
RB
4201 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
4202 pre_p,
4203 post_p,
17ad5b5e
RH
4204 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
4205 fb_rvalue);
26d44ae2
RH
4206 }
4207 }
4208 break;
506e2710 4209
26d44ae2 4210 case VECTOR_TYPE:
4038c495
GB
4211 {
4212 unsigned HOST_WIDE_INT ix;
4213 constructor_elt *ce;
e89be13b 4214
ffed8a01
AH
4215 if (notify_temp_creation)
4216 return GS_OK;
4217
4038c495
GB
4218 /* Go ahead and simplify constant constructors to VECTOR_CST. */
4219 if (TREE_CONSTANT (ctor))
4220 {
4221 bool constant_p = true;
4222 tree value;
4223
4224 /* Even when ctor is constant, it might contain non-*_CST
9f1da821
RS
4225 elements, such as addresses or trapping values like
4226 1.0/0.0 - 1.0/0.0. Such expressions don't belong
4227 in VECTOR_CST nodes. */
4038c495
GB
4228 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
4229 if (!CONSTANT_CLASS_P (value))
4230 {
4231 constant_p = false;
4232 break;
4233 }
e89be13b 4234
4038c495
GB
4235 if (constant_p)
4236 {
4237 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
4238 break;
4239 }
84816907 4240
9f1da821 4241 /* Don't reduce an initializer constant even if we can't
84816907
JM
4242 make a VECTOR_CST. It won't do anything for us, and it'll
4243 prevent us from representing it as a single constant. */
9f1da821
RS
4244 if (initializer_constant_valid_p (ctor, type))
4245 break;
4246
4247 TREE_CONSTANT (ctor) = 0;
4038c495 4248 }
e89be13b 4249
4038c495
GB
4250 /* Vector types use CONSTRUCTOR all the way through gimple
4251 compilation as a general initializer. */
9771b263 4252 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
4038c495
GB
4253 {
4254 enum gimplify_status tret;
726a989a
RB
4255 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
4256 fb_rvalue);
4038c495
GB
4257 if (tret == GS_ERROR)
4258 ret = GS_ERROR;
4259 }
726a989a
RB
4260 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
4261 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
4038c495 4262 }
26d44ae2 4263 break;
6de9cd9a 4264
26d44ae2
RH
4265 default:
4266 /* So how did we get a CONSTRUCTOR for a scalar type? */
282899df 4267 gcc_unreachable ();
26d44ae2 4268 }
6de9cd9a 4269
26d44ae2
RH
4270 if (ret == GS_ERROR)
4271 return GS_ERROR;
4272 else if (want_value)
4273 {
26d44ae2
RH
4274 *expr_p = object;
4275 return GS_OK;
6de9cd9a 4276 }
26d44ae2 4277 else
726a989a
RB
4278 {
4279 /* If we have gimplified both sides of the initializer but have
4280 not emitted an assignment, do so now. */
4281 if (*expr_p)
4282 {
4283 tree lhs = TREE_OPERAND (*expr_p, 0);
4284 tree rhs = TREE_OPERAND (*expr_p, 1);
4285 gimple init = gimple_build_assign (lhs, rhs);
4286 gimplify_seq_add_stmt (pre_p, init);
4287 *expr_p = NULL;
4288 }
4289
4290 return GS_ALL_DONE;
4291 }
26d44ae2 4292}
6de9cd9a 4293
de4af523
JJ
4294/* Given a pointer value OP0, return a simplified version of an
4295 indirection through OP0, or NULL_TREE if no simplification is
4296 possible. This may only be applied to a rhs of an expression.
4297 Note that the resulting type may be different from the type pointed
4298 to in the sense that it is still compatible from the langhooks
4299 point of view. */
4300
4301static tree
4302gimple_fold_indirect_ref_rhs (tree t)
4303{
4304 return gimple_fold_indirect_ref (t);
4305}
4306
4caa08da
AH
4307/* Subroutine of gimplify_modify_expr to do simplifications of
4308 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
4309 something changes. */
6de9cd9a 4310
26d44ae2 4311static enum gimplify_status
726a989a
RB
4312gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
4313 gimple_seq *pre_p, gimple_seq *post_p,
4314 bool want_value)
26d44ae2 4315{
6d729f28
JM
4316 enum gimplify_status ret = GS_UNHANDLED;
4317 bool changed;
6de9cd9a 4318
6d729f28
JM
4319 do
4320 {
4321 changed = false;
4322 switch (TREE_CODE (*from_p))
4323 {
4324 case VAR_DECL:
4325 /* If we're assigning from a read-only variable initialized with
4326 a constructor, do the direct assignment from the constructor,
4327 but only if neither source nor target are volatile since this
4328 latter assignment might end up being done on a per-field basis. */
4329 if (DECL_INITIAL (*from_p)
4330 && TREE_READONLY (*from_p)
4331 && !TREE_THIS_VOLATILE (*from_p)
4332 && !TREE_THIS_VOLATILE (*to_p)
4333 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
4334 {
4335 tree old_from = *from_p;
4336 enum gimplify_status subret;
4337
4338 /* Move the constructor into the RHS. */
4339 *from_p = unshare_expr (DECL_INITIAL (*from_p));
4340
4341 /* Let's see if gimplify_init_constructor will need to put
4342 it in memory. */
4343 subret = gimplify_init_constructor (expr_p, NULL, NULL,
4344 false, true);
4345 if (subret == GS_ERROR)
4346 {
4347 /* If so, revert the change. */
4348 *from_p = old_from;
4349 }
4350 else
4351 {
4352 ret = GS_OK;
4353 changed = true;
4354 }
4355 }
4356 break;
4357 case INDIRECT_REF:
4caa08da 4358 {
6d729f28 4359 /* If we have code like
ffed8a01 4360
6d729f28 4361 *(const A*)(A*)&x
ffed8a01 4362
6d729f28
JM
4363 where the type of "x" is a (possibly cv-qualified variant
4364 of "A"), treat the entire expression as identical to "x".
4365 This kind of code arises in C++ when an object is bound
4366 to a const reference, and if "x" is a TARGET_EXPR we want
4367 to take advantage of the optimization below. */
06baaba3 4368 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
6d729f28
JM
4369 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
4370 if (t)
ffed8a01 4371 {
06baaba3
RG
4372 if (TREE_THIS_VOLATILE (t) != volatile_p)
4373 {
4374 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
4375 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
4376 build_fold_addr_expr (t));
4377 if (REFERENCE_CLASS_P (t))
4378 TREE_THIS_VOLATILE (t) = volatile_p;
4379 }
6d729f28
JM
4380 *from_p = t;
4381 ret = GS_OK;
4382 changed = true;
ffed8a01 4383 }
6d729f28
JM
4384 break;
4385 }
4386
4387 case TARGET_EXPR:
4388 {
4389 /* If we are initializing something from a TARGET_EXPR, strip the
4390 TARGET_EXPR and initialize it directly, if possible. This can't
4391 be done if the initializer is void, since that implies that the
4392 temporary is set in some non-trivial way.
4393
4394 ??? What about code that pulls out the temp and uses it
4395 elsewhere? I think that such code never uses the TARGET_EXPR as
4396 an initializer. If I'm wrong, we'll die because the temp won't
4397 have any RTL. In that case, I guess we'll need to replace
4398 references somehow. */
4399 tree init = TARGET_EXPR_INITIAL (*from_p);
4400
4401 if (init
4402 && !VOID_TYPE_P (TREE_TYPE (init)))
ffed8a01 4403 {
6d729f28 4404 *from_p = init;
ffed8a01 4405 ret = GS_OK;
6d729f28 4406 changed = true;
ffed8a01 4407 }
4caa08da 4408 }
6d729f28 4409 break;
f98625f6 4410
6d729f28
JM
4411 case COMPOUND_EXPR:
4412 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
4413 caught. */
4414 gimplify_compound_expr (from_p, pre_p, true);
4415 ret = GS_OK;
4416 changed = true;
4417 break;
f98625f6 4418
6d729f28 4419 case CONSTRUCTOR:
ce3beba3
JM
4420 /* If we already made some changes, let the front end have a
4421 crack at this before we break it down. */
4422 if (ret != GS_UNHANDLED)
4423 break;
6d729f28
JM
4424 /* If we're initializing from a CONSTRUCTOR, break this into
4425 individual MODIFY_EXPRs. */
4426 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
4427 false);
4428
4429 case COND_EXPR:
4430 /* If we're assigning to a non-register type, push the assignment
4431 down into the branches. This is mandatory for ADDRESSABLE types,
4432 since we cannot generate temporaries for such, but it saves a
4433 copy in other cases as well. */
4434 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
f98625f6 4435 {
6d729f28
JM
4436 /* This code should mirror the code in gimplify_cond_expr. */
4437 enum tree_code code = TREE_CODE (*expr_p);
4438 tree cond = *from_p;
4439 tree result = *to_p;
4440
4441 ret = gimplify_expr (&result, pre_p, post_p,
4442 is_gimple_lvalue, fb_lvalue);
4443 if (ret != GS_ERROR)
4444 ret = GS_OK;
4445
4446 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
4447 TREE_OPERAND (cond, 1)
4448 = build2 (code, void_type_node, result,
4449 TREE_OPERAND (cond, 1));
4450 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
4451 TREE_OPERAND (cond, 2)
4452 = build2 (code, void_type_node, unshare_expr (result),
4453 TREE_OPERAND (cond, 2));
4454
4455 TREE_TYPE (cond) = void_type_node;
4456 recalculate_side_effects (cond);
4457
4458 if (want_value)
4459 {
4460 gimplify_and_add (cond, pre_p);
4461 *expr_p = unshare_expr (result);
4462 }
4463 else
4464 *expr_p = cond;
4465 return ret;
f98625f6 4466 }
f98625f6 4467 break;
f98625f6 4468
6d729f28
JM
4469 case CALL_EXPR:
4470 /* For calls that return in memory, give *to_p as the CALL_EXPR's
4471 return slot so that we don't generate a temporary. */
4472 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
4473 && aggregate_value_p (*from_p, *from_p))
26d44ae2 4474 {
6d729f28
JM
4475 bool use_target;
4476
4477 if (!(rhs_predicate_for (*to_p))(*from_p))
4478 /* If we need a temporary, *to_p isn't accurate. */
4479 use_target = false;
ad19c4be 4480 /* It's OK to use the return slot directly unless it's an NRV. */
6d729f28
JM
4481 else if (TREE_CODE (*to_p) == RESULT_DECL
4482 && DECL_NAME (*to_p) == NULL_TREE
4483 && needs_to_live_in_memory (*to_p))
6d729f28
JM
4484 use_target = true;
4485 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
4486 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
4487 /* Don't force regs into memory. */
4488 use_target = false;
4489 else if (TREE_CODE (*expr_p) == INIT_EXPR)
4490 /* It's OK to use the target directly if it's being
4491 initialized. */
4492 use_target = true;
aabb90e5
RG
4493 else if (variably_modified_type_p (TREE_TYPE (*to_p), NULL_TREE))
4494 /* Always use the target and thus RSO for variable-sized types.
4495 GIMPLE cannot deal with a variable-sized assignment
4496 embedded in a call statement. */
4497 use_target = true;
345ae177
AH
4498 else if (TREE_CODE (*to_p) != SSA_NAME
4499 && (!is_gimple_variable (*to_p)
4500 || needs_to_live_in_memory (*to_p)))
6d729f28
JM
4501 /* Don't use the original target if it's already addressable;
4502 if its address escapes, and the called function uses the
4503 NRV optimization, a conforming program could see *to_p
4504 change before the called function returns; see c++/19317.
4505 When optimizing, the return_slot pass marks more functions
4506 as safe after we have escape info. */
4507 use_target = false;
4508 else
4509 use_target = true;
4510
4511 if (use_target)
4512 {
4513 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
4514 mark_addressable (*to_p);
4515 }
26d44ae2 4516 }
6d729f28 4517 break;
6de9cd9a 4518
6d729f28
JM
4519 case WITH_SIZE_EXPR:
4520 /* Likewise for calls that return an aggregate of non-constant size,
4521 since we would not be able to generate a temporary at all. */
4522 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
4523 {
4524 *from_p = TREE_OPERAND (*from_p, 0);
ebad5233
JM
4525 /* We don't change ret in this case because the
4526 WITH_SIZE_EXPR might have been added in
4527 gimplify_modify_expr, so returning GS_OK would lead to an
4528 infinite loop. */
6d729f28
JM
4529 changed = true;
4530 }
4531 break;
6de9cd9a 4532
6d729f28
JM
4533 /* If we're initializing from a container, push the initialization
4534 inside it. */
4535 case CLEANUP_POINT_EXPR:
4536 case BIND_EXPR:
4537 case STATEMENT_LIST:
26d44ae2 4538 {
6d729f28
JM
4539 tree wrap = *from_p;
4540 tree t;
dae7ec87 4541
6d729f28
JM
4542 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
4543 fb_lvalue);
dae7ec87
JM
4544 if (ret != GS_ERROR)
4545 ret = GS_OK;
4546
6d729f28
JM
4547 t = voidify_wrapper_expr (wrap, *expr_p);
4548 gcc_assert (t == *expr_p);
dae7ec87
JM
4549
4550 if (want_value)
4551 {
6d729f28
JM
4552 gimplify_and_add (wrap, pre_p);
4553 *expr_p = unshare_expr (*to_p);
dae7ec87
JM
4554 }
4555 else
6d729f28
JM
4556 *expr_p = wrap;
4557 return GS_OK;
26d44ae2 4558 }
6de9cd9a 4559
6d729f28 4560 case COMPOUND_LITERAL_EXPR:
fa47911c 4561 {
6d729f28
JM
4562 tree complit = TREE_OPERAND (*expr_p, 1);
4563 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
4564 tree decl = DECL_EXPR_DECL (decl_s);
4565 tree init = DECL_INITIAL (decl);
4566
4567 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
4568 into struct T x = { 0, 1, 2 } if the address of the
4569 compound literal has never been taken. */
4570 if (!TREE_ADDRESSABLE (complit)
4571 && !TREE_ADDRESSABLE (decl)
4572 && init)
fa47911c 4573 {
6d729f28
JM
4574 *expr_p = copy_node (*expr_p);
4575 TREE_OPERAND (*expr_p, 1) = init;
4576 return GS_OK;
fa47911c
JM
4577 }
4578 }
4579
6d729f28
JM
4580 default:
4581 break;
2ec5deb5 4582 }
6d729f28
JM
4583 }
4584 while (changed);
6de9cd9a 4585
6de9cd9a
DN
4586 return ret;
4587}
4588
216820a4
RG
4589
4590/* Return true if T looks like a valid GIMPLE statement. */
4591
4592static bool
4593is_gimple_stmt (tree t)
4594{
4595 const enum tree_code code = TREE_CODE (t);
4596
4597 switch (code)
4598 {
4599 case NOP_EXPR:
4600 /* The only valid NOP_EXPR is the empty statement. */
4601 return IS_EMPTY_STMT (t);
4602
4603 case BIND_EXPR:
4604 case COND_EXPR:
4605 /* These are only valid if they're void. */
4606 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
4607
4608 case SWITCH_EXPR:
4609 case GOTO_EXPR:
4610 case RETURN_EXPR:
4611 case LABEL_EXPR:
4612 case CASE_LABEL_EXPR:
4613 case TRY_CATCH_EXPR:
4614 case TRY_FINALLY_EXPR:
4615 case EH_FILTER_EXPR:
4616 case CATCH_EXPR:
4617 case ASM_EXPR:
4618 case STATEMENT_LIST:
4619 case OMP_PARALLEL:
4620 case OMP_FOR:
74bf76ed 4621 case OMP_SIMD:
acf0174b 4622 case OMP_DISTRIBUTE:
216820a4
RG
4623 case OMP_SECTIONS:
4624 case OMP_SECTION:
4625 case OMP_SINGLE:
4626 case OMP_MASTER:
acf0174b 4627 case OMP_TASKGROUP:
216820a4
RG
4628 case OMP_ORDERED:
4629 case OMP_CRITICAL:
4630 case OMP_TASK:
4631 /* These are always void. */
4632 return true;
4633
4634 case CALL_EXPR:
4635 case MODIFY_EXPR:
4636 case PREDICT_EXPR:
4637 /* These are valid regardless of their type. */
4638 return true;
4639
4640 default:
4641 return false;
4642 }
4643}
4644
4645
d9c2d296
AP
4646/* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
4647 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
7b7e6ecd
EB
4648 DECL_GIMPLE_REG_P set.
4649
4650 IMPORTANT NOTE: This promotion is performed by introducing a load of the
4651 other, unmodified part of the complex object just before the total store.
4652 As a consequence, if the object is still uninitialized, an undefined value
4653 will be loaded into a register, which may result in a spurious exception
4654 if the register is floating-point and the value happens to be a signaling
4655 NaN for example. Then the fully-fledged complex operations lowering pass
4656 followed by a DCE pass are necessary in order to fix things up. */
d9c2d296
AP
4657
4658static enum gimplify_status
726a989a
RB
4659gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
4660 bool want_value)
d9c2d296
AP
4661{
4662 enum tree_code code, ocode;
4663 tree lhs, rhs, new_rhs, other, realpart, imagpart;
4664
726a989a
RB
4665 lhs = TREE_OPERAND (*expr_p, 0);
4666 rhs = TREE_OPERAND (*expr_p, 1);
d9c2d296
AP
4667 code = TREE_CODE (lhs);
4668 lhs = TREE_OPERAND (lhs, 0);
4669
4670 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
4671 other = build1 (ocode, TREE_TYPE (rhs), lhs);
8d2b0410 4672 TREE_NO_WARNING (other) = 1;
d9c2d296
AP
4673 other = get_formal_tmp_var (other, pre_p);
4674
4675 realpart = code == REALPART_EXPR ? rhs : other;
4676 imagpart = code == REALPART_EXPR ? other : rhs;
4677
4678 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
4679 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
4680 else
4681 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
4682
726a989a
RB
4683 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
4684 *expr_p = (want_value) ? rhs : NULL_TREE;
d9c2d296
AP
4685
4686 return GS_ALL_DONE;
4687}
4688
206048bd 4689/* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
6de9cd9a
DN
4690
4691 modify_expr
4692 : varname '=' rhs
4693 | '*' ID '=' rhs
4694
4695 PRE_P points to the list where side effects that must happen before
4696 *EXPR_P should be stored.
4697
4698 POST_P points to the list where side effects that must happen after
4699 *EXPR_P should be stored.
4700
4701 WANT_VALUE is nonzero iff we want to use the value of this expression
4702 in another expression. */
4703
4704static enum gimplify_status
726a989a
RB
4705gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4706 bool want_value)
6de9cd9a 4707{
726a989a
RB
4708 tree *from_p = &TREE_OPERAND (*expr_p, 1);
4709 tree *to_p = &TREE_OPERAND (*expr_p, 0);
44de5aeb 4710 enum gimplify_status ret = GS_UNHANDLED;
726a989a 4711 gimple assign;
db3927fb 4712 location_t loc = EXPR_LOCATION (*expr_p);
6da8be89 4713 gimple_stmt_iterator gsi;
6de9cd9a 4714
282899df
NS
4715 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
4716 || TREE_CODE (*expr_p) == INIT_EXPR);
6de9cd9a 4717
d0ad58f9
JM
4718 /* Trying to simplify a clobber using normal logic doesn't work,
4719 so handle it here. */
4720 if (TREE_CLOBBER_P (*from_p))
4721 {
5d751b0c
JJ
4722 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4723 if (ret == GS_ERROR)
4724 return ret;
4725 gcc_assert (!want_value
4726 && (TREE_CODE (*to_p) == VAR_DECL
4727 || TREE_CODE (*to_p) == MEM_REF));
d0ad58f9
JM
4728 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
4729 *expr_p = NULL;
4730 return GS_ALL_DONE;
4731 }
4732
1b24a790
RG
4733 /* Insert pointer conversions required by the middle-end that are not
4734 required by the frontend. This fixes middle-end type checking for
4735 for example gcc.dg/redecl-6.c. */
daad0278 4736 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
1b24a790
RG
4737 {
4738 STRIP_USELESS_TYPE_CONVERSION (*from_p);
4739 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
db3927fb 4740 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
1b24a790
RG
4741 }
4742
83d7e8f0
JM
4743 /* See if any simplifications can be done based on what the RHS is. */
4744 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4745 want_value);
4746 if (ret != GS_UNHANDLED)
4747 return ret;
4748
4749 /* For zero sized types only gimplify the left hand side and right hand
4750 side as statements and throw away the assignment. Do this after
4751 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
4752 types properly. */
753b34d7 4753 if (zero_sized_type (TREE_TYPE (*from_p)) && !want_value)
412f8986 4754 {
726a989a
RB
4755 gimplify_stmt (from_p, pre_p);
4756 gimplify_stmt (to_p, pre_p);
412f8986
AP
4757 *expr_p = NULL_TREE;
4758 return GS_ALL_DONE;
4759 }
6de9cd9a 4760
d25cee4d
RH
4761 /* If the value being copied is of variable width, compute the length
4762 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
4763 before gimplifying any of the operands so that we can resolve any
4764 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
4765 the size of the expression to be copied, not of the destination, so
726a989a 4766 that is what we must do here. */
d25cee4d 4767 maybe_with_size_expr (from_p);
6de9cd9a 4768
44de5aeb
RK
4769 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4770 if (ret == GS_ERROR)
4771 return ret;
6de9cd9a 4772
726a989a
RB
4773 /* As a special case, we have to temporarily allow for assignments
4774 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
4775 a toplevel statement, when gimplifying the GENERIC expression
4776 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
4777 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
4778
4779 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
4780 prevent gimplify_expr from trying to create a new temporary for
4781 foo's LHS, we tell it that it should only gimplify until it
4782 reaches the CALL_EXPR. On return from gimplify_expr, the newly
4783 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
4784 and all we need to do here is set 'a' to be its LHS. */
4785 ret = gimplify_expr (from_p, pre_p, post_p, rhs_predicate_for (*to_p),
4786 fb_rvalue);
6de9cd9a
DN
4787 if (ret == GS_ERROR)
4788 return ret;
4789
44de5aeb
RK
4790 /* Now see if the above changed *from_p to something we handle specially. */
4791 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4792 want_value);
6de9cd9a
DN
4793 if (ret != GS_UNHANDLED)
4794 return ret;
4795
d25cee4d
RH
4796 /* If we've got a variable sized assignment between two lvalues (i.e. does
4797 not involve a call), then we can make things a bit more straightforward
4798 by converting the assignment to memcpy or memset. */
4799 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
4800 {
4801 tree from = TREE_OPERAND (*from_p, 0);
4802 tree size = TREE_OPERAND (*from_p, 1);
4803
4804 if (TREE_CODE (from) == CONSTRUCTOR)
726a989a
RB
4805 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
4806
e847cc68 4807 if (is_gimple_addressable (from))
d25cee4d
RH
4808 {
4809 *from_p = from;
726a989a
RB
4810 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
4811 pre_p);
d25cee4d
RH
4812 }
4813 }
4814
e41d82f5
RH
4815 /* Transform partial stores to non-addressable complex variables into
4816 total stores. This allows us to use real instead of virtual operands
4817 for these variables, which improves optimization. */
4818 if ((TREE_CODE (*to_p) == REALPART_EXPR
4819 || TREE_CODE (*to_p) == IMAGPART_EXPR)
4820 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
4821 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
4822
f173837a
EB
4823 /* Try to alleviate the effects of the gimplification creating artificial
4824 temporaries (see for example is_gimple_reg_rhs) on the debug info. */
4825 if (!gimplify_ctxp->into_ssa
f2896bc9 4826 && TREE_CODE (*from_p) == VAR_DECL
726a989a
RB
4827 && DECL_IGNORED_P (*from_p)
4828 && DECL_P (*to_p)
4829 && !DECL_IGNORED_P (*to_p))
f173837a
EB
4830 {
4831 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
4832 DECL_NAME (*from_p)
4833 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
839b422f 4834 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
f173837a 4835 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
726a989a
RB
4836 }
4837
8f0fe813
NS
4838 if (want_value && TREE_THIS_VOLATILE (*to_p))
4839 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
4840
726a989a
RB
4841 if (TREE_CODE (*from_p) == CALL_EXPR)
4842 {
4843 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
4844 instead of a GIMPLE_ASSIGN. */
f20ca725
RG
4845 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
4846 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
4847 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
726a989a 4848 assign = gimple_build_call_from_tree (*from_p);
f20ca725 4849 gimple_call_set_fntype (assign, TREE_TYPE (fnptrtype));
f6b64c35 4850 notice_special_calls (assign);
5de8da9b
AO
4851 if (!gimple_call_noreturn_p (assign))
4852 gimple_call_set_lhs (assign, *to_p);
f173837a 4853 }
726a989a 4854 else
c2255bc4
AH
4855 {
4856 assign = gimple_build_assign (*to_p, *from_p);
4857 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
4858 }
f173837a 4859
726a989a 4860 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
6de9cd9a 4861 {
2ad728d2
RG
4862 /* We should have got an SSA name from the start. */
4863 gcc_assert (TREE_CODE (*to_p) == SSA_NAME);
726a989a 4864 }
07beea0d 4865
6da8be89
MM
4866 gimplify_seq_add_stmt (pre_p, assign);
4867 gsi = gsi_last (*pre_p);
acf0174b
JJ
4868 /* Don't fold stmts inside of target construct. We'll do it
4869 during omplower pass instead. */
4870 struct gimplify_omp_ctx *ctx;
4871 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
4872 if (ctx->region_type == ORT_TARGET)
4873 break;
4874 if (ctx == NULL)
4875 fold_stmt (&gsi);
6da8be89 4876
726a989a
RB
4877 if (want_value)
4878 {
8f0fe813 4879 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
17ad5b5e 4880 return GS_OK;
6de9cd9a 4881 }
726a989a
RB
4882 else
4883 *expr_p = NULL;
6de9cd9a 4884
17ad5b5e 4885 return GS_ALL_DONE;
6de9cd9a
DN
4886}
4887
ad19c4be
EB
4888/* Gimplify a comparison between two variable-sized objects. Do this
4889 with a call to BUILT_IN_MEMCMP. */
44de5aeb
RK
4890
4891static enum gimplify_status
4892gimplify_variable_sized_compare (tree *expr_p)
4893{
692ad9aa 4894 location_t loc = EXPR_LOCATION (*expr_p);
44de5aeb
RK
4895 tree op0 = TREE_OPERAND (*expr_p, 0);
4896 tree op1 = TREE_OPERAND (*expr_p, 1);
692ad9aa 4897 tree t, arg, dest, src, expr;
5039610b
SL
4898
4899 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
4900 arg = unshare_expr (arg);
4901 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
db3927fb
AH
4902 src = build_fold_addr_expr_loc (loc, op1);
4903 dest = build_fold_addr_expr_loc (loc, op0);
e79983f4 4904 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
db3927fb 4905 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
692ad9aa
EB
4906
4907 expr
b4257cfc 4908 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
692ad9aa
EB
4909 SET_EXPR_LOCATION (expr, loc);
4910 *expr_p = expr;
44de5aeb
RK
4911
4912 return GS_OK;
4913}
4914
ad19c4be
EB
4915/* Gimplify a comparison between two aggregate objects of integral scalar
4916 mode as a comparison between the bitwise equivalent scalar values. */
61c25908
OH
4917
4918static enum gimplify_status
4919gimplify_scalar_mode_aggregate_compare (tree *expr_p)
4920{
db3927fb 4921 location_t loc = EXPR_LOCATION (*expr_p);
61c25908
OH
4922 tree op0 = TREE_OPERAND (*expr_p, 0);
4923 tree op1 = TREE_OPERAND (*expr_p, 1);
4924
4925 tree type = TREE_TYPE (op0);
4926 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
4927
db3927fb
AH
4928 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
4929 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
61c25908
OH
4930
4931 *expr_p
db3927fb 4932 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
61c25908
OH
4933
4934 return GS_OK;
4935}
4936
ad19c4be
EB
4937/* Gimplify an expression sequence. This function gimplifies each
4938 expression and rewrites the original expression with the last
6de9cd9a
DN
4939 expression of the sequence in GIMPLE form.
4940
4941 PRE_P points to the list where the side effects for all the
4942 expressions in the sequence will be emitted.
d3147f64 4943
6de9cd9a 4944 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
6de9cd9a
DN
4945
4946static enum gimplify_status
726a989a 4947gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
6de9cd9a
DN
4948{
4949 tree t = *expr_p;
4950
4951 do
4952 {
4953 tree *sub_p = &TREE_OPERAND (t, 0);
4954
4955 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
4956 gimplify_compound_expr (sub_p, pre_p, false);
4957 else
726a989a 4958 gimplify_stmt (sub_p, pre_p);
6de9cd9a
DN
4959
4960 t = TREE_OPERAND (t, 1);
4961 }
4962 while (TREE_CODE (t) == COMPOUND_EXPR);
4963
4964 *expr_p = t;
4965 if (want_value)
4966 return GS_OK;
4967 else
4968 {
726a989a 4969 gimplify_stmt (expr_p, pre_p);
6de9cd9a
DN
4970 return GS_ALL_DONE;
4971 }
4972}
4973
726a989a
RB
4974/* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
4975 gimplify. After gimplification, EXPR_P will point to a new temporary
4976 that holds the original value of the SAVE_EXPR node.
6de9cd9a 4977
726a989a 4978 PRE_P points to the list where side effects that must happen before
ad19c4be 4979 *EXPR_P should be stored. */
6de9cd9a
DN
4980
4981static enum gimplify_status
726a989a 4982gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6de9cd9a
DN
4983{
4984 enum gimplify_status ret = GS_ALL_DONE;
4985 tree val;
4986
282899df 4987 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
6de9cd9a
DN
4988 val = TREE_OPERAND (*expr_p, 0);
4989
7f5e6307
RH
4990 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
4991 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
17ad5b5e 4992 {
7f5e6307
RH
4993 /* The operand may be a void-valued expression such as SAVE_EXPRs
4994 generated by the Java frontend for class initialization. It is
4995 being executed only for its side-effects. */
4996 if (TREE_TYPE (val) == void_type_node)
4997 {
4998 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4999 is_gimple_stmt, fb_none);
7f5e6307
RH
5000 val = NULL;
5001 }
5002 else
5003 val = get_initialized_tmp_var (val, pre_p, post_p);
5004
5005 TREE_OPERAND (*expr_p, 0) = val;
5006 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
17ad5b5e 5007 }
6de9cd9a 5008
7f5e6307
RH
5009 *expr_p = val;
5010
6de9cd9a
DN
5011 return ret;
5012}
5013
ad19c4be 5014/* Rewrite the ADDR_EXPR node pointed to by EXPR_P
6de9cd9a
DN
5015
5016 unary_expr
5017 : ...
5018 | '&' varname
5019 ...
5020
5021 PRE_P points to the list where side effects that must happen before
5022 *EXPR_P should be stored.
5023
5024 POST_P points to the list where side effects that must happen after
5025 *EXPR_P should be stored. */
5026
5027static enum gimplify_status
726a989a 5028gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6de9cd9a
DN
5029{
5030 tree expr = *expr_p;
5031 tree op0 = TREE_OPERAND (expr, 0);
5032 enum gimplify_status ret;
db3927fb 5033 location_t loc = EXPR_LOCATION (*expr_p);
6de9cd9a
DN
5034
5035 switch (TREE_CODE (op0))
5036 {
5037 case INDIRECT_REF:
67f23620 5038 do_indirect_ref:
6de9cd9a
DN
5039 /* Check if we are dealing with an expression of the form '&*ptr'.
5040 While the front end folds away '&*ptr' into 'ptr', these
5041 expressions may be generated internally by the compiler (e.g.,
5042 builtins like __builtin_va_end). */
67f23620
RH
5043 /* Caution: the silent array decomposition semantics we allow for
5044 ADDR_EXPR means we can't always discard the pair. */
c87ac7e8
AO
5045 /* Gimplification of the ADDR_EXPR operand may drop
5046 cv-qualification conversions, so make sure we add them if
5047 needed. */
67f23620
RH
5048 {
5049 tree op00 = TREE_OPERAND (op0, 0);
5050 tree t_expr = TREE_TYPE (expr);
5051 tree t_op00 = TREE_TYPE (op00);
5052
f4088621 5053 if (!useless_type_conversion_p (t_expr, t_op00))
db3927fb 5054 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
67f23620
RH
5055 *expr_p = op00;
5056 ret = GS_OK;
5057 }
6de9cd9a
DN
5058 break;
5059
44de5aeb
RK
5060 case VIEW_CONVERT_EXPR:
5061 /* Take the address of our operand and then convert it to the type of
af72267c
RK
5062 this ADDR_EXPR.
5063
5064 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
5065 all clear. The impact of this transformation is even less clear. */
91804752
EB
5066
5067 /* If the operand is a useless conversion, look through it. Doing so
5068 guarantees that the ADDR_EXPR and its operand will remain of the
5069 same type. */
5070 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
317c0092 5071 op0 = TREE_OPERAND (op0, 0);
91804752 5072
db3927fb
AH
5073 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
5074 build_fold_addr_expr_loc (loc,
5075 TREE_OPERAND (op0, 0)));
44de5aeb 5076 ret = GS_OK;
6de9cd9a
DN
5077 break;
5078
5079 default:
5080 /* We use fb_either here because the C frontend sometimes takes
5201931e
JM
5081 the address of a call that returns a struct; see
5082 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
5083 the implied temporary explicit. */
936d04b6 5084
f76d6e6f 5085 /* Make the operand addressable. */
6de9cd9a 5086 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
e847cc68 5087 is_gimple_addressable, fb_either);
8b17cc05
RG
5088 if (ret == GS_ERROR)
5089 break;
67f23620 5090
f76d6e6f
EB
5091 /* Then mark it. Beware that it may not be possible to do so directly
5092 if a temporary has been created by the gimplification. */
5093 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
9e51aaf5 5094
8b17cc05 5095 op0 = TREE_OPERAND (expr, 0);
6de9cd9a 5096
8b17cc05
RG
5097 /* For various reasons, the gimplification of the expression
5098 may have made a new INDIRECT_REF. */
5099 if (TREE_CODE (op0) == INDIRECT_REF)
5100 goto do_indirect_ref;
5101
6b8b9e42
RG
5102 mark_addressable (TREE_OPERAND (expr, 0));
5103
5104 /* The FEs may end up building ADDR_EXPRs early on a decl with
5105 an incomplete type. Re-build ADDR_EXPRs in canonical form
5106 here. */
5107 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
5108 *expr_p = build_fold_addr_expr (op0);
5109
8b17cc05 5110 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
6b8b9e42
RG
5111 recompute_tree_invariant_for_addr_expr (*expr_p);
5112
5113 /* If we re-built the ADDR_EXPR add a conversion to the original type
5114 if required. */
5115 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
5116 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
8b17cc05 5117
6de9cd9a
DN
5118 break;
5119 }
5120
6de9cd9a
DN
5121 return ret;
5122}
5123
5124/* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
5125 value; output operands should be a gimple lvalue. */
5126
5127static enum gimplify_status
726a989a 5128gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6de9cd9a 5129{
726a989a
RB
5130 tree expr;
5131 int noutputs;
5132 const char **oconstraints;
6de9cd9a
DN
5133 int i;
5134 tree link;
5135 const char *constraint;
5136 bool allows_mem, allows_reg, is_inout;
5137 enum gimplify_status ret, tret;
726a989a 5138 gimple stmt;
9771b263
DN
5139 vec<tree, va_gc> *inputs;
5140 vec<tree, va_gc> *outputs;
5141 vec<tree, va_gc> *clobbers;
5142 vec<tree, va_gc> *labels;
726a989a 5143 tree link_next;
b8698a0f 5144
726a989a
RB
5145 expr = *expr_p;
5146 noutputs = list_length (ASM_OUTPUTS (expr));
5147 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
5148
9771b263
DN
5149 inputs = NULL;
5150 outputs = NULL;
5151 clobbers = NULL;
5152 labels = NULL;
6de9cd9a 5153
6de9cd9a 5154 ret = GS_ALL_DONE;
726a989a
RB
5155 link_next = NULL_TREE;
5156 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
6de9cd9a 5157 {
2c68ba8e 5158 bool ok;
726a989a
RB
5159 size_t constraint_len;
5160
5161 link_next = TREE_CHAIN (link);
5162
5163 oconstraints[i]
5164 = constraint
6de9cd9a 5165 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6db081f1
AP
5166 constraint_len = strlen (constraint);
5167 if (constraint_len == 0)
5168 continue;
6de9cd9a 5169
2c68ba8e
LB
5170 ok = parse_output_constraint (&constraint, i, 0, 0,
5171 &allows_mem, &allows_reg, &is_inout);
5172 if (!ok)
5173 {
5174 ret = GS_ERROR;
5175 is_inout = false;
5176 }
6de9cd9a
DN
5177
5178 if (!allows_reg && allows_mem)
936d04b6 5179 mark_addressable (TREE_VALUE (link));
6de9cd9a
DN
5180
5181 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5182 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
5183 fb_lvalue | fb_mayfail);
5184 if (tret == GS_ERROR)
5185 {
5186 error ("invalid lvalue in asm output %d", i);
5187 ret = tret;
5188 }
5189
9771b263 5190 vec_safe_push (outputs, link);
726a989a
RB
5191 TREE_CHAIN (link) = NULL_TREE;
5192
6de9cd9a
DN
5193 if (is_inout)
5194 {
5195 /* An input/output operand. To give the optimizers more
5196 flexibility, split it into separate input and output
5197 operands. */
5198 tree input;
5199 char buf[10];
6de9cd9a
DN
5200
5201 /* Turn the in/out constraint into an output constraint. */
5202 char *p = xstrdup (constraint);
5203 p[0] = '=';
5204 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
6de9cd9a
DN
5205
5206 /* And add a matching input constraint. */
5207 if (allows_reg)
5208 {
5209 sprintf (buf, "%d", i);
372d72d9
JJ
5210
5211 /* If there are multiple alternatives in the constraint,
5212 handle each of them individually. Those that allow register
5213 will be replaced with operand number, the others will stay
5214 unchanged. */
5215 if (strchr (p, ',') != NULL)
5216 {
5217 size_t len = 0, buflen = strlen (buf);
5218 char *beg, *end, *str, *dst;
5219
5220 for (beg = p + 1;;)
5221 {
5222 end = strchr (beg, ',');
5223 if (end == NULL)
5224 end = strchr (beg, '\0');
5225 if ((size_t) (end - beg) < buflen)
5226 len += buflen + 1;
5227 else
5228 len += end - beg + 1;
5229 if (*end)
5230 beg = end + 1;
5231 else
5232 break;
5233 }
5234
858904db 5235 str = (char *) alloca (len);
372d72d9
JJ
5236 for (beg = p + 1, dst = str;;)
5237 {
5238 const char *tem;
5239 bool mem_p, reg_p, inout_p;
5240
5241 end = strchr (beg, ',');
5242 if (end)
5243 *end = '\0';
5244 beg[-1] = '=';
5245 tem = beg - 1;
5246 parse_output_constraint (&tem, i, 0, 0,
5247 &mem_p, &reg_p, &inout_p);
5248 if (dst != str)
5249 *dst++ = ',';
5250 if (reg_p)
5251 {
5252 memcpy (dst, buf, buflen);
5253 dst += buflen;
5254 }
5255 else
5256 {
5257 if (end)
5258 len = end - beg;
5259 else
5260 len = strlen (beg);
5261 memcpy (dst, beg, len);
5262 dst += len;
5263 }
5264 if (end)
5265 beg = end + 1;
5266 else
5267 break;
5268 }
5269 *dst = '\0';
5270 input = build_string (dst - str, str);
5271 }
5272 else
5273 input = build_string (strlen (buf), buf);
6de9cd9a
DN
5274 }
5275 else
5276 input = build_string (constraint_len - 1, constraint + 1);
372d72d9
JJ
5277
5278 free (p);
5279
6de9cd9a
DN
5280 input = build_tree_list (build_tree_list (NULL_TREE, input),
5281 unshare_expr (TREE_VALUE (link)));
5282 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
5283 }
5284 }
5285
726a989a
RB
5286 link_next = NULL_TREE;
5287 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
6de9cd9a 5288 {
726a989a
RB
5289 link_next = TREE_CHAIN (link);
5290 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6de9cd9a
DN
5291 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
5292 oconstraints, &allows_mem, &allows_reg);
5293
f497c16c
JJ
5294 /* If we can't make copies, we can only accept memory. */
5295 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
5296 {
5297 if (allows_mem)
5298 allows_reg = 0;
5299 else
5300 {
5301 error ("impossible constraint in %<asm%>");
5302 error ("non-memory input %d must stay in memory", i);
5303 return GS_ERROR;
5304 }
5305 }
5306
6de9cd9a
DN
5307 /* If the operand is a memory input, it should be an lvalue. */
5308 if (!allows_reg && allows_mem)
5309 {
502c5084
JJ
5310 tree inputv = TREE_VALUE (link);
5311 STRIP_NOPS (inputv);
5312 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
5313 || TREE_CODE (inputv) == PREINCREMENT_EXPR
5314 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
5315 || TREE_CODE (inputv) == POSTINCREMENT_EXPR)
5316 TREE_VALUE (link) = error_mark_node;
6de9cd9a
DN
5317 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5318 is_gimple_lvalue, fb_lvalue | fb_mayfail);
936d04b6 5319 mark_addressable (TREE_VALUE (link));
6de9cd9a
DN
5320 if (tret == GS_ERROR)
5321 {
6a3799eb
AH
5322 if (EXPR_HAS_LOCATION (TREE_VALUE (link)))
5323 input_location = EXPR_LOCATION (TREE_VALUE (link));
6de9cd9a
DN
5324 error ("memory input %d is not directly addressable", i);
5325 ret = tret;
5326 }
5327 }
5328 else
5329 {
5330 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
e670d9e4 5331 is_gimple_asm_val, fb_rvalue);
6de9cd9a
DN
5332 if (tret == GS_ERROR)
5333 ret = tret;
5334 }
726a989a
RB
5335
5336 TREE_CHAIN (link) = NULL_TREE;
9771b263 5337 vec_safe_push (inputs, link);
6de9cd9a 5338 }
b8698a0f 5339
ca081cc8
EB
5340 link_next = NULL_TREE;
5341 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
5342 {
5343 link_next = TREE_CHAIN (link);
5344 TREE_CHAIN (link) = NULL_TREE;
5345 vec_safe_push (clobbers, link);
5346 }
1c384bf1 5347
ca081cc8
EB
5348 link_next = NULL_TREE;
5349 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
5350 {
5351 link_next = TREE_CHAIN (link);
5352 TREE_CHAIN (link) = NULL_TREE;
5353 vec_safe_push (labels, link);
5354 }
726a989a 5355
a406865a
RG
5356 /* Do not add ASMs with errors to the gimple IL stream. */
5357 if (ret != GS_ERROR)
5358 {
5359 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
1c384bf1 5360 inputs, outputs, clobbers, labels);
726a989a 5361
a406865a
RG
5362 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr));
5363 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
5364
5365 gimplify_seq_add_stmt (pre_p, stmt);
5366 }
6de9cd9a
DN
5367
5368 return ret;
5369}
5370
5371/* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
726a989a 5372 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6de9cd9a
DN
5373 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
5374 return to this function.
5375
5376 FIXME should we complexify the prequeue handling instead? Or use flags
5377 for all the cleanups and let the optimizer tighten them up? The current
5378 code seems pretty fragile; it will break on a cleanup within any
5379 non-conditional nesting. But any such nesting would be broken, anyway;
5380 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
5381 and continues out of it. We can do that at the RTL level, though, so
5382 having an optimizer to tighten up try/finally regions would be a Good
5383 Thing. */
5384
5385static enum gimplify_status
726a989a 5386gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
6de9cd9a 5387{
726a989a
RB
5388 gimple_stmt_iterator iter;
5389 gimple_seq body_sequence = NULL;
6de9cd9a 5390
325c3691 5391 tree temp = voidify_wrapper_expr (*expr_p, NULL);
6de9cd9a
DN
5392
5393 /* We only care about the number of conditions between the innermost
df77f454
JM
5394 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
5395 any cleanups collected outside the CLEANUP_POINT_EXPR. */
6de9cd9a 5396 int old_conds = gimplify_ctxp->conditions;
726a989a 5397 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
32be32af 5398 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
6de9cd9a 5399 gimplify_ctxp->conditions = 0;
726a989a 5400 gimplify_ctxp->conditional_cleanups = NULL;
32be32af 5401 gimplify_ctxp->in_cleanup_point_expr = true;
6de9cd9a 5402
726a989a 5403 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
6de9cd9a
DN
5404
5405 gimplify_ctxp->conditions = old_conds;
df77f454 5406 gimplify_ctxp->conditional_cleanups = old_cleanups;
32be32af 5407 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
6de9cd9a 5408
726a989a 5409 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
6de9cd9a 5410 {
726a989a 5411 gimple wce = gsi_stmt (iter);
6de9cd9a 5412
726a989a 5413 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
6de9cd9a 5414 {
726a989a 5415 if (gsi_one_before_end_p (iter))
6de9cd9a 5416 {
726a989a
RB
5417 /* Note that gsi_insert_seq_before and gsi_remove do not
5418 scan operands, unlike some other sequence mutators. */
ae0595b0
RG
5419 if (!gimple_wce_cleanup_eh_only (wce))
5420 gsi_insert_seq_before_without_update (&iter,
5421 gimple_wce_cleanup (wce),
5422 GSI_SAME_STMT);
726a989a 5423 gsi_remove (&iter, true);
6de9cd9a
DN
5424 break;
5425 }
5426 else
5427 {
82d6e6fc 5428 gimple gtry;
726a989a
RB
5429 gimple_seq seq;
5430 enum gimple_try_flags kind;
40aac948 5431
726a989a
RB
5432 if (gimple_wce_cleanup_eh_only (wce))
5433 kind = GIMPLE_TRY_CATCH;
40aac948 5434 else
726a989a
RB
5435 kind = GIMPLE_TRY_FINALLY;
5436 seq = gsi_split_seq_after (iter);
5437
82d6e6fc 5438 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
726a989a
RB
5439 /* Do not use gsi_replace here, as it may scan operands.
5440 We want to do a simple structural modification only. */
355a7673
MM
5441 gsi_set_stmt (&iter, gtry);
5442 iter = gsi_start (gtry->gimple_try.eval);
6de9cd9a
DN
5443 }
5444 }
5445 else
726a989a 5446 gsi_next (&iter);
6de9cd9a
DN
5447 }
5448
726a989a 5449 gimplify_seq_add_seq (pre_p, body_sequence);
6de9cd9a
DN
5450 if (temp)
5451 {
5452 *expr_p = temp;
6de9cd9a
DN
5453 return GS_OK;
5454 }
5455 else
5456 {
726a989a 5457 *expr_p = NULL;
6de9cd9a
DN
5458 return GS_ALL_DONE;
5459 }
5460}
5461
5462/* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
726a989a
RB
5463 is the cleanup action required. EH_ONLY is true if the cleanup should
5464 only be executed if an exception is thrown, not on normal exit. */
6de9cd9a
DN
5465
5466static void
726a989a 5467gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p)
6de9cd9a 5468{
726a989a
RB
5469 gimple wce;
5470 gimple_seq cleanup_stmts = NULL;
6de9cd9a
DN
5471
5472 /* Errors can result in improperly nested cleanups. Which results in
726a989a 5473 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
1da2ed5f 5474 if (seen_error ())
6de9cd9a
DN
5475 return;
5476
5477 if (gimple_conditional_context ())
5478 {
5479 /* If we're in a conditional context, this is more complex. We only
5480 want to run the cleanup if we actually ran the initialization that
5481 necessitates it, but we want to run it after the end of the
5482 conditional context. So we wrap the try/finally around the
5483 condition and use a flag to determine whether or not to actually
5484 run the destructor. Thus
5485
5486 test ? f(A()) : 0
5487
5488 becomes (approximately)
5489
5490 flag = 0;
5491 try {
5492 if (test) { A::A(temp); flag = 1; val = f(temp); }
5493 else { val = 0; }
5494 } finally {
5495 if (flag) A::~A(temp);
5496 }
5497 val
5498 */
6de9cd9a 5499 tree flag = create_tmp_var (boolean_type_node, "cleanup");
726a989a
RB
5500 gimple ffalse = gimple_build_assign (flag, boolean_false_node);
5501 gimple ftrue = gimple_build_assign (flag, boolean_true_node);
5502
b4257cfc 5503 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
726a989a
RB
5504 gimplify_stmt (&cleanup, &cleanup_stmts);
5505 wce = gimple_build_wce (cleanup_stmts);
5506
5507 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
5508 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
5509 gimplify_seq_add_stmt (pre_p, ftrue);
6de9cd9a
DN
5510
5511 /* Because of this manipulation, and the EH edges that jump
5512 threading cannot redirect, the temporary (VAR) will appear
5513 to be used uninitialized. Don't warn. */
5514 TREE_NO_WARNING (var) = 1;
5515 }
5516 else
5517 {
726a989a
RB
5518 gimplify_stmt (&cleanup, &cleanup_stmts);
5519 wce = gimple_build_wce (cleanup_stmts);
5520 gimple_wce_set_cleanup_eh_only (wce, eh_only);
5521 gimplify_seq_add_stmt (pre_p, wce);
6de9cd9a 5522 }
6de9cd9a
DN
5523}
5524
5525/* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
5526
5527static enum gimplify_status
726a989a 5528gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6de9cd9a
DN
5529{
5530 tree targ = *expr_p;
5531 tree temp = TARGET_EXPR_SLOT (targ);
5532 tree init = TARGET_EXPR_INITIAL (targ);
5533 enum gimplify_status ret;
5534
5535 if (init)
5536 {
d0ad58f9
JM
5537 tree cleanup = NULL_TREE;
5538
3a5b9284 5539 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
786025ea
JJ
5540 to the temps list. Handle also variable length TARGET_EXPRs. */
5541 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
5542 {
5543 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
5544 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
5545 gimplify_vla_decl (temp, pre_p);
5546 }
5547 else
5548 gimple_add_tmp_var (temp);
6de9cd9a 5549
3a5b9284
RH
5550 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
5551 expression is supposed to initialize the slot. */
5552 if (VOID_TYPE_P (TREE_TYPE (init)))
5553 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5554 else
325c3691 5555 {
726a989a
RB
5556 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
5557 init = init_expr;
5558 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5559 init = NULL;
5560 ggc_free (init_expr);
325c3691 5561 }
3a5b9284 5562 if (ret == GS_ERROR)
abc67de1
SM
5563 {
5564 /* PR c++/28266 Make sure this is expanded only once. */
5565 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5566 return GS_ERROR;
5567 }
726a989a
RB
5568 if (init)
5569 gimplify_and_add (init, pre_p);
6de9cd9a
DN
5570
5571 /* If needed, push the cleanup for the temp. */
5572 if (TARGET_EXPR_CLEANUP (targ))
d0ad58f9
JM
5573 {
5574 if (CLEANUP_EH_ONLY (targ))
5575 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
5576 CLEANUP_EH_ONLY (targ), pre_p);
5577 else
5578 cleanup = TARGET_EXPR_CLEANUP (targ);
5579 }
5580
5581 /* Add a clobber for the temporary going out of scope, like
5582 gimplify_bind_expr. */
32be32af 5583 if (gimplify_ctxp->in_cleanup_point_expr
87e2a8fd
XDL
5584 && needs_to_live_in_memory (temp)
5585 && flag_stack_reuse == SR_ALL)
d0ad58f9 5586 {
9771b263
DN
5587 tree clobber = build_constructor (TREE_TYPE (temp),
5588 NULL);
d0ad58f9
JM
5589 TREE_THIS_VOLATILE (clobber) = true;
5590 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
5591 if (cleanup)
5592 cleanup = build2 (COMPOUND_EXPR, void_type_node, cleanup,
5593 clobber);
5594 else
5595 cleanup = clobber;
5596 }
5597
5598 if (cleanup)
5599 gimple_push_cleanup (temp, cleanup, false, pre_p);
6de9cd9a
DN
5600
5601 /* Only expand this once. */
5602 TREE_OPERAND (targ, 3) = init;
5603 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5604 }
282899df 5605 else
6de9cd9a 5606 /* We should have expanded this before. */
282899df 5607 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
6de9cd9a
DN
5608
5609 *expr_p = temp;
5610 return GS_OK;
5611}
5612
5613/* Gimplification of expression trees. */
5614
726a989a
RB
5615/* Gimplify an expression which appears at statement context. The
5616 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
5617 NULL, a new sequence is allocated.
6de9cd9a 5618
726a989a
RB
5619 Return true if we actually added a statement to the queue. */
5620
5621bool
5622gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
6de9cd9a 5623{
726a989a 5624 gimple_seq_node last;
6de9cd9a 5625
726a989a
RB
5626 last = gimple_seq_last (*seq_p);
5627 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
5628 return last != gimple_seq_last (*seq_p);
6de9cd9a
DN
5629}
5630
953ff289
DN
5631/* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
5632 to CTX. If entries already exist, force them to be some flavor of private.
5633 If there is no enclosing parallel, do nothing. */
5634
5635void
5636omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
5637{
5638 splay_tree_node n;
5639
5640 if (decl == NULL || !DECL_P (decl))
5641 return;
5642
5643 do
5644 {
5645 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5646 if (n != NULL)
5647 {
5648 if (n->value & GOVD_SHARED)
5649 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
acf0174b
JJ
5650 else if (n->value & GOVD_MAP)
5651 n->value |= GOVD_MAP_TO_ONLY;
953ff289
DN
5652 else
5653 return;
5654 }
acf0174b
JJ
5655 else if (ctx->region_type == ORT_TARGET)
5656 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
74bf76ed 5657 else if (ctx->region_type != ORT_WORKSHARE
acf0174b
JJ
5658 && ctx->region_type != ORT_SIMD
5659 && ctx->region_type != ORT_TARGET_DATA)
953ff289
DN
5660 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
5661
5662 ctx = ctx->outer_context;
5663 }
5664 while (ctx);
5665}
5666
5667/* Similarly for each of the type sizes of TYPE. */
5668
5669static void
5670omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
5671{
5672 if (type == NULL || type == error_mark_node)
5673 return;
5674 type = TYPE_MAIN_VARIANT (type);
5675
5676 if (pointer_set_insert (ctx->privatized_types, type))
5677 return;
5678
5679 switch (TREE_CODE (type))
5680 {
5681 case INTEGER_TYPE:
5682 case ENUMERAL_TYPE:
5683 case BOOLEAN_TYPE:
953ff289 5684 case REAL_TYPE:
325217ed 5685 case FIXED_POINT_TYPE:
953ff289
DN
5686 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
5687 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
5688 break;
5689
5690 case ARRAY_TYPE:
5691 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5692 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
5693 break;
5694
5695 case RECORD_TYPE:
5696 case UNION_TYPE:
5697 case QUAL_UNION_TYPE:
5698 {
5699 tree field;
910ad8de 5700 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
953ff289
DN
5701 if (TREE_CODE (field) == FIELD_DECL)
5702 {
5703 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
5704 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
5705 }
5706 }
5707 break;
5708
5709 case POINTER_TYPE:
5710 case REFERENCE_TYPE:
5711 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5712 break;
5713
5714 default:
5715 break;
5716 }
5717
5718 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
5719 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
5720 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
5721}
5722
5723/* Add an entry for DECL in the OpenMP context CTX with FLAGS. */
5724
5725static void
5726omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
5727{
5728 splay_tree_node n;
5729 unsigned int nflags;
5730 tree t;
5731
b504a918 5732 if (error_operand_p (decl))
953ff289
DN
5733 return;
5734
5735 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
5736 there are constructors involved somewhere. */
5737 if (TREE_ADDRESSABLE (TREE_TYPE (decl))
5738 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))
5739 flags |= GOVD_SEEN;
5740
5741 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
acf0174b 5742 if (n != NULL && n->value != GOVD_ALIGNED)
953ff289
DN
5743 {
5744 /* We shouldn't be re-adding the decl with the same data
5745 sharing class. */
5746 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
5747 /* The only combination of data sharing classes we should see is
5748 FIRSTPRIVATE and LASTPRIVATE. */
5749 nflags = n->value | flags;
5750 gcc_assert ((nflags & GOVD_DATA_SHARE_CLASS)
74bf76ed
JJ
5751 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE)
5752 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
953ff289
DN
5753 n->value = nflags;
5754 return;
5755 }
5756
5757 /* When adding a variable-sized variable, we have to handle all sorts
b8698a0f 5758 of additional bits of data: the pointer replacement variable, and
953ff289 5759 the parameters of the type. */
4c923c28 5760 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
953ff289
DN
5761 {
5762 /* Add the pointer replacement variable as PRIVATE if the variable
5763 replacement is private, else FIRSTPRIVATE since we'll need the
5764 address of the original variable either for SHARED, or for the
5765 copy into or out of the context. */
5766 if (!(flags & GOVD_LOCAL))
5767 {
acf0174b
JJ
5768 nflags = flags & GOVD_MAP
5769 ? GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT
5770 : flags & GOVD_PRIVATE ? GOVD_PRIVATE : GOVD_FIRSTPRIVATE;
953ff289
DN
5771 nflags |= flags & GOVD_SEEN;
5772 t = DECL_VALUE_EXPR (decl);
5773 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5774 t = TREE_OPERAND (t, 0);
5775 gcc_assert (DECL_P (t));
5776 omp_add_variable (ctx, t, nflags);
5777 }
5778
5779 /* Add all of the variable and type parameters (which should have
5780 been gimplified to a formal temporary) as FIRSTPRIVATE. */
5781 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
5782 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
5783 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5784
5785 /* The variable-sized variable itself is never SHARED, only some form
5786 of PRIVATE. The sharing would take place via the pointer variable
5787 which we remapped above. */
5788 if (flags & GOVD_SHARED)
5789 flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE
5790 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
5791
b8698a0f 5792 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
953ff289
DN
5793 alloca statement we generate for the variable, so make sure it
5794 is available. This isn't automatically needed for the SHARED
4288fea2
JJ
5795 case, since we won't be allocating local storage then.
5796 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
5797 in this case omp_notice_variable will be called later
5798 on when it is gimplified. */
acf0174b 5799 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
423ed416 5800 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
953ff289
DN
5801 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
5802 }
acf0174b
JJ
5803 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
5804 && lang_hooks.decls.omp_privatize_by_reference (decl))
953ff289 5805 {
953ff289
DN
5806 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5807
5808 /* Similar to the direct variable sized case above, we'll need the
5809 size of references being privatized. */
5810 if ((flags & GOVD_SHARED) == 0)
5811 {
5812 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
4c923c28 5813 if (TREE_CODE (t) != INTEGER_CST)
953ff289
DN
5814 omp_notice_variable (ctx, t, true);
5815 }
5816 }
5817
74bf76ed
JJ
5818 if (n != NULL)
5819 n->value |= flags;
5820 else
5821 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
953ff289
DN
5822}
5823
f22f4340
JJ
5824/* Notice a threadprivate variable DECL used in OpenMP context CTX.
5825 This just prints out diagnostics about threadprivate variable uses
5826 in untied tasks. If DECL2 is non-NULL, prevent this warning
5827 on that variable. */
5828
5829static bool
5830omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
5831 tree decl2)
5832{
5833 splay_tree_node n;
acf0174b
JJ
5834 struct gimplify_omp_ctx *octx;
5835
5836 for (octx = ctx; octx; octx = octx->outer_context)
5837 if (octx->region_type == ORT_TARGET)
5838 {
5839 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
5840 if (n == NULL)
5841 {
5842 error ("threadprivate variable %qE used in target region",
5843 DECL_NAME (decl));
5844 error_at (octx->location, "enclosing target region");
5845 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
5846 }
5847 if (decl2)
5848 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
5849 }
f22f4340
JJ
5850
5851 if (ctx->region_type != ORT_UNTIED_TASK)
5852 return false;
5853 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5854 if (n == NULL)
5855 {
ad19c4be
EB
5856 error ("threadprivate variable %qE used in untied task",
5857 DECL_NAME (decl));
f22f4340
JJ
5858 error_at (ctx->location, "enclosing task");
5859 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
5860 }
5861 if (decl2)
5862 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
5863 return false;
5864}
5865
953ff289
DN
5866/* Record the fact that DECL was used within the OpenMP context CTX.
5867 IN_CODE is true when real code uses DECL, and false when we should
5868 merely emit default(none) errors. Return true if DECL is going to
5869 be remapped and thus DECL shouldn't be gimplified into its
5870 DECL_VALUE_EXPR (if any). */
5871
5872static bool
5873omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
5874{
5875 splay_tree_node n;
5876 unsigned flags = in_code ? GOVD_SEEN : 0;
5877 bool ret = false, shared;
5878
b504a918 5879 if (error_operand_p (decl))
953ff289
DN
5880 return false;
5881
5882 /* Threadprivate variables are predetermined. */
5883 if (is_global_var (decl))
5884 {
5885 if (DECL_THREAD_LOCAL_P (decl))
f22f4340 5886 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
953ff289
DN
5887
5888 if (DECL_HAS_VALUE_EXPR_P (decl))
5889 {
5890 tree value = get_base_address (DECL_VALUE_EXPR (decl));
5891
5892 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
f22f4340 5893 return omp_notice_threadprivate_variable (ctx, decl, value);
953ff289
DN
5894 }
5895 }
5896
5897 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
acf0174b
JJ
5898 if (ctx->region_type == ORT_TARGET)
5899 {
5900 if (n == NULL)
5901 {
5902 if (!lang_hooks.types.omp_mappable_type (TREE_TYPE (decl)))
5903 {
5904 error ("%qD referenced in target region does not have "
5905 "a mappable type", decl);
5906 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_EXPLICIT | flags);
5907 }
5908 else
5909 omp_add_variable (ctx, decl, GOVD_MAP | flags);
5910 }
5911 else
5912 n->value |= flags;
5913 ret = lang_hooks.decls.omp_disregard_value_expr (decl, true);
5914 goto do_outer;
5915 }
5916
953ff289
DN
5917 if (n == NULL)
5918 {
5919 enum omp_clause_default_kind default_kind, kind;
a68ab351 5920 struct gimplify_omp_ctx *octx;
953ff289 5921
74bf76ed 5922 if (ctx->region_type == ORT_WORKSHARE
acf0174b
JJ
5923 || ctx->region_type == ORT_SIMD
5924 || ctx->region_type == ORT_TARGET_DATA)
953ff289
DN
5925 goto do_outer;
5926
5927 /* ??? Some compiler-generated variables (like SAVE_EXPRs) could be
5928 remapped firstprivate instead of shared. To some extent this is
5929 addressed in omp_firstprivatize_type_sizes, but not effectively. */
5930 default_kind = ctx->default_kind;
5931 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
5932 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
5933 default_kind = kind;
5934
5935 switch (default_kind)
5936 {
5937 case OMP_CLAUSE_DEFAULT_NONE:
f22f4340 5938 if ((ctx->region_type & ORT_TASK) != 0)
acf0174b
JJ
5939 {
5940 error ("%qE not specified in enclosing task",
5941 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)));
5942 error_at (ctx->location, "enclosing task");
5943 }
5944 else if (ctx->region_type == ORT_TEAMS)
5945 {
5946 error ("%qE not specified in enclosing teams construct",
5947 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)));
5948 error_at (ctx->location, "enclosing teams construct");
5949 }
f22f4340 5950 else
acf0174b
JJ
5951 {
5952 error ("%qE not specified in enclosing parallel",
5953 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)));
5954 error_at (ctx->location, "enclosing parallel");
5955 }
953ff289
DN
5956 /* FALLTHRU */
5957 case OMP_CLAUSE_DEFAULT_SHARED:
5958 flags |= GOVD_SHARED;
5959 break;
5960 case OMP_CLAUSE_DEFAULT_PRIVATE:
5961 flags |= GOVD_PRIVATE;
5962 break;
a68ab351
JJ
5963 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
5964 flags |= GOVD_FIRSTPRIVATE;
5965 break;
5966 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
5967 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
f22f4340 5968 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
a68ab351
JJ
5969 if (ctx->outer_context)
5970 omp_notice_variable (ctx->outer_context, decl, in_code);
5971 for (octx = ctx->outer_context; octx; octx = octx->outer_context)
5972 {
5973 splay_tree_node n2;
5974
acf0174b
JJ
5975 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0)
5976 continue;
a68ab351
JJ
5977 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
5978 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
5979 {
5980 flags |= GOVD_FIRSTPRIVATE;
5981 break;
5982 }
acf0174b 5983 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
a68ab351
JJ
5984 break;
5985 }
5986 if (flags & GOVD_FIRSTPRIVATE)
5987 break;
5988 if (octx == NULL
5989 && (TREE_CODE (decl) == PARM_DECL
5990 || (!is_global_var (decl)
5991 && DECL_CONTEXT (decl) == current_function_decl)))
5992 {
5993 flags |= GOVD_FIRSTPRIVATE;
5994 break;
5995 }
5996 flags |= GOVD_SHARED;
5997 break;
953ff289
DN
5998 default:
5999 gcc_unreachable ();
6000 }
6001
a68ab351
JJ
6002 if ((flags & GOVD_PRIVATE)
6003 && lang_hooks.decls.omp_private_outer_ref (decl))
6004 flags |= GOVD_PRIVATE_OUTER_REF;
6005
953ff289
DN
6006 omp_add_variable (ctx, decl, flags);
6007
6008 shared = (flags & GOVD_SHARED) != 0;
6009 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
6010 goto do_outer;
6011 }
6012
3ad6b266
JJ
6013 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
6014 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
6015 && DECL_SIZE (decl)
6016 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6017 {
6018 splay_tree_node n2;
6019 tree t = DECL_VALUE_EXPR (decl);
6020 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
6021 t = TREE_OPERAND (t, 0);
6022 gcc_assert (DECL_P (t));
6023 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
6024 n2->value |= GOVD_SEEN;
6025 }
6026
953ff289
DN
6027 shared = ((flags | n->value) & GOVD_SHARED) != 0;
6028 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
6029
6030 /* If nothing changed, there's nothing left to do. */
6031 if ((n->value & flags) == flags)
6032 return ret;
6033 flags |= n->value;
6034 n->value = flags;
6035
6036 do_outer:
6037 /* If the variable is private in the current context, then we don't
6038 need to propagate anything to an outer context. */
a68ab351 6039 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
953ff289
DN
6040 return ret;
6041 if (ctx->outer_context
6042 && omp_notice_variable (ctx->outer_context, decl, in_code))
6043 return true;
6044 return ret;
6045}
6046
6047/* Verify that DECL is private within CTX. If there's specific information
6048 to the contrary in the innermost scope, generate an error. */
6049
6050static bool
74bf76ed 6051omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, bool simd)
953ff289
DN
6052{
6053 splay_tree_node n;
6054
6055 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6056 if (n != NULL)
6057 {
6058 if (n->value & GOVD_SHARED)
6059 {
6060 if (ctx == gimplify_omp_ctxp)
f6a5ffbf 6061 {
74bf76ed
JJ
6062 if (simd)
6063 error ("iteration variable %qE is predetermined linear",
6064 DECL_NAME (decl));
6065 else
6066 error ("iteration variable %qE should be private",
6067 DECL_NAME (decl));
f6a5ffbf
JJ
6068 n->value = GOVD_PRIVATE;
6069 return true;
6070 }
6071 else
6072 return false;
953ff289 6073 }
761041be
JJ
6074 else if ((n->value & GOVD_EXPLICIT) != 0
6075 && (ctx == gimplify_omp_ctxp
a68ab351 6076 || (ctx->region_type == ORT_COMBINED_PARALLEL
761041be
JJ
6077 && gimplify_omp_ctxp->outer_context == ctx)))
6078 {
6079 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
4f1e4960
JM
6080 error ("iteration variable %qE should not be firstprivate",
6081 DECL_NAME (decl));
761041be 6082 else if ((n->value & GOVD_REDUCTION) != 0)
4f1e4960
JM
6083 error ("iteration variable %qE should not be reduction",
6084 DECL_NAME (decl));
74bf76ed
JJ
6085 else if (simd && (n->value & GOVD_LASTPRIVATE) != 0)
6086 error ("iteration variable %qE should not be lastprivate",
6087 DECL_NAME (decl));
6088 else if (simd && (n->value & GOVD_PRIVATE) != 0)
6089 error ("iteration variable %qE should not be private",
6090 DECL_NAME (decl));
6091 else if (simd && (n->value & GOVD_LINEAR) != 0)
6092 error ("iteration variable %qE is predetermined linear",
6093 DECL_NAME (decl));
761041be 6094 }
ca2b1311
JJ
6095 return (ctx == gimplify_omp_ctxp
6096 || (ctx->region_type == ORT_COMBINED_PARALLEL
6097 && gimplify_omp_ctxp->outer_context == ctx));
953ff289
DN
6098 }
6099
74bf76ed
JJ
6100 if (ctx->region_type != ORT_WORKSHARE
6101 && ctx->region_type != ORT_SIMD)
953ff289 6102 return false;
f6a5ffbf 6103 else if (ctx->outer_context)
74bf76ed 6104 return omp_is_private (ctx->outer_context, decl, simd);
ca2b1311 6105 return false;
953ff289
DN
6106}
6107
07b7aade
JJ
6108/* Return true if DECL is private within a parallel region
6109 that binds to the current construct's context or in parallel
6110 region's REDUCTION clause. */
6111
6112static bool
6113omp_check_private (struct gimplify_omp_ctx *ctx, tree decl)
6114{
6115 splay_tree_node n;
6116
6117 do
6118 {
6119 ctx = ctx->outer_context;
6120 if (ctx == NULL)
6121 return !(is_global_var (decl)
6122 /* References might be private, but might be shared too. */
6123 || lang_hooks.decls.omp_privatize_by_reference (decl));
6124
acf0174b
JJ
6125 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0)
6126 continue;
6127
07b7aade
JJ
6128 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6129 if (n != NULL)
6130 return (n->value & GOVD_SHARED) == 0;
6131 }
74bf76ed
JJ
6132 while (ctx->region_type == ORT_WORKSHARE
6133 || ctx->region_type == ORT_SIMD);
07b7aade
JJ
6134 return false;
6135}
6136
953ff289
DN
6137/* Scan the OpenMP clauses in *LIST_P, installing mappings into a new
6138 and previous omp contexts. */
6139
6140static void
726a989a 6141gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
a68ab351 6142 enum omp_region_type region_type)
953ff289
DN
6143{
6144 struct gimplify_omp_ctx *ctx, *outer_ctx;
d406b663 6145 struct gimplify_ctx gctx;
953ff289
DN
6146 tree c;
6147
a68ab351 6148 ctx = new_omp_context (region_type);
953ff289
DN
6149 outer_ctx = ctx->outer_context;
6150
6151 while ((c = *list_p) != NULL)
6152 {
953ff289
DN
6153 bool remove = false;
6154 bool notice_outer = true;
07b7aade 6155 const char *check_non_private = NULL;
953ff289
DN
6156 unsigned int flags;
6157 tree decl;
6158
aaf46ef9 6159 switch (OMP_CLAUSE_CODE (c))
953ff289
DN
6160 {
6161 case OMP_CLAUSE_PRIVATE:
6162 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
a68ab351
JJ
6163 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
6164 {
6165 flags |= GOVD_PRIVATE_OUTER_REF;
6166 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
6167 }
6168 else
6169 notice_outer = false;
953ff289
DN
6170 goto do_add;
6171 case OMP_CLAUSE_SHARED:
6172 flags = GOVD_SHARED | GOVD_EXPLICIT;
6173 goto do_add;
6174 case OMP_CLAUSE_FIRSTPRIVATE:
6175 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
07b7aade 6176 check_non_private = "firstprivate";
953ff289
DN
6177 goto do_add;
6178 case OMP_CLAUSE_LASTPRIVATE:
6179 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
07b7aade 6180 check_non_private = "lastprivate";
953ff289
DN
6181 goto do_add;
6182 case OMP_CLAUSE_REDUCTION:
6183 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
07b7aade 6184 check_non_private = "reduction";
953ff289 6185 goto do_add;
acf0174b
JJ
6186 case OMP_CLAUSE_LINEAR:
6187 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
6188 is_gimple_val, fb_rvalue) == GS_ERROR)
6189 {
6190 remove = true;
6191 break;
6192 }
6193 flags = GOVD_LINEAR | GOVD_EXPLICIT;
6194 goto do_add;
6195
6196 case OMP_CLAUSE_MAP:
6197 if (OMP_CLAUSE_SIZE (c)
6198 && gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
6199 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
6200 {
6201 remove = true;
6202 break;
6203 }
6204 decl = OMP_CLAUSE_DECL (c);
6205 if (!DECL_P (decl))
6206 {
6207 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
6208 NULL, is_gimple_lvalue, fb_lvalue)
6209 == GS_ERROR)
6210 {
6211 remove = true;
6212 break;
6213 }
6214 break;
6215 }
6216 flags = GOVD_MAP | GOVD_EXPLICIT;
6217 goto do_add;
6218
6219 case OMP_CLAUSE_DEPEND:
6220 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
6221 {
6222 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
6223 NULL, is_gimple_val, fb_rvalue);
6224 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
6225 }
6226 if (error_operand_p (OMP_CLAUSE_DECL (c)))
6227 {
6228 remove = true;
6229 break;
6230 }
6231 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
6232 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
6233 is_gimple_val, fb_rvalue) == GS_ERROR)
6234 {
6235 remove = true;
6236 break;
6237 }
6238 break;
6239
6240 case OMP_CLAUSE_TO:
6241 case OMP_CLAUSE_FROM:
6242 if (OMP_CLAUSE_SIZE (c)
6243 && gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
6244 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
6245 {
6246 remove = true;
6247 break;
6248 }
6249 decl = OMP_CLAUSE_DECL (c);
6250 if (error_operand_p (decl))
6251 {
6252 remove = true;
6253 break;
6254 }
6255 if (!DECL_P (decl))
6256 {
6257 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
6258 NULL, is_gimple_lvalue, fb_lvalue)
6259 == GS_ERROR)
6260 {
6261 remove = true;
6262 break;
6263 }
6264 break;
6265 }
6266 goto do_notice;
953ff289
DN
6267
6268 do_add:
6269 decl = OMP_CLAUSE_DECL (c);
b504a918 6270 if (error_operand_p (decl))
953ff289
DN
6271 {
6272 remove = true;
6273 break;
6274 }
6275 omp_add_variable (ctx, decl, flags);
693d710f 6276 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
953ff289
DN
6277 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
6278 {
6279 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
693d710f 6280 GOVD_LOCAL | GOVD_SEEN);
953ff289 6281 gimplify_omp_ctxp = ctx;
d406b663 6282 push_gimplify_context (&gctx);
726a989a 6283
355a7673
MM
6284 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6285 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
726a989a
RB
6286
6287 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
6288 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
6289 pop_gimplify_context
6290 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
d406b663 6291 push_gimplify_context (&gctx);
726a989a
RB
6292 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
6293 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
b8698a0f 6294 pop_gimplify_context
726a989a
RB
6295 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
6296 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
6297 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
6298
953ff289
DN
6299 gimplify_omp_ctxp = outer_ctx;
6300 }
a68ab351
JJ
6301 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6302 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
6303 {
6304 gimplify_omp_ctxp = ctx;
d406b663 6305 push_gimplify_context (&gctx);
a68ab351
JJ
6306 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
6307 {
6308 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
6309 NULL, NULL);
6310 TREE_SIDE_EFFECTS (bind) = 1;
6311 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
6312 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
6313 }
726a989a
RB
6314 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
6315 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
6316 pop_gimplify_context
6317 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
6318 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
6319
a68ab351
JJ
6320 gimplify_omp_ctxp = outer_ctx;
6321 }
953ff289
DN
6322 if (notice_outer)
6323 goto do_notice;
6324 break;
6325
6326 case OMP_CLAUSE_COPYIN:
6327 case OMP_CLAUSE_COPYPRIVATE:
6328 decl = OMP_CLAUSE_DECL (c);
b504a918 6329 if (error_operand_p (decl))
953ff289
DN
6330 {
6331 remove = true;
6332 break;
6333 }
6334 do_notice:
6335 if (outer_ctx)
6336 omp_notice_variable (outer_ctx, decl, true);
07b7aade 6337 if (check_non_private
a68ab351 6338 && region_type == ORT_WORKSHARE
07b7aade
JJ
6339 && omp_check_private (ctx, decl))
6340 {
4f1e4960
JM
6341 error ("%s variable %qE is private in outer context",
6342 check_non_private, DECL_NAME (decl));
07b7aade
JJ
6343 remove = true;
6344 }
953ff289
DN
6345 break;
6346
20906c66 6347 case OMP_CLAUSE_FINAL:
953ff289 6348 case OMP_CLAUSE_IF:
d568d1a8
RS
6349 OMP_CLAUSE_OPERAND (c, 0)
6350 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
6351 /* Fall through. */
6352
6353 case OMP_CLAUSE_SCHEDULE:
953ff289 6354 case OMP_CLAUSE_NUM_THREADS:
acf0174b
JJ
6355 case OMP_CLAUSE_NUM_TEAMS:
6356 case OMP_CLAUSE_THREAD_LIMIT:
6357 case OMP_CLAUSE_DIST_SCHEDULE:
6358 case OMP_CLAUSE_DEVICE:
726a989a
RB
6359 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
6360 is_gimple_val, fb_rvalue) == GS_ERROR)
acf0174b 6361 remove = true;
953ff289
DN
6362 break;
6363
6364 case OMP_CLAUSE_NOWAIT:
6365 case OMP_CLAUSE_ORDERED:
a68ab351
JJ
6366 case OMP_CLAUSE_UNTIED:
6367 case OMP_CLAUSE_COLLAPSE:
20906c66 6368 case OMP_CLAUSE_MERGEABLE:
acf0174b 6369 case OMP_CLAUSE_PROC_BIND:
74bf76ed 6370 case OMP_CLAUSE_SAFELEN:
953ff289
DN
6371 break;
6372
acf0174b
JJ
6373 case OMP_CLAUSE_ALIGNED:
6374 decl = OMP_CLAUSE_DECL (c);
6375 if (error_operand_p (decl))
6376 {
6377 remove = true;
6378 break;
6379 }
6380 if (!is_global_var (decl)
6381 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
6382 omp_add_variable (ctx, decl, GOVD_ALIGNED);
6383 break;
6384
953ff289
DN
6385 case OMP_CLAUSE_DEFAULT:
6386 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
6387 break;
6388
6389 default:
6390 gcc_unreachable ();
6391 }
6392
6393 if (remove)
6394 *list_p = OMP_CLAUSE_CHAIN (c);
6395 else
6396 list_p = &OMP_CLAUSE_CHAIN (c);
6397 }
6398
6399 gimplify_omp_ctxp = ctx;
6400}
6401
6402/* For all variables that were not actually used within the context,
6403 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
6404
6405static int
6406gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
6407{
6408 tree *list_p = (tree *) data;
6409 tree decl = (tree) n->key;
6410 unsigned flags = n->value;
aaf46ef9 6411 enum omp_clause_code code;
953ff289
DN
6412 tree clause;
6413 bool private_debug;
6414
6415 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
6416 return 0;
6417 if ((flags & GOVD_SEEN) == 0)
6418 return 0;
6419 if (flags & GOVD_DEBUG_PRIVATE)
6420 {
6421 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE);
6422 private_debug = true;
6423 }
acf0174b
JJ
6424 else if (flags & GOVD_MAP)
6425 private_debug = false;
953ff289
DN
6426 else
6427 private_debug
6428 = lang_hooks.decls.omp_private_debug_clause (decl,
6429 !!(flags & GOVD_SHARED));
6430 if (private_debug)
6431 code = OMP_CLAUSE_PRIVATE;
acf0174b
JJ
6432 else if (flags & GOVD_MAP)
6433 code = OMP_CLAUSE_MAP;
953ff289
DN
6434 else if (flags & GOVD_SHARED)
6435 {
6436 if (is_global_var (decl))
64964499
JJ
6437 {
6438 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
6439 while (ctx != NULL)
6440 {
6441 splay_tree_node on
6442 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6443 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
74bf76ed
JJ
6444 | GOVD_PRIVATE | GOVD_REDUCTION
6445 | GOVD_LINEAR)) != 0)
64964499
JJ
6446 break;
6447 ctx = ctx->outer_context;
6448 }
6449 if (ctx == NULL)
6450 return 0;
6451 }
953ff289
DN
6452 code = OMP_CLAUSE_SHARED;
6453 }
6454 else if (flags & GOVD_PRIVATE)
6455 code = OMP_CLAUSE_PRIVATE;
6456 else if (flags & GOVD_FIRSTPRIVATE)
6457 code = OMP_CLAUSE_FIRSTPRIVATE;
74bf76ed
JJ
6458 else if (flags & GOVD_LASTPRIVATE)
6459 code = OMP_CLAUSE_LASTPRIVATE;
acf0174b
JJ
6460 else if (flags & GOVD_ALIGNED)
6461 return 0;
953ff289
DN
6462 else
6463 gcc_unreachable ();
6464
c2255bc4 6465 clause = build_omp_clause (input_location, code);
aaf46ef9 6466 OMP_CLAUSE_DECL (clause) = decl;
953ff289
DN
6467 OMP_CLAUSE_CHAIN (clause) = *list_p;
6468 if (private_debug)
6469 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
a68ab351
JJ
6470 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
6471 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
acf0174b
JJ
6472 else if (code == OMP_CLAUSE_MAP)
6473 {
6474 OMP_CLAUSE_MAP_KIND (clause) = flags & GOVD_MAP_TO_ONLY
6475 ? OMP_CLAUSE_MAP_TO
6476 : OMP_CLAUSE_MAP_TOFROM;
6477 if (DECL_SIZE (decl)
6478 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6479 {
6480 tree decl2 = DECL_VALUE_EXPR (decl);
6481 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
6482 decl2 = TREE_OPERAND (decl2, 0);
6483 gcc_assert (DECL_P (decl2));
6484 tree mem = build_simple_mem_ref (decl2);
6485 OMP_CLAUSE_DECL (clause) = mem;
6486 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
6487 if (gimplify_omp_ctxp->outer_context)
6488 {
6489 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
6490 omp_notice_variable (ctx, decl2, true);
6491 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
6492 }
6493 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
6494 OMP_CLAUSE_MAP);
6495 OMP_CLAUSE_DECL (nc) = decl;
6496 OMP_CLAUSE_SIZE (nc) = size_zero_node;
6497 OMP_CLAUSE_MAP_KIND (nc) = OMP_CLAUSE_MAP_POINTER;
6498 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
6499 OMP_CLAUSE_CHAIN (clause) = nc;
6500 }
6501 }
953ff289 6502 *list_p = clause;
a68ab351 6503 lang_hooks.decls.omp_finish_clause (clause);
953ff289
DN
6504
6505 return 0;
6506}
6507
6508static void
6509gimplify_adjust_omp_clauses (tree *list_p)
6510{
6511 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
6512 tree c, decl;
6513
6514 while ((c = *list_p) != NULL)
6515 {
6516 splay_tree_node n;
6517 bool remove = false;
6518
aaf46ef9 6519 switch (OMP_CLAUSE_CODE (c))
953ff289
DN
6520 {
6521 case OMP_CLAUSE_PRIVATE:
6522 case OMP_CLAUSE_SHARED:
6523 case OMP_CLAUSE_FIRSTPRIVATE:
74bf76ed 6524 case OMP_CLAUSE_LINEAR:
953ff289
DN
6525 decl = OMP_CLAUSE_DECL (c);
6526 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6527 remove = !(n->value & GOVD_SEEN);
6528 if (! remove)
6529 {
aaf46ef9 6530 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
953ff289
DN
6531 if ((n->value & GOVD_DEBUG_PRIVATE)
6532 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
6533 {
6534 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
6535 || ((n->value & GOVD_DATA_SHARE_CLASS)
6536 == GOVD_PRIVATE));
aaf46ef9 6537 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
953ff289
DN
6538 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
6539 }
74bf76ed
JJ
6540 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6541 && ctx->outer_context
6542 && !(OMP_CLAUSE_LINEAR_NO_COPYIN (c)
6543 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6544 && !is_global_var (decl))
6545 {
6546 if (ctx->outer_context->region_type == ORT_COMBINED_PARALLEL)
6547 {
6548 n = splay_tree_lookup (ctx->outer_context->variables,
6549 (splay_tree_key) decl);
6550 if (n == NULL
6551 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
6552 {
6553 int flags = OMP_CLAUSE_LINEAR_NO_COPYIN (c)
6554 ? GOVD_LASTPRIVATE : GOVD_SHARED;
6555 if (n == NULL)
6556 omp_add_variable (ctx->outer_context, decl,
6557 flags | GOVD_SEEN);
6558 else
6559 n->value |= flags | GOVD_SEEN;
6560 }
6561 }
6562 else
6563 omp_notice_variable (ctx->outer_context, decl, true);
6564 }
953ff289
DN
6565 }
6566 break;
6567
6568 case OMP_CLAUSE_LASTPRIVATE:
6569 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
6570 accurately reflect the presence of a FIRSTPRIVATE clause. */
6571 decl = OMP_CLAUSE_DECL (c);
6572 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6573 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
6574 = (n->value & GOVD_FIRSTPRIVATE) != 0;
6575 break;
b8698a0f 6576
acf0174b
JJ
6577 case OMP_CLAUSE_ALIGNED:
6578 decl = OMP_CLAUSE_DECL (c);
6579 if (!is_global_var (decl))
6580 {
6581 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6582 remove = n == NULL || !(n->value & GOVD_SEEN);
6583 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
6584 {
6585 struct gimplify_omp_ctx *octx;
6586 if (n != NULL
6587 && (n->value & (GOVD_DATA_SHARE_CLASS
6588 & ~GOVD_FIRSTPRIVATE)))
6589 remove = true;
6590 else
6591 for (octx = ctx->outer_context; octx;
6592 octx = octx->outer_context)
6593 {
6594 n = splay_tree_lookup (octx->variables,
6595 (splay_tree_key) decl);
6596 if (n == NULL)
6597 continue;
6598 if (n->value & GOVD_LOCAL)
6599 break;
6600 /* We have to avoid assigning a shared variable
6601 to itself when trying to add
6602 __builtin_assume_aligned. */
6603 if (n->value & GOVD_SHARED)
6604 {
6605 remove = true;
6606 break;
6607 }
6608 }
6609 }
6610 }
6611 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
6612 {
6613 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6614 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6615 remove = true;
6616 }
6617 break;
6618
6619 case OMP_CLAUSE_MAP:
6620 decl = OMP_CLAUSE_DECL (c);
6621 if (!DECL_P (decl))
6622 break;
6623 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6624 if (ctx->region_type == ORT_TARGET && !(n->value & GOVD_SEEN))
6625 remove = true;
6626 else if (DECL_SIZE (decl)
6627 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
6628 && OMP_CLAUSE_MAP_KIND (c) != OMP_CLAUSE_MAP_POINTER)
6629 {
6630 tree decl2 = DECL_VALUE_EXPR (decl);
6631 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
6632 decl2 = TREE_OPERAND (decl2, 0);
6633 gcc_assert (DECL_P (decl2));
6634 tree mem = build_simple_mem_ref (decl2);
6635 OMP_CLAUSE_DECL (c) = mem;
6636 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
6637 if (ctx->outer_context)
6638 {
6639 omp_notice_variable (ctx->outer_context, decl2, true);
6640 omp_notice_variable (ctx->outer_context,
6641 OMP_CLAUSE_SIZE (c), true);
6642 }
6643 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
6644 OMP_CLAUSE_MAP);
6645 OMP_CLAUSE_DECL (nc) = decl;
6646 OMP_CLAUSE_SIZE (nc) = size_zero_node;
6647 OMP_CLAUSE_MAP_KIND (nc) = OMP_CLAUSE_MAP_POINTER;
6648 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
6649 OMP_CLAUSE_CHAIN (c) = nc;
6650 c = nc;
6651 }
6652 break;
6653
6654 case OMP_CLAUSE_TO:
6655 case OMP_CLAUSE_FROM:
6656 decl = OMP_CLAUSE_DECL (c);
6657 if (!DECL_P (decl))
6658 break;
6659 if (DECL_SIZE (decl)
6660 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6661 {
6662 tree decl2 = DECL_VALUE_EXPR (decl);
6663 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
6664 decl2 = TREE_OPERAND (decl2, 0);
6665 gcc_assert (DECL_P (decl2));
6666 tree mem = build_simple_mem_ref (decl2);
6667 OMP_CLAUSE_DECL (c) = mem;
6668 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
6669 if (ctx->outer_context)
6670 {
6671 omp_notice_variable (ctx->outer_context, decl2, true);
6672 omp_notice_variable (ctx->outer_context,
6673 OMP_CLAUSE_SIZE (c), true);
6674 }
6675 }
6676 break;
6677
953ff289
DN
6678 case OMP_CLAUSE_REDUCTION:
6679 case OMP_CLAUSE_COPYIN:
6680 case OMP_CLAUSE_COPYPRIVATE:
6681 case OMP_CLAUSE_IF:
6682 case OMP_CLAUSE_NUM_THREADS:
acf0174b
JJ
6683 case OMP_CLAUSE_NUM_TEAMS:
6684 case OMP_CLAUSE_THREAD_LIMIT:
6685 case OMP_CLAUSE_DIST_SCHEDULE:
6686 case OMP_CLAUSE_DEVICE:
953ff289
DN
6687 case OMP_CLAUSE_SCHEDULE:
6688 case OMP_CLAUSE_NOWAIT:
6689 case OMP_CLAUSE_ORDERED:
6690 case OMP_CLAUSE_DEFAULT:
a68ab351
JJ
6691 case OMP_CLAUSE_UNTIED:
6692 case OMP_CLAUSE_COLLAPSE:
20906c66
JJ
6693 case OMP_CLAUSE_FINAL:
6694 case OMP_CLAUSE_MERGEABLE:
acf0174b 6695 case OMP_CLAUSE_PROC_BIND:
74bf76ed 6696 case OMP_CLAUSE_SAFELEN:
acf0174b 6697 case OMP_CLAUSE_DEPEND:
953ff289
DN
6698 break;
6699
6700 default:
6701 gcc_unreachable ();
6702 }
6703
6704 if (remove)
6705 *list_p = OMP_CLAUSE_CHAIN (c);
6706 else
6707 list_p = &OMP_CLAUSE_CHAIN (c);
6708 }
6709
6710 /* Add in any implicit data sharing. */
6711 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, list_p);
b8698a0f 6712
953ff289
DN
6713 gimplify_omp_ctxp = ctx->outer_context;
6714 delete_omp_context (ctx);
6715}
6716
6717/* Gimplify the contents of an OMP_PARALLEL statement. This involves
6718 gimplification of the body, as well as scanning the body for used
6719 variables. We need to do this scan now, because variable-sized
6720 decls will be decomposed during gimplification. */
6721
726a989a
RB
6722static void
6723gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
953ff289
DN
6724{
6725 tree expr = *expr_p;
726a989a
RB
6726 gimple g;
6727 gimple_seq body = NULL;
d406b663 6728 struct gimplify_ctx gctx;
953ff289 6729
a68ab351
JJ
6730 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
6731 OMP_PARALLEL_COMBINED (expr)
6732 ? ORT_COMBINED_PARALLEL
6733 : ORT_PARALLEL);
953ff289 6734
d406b663 6735 push_gimplify_context (&gctx);
953ff289 6736
726a989a
RB
6737 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
6738 if (gimple_code (g) == GIMPLE_BIND)
6739 pop_gimplify_context (g);
50674e96 6740 else
726a989a 6741 pop_gimplify_context (NULL);
953ff289
DN
6742
6743 gimplify_adjust_omp_clauses (&OMP_PARALLEL_CLAUSES (expr));
6744
726a989a
RB
6745 g = gimple_build_omp_parallel (body,
6746 OMP_PARALLEL_CLAUSES (expr),
6747 NULL_TREE, NULL_TREE);
6748 if (OMP_PARALLEL_COMBINED (expr))
6749 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
6750 gimplify_seq_add_stmt (pre_p, g);
6751 *expr_p = NULL_TREE;
953ff289
DN
6752}
6753
a68ab351
JJ
6754/* Gimplify the contents of an OMP_TASK statement. This involves
6755 gimplification of the body, as well as scanning the body for used
6756 variables. We need to do this scan now, because variable-sized
6757 decls will be decomposed during gimplification. */
953ff289 6758
726a989a
RB
6759static void
6760gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
953ff289 6761{
a68ab351 6762 tree expr = *expr_p;
726a989a
RB
6763 gimple g;
6764 gimple_seq body = NULL;
d406b663 6765 struct gimplify_ctx gctx;
953ff289 6766
f22f4340
JJ
6767 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
6768 find_omp_clause (OMP_TASK_CLAUSES (expr),
6769 OMP_CLAUSE_UNTIED)
6770 ? ORT_UNTIED_TASK : ORT_TASK);
953ff289 6771
d406b663 6772 push_gimplify_context (&gctx);
953ff289 6773
726a989a
RB
6774 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
6775 if (gimple_code (g) == GIMPLE_BIND)
6776 pop_gimplify_context (g);
953ff289 6777 else
726a989a 6778 pop_gimplify_context (NULL);
953ff289 6779
a68ab351 6780 gimplify_adjust_omp_clauses (&OMP_TASK_CLAUSES (expr));
917948d3 6781
726a989a
RB
6782 g = gimple_build_omp_task (body,
6783 OMP_TASK_CLAUSES (expr),
6784 NULL_TREE, NULL_TREE,
6785 NULL_TREE, NULL_TREE, NULL_TREE);
6786 gimplify_seq_add_stmt (pre_p, g);
6787 *expr_p = NULL_TREE;
a68ab351
JJ
6788}
6789
acf0174b
JJ
6790/* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD
6791 with non-NULL OMP_FOR_INIT. */
6792
6793static tree
6794find_combined_omp_for (tree *tp, int *walk_subtrees, void *)
6795{
6796 *walk_subtrees = 0;
6797 switch (TREE_CODE (*tp))
6798 {
6799 case OMP_FOR:
6800 *walk_subtrees = 1;
6801 /* FALLTHRU */
6802 case OMP_SIMD:
6803 if (OMP_FOR_INIT (*tp) != NULL_TREE)
6804 return *tp;
6805 break;
6806 case BIND_EXPR:
6807 case STATEMENT_LIST:
6808 case OMP_PARALLEL:
6809 *walk_subtrees = 1;
6810 break;
6811 default:
6812 break;
6813 }
6814 return NULL_TREE;
6815}
6816
a68ab351
JJ
6817/* Gimplify the gross structure of an OMP_FOR statement. */
6818
6819static enum gimplify_status
726a989a 6820gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
a68ab351 6821{
acf0174b 6822 tree for_stmt, orig_for_stmt, decl, var, t;
32e8bb8e
ILT
6823 enum gimplify_status ret = GS_ALL_DONE;
6824 enum gimplify_status tret;
726a989a
RB
6825 gimple gfor;
6826 gimple_seq for_body, for_pre_body;
a68ab351 6827 int i;
74bf76ed
JJ
6828 bool simd;
6829 bitmap has_decl_expr = NULL;
a68ab351 6830
acf0174b 6831 orig_for_stmt = for_stmt = *expr_p;
a68ab351 6832
acf0174b 6833 simd = TREE_CODE (for_stmt) == OMP_SIMD;
a68ab351 6834 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p,
74bf76ed 6835 simd ? ORT_SIMD : ORT_WORKSHARE);
917948d3 6836
726a989a
RB
6837 /* Handle OMP_FOR_INIT. */
6838 for_pre_body = NULL;
74bf76ed
JJ
6839 if (simd && OMP_FOR_PRE_BODY (for_stmt))
6840 {
6841 has_decl_expr = BITMAP_ALLOC (NULL);
6842 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
6843 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
acf0174b 6844 == VAR_DECL)
74bf76ed
JJ
6845 {
6846 t = OMP_FOR_PRE_BODY (for_stmt);
6847 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
6848 }
6849 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
6850 {
6851 tree_stmt_iterator si;
6852 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
6853 tsi_next (&si))
6854 {
6855 t = tsi_stmt (si);
6856 if (TREE_CODE (t) == DECL_EXPR
6857 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
6858 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
6859 }
6860 }
6861 }
726a989a
RB
6862 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
6863 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
a68ab351 6864
acf0174b
JJ
6865 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
6866 {
6867 for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt), find_combined_omp_for,
6868 NULL, NULL);
6869 gcc_assert (for_stmt != NULL_TREE);
6870 gimplify_omp_ctxp->combined_loop = true;
6871 }
6872
355a7673 6873 for_body = NULL;
a68ab351
JJ
6874 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
6875 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
6876 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
6877 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
6878 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
6879 {
6880 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
726a989a
RB
6881 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
6882 decl = TREE_OPERAND (t, 0);
a68ab351
JJ
6883 gcc_assert (DECL_P (decl));
6884 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
6885 || POINTER_TYPE_P (TREE_TYPE (decl)));
6886
6887 /* Make sure the iteration variable is private. */
74bf76ed 6888 tree c = NULL_TREE;
acf0174b
JJ
6889 if (orig_for_stmt != for_stmt)
6890 /* Do this only on innermost construct for combined ones. */;
6891 else if (simd)
74bf76ed
JJ
6892 {
6893 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
6894 (splay_tree_key)decl);
6895 omp_is_private (gimplify_omp_ctxp, decl, simd);
6896 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6897 omp_notice_variable (gimplify_omp_ctxp, decl, true);
6898 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
6899 {
6900 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
6901 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
6902 if (has_decl_expr
6903 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
6904 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
6905 OMP_CLAUSE_DECL (c) = decl;
6906 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
6907 OMP_FOR_CLAUSES (for_stmt) = c;
6908 omp_add_variable (gimplify_omp_ctxp, decl,
6909 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
6910 }
6911 else
6912 {
6913 bool lastprivate
6914 = (!has_decl_expr
6915 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
6916 c = build_omp_clause (input_location,
6917 lastprivate ? OMP_CLAUSE_LASTPRIVATE
6918 : OMP_CLAUSE_PRIVATE);
6919 OMP_CLAUSE_DECL (c) = decl;
6920 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
6921 omp_add_variable (gimplify_omp_ctxp, decl,
6922 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
6923 | GOVD_SEEN);
6924 c = NULL_TREE;
6925 }
6926 }
6927 else if (omp_is_private (gimplify_omp_ctxp, decl, simd))
a68ab351
JJ
6928 omp_notice_variable (gimplify_omp_ctxp, decl, true);
6929 else
6930 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
6931
6932 /* If DECL is not a gimple register, create a temporary variable to act
6933 as an iteration counter. This is valid, since DECL cannot be
6934 modified in the body of the loop. */
acf0174b
JJ
6935 if (orig_for_stmt != for_stmt)
6936 var = decl;
6937 else if (!is_gimple_reg (decl))
a68ab351
JJ
6938 {
6939 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
726a989a 6940 TREE_OPERAND (t, 0) = var;
b8698a0f 6941
726a989a 6942 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
953ff289 6943
a68ab351
JJ
6944 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
6945 }
6946 else
6947 var = decl;
07beea0d 6948
32e8bb8e 6949 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
726a989a 6950 is_gimple_val, fb_rvalue);
32e8bb8e 6951 ret = MIN (ret, tret);
726a989a
RB
6952 if (ret == GS_ERROR)
6953 return ret;
953ff289 6954
726a989a 6955 /* Handle OMP_FOR_COND. */
a68ab351
JJ
6956 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
6957 gcc_assert (COMPARISON_CLASS_P (t));
726a989a 6958 gcc_assert (TREE_OPERAND (t, 0) == decl);
b56b9fe3 6959
32e8bb8e 6960 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
726a989a 6961 is_gimple_val, fb_rvalue);
32e8bb8e 6962 ret = MIN (ret, tret);
917948d3 6963
726a989a 6964 /* Handle OMP_FOR_INCR. */
a68ab351 6965 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
953ff289
DN
6966 switch (TREE_CODE (t))
6967 {
a68ab351
JJ
6968 case PREINCREMENT_EXPR:
6969 case POSTINCREMENT_EXPR:
acf0174b
JJ
6970 if (orig_for_stmt != for_stmt)
6971 break;
a68ab351 6972 t = build_int_cst (TREE_TYPE (decl), 1);
74bf76ed
JJ
6973 if (c)
6974 OMP_CLAUSE_LINEAR_STEP (c) = t;
a68ab351 6975 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
726a989a 6976 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
a68ab351
JJ
6977 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
6978 break;
6979
6980 case PREDECREMENT_EXPR:
6981 case POSTDECREMENT_EXPR:
acf0174b
JJ
6982 if (orig_for_stmt != for_stmt)
6983 break;
a68ab351 6984 t = build_int_cst (TREE_TYPE (decl), -1);
74bf76ed
JJ
6985 if (c)
6986 OMP_CLAUSE_LINEAR_STEP (c) = t;
a68ab351 6987 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
726a989a 6988 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
a68ab351
JJ
6989 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
6990 break;
6991
726a989a
RB
6992 case MODIFY_EXPR:
6993 gcc_assert (TREE_OPERAND (t, 0) == decl);
6994 TREE_OPERAND (t, 0) = var;
a68ab351 6995
726a989a 6996 t = TREE_OPERAND (t, 1);
a68ab351 6997 switch (TREE_CODE (t))
953ff289 6998 {
a68ab351
JJ
6999 case PLUS_EXPR:
7000 if (TREE_OPERAND (t, 1) == decl)
7001 {
7002 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
7003 TREE_OPERAND (t, 0) = var;
7004 break;
7005 }
7006
7007 /* Fallthru. */
7008 case MINUS_EXPR:
7009 case POINTER_PLUS_EXPR:
7010 gcc_assert (TREE_OPERAND (t, 0) == decl);
917948d3 7011 TREE_OPERAND (t, 0) = var;
953ff289 7012 break;
a68ab351
JJ
7013 default:
7014 gcc_unreachable ();
953ff289 7015 }
917948d3 7016
32e8bb8e 7017 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
726a989a 7018 is_gimple_val, fb_rvalue);
32e8bb8e 7019 ret = MIN (ret, tret);
74bf76ed
JJ
7020 if (c)
7021 {
7022 OMP_CLAUSE_LINEAR_STEP (c) = TREE_OPERAND (t, 1);
7023 if (TREE_CODE (t) == MINUS_EXPR)
7024 {
7025 t = TREE_OPERAND (t, 1);
7026 OMP_CLAUSE_LINEAR_STEP (c)
7027 = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
7028 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
7029 &for_pre_body, NULL,
7030 is_gimple_val, fb_rvalue);
7031 ret = MIN (ret, tret);
7032 }
7033 }
953ff289 7034 break;
a68ab351 7035
953ff289
DN
7036 default:
7037 gcc_unreachable ();
7038 }
7039
acf0174b
JJ
7040 if ((var != decl || TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1)
7041 && orig_for_stmt == for_stmt)
a68ab351 7042 {
a68ab351 7043 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
726a989a
RB
7044 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7045 && OMP_CLAUSE_DECL (c) == decl
7046 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
7047 {
7048 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
7049 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
7050 gcc_assert (TREE_OPERAND (t, 0) == var);
7051 t = TREE_OPERAND (t, 1);
7052 gcc_assert (TREE_CODE (t) == PLUS_EXPR
7053 || TREE_CODE (t) == MINUS_EXPR
7054 || TREE_CODE (t) == POINTER_PLUS_EXPR);
7055 gcc_assert (TREE_OPERAND (t, 0) == var);
7056 t = build2 (TREE_CODE (t), TREE_TYPE (decl), decl,
7057 TREE_OPERAND (t, 1));
7058 gimplify_assign (decl, t,
7059 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
a68ab351
JJ
7060 }
7061 }
953ff289
DN
7062 }
7063
74bf76ed
JJ
7064 BITMAP_FREE (has_decl_expr);
7065
acf0174b 7066 gimplify_and_add (OMP_FOR_BODY (orig_for_stmt), &for_body);
726a989a 7067
acf0174b
JJ
7068 if (orig_for_stmt != for_stmt)
7069 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
7070 {
7071 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
7072 decl = TREE_OPERAND (t, 0);
7073 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
7074 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
7075 TREE_OPERAND (t, 0) = var;
7076 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
7077 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
7078 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
7079 }
7080
7081 gimplify_adjust_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt));
953ff289 7082
74bf76ed 7083 int kind;
acf0174b 7084 switch (TREE_CODE (orig_for_stmt))
74bf76ed
JJ
7085 {
7086 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
7087 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
acf0174b 7088 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
74bf76ed
JJ
7089 default:
7090 gcc_unreachable ();
7091 }
acf0174b 7092 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
726a989a
RB
7093 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
7094 for_pre_body);
acf0174b
JJ
7095 if (orig_for_stmt != for_stmt)
7096 gimple_omp_for_set_combined_p (gfor, true);
7097 if (gimplify_omp_ctxp
7098 && (gimplify_omp_ctxp->combined_loop
7099 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
7100 && gimplify_omp_ctxp->outer_context
7101 && gimplify_omp_ctxp->outer_context->combined_loop)))
7102 {
7103 gimple_omp_for_set_combined_into_p (gfor, true);
7104 if (gimplify_omp_ctxp->combined_loop)
7105 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
7106 else
7107 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
7108 }
726a989a
RB
7109
7110 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
7111 {
7112 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
7113 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
7114 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
7115 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
7116 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
7117 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
7118 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
7119 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
7120 }
7121
7122 gimplify_seq_add_stmt (pre_p, gfor);
74bf76ed
JJ
7123 if (ret != GS_ALL_DONE)
7124 return GS_ERROR;
7125 *expr_p = NULL_TREE;
7126 return GS_ALL_DONE;
953ff289
DN
7127}
7128
acf0174b
JJ
7129/* Gimplify the gross structure of other OpenMP constructs.
7130 In particular, OMP_SECTIONS, OMP_SINGLE, OMP_TARGET, OMP_TARGET_DATA
7131 and OMP_TEAMS. */
953ff289 7132
726a989a
RB
7133static void
7134gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
953ff289 7135{
726a989a
RB
7136 tree expr = *expr_p;
7137 gimple stmt;
7138 gimple_seq body = NULL;
acf0174b 7139 enum omp_region_type ort = ORT_WORKSHARE;
953ff289 7140
acf0174b
JJ
7141 switch (TREE_CODE (expr))
7142 {
7143 case OMP_SECTIONS:
7144 case OMP_SINGLE:
7145 break;
7146 case OMP_TARGET:
7147 ort = ORT_TARGET;
7148 break;
7149 case OMP_TARGET_DATA:
7150 ort = ORT_TARGET_DATA;
7151 break;
7152 case OMP_TEAMS:
7153 ort = ORT_TEAMS;
7154 break;
7155 default:
7156 gcc_unreachable ();
7157 }
7158 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort);
7159 if (ort == ORT_TARGET || ort == ORT_TARGET_DATA)
7160 {
7161 struct gimplify_ctx gctx;
7162 push_gimplify_context (&gctx);
7163 gimple g = gimplify_and_return_first (OMP_BODY (expr), &body);
7164 if (gimple_code (g) == GIMPLE_BIND)
7165 pop_gimplify_context (g);
7166 else
7167 pop_gimplify_context (NULL);
7168 if (ort == ORT_TARGET_DATA)
7169 {
7170 gimple_seq cleanup = NULL;
7171 tree fn = builtin_decl_explicit (BUILT_IN_GOMP_TARGET_END_DATA);
7172 g = gimple_build_call (fn, 0);
7173 gimple_seq_add_stmt (&cleanup, g);
7174 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
7175 body = NULL;
7176 gimple_seq_add_stmt (&body, g);
7177 }
7178 }
7179 else
7180 gimplify_and_add (OMP_BODY (expr), &body);
726a989a 7181 gimplify_adjust_omp_clauses (&OMP_CLAUSES (expr));
953ff289 7182
acf0174b
JJ
7183 switch (TREE_CODE (expr))
7184 {
7185 case OMP_SECTIONS:
7186 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
7187 break;
7188 case OMP_SINGLE:
7189 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
7190 break;
7191 case OMP_TARGET:
7192 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
7193 OMP_CLAUSES (expr));
7194 break;
7195 case OMP_TARGET_DATA:
7196 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
7197 OMP_CLAUSES (expr));
7198 break;
7199 case OMP_TEAMS:
7200 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
7201 break;
7202 default:
7203 gcc_unreachable ();
7204 }
7205
7206 gimplify_seq_add_stmt (pre_p, stmt);
7207 *expr_p = NULL_TREE;
7208}
7209
7210/* Gimplify the gross structure of OpenMP target update construct. */
7211
7212static void
7213gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
7214{
7215 tree expr = *expr_p;
7216 gimple stmt;
7217
7218 gimplify_scan_omp_clauses (&OMP_TARGET_UPDATE_CLAUSES (expr), pre_p,
7219 ORT_WORKSHARE);
7220 gimplify_adjust_omp_clauses (&OMP_TARGET_UPDATE_CLAUSES (expr));
7221 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_UPDATE,
7222 OMP_TARGET_UPDATE_CLAUSES (expr));
726a989a
RB
7223
7224 gimplify_seq_add_stmt (pre_p, stmt);
acf0174b 7225 *expr_p = NULL_TREE;
953ff289
DN
7226}
7227
7228/* A subroutine of gimplify_omp_atomic. The front end is supposed to have
b8698a0f 7229 stabilized the lhs of the atomic operation as *ADDR. Return true if
953ff289
DN
7230 EXPR is this stabilized form. */
7231
7232static bool
a509ebb5 7233goa_lhs_expr_p (tree expr, tree addr)
953ff289
DN
7234{
7235 /* Also include casts to other type variants. The C front end is fond
b8698a0f 7236 of adding these for e.g. volatile variables. This is like
953ff289 7237 STRIP_TYPE_NOPS but includes the main variant lookup. */
9600efe1 7238 STRIP_USELESS_TYPE_CONVERSION (expr);
953ff289 7239
78e47463
JJ
7240 if (TREE_CODE (expr) == INDIRECT_REF)
7241 {
7242 expr = TREE_OPERAND (expr, 0);
7243 while (expr != addr
1043771b 7244 && (CONVERT_EXPR_P (expr)
78e47463
JJ
7245 || TREE_CODE (expr) == NON_LVALUE_EXPR)
7246 && TREE_CODE (expr) == TREE_CODE (addr)
9600efe1 7247 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
78e47463
JJ
7248 {
7249 expr = TREE_OPERAND (expr, 0);
7250 addr = TREE_OPERAND (addr, 0);
7251 }
251923f5
JJ
7252 if (expr == addr)
7253 return true;
71458b8a
JJ
7254 return (TREE_CODE (addr) == ADDR_EXPR
7255 && TREE_CODE (expr) == ADDR_EXPR
251923f5 7256 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
78e47463 7257 }
953ff289
DN
7258 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
7259 return true;
7260 return false;
7261}
7262
ad19c4be
EB
7263/* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
7264 expression does not involve the lhs, evaluate it into a temporary.
7265 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
7266 or -1 if an error was encountered. */
953ff289
DN
7267
7268static int
726a989a
RB
7269goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
7270 tree lhs_var)
953ff289
DN
7271{
7272 tree expr = *expr_p;
7273 int saw_lhs;
7274
7275 if (goa_lhs_expr_p (expr, lhs_addr))
7276 {
7277 *expr_p = lhs_var;
7278 return 1;
7279 }
7280 if (is_gimple_val (expr))
7281 return 0;
b8698a0f 7282
953ff289
DN
7283 saw_lhs = 0;
7284 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
7285 {
7286 case tcc_binary:
067dd3c9 7287 case tcc_comparison:
726a989a
RB
7288 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
7289 lhs_var);
953ff289 7290 case tcc_unary:
726a989a
RB
7291 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
7292 lhs_var);
953ff289 7293 break;
067dd3c9
JJ
7294 case tcc_expression:
7295 switch (TREE_CODE (expr))
7296 {
7297 case TRUTH_ANDIF_EXPR:
7298 case TRUTH_ORIF_EXPR:
f2b11865
JJ
7299 case TRUTH_AND_EXPR:
7300 case TRUTH_OR_EXPR:
7301 case TRUTH_XOR_EXPR:
067dd3c9
JJ
7302 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
7303 lhs_addr, lhs_var);
f2b11865 7304 case TRUTH_NOT_EXPR:
067dd3c9
JJ
7305 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
7306 lhs_addr, lhs_var);
7307 break;
4063e61b
JM
7308 case COMPOUND_EXPR:
7309 /* Break out any preevaluations from cp_build_modify_expr. */
7310 for (; TREE_CODE (expr) == COMPOUND_EXPR;
7311 expr = TREE_OPERAND (expr, 1))
7312 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
7313 *expr_p = expr;
7314 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
067dd3c9
JJ
7315 default:
7316 break;
7317 }
7318 break;
953ff289
DN
7319 default:
7320 break;
7321 }
7322
7323 if (saw_lhs == 0)
7324 {
7325 enum gimplify_status gs;
7326 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
7327 if (gs != GS_ALL_DONE)
7328 saw_lhs = -1;
7329 }
7330
7331 return saw_lhs;
7332}
7333
953ff289
DN
7334/* Gimplify an OMP_ATOMIC statement. */
7335
7336static enum gimplify_status
726a989a 7337gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
953ff289
DN
7338{
7339 tree addr = TREE_OPERAND (*expr_p, 0);
20906c66
JJ
7340 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
7341 ? NULL : TREE_OPERAND (*expr_p, 1);
953ff289 7342 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
726a989a 7343 tree tmp_load;
20906c66 7344 gimple loadstmt, storestmt;
953ff289 7345
20906c66
JJ
7346 tmp_load = create_tmp_reg (type, NULL);
7347 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
7348 return GS_ERROR;
7349
7350 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
7351 != GS_ALL_DONE)
7352 return GS_ERROR;
953ff289 7353
20906c66
JJ
7354 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr);
7355 gimplify_seq_add_stmt (pre_p, loadstmt);
7356 if (rhs && gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
7357 != GS_ALL_DONE)
7358 return GS_ERROR;
953ff289 7359
20906c66
JJ
7360 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
7361 rhs = tmp_load;
7362 storestmt = gimple_build_omp_atomic_store (rhs);
7363 gimplify_seq_add_stmt (pre_p, storestmt);
acf0174b
JJ
7364 if (OMP_ATOMIC_SEQ_CST (*expr_p))
7365 {
7366 gimple_omp_atomic_set_seq_cst (loadstmt);
7367 gimple_omp_atomic_set_seq_cst (storestmt);
7368 }
20906c66
JJ
7369 switch (TREE_CODE (*expr_p))
7370 {
7371 case OMP_ATOMIC_READ:
7372 case OMP_ATOMIC_CAPTURE_OLD:
7373 *expr_p = tmp_load;
7374 gimple_omp_atomic_set_need_value (loadstmt);
7375 break;
7376 case OMP_ATOMIC_CAPTURE_NEW:
7377 *expr_p = rhs;
7378 gimple_omp_atomic_set_need_value (storestmt);
7379 break;
7380 default:
7381 *expr_p = NULL;
7382 break;
7383 }
a509ebb5 7384
acf0174b 7385 return GS_ALL_DONE;
953ff289 7386}
6de9cd9a 7387
0a35513e
AH
7388/* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
7389 body, and adding some EH bits. */
7390
7391static enum gimplify_status
7392gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
7393{
7394 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
7395 gimple g;
7396 gimple_seq body = NULL;
7397 struct gimplify_ctx gctx;
7398 int subcode = 0;
7399
7400 /* Wrap the transaction body in a BIND_EXPR so we have a context
7401 where to put decls for OpenMP. */
7402 if (TREE_CODE (tbody) != BIND_EXPR)
7403 {
7404 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
7405 TREE_SIDE_EFFECTS (bind) = 1;
7406 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
7407 TRANSACTION_EXPR_BODY (expr) = bind;
7408 }
7409
7410 push_gimplify_context (&gctx);
7411 temp = voidify_wrapper_expr (*expr_p, NULL);
7412
7413 g = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
7414 pop_gimplify_context (g);
7415
7416 g = gimple_build_transaction (body, NULL);
7417 if (TRANSACTION_EXPR_OUTER (expr))
7418 subcode = GTMA_IS_OUTER;
7419 else if (TRANSACTION_EXPR_RELAXED (expr))
7420 subcode = GTMA_IS_RELAXED;
7421 gimple_transaction_set_subcode (g, subcode);
7422
7423 gimplify_seq_add_stmt (pre_p, g);
7424
7425 if (temp)
7426 {
7427 *expr_p = temp;
7428 return GS_OK;
7429 }
7430
7431 *expr_p = NULL_TREE;
7432 return GS_ALL_DONE;
7433}
7434
ad19c4be 7435/* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
726a989a
RB
7436 expression produces a value to be used as an operand inside a GIMPLE
7437 statement, the value will be stored back in *EXPR_P. This value will
7438 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
7439 an SSA_NAME. The corresponding sequence of GIMPLE statements is
7440 emitted in PRE_P and POST_P.
7441
7442 Additionally, this process may overwrite parts of the input
7443 expression during gimplification. Ideally, it should be
7444 possible to do non-destructive gimplification.
7445
7446 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
7447 the expression needs to evaluate to a value to be used as
7448 an operand in a GIMPLE statement, this value will be stored in
7449 *EXPR_P on exit. This happens when the caller specifies one
7450 of fb_lvalue or fb_rvalue fallback flags.
7451
7452 PRE_P will contain the sequence of GIMPLE statements corresponding
7453 to the evaluation of EXPR and all the side-effects that must
7454 be executed before the main expression. On exit, the last
7455 statement of PRE_P is the core statement being gimplified. For
7456 instance, when gimplifying 'if (++a)' the last statement in
7457 PRE_P will be 'if (t.1)' where t.1 is the result of
7458 pre-incrementing 'a'.
7459
7460 POST_P will contain the sequence of GIMPLE statements corresponding
7461 to the evaluation of all the side-effects that must be executed
7462 after the main expression. If this is NULL, the post
7463 side-effects are stored at the end of PRE_P.
7464
7465 The reason why the output is split in two is to handle post
7466 side-effects explicitly. In some cases, an expression may have
7467 inner and outer post side-effects which need to be emitted in
7468 an order different from the one given by the recursive
7469 traversal. For instance, for the expression (*p--)++ the post
7470 side-effects of '--' must actually occur *after* the post
7471 side-effects of '++'. However, gimplification will first visit
7472 the inner expression, so if a separate POST sequence was not
7473 used, the resulting sequence would be:
7474
7475 1 t.1 = *p
7476 2 p = p - 1
7477 3 t.2 = t.1 + 1
7478 4 *p = t.2
7479
7480 However, the post-decrement operation in line #2 must not be
7481 evaluated until after the store to *p at line #4, so the
7482 correct sequence should be:
7483
7484 1 t.1 = *p
7485 2 t.2 = t.1 + 1
7486 3 *p = t.2
7487 4 p = p - 1
7488
7489 So, by specifying a separate post queue, it is possible
7490 to emit the post side-effects in the correct order.
7491 If POST_P is NULL, an internal queue will be used. Before
7492 returning to the caller, the sequence POST_P is appended to
7493 the main output sequence PRE_P.
7494
7495 GIMPLE_TEST_F points to a function that takes a tree T and
7496 returns nonzero if T is in the GIMPLE form requested by the
12947319 7497 caller. The GIMPLE predicates are in gimple.c.
726a989a
RB
7498
7499 FALLBACK tells the function what sort of a temporary we want if
7500 gimplification cannot produce an expression that complies with
7501 GIMPLE_TEST_F.
7502
7503 fb_none means that no temporary should be generated
7504 fb_rvalue means that an rvalue is OK to generate
7505 fb_lvalue means that an lvalue is OK to generate
7506 fb_either means that either is OK, but an lvalue is preferable.
7507 fb_mayfail means that gimplification may fail (in which case
7508 GS_ERROR will be returned)
7509
7510 The return value is either GS_ERROR or GS_ALL_DONE, since this
7511 function iterates until EXPR is completely gimplified or an error
7512 occurs. */
6de9cd9a
DN
7513
7514enum gimplify_status
726a989a
RB
7515gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
7516 bool (*gimple_test_f) (tree), fallback_t fallback)
6de9cd9a
DN
7517{
7518 tree tmp;
726a989a
RB
7519 gimple_seq internal_pre = NULL;
7520 gimple_seq internal_post = NULL;
6de9cd9a 7521 tree save_expr;
726a989a 7522 bool is_statement;
6de9cd9a
DN
7523 location_t saved_location;
7524 enum gimplify_status ret;
726a989a 7525 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
6de9cd9a
DN
7526
7527 save_expr = *expr_p;
7528 if (save_expr == NULL_TREE)
7529 return GS_ALL_DONE;
7530
726a989a
RB
7531 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
7532 is_statement = gimple_test_f == is_gimple_stmt;
7533 if (is_statement)
7534 gcc_assert (pre_p);
7535
7536 /* Consistency checks. */
7537 if (gimple_test_f == is_gimple_reg)
7538 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
7539 else if (gimple_test_f == is_gimple_val
726a989a
RB
7540 || gimple_test_f == is_gimple_call_addr
7541 || gimple_test_f == is_gimple_condexpr
7542 || gimple_test_f == is_gimple_mem_rhs
ba4d8f9d 7543 || gimple_test_f == is_gimple_mem_rhs_or_call
726a989a 7544 || gimple_test_f == is_gimple_reg_rhs
ba4d8f9d 7545 || gimple_test_f == is_gimple_reg_rhs_or_call
70f34814
RG
7546 || gimple_test_f == is_gimple_asm_val
7547 || gimple_test_f == is_gimple_mem_ref_addr)
726a989a
RB
7548 gcc_assert (fallback & fb_rvalue);
7549 else if (gimple_test_f == is_gimple_min_lval
7550 || gimple_test_f == is_gimple_lvalue)
7551 gcc_assert (fallback & fb_lvalue);
7552 else if (gimple_test_f == is_gimple_addressable)
7553 gcc_assert (fallback & fb_either);
7554 else if (gimple_test_f == is_gimple_stmt)
7555 gcc_assert (fallback == fb_none);
7556 else
7557 {
7558 /* We should have recognized the GIMPLE_TEST_F predicate to
7559 know what kind of fallback to use in case a temporary is
7560 needed to hold the value or address of *EXPR_P. */
7561 gcc_unreachable ();
7562 }
7563
6de9cd9a
DN
7564 /* We used to check the predicate here and return immediately if it
7565 succeeds. This is wrong; the design is for gimplification to be
7566 idempotent, and for the predicates to only test for valid forms, not
7567 whether they are fully simplified. */
6de9cd9a
DN
7568 if (pre_p == NULL)
7569 pre_p = &internal_pre;
726a989a 7570
6de9cd9a
DN
7571 if (post_p == NULL)
7572 post_p = &internal_post;
7573
726a989a
RB
7574 /* Remember the last statements added to PRE_P and POST_P. Every
7575 new statement added by the gimplification helpers needs to be
7576 annotated with location information. To centralize the
7577 responsibility, we remember the last statement that had been
7578 added to both queues before gimplifying *EXPR_P. If
7579 gimplification produces new statements in PRE_P and POST_P, those
7580 statements will be annotated with the same location information
7581 as *EXPR_P. */
7582 pre_last_gsi = gsi_last (*pre_p);
7583 post_last_gsi = gsi_last (*post_p);
7584
6de9cd9a 7585 saved_location = input_location;
a281759f
PB
7586 if (save_expr != error_mark_node
7587 && EXPR_HAS_LOCATION (*expr_p))
7588 input_location = EXPR_LOCATION (*expr_p);
6de9cd9a
DN
7589
7590 /* Loop over the specific gimplifiers until the toplevel node
7591 remains the same. */
7592 do
7593 {
73d6ddef
RK
7594 /* Strip away as many useless type conversions as possible
7595 at the toplevel. */
7596 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
6de9cd9a
DN
7597
7598 /* Remember the expr. */
7599 save_expr = *expr_p;
7600
7601 /* Die, die, die, my darling. */
7602 if (save_expr == error_mark_node
726a989a 7603 || (TREE_TYPE (save_expr)
65355d53 7604 && TREE_TYPE (save_expr) == error_mark_node))
6de9cd9a
DN
7605 {
7606 ret = GS_ERROR;
7607 break;
7608 }
7609
7610 /* Do any language-specific gimplification. */
32e8bb8e
ILT
7611 ret = ((enum gimplify_status)
7612 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
6de9cd9a
DN
7613 if (ret == GS_OK)
7614 {
7615 if (*expr_p == NULL_TREE)
7616 break;
7617 if (*expr_p != save_expr)
7618 continue;
7619 }
7620 else if (ret != GS_UNHANDLED)
7621 break;
7622
941f78d1
JM
7623 /* Make sure that all the cases set 'ret' appropriately. */
7624 ret = GS_UNHANDLED;
6de9cd9a
DN
7625 switch (TREE_CODE (*expr_p))
7626 {
7627 /* First deal with the special cases. */
7628
7629 case POSTINCREMENT_EXPR:
7630 case POSTDECREMENT_EXPR:
7631 case PREINCREMENT_EXPR:
7632 case PREDECREMENT_EXPR:
7633 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
cc3c4f62
RB
7634 fallback != fb_none,
7635 TREE_TYPE (*expr_p));
6de9cd9a
DN
7636 break;
7637
7638 case ARRAY_REF:
44de5aeb
RK
7639 case ARRAY_RANGE_REF:
7640 case REALPART_EXPR:
7641 case IMAGPART_EXPR:
6de9cd9a 7642 case COMPONENT_REF:
9e51aaf5 7643 case VIEW_CONVERT_EXPR:
6de9cd9a 7644 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
90051e16 7645 fallback ? fallback : fb_rvalue);
6de9cd9a
DN
7646 break;
7647
7648 case COND_EXPR:
dae7ec87 7649 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
726a989a 7650
0223e4f5
JM
7651 /* C99 code may assign to an array in a structure value of a
7652 conditional expression, and this has undefined behavior
7653 only on execution, so create a temporary if an lvalue is
7654 required. */
7655 if (fallback == fb_lvalue)
7656 {
7657 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
936d04b6 7658 mark_addressable (*expr_p);
941f78d1 7659 ret = GS_OK;
0223e4f5 7660 }
6de9cd9a
DN
7661 break;
7662
7663 case CALL_EXPR:
90051e16 7664 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
726a989a 7665
0223e4f5
JM
7666 /* C99 code may assign to an array in a structure returned
7667 from a function, and this has undefined behavior only on
7668 execution, so create a temporary if an lvalue is
7669 required. */
7670 if (fallback == fb_lvalue)
7671 {
7672 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
936d04b6 7673 mark_addressable (*expr_p);
941f78d1 7674 ret = GS_OK;
0223e4f5 7675 }
6de9cd9a
DN
7676 break;
7677
7678 case TREE_LIST:
282899df 7679 gcc_unreachable ();
6de9cd9a
DN
7680
7681 case COMPOUND_EXPR:
7682 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
7683 break;
7684
2ec5deb5 7685 case COMPOUND_LITERAL_EXPR:
4c53d183
MM
7686 ret = gimplify_compound_literal_expr (expr_p, pre_p,
7687 gimple_test_f, fallback);
2ec5deb5
PB
7688 break;
7689
6de9cd9a
DN
7690 case MODIFY_EXPR:
7691 case INIT_EXPR:
ebad5233
JM
7692 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
7693 fallback != fb_none);
6de9cd9a
DN
7694 break;
7695
7696 case TRUTH_ANDIF_EXPR:
7697 case TRUTH_ORIF_EXPR:
1d15f620
KT
7698 {
7699 /* Preserve the original type of the expression and the
7700 source location of the outer expression. */
7701 tree org_type = TREE_TYPE (*expr_p);
7702 *expr_p = gimple_boolify (*expr_p);
4b4455e5 7703 *expr_p = build3_loc (input_location, COND_EXPR,
1d15f620
KT
7704 org_type, *expr_p,
7705 fold_convert_loc
4b4455e5 7706 (input_location,
1d15f620
KT
7707 org_type, boolean_true_node),
7708 fold_convert_loc
4b4455e5 7709 (input_location,
1d15f620
KT
7710 org_type, boolean_false_node));
7711 ret = GS_OK;
7712 break;
7713 }
6de9cd9a
DN
7714
7715 case TRUTH_NOT_EXPR:
3c6cbf7a 7716 {
53020648
RG
7717 tree type = TREE_TYPE (*expr_p);
7718 /* The parsers are careful to generate TRUTH_NOT_EXPR
7719 only with operands that are always zero or one.
7720 We do not fold here but handle the only interesting case
7721 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
3c6cbf7a 7722 *expr_p = gimple_boolify (*expr_p);
53020648
RG
7723 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
7724 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
7725 TREE_TYPE (*expr_p),
7726 TREE_OPERAND (*expr_p, 0));
7727 else
7728 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
7729 TREE_TYPE (*expr_p),
7730 TREE_OPERAND (*expr_p, 0),
7731 build_int_cst (TREE_TYPE (*expr_p), 1));
7732 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
7733 *expr_p = fold_convert_loc (input_location, type, *expr_p);
7734 ret = GS_OK;
bd5d002e 7735 break;
3c6cbf7a 7736 }
67339062 7737
6de9cd9a
DN
7738 case ADDR_EXPR:
7739 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
7740 break;
7741
8170608b
TB
7742 case ANNOTATE_EXPR:
7743 {
7744 tree cond = TREE_OPERAND (*expr_p, 0);
7745 tree id = TREE_OPERAND (*expr_p, 1);
7746 tree tmp = create_tmp_var_raw (TREE_TYPE(cond), NULL);
7747 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
7748 gimple call = gimple_build_call_internal (IFN_ANNOTATE, 2,
7749 cond, id);
7750 gimple_call_set_lhs (call, tmp);
7751 gimplify_seq_add_stmt (pre_p, call);
7752 *expr_p = tmp;
7753 ret = GS_ALL_DONE;
7754 break;
7755 }
7756
6de9cd9a 7757 case VA_ARG_EXPR:
cd3ce9b4 7758 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
6de9cd9a
DN
7759 break;
7760
1043771b 7761 CASE_CONVERT:
6de9cd9a
DN
7762 if (IS_EMPTY_STMT (*expr_p))
7763 {
7764 ret = GS_ALL_DONE;
7765 break;
7766 }
7767
7768 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
7769 || fallback == fb_none)
7770 {
7771 /* Just strip a conversion to void (or in void context) and
7772 try again. */
7773 *expr_p = TREE_OPERAND (*expr_p, 0);
941f78d1 7774 ret = GS_OK;
6de9cd9a
DN
7775 break;
7776 }
7777
7778 ret = gimplify_conversion (expr_p);
7779 if (ret == GS_ERROR)
7780 break;
7781 if (*expr_p != save_expr)
7782 break;
7783 /* FALLTHRU */
7784
7785 case FIX_TRUNC_EXPR:
6de9cd9a
DN
7786 /* unary_expr: ... | '(' cast ')' val | ... */
7787 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7788 is_gimple_val, fb_rvalue);
7789 recalculate_side_effects (*expr_p);
7790 break;
7791
6a720599 7792 case INDIRECT_REF:
70f34814
RG
7793 {
7794 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
3748f5c9 7795 bool notrap = TREE_THIS_NOTRAP (*expr_p);
70f34814
RG
7796 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
7797
7798 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
7799 if (*expr_p != save_expr)
7800 {
7801 ret = GS_OK;
7802 break;
7803 }
7804
7805 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7806 is_gimple_reg, fb_rvalue);
dca26746
RG
7807 if (ret == GS_ERROR)
7808 break;
70f34814 7809
dca26746 7810 recalculate_side_effects (*expr_p);
70f34814
RG
7811 *expr_p = fold_build2_loc (input_location, MEM_REF,
7812 TREE_TYPE (*expr_p),
7813 TREE_OPERAND (*expr_p, 0),
7814 build_int_cst (saved_ptr_type, 0));
7815 TREE_THIS_VOLATILE (*expr_p) = volatilep;
3748f5c9 7816 TREE_THIS_NOTRAP (*expr_p) = notrap;
70f34814
RG
7817 ret = GS_OK;
7818 break;
7819 }
7820
7821 /* We arrive here through the various re-gimplifcation paths. */
7822 case MEM_REF:
7823 /* First try re-folding the whole thing. */
7824 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
7825 TREE_OPERAND (*expr_p, 0),
7826 TREE_OPERAND (*expr_p, 1));
7827 if (tmp)
941f78d1 7828 {
70f34814
RG
7829 *expr_p = tmp;
7830 recalculate_side_effects (*expr_p);
941f78d1
JM
7831 ret = GS_OK;
7832 break;
7833 }
01718e96
RG
7834 /* Avoid re-gimplifying the address operand if it is already
7835 in suitable form. Re-gimplifying would mark the address
7836 operand addressable. Always gimplify when not in SSA form
7837 as we still may have to gimplify decls with value-exprs. */
7838 if (!gimplify_ctxp || !gimplify_ctxp->into_ssa
7839 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
7840 {
7841 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7842 is_gimple_mem_ref_addr, fb_rvalue);
7843 if (ret == GS_ERROR)
7844 break;
7845 }
6de9cd9a 7846 recalculate_side_effects (*expr_p);
70f34814 7847 ret = GS_ALL_DONE;
6de9cd9a
DN
7848 break;
7849
01718e96 7850 /* Constants need not be gimplified. */
6de9cd9a
DN
7851 case INTEGER_CST:
7852 case REAL_CST:
325217ed 7853 case FIXED_CST:
6de9cd9a
DN
7854 case STRING_CST:
7855 case COMPLEX_CST:
7856 case VECTOR_CST:
7857 ret = GS_ALL_DONE;
7858 break;
7859
7860 case CONST_DECL:
0534fa56 7861 /* If we require an lvalue, such as for ADDR_EXPR, retain the
2a7e31df 7862 CONST_DECL node. Otherwise the decl is replaceable by its
0534fa56
RH
7863 value. */
7864 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
7865 if (fallback & fb_lvalue)
7866 ret = GS_ALL_DONE;
7867 else
941f78d1
JM
7868 {
7869 *expr_p = DECL_INITIAL (*expr_p);
7870 ret = GS_OK;
7871 }
6de9cd9a
DN
7872 break;
7873
350fae66 7874 case DECL_EXPR:
726a989a 7875 ret = gimplify_decl_expr (expr_p, pre_p);
350fae66
RK
7876 break;
7877
6de9cd9a 7878 case BIND_EXPR:
c6c7698d 7879 ret = gimplify_bind_expr (expr_p, pre_p);
6de9cd9a
DN
7880 break;
7881
7882 case LOOP_EXPR:
7883 ret = gimplify_loop_expr (expr_p, pre_p);
7884 break;
7885
7886 case SWITCH_EXPR:
7887 ret = gimplify_switch_expr (expr_p, pre_p);
7888 break;
7889
6de9cd9a
DN
7890 case EXIT_EXPR:
7891 ret = gimplify_exit_expr (expr_p);
7892 break;
7893
7894 case GOTO_EXPR:
7895 /* If the target is not LABEL, then it is a computed jump
7896 and the target needs to be gimplified. */
7897 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
8c50b495
JJ
7898 {
7899 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
7900 NULL, is_gimple_val, fb_rvalue);
7901 if (ret == GS_ERROR)
7902 break;
7903 }
726a989a
RB
7904 gimplify_seq_add_stmt (pre_p,
7905 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
941f78d1 7906 ret = GS_ALL_DONE;
6de9cd9a
DN
7907 break;
7908
2e28e797 7909 case PREDICT_EXPR:
726a989a
RB
7910 gimplify_seq_add_stmt (pre_p,
7911 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
7912 PREDICT_EXPR_OUTCOME (*expr_p)));
7913 ret = GS_ALL_DONE;
7914 break;
2e28e797 7915
6de9cd9a
DN
7916 case LABEL_EXPR:
7917 ret = GS_ALL_DONE;
282899df
NS
7918 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
7919 == current_function_decl);
726a989a
RB
7920 gimplify_seq_add_stmt (pre_p,
7921 gimple_build_label (LABEL_EXPR_LABEL (*expr_p)));
6de9cd9a
DN
7922 break;
7923
7924 case CASE_LABEL_EXPR:
726a989a 7925 ret = gimplify_case_label_expr (expr_p, pre_p);
6de9cd9a
DN
7926 break;
7927
7928 case RETURN_EXPR:
7929 ret = gimplify_return_expr (*expr_p, pre_p);
7930 break;
7931
7932 case CONSTRUCTOR:
48eb4e53
RK
7933 /* Don't reduce this in place; let gimplify_init_constructor work its
7934 magic. Buf if we're just elaborating this for side effects, just
7935 gimplify any element that has side-effects. */
7936 if (fallback == fb_none)
7937 {
4038c495 7938 unsigned HOST_WIDE_INT ix;
ac47786e 7939 tree val;
08330ec2 7940 tree temp = NULL_TREE;
ac47786e
NF
7941 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
7942 if (TREE_SIDE_EFFECTS (val))
7943 append_to_statement_list (val, &temp);
48eb4e53 7944
08330ec2 7945 *expr_p = temp;
941f78d1 7946 ret = temp ? GS_OK : GS_ALL_DONE;
48eb4e53 7947 }
ca0b7d18
AP
7948 /* C99 code may assign to an array in a constructed
7949 structure or union, and this has undefined behavior only
7950 on execution, so create a temporary if an lvalue is
7951 required. */
7952 else if (fallback == fb_lvalue)
7953 {
7954 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
936d04b6 7955 mark_addressable (*expr_p);
941f78d1 7956 ret = GS_OK;
ca0b7d18 7957 }
08330ec2
AP
7958 else
7959 ret = GS_ALL_DONE;
6de9cd9a
DN
7960 break;
7961
7962 /* The following are special cases that are not handled by the
7963 original GIMPLE grammar. */
7964
7965 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
7966 eliminated. */
7967 case SAVE_EXPR:
7968 ret = gimplify_save_expr (expr_p, pre_p, post_p);
7969 break;
7970
7971 case BIT_FIELD_REF:
ea814c66
EB
7972 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7973 post_p, is_gimple_lvalue, fb_either);
7974 recalculate_side_effects (*expr_p);
6de9cd9a
DN
7975 break;
7976
150e3929
RG
7977 case TARGET_MEM_REF:
7978 {
7979 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
7980
23a534a1 7981 if (TMR_BASE (*expr_p))
150e3929 7982 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
4d948885 7983 post_p, is_gimple_mem_ref_addr, fb_either);
150e3929
RG
7984 if (TMR_INDEX (*expr_p))
7985 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
7986 post_p, is_gimple_val, fb_rvalue);
4d948885
RG
7987 if (TMR_INDEX2 (*expr_p))
7988 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
7989 post_p, is_gimple_val, fb_rvalue);
150e3929
RG
7990 /* TMR_STEP and TMR_OFFSET are always integer constants. */
7991 ret = MIN (r0, r1);
7992 }
7993 break;
7994
6de9cd9a
DN
7995 case NON_LVALUE_EXPR:
7996 /* This should have been stripped above. */
282899df 7997 gcc_unreachable ();
6de9cd9a
DN
7998
7999 case ASM_EXPR:
8000 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
8001 break;
8002
8003 case TRY_FINALLY_EXPR:
8004 case TRY_CATCH_EXPR:
726a989a
RB
8005 {
8006 gimple_seq eval, cleanup;
8007 gimple try_;
8008
820055a0
DC
8009 /* Calls to destructors are generated automatically in FINALLY/CATCH
8010 block. They should have location as UNKNOWN_LOCATION. However,
8011 gimplify_call_expr will reset these call stmts to input_location
8012 if it finds stmt's location is unknown. To prevent resetting for
8013 destructors, we set the input_location to unknown.
8014 Note that this only affects the destructor calls in FINALLY/CATCH
8015 block, and will automatically reset to its original value by the
8016 end of gimplify_expr. */
8017 input_location = UNKNOWN_LOCATION;
726a989a
RB
8018 eval = cleanup = NULL;
8019 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
8020 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
cc8b343d
JJ
8021 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
8022 if (gimple_seq_empty_p (cleanup))
8023 {
8024 gimple_seq_add_seq (pre_p, eval);
8025 ret = GS_ALL_DONE;
8026 break;
8027 }
726a989a
RB
8028 try_ = gimple_build_try (eval, cleanup,
8029 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
8030 ? GIMPLE_TRY_FINALLY
8031 : GIMPLE_TRY_CATCH);
e368f44f
DC
8032 if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
8033 gimple_set_location (try_, saved_location);
8034 else
8035 gimple_set_location (try_, EXPR_LOCATION (save_expr));
726a989a
RB
8036 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
8037 gimple_try_set_catch_is_cleanup (try_,
8038 TRY_CATCH_IS_CLEANUP (*expr_p));
8039 gimplify_seq_add_stmt (pre_p, try_);
8040 ret = GS_ALL_DONE;
8041 break;
8042 }
6de9cd9a
DN
8043
8044 case CLEANUP_POINT_EXPR:
8045 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
8046 break;
8047
8048 case TARGET_EXPR:
8049 ret = gimplify_target_expr (expr_p, pre_p, post_p);
8050 break;
8051
8052 case CATCH_EXPR:
726a989a
RB
8053 {
8054 gimple c;
8055 gimple_seq handler = NULL;
8056 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
8057 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
8058 gimplify_seq_add_stmt (pre_p, c);
8059 ret = GS_ALL_DONE;
8060 break;
8061 }
6de9cd9a
DN
8062
8063 case EH_FILTER_EXPR:
726a989a
RB
8064 {
8065 gimple ehf;
8066 gimple_seq failure = NULL;
8067
8068 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
8069 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
d665b6e5 8070 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
726a989a
RB
8071 gimplify_seq_add_stmt (pre_p, ehf);
8072 ret = GS_ALL_DONE;
8073 break;
8074 }
6de9cd9a 8075
0f59171d
RH
8076 case OBJ_TYPE_REF:
8077 {
8078 enum gimplify_status r0, r1;
726a989a
RB
8079 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
8080 post_p, is_gimple_val, fb_rvalue);
8081 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
8082 post_p, is_gimple_val, fb_rvalue);
0f3a057a 8083 TREE_SIDE_EFFECTS (*expr_p) = 0;
0f59171d
RH
8084 ret = MIN (r0, r1);
8085 }
6de9cd9a
DN
8086 break;
8087
6de9cd9a
DN
8088 case LABEL_DECL:
8089 /* We get here when taking the address of a label. We mark
8090 the label as "forced"; meaning it can never be removed and
8091 it is a potential target for any computed goto. */
8092 FORCED_LABEL (*expr_p) = 1;
8093 ret = GS_ALL_DONE;
8094 break;
8095
8096 case STATEMENT_LIST:
c6c7698d 8097 ret = gimplify_statement_list (expr_p, pre_p);
6de9cd9a
DN
8098 break;
8099
d25cee4d
RH
8100 case WITH_SIZE_EXPR:
8101 {
70e2829d
KH
8102 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8103 post_p == &internal_post ? NULL : post_p,
8104 gimple_test_f, fallback);
8105 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
8106 is_gimple_val, fb_rvalue);
941f78d1 8107 ret = GS_ALL_DONE;
d25cee4d
RH
8108 }
8109 break;
8110
6de9cd9a 8111 case VAR_DECL:
4744afba 8112 case PARM_DECL:
a9f7c570 8113 ret = gimplify_var_or_parm_decl (expr_p);
6de9cd9a
DN
8114 break;
8115
077b0dfb
JJ
8116 case RESULT_DECL:
8117 /* When within an OpenMP context, notice uses of variables. */
8118 if (gimplify_omp_ctxp)
8119 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
8120 ret = GS_ALL_DONE;
8121 break;
8122
71956db3
RH
8123 case SSA_NAME:
8124 /* Allow callbacks into the gimplifier during optimization. */
8125 ret = GS_ALL_DONE;
8126 break;
8127
953ff289 8128 case OMP_PARALLEL:
726a989a
RB
8129 gimplify_omp_parallel (expr_p, pre_p);
8130 ret = GS_ALL_DONE;
953ff289
DN
8131 break;
8132
a68ab351 8133 case OMP_TASK:
726a989a
RB
8134 gimplify_omp_task (expr_p, pre_p);
8135 ret = GS_ALL_DONE;
a68ab351
JJ
8136 break;
8137
953ff289 8138 case OMP_FOR:
74bf76ed 8139 case OMP_SIMD:
acf0174b 8140 case OMP_DISTRIBUTE:
953ff289
DN
8141 ret = gimplify_omp_for (expr_p, pre_p);
8142 break;
8143
8144 case OMP_SECTIONS:
8145 case OMP_SINGLE:
acf0174b
JJ
8146 case OMP_TARGET:
8147 case OMP_TARGET_DATA:
8148 case OMP_TEAMS:
726a989a
RB
8149 gimplify_omp_workshare (expr_p, pre_p);
8150 ret = GS_ALL_DONE;
953ff289
DN
8151 break;
8152
acf0174b
JJ
8153 case OMP_TARGET_UPDATE:
8154 gimplify_omp_target_update (expr_p, pre_p);
8155 ret = GS_ALL_DONE;
8156 break;
8157
953ff289
DN
8158 case OMP_SECTION:
8159 case OMP_MASTER:
acf0174b 8160 case OMP_TASKGROUP:
953ff289
DN
8161 case OMP_ORDERED:
8162 case OMP_CRITICAL:
726a989a
RB
8163 {
8164 gimple_seq body = NULL;
8165 gimple g;
8166
8167 gimplify_and_add (OMP_BODY (*expr_p), &body);
8168 switch (TREE_CODE (*expr_p))
8169 {
8170 case OMP_SECTION:
8171 g = gimple_build_omp_section (body);
8172 break;
8173 case OMP_MASTER:
8174 g = gimple_build_omp_master (body);
8175 break;
acf0174b
JJ
8176 case OMP_TASKGROUP:
8177 {
8178 gimple_seq cleanup = NULL;
8179 tree fn
8180 = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
8181 g = gimple_build_call (fn, 0);
8182 gimple_seq_add_stmt (&cleanup, g);
8183 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
8184 body = NULL;
8185 gimple_seq_add_stmt (&body, g);
8186 g = gimple_build_omp_taskgroup (body);
8187 }
8188 break;
726a989a
RB
8189 case OMP_ORDERED:
8190 g = gimple_build_omp_ordered (body);
8191 break;
8192 case OMP_CRITICAL:
8193 g = gimple_build_omp_critical (body,
8194 OMP_CRITICAL_NAME (*expr_p));
8195 break;
8196 default:
8197 gcc_unreachable ();
8198 }
8199 gimplify_seq_add_stmt (pre_p, g);
8200 ret = GS_ALL_DONE;
8201 break;
8202 }
953ff289
DN
8203
8204 case OMP_ATOMIC:
20906c66
JJ
8205 case OMP_ATOMIC_READ:
8206 case OMP_ATOMIC_CAPTURE_OLD:
8207 case OMP_ATOMIC_CAPTURE_NEW:
953ff289
DN
8208 ret = gimplify_omp_atomic (expr_p, pre_p);
8209 break;
8210
0a35513e
AH
8211 case TRANSACTION_EXPR:
8212 ret = gimplify_transaction (expr_p, pre_p);
8213 break;
8214
16949072
RG
8215 case TRUTH_AND_EXPR:
8216 case TRUTH_OR_EXPR:
8217 case TRUTH_XOR_EXPR:
1d15f620 8218 {
bd5d002e 8219 tree orig_type = TREE_TYPE (*expr_p);
fc1f4caf 8220 tree new_type, xop0, xop1;
1d15f620 8221 *expr_p = gimple_boolify (*expr_p);
fc1f4caf
KT
8222 new_type = TREE_TYPE (*expr_p);
8223 if (!useless_type_conversion_p (orig_type, new_type))
1d15f620 8224 {
4b4455e5 8225 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
1d15f620
KT
8226 ret = GS_OK;
8227 break;
8228 }
da5fb469 8229
bd5d002e
RG
8230 /* Boolified binary truth expressions are semantically equivalent
8231 to bitwise binary expressions. Canonicalize them to the
8232 bitwise variant. */
8233 switch (TREE_CODE (*expr_p))
8234 {
8235 case TRUTH_AND_EXPR:
8236 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
8237 break;
8238 case TRUTH_OR_EXPR:
8239 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
8240 break;
8241 case TRUTH_XOR_EXPR:
8242 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
8243 break;
8244 default:
8245 break;
8246 }
fc1f4caf
KT
8247 /* Now make sure that operands have compatible type to
8248 expression's new_type. */
8249 xop0 = TREE_OPERAND (*expr_p, 0);
8250 xop1 = TREE_OPERAND (*expr_p, 1);
8251 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
8252 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
8253 new_type,
8254 xop0);
8255 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
8256 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
8257 new_type,
8258 xop1);
bd5d002e
RG
8259 /* Continue classified as tcc_binary. */
8260 goto expr_2;
da5fb469 8261 }
16949072
RG
8262
8263 case FMA_EXPR:
e6ed43b0 8264 case VEC_COND_EXPR:
2205ed25 8265 case VEC_PERM_EXPR:
16949072
RG
8266 /* Classified as tcc_expression. */
8267 goto expr_3;
8268
5be014d5 8269 case POINTER_PLUS_EXPR:
315f5f1b
RG
8270 {
8271 enum gimplify_status r0, r1;
8272 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8273 post_p, is_gimple_val, fb_rvalue);
8274 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
8275 post_p, is_gimple_val, fb_rvalue);
8276 recalculate_side_effects (*expr_p);
8277 ret = MIN (r0, r1);
8278 /* Convert &X + CST to invariant &MEM[&X, CST]. Do this
8279 after gimplifying operands - this is similar to how
8280 it would be folding all gimplified stmts on creation
8281 to have them canonicalized, which is what we eventually
8282 should do anyway. */
8283 if (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
8284 && is_gimple_min_invariant (TREE_OPERAND (*expr_p, 0)))
8285 {
8286 *expr_p = build_fold_addr_expr_with_type_loc
8287 (input_location,
8288 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (*expr_p)),
8289 TREE_OPERAND (*expr_p, 0),
8290 fold_convert (ptr_type_node,
8291 TREE_OPERAND (*expr_p, 1))),
8292 TREE_TYPE (*expr_p));
8293 ret = MIN (ret, GS_OK);
8294 }
8295 break;
8296 }
726a989a 8297
6de9cd9a 8298 default:
282899df 8299 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
6de9cd9a 8300 {
6615c446 8301 case tcc_comparison:
61c25908
OH
8302 /* Handle comparison of objects of non scalar mode aggregates
8303 with a call to memcmp. It would be nice to only have to do
8304 this for variable-sized objects, but then we'd have to allow
8305 the same nest of reference nodes we allow for MODIFY_EXPR and
8306 that's too complex.
8307
8308 Compare scalar mode aggregates as scalar mode values. Using
8309 memcmp for them would be very inefficient at best, and is
8310 plain wrong if bitfields are involved. */
726a989a
RB
8311 {
8312 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
61c25908 8313
544d960a
AS
8314 /* Vector comparisons need no boolification. */
8315 if (TREE_CODE (type) == VECTOR_TYPE)
8316 goto expr_2;
8317 else if (!AGGREGATE_TYPE_P (type))
7f3ff782
KT
8318 {
8319 tree org_type = TREE_TYPE (*expr_p);
8320 *expr_p = gimple_boolify (*expr_p);
8321 if (!useless_type_conversion_p (org_type,
8322 TREE_TYPE (*expr_p)))
8323 {
8324 *expr_p = fold_convert_loc (input_location,
8325 org_type, *expr_p);
8326 ret = GS_OK;
8327 }
8328 else
8329 goto expr_2;
8330 }
726a989a
RB
8331 else if (TYPE_MODE (type) != BLKmode)
8332 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
8333 else
8334 ret = gimplify_variable_sized_compare (expr_p);
61c25908 8335
726a989a 8336 break;
61c25908 8337 }
d3147f64 8338
282899df
NS
8339 /* If *EXPR_P does not need to be special-cased, handle it
8340 according to its class. */
6615c446 8341 case tcc_unary:
282899df
NS
8342 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8343 post_p, is_gimple_val, fb_rvalue);
8344 break;
6de9cd9a 8345
6615c446 8346 case tcc_binary:
282899df
NS
8347 expr_2:
8348 {
8349 enum gimplify_status r0, r1;
d3147f64 8350
282899df 8351 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
726a989a 8352 post_p, is_gimple_val, fb_rvalue);
282899df
NS
8353 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
8354 post_p, is_gimple_val, fb_rvalue);
d3147f64 8355
282899df
NS
8356 ret = MIN (r0, r1);
8357 break;
8358 }
d3147f64 8359
16949072
RG
8360 expr_3:
8361 {
8362 enum gimplify_status r0, r1, r2;
8363
8364 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8365 post_p, is_gimple_val, fb_rvalue);
8366 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
8367 post_p, is_gimple_val, fb_rvalue);
8368 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
8369 post_p, is_gimple_val, fb_rvalue);
8370
8371 ret = MIN (MIN (r0, r1), r2);
8372 break;
8373 }
8374
6615c446
JO
8375 case tcc_declaration:
8376 case tcc_constant:
6de9cd9a 8377 ret = GS_ALL_DONE;
282899df 8378 goto dont_recalculate;
d3147f64 8379
282899df 8380 default:
16949072 8381 gcc_unreachable ();
6de9cd9a 8382 }
6de9cd9a
DN
8383
8384 recalculate_side_effects (*expr_p);
726a989a 8385
282899df 8386 dont_recalculate:
6de9cd9a
DN
8387 break;
8388 }
d3147f64 8389
941f78d1 8390 gcc_assert (*expr_p || ret != GS_OK);
6de9cd9a
DN
8391 }
8392 while (ret == GS_OK);
8393
8394 /* If we encountered an error_mark somewhere nested inside, either
8395 stub out the statement or propagate the error back out. */
8396 if (ret == GS_ERROR)
8397 {
8398 if (is_statement)
65355d53 8399 *expr_p = NULL;
6de9cd9a
DN
8400 goto out;
8401 }
8402
6de9cd9a
DN
8403 /* This was only valid as a return value from the langhook, which
8404 we handled. Make sure it doesn't escape from any other context. */
282899df 8405 gcc_assert (ret != GS_UNHANDLED);
6de9cd9a 8406
65355d53 8407 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
6de9cd9a
DN
8408 {
8409 /* We aren't looking for a value, and we don't have a valid
8410 statement. If it doesn't have side-effects, throw it away. */
8411 if (!TREE_SIDE_EFFECTS (*expr_p))
65355d53 8412 *expr_p = NULL;
6de9cd9a 8413 else if (!TREE_THIS_VOLATILE (*expr_p))
44de5aeb
RK
8414 {
8415 /* This is probably a _REF that contains something nested that
8416 has side effects. Recurse through the operands to find it. */
8417 enum tree_code code = TREE_CODE (*expr_p);
8418
282899df 8419 switch (code)
44de5aeb 8420 {
282899df 8421 case COMPONENT_REF:
02a5eac4
EB
8422 case REALPART_EXPR:
8423 case IMAGPART_EXPR:
8424 case VIEW_CONVERT_EXPR:
282899df
NS
8425 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
8426 gimple_test_f, fallback);
8427 break;
8428
a9e64c63
EB
8429 case ARRAY_REF:
8430 case ARRAY_RANGE_REF:
44de5aeb
RK
8431 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
8432 gimple_test_f, fallback);
8433 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
282899df
NS
8434 gimple_test_f, fallback);
8435 break;
8436
8437 default:
8438 /* Anything else with side-effects must be converted to
a9e64c63 8439 a valid statement before we get here. */
282899df 8440 gcc_unreachable ();
44de5aeb 8441 }
44de5aeb 8442
65355d53 8443 *expr_p = NULL;
44de5aeb 8444 }
a9e64c63
EB
8445 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
8446 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
6de9cd9a 8447 {
a9e64c63
EB
8448 /* Historically, the compiler has treated a bare reference
8449 to a non-BLKmode volatile lvalue as forcing a load. */
af62f6f9 8450 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
726a989a 8451
c22b1771 8452 /* Normally, we do not want to create a temporary for a
a38578e1
MM
8453 TREE_ADDRESSABLE type because such a type should not be
8454 copied by bitwise-assignment. However, we make an
8455 exception here, as all we are doing here is ensuring that
8456 we read the bytes that make up the type. We use
8457 create_tmp_var_raw because create_tmp_var will abort when
57b51d4d 8458 given a TREE_ADDRESSABLE type. */
a38578e1
MM
8459 tree tmp = create_tmp_var_raw (type, "vol");
8460 gimple_add_tmp_var (tmp);
726a989a
RB
8461 gimplify_assign (tmp, *expr_p, pre_p);
8462 *expr_p = NULL;
6de9cd9a
DN
8463 }
8464 else
8465 /* We can't do anything useful with a volatile reference to
a9e64c63
EB
8466 an incomplete type, so just throw it away. Likewise for
8467 a BLKmode type, since any implicit inner load should
8468 already have been turned into an explicit one by the
8469 gimplification process. */
65355d53 8470 *expr_p = NULL;
6de9cd9a
DN
8471 }
8472
8473 /* If we are gimplifying at the statement level, we're done. Tack
726a989a 8474 everything together and return. */
325c3691 8475 if (fallback == fb_none || is_statement)
6de9cd9a 8476 {
726a989a
RB
8477 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
8478 it out for GC to reclaim it. */
8479 *expr_p = NULL_TREE;
8480
8481 if (!gimple_seq_empty_p (internal_pre)
8482 || !gimple_seq_empty_p (internal_post))
be00f578 8483 {
726a989a
RB
8484 gimplify_seq_add_seq (&internal_pre, internal_post);
8485 gimplify_seq_add_seq (pre_p, internal_pre);
be00f578 8486 }
726a989a
RB
8487
8488 /* The result of gimplifying *EXPR_P is going to be the last few
8489 statements in *PRE_P and *POST_P. Add location information
8490 to all the statements that were added by the gimplification
8491 helpers. */
8492 if (!gimple_seq_empty_p (*pre_p))
8493 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
8494
8495 if (!gimple_seq_empty_p (*post_p))
8496 annotate_all_with_location_after (*post_p, post_last_gsi,
8497 input_location);
8498
6de9cd9a
DN
8499 goto out;
8500 }
8501
726a989a
RB
8502#ifdef ENABLE_GIMPLE_CHECKING
8503 if (*expr_p)
8504 {
8505 enum tree_code code = TREE_CODE (*expr_p);
8506 /* These expressions should already be in gimple IR form. */
8507 gcc_assert (code != MODIFY_EXPR
8508 && code != ASM_EXPR
8509 && code != BIND_EXPR
8510 && code != CATCH_EXPR
6fc4fb06 8511 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
726a989a
RB
8512 && code != EH_FILTER_EXPR
8513 && code != GOTO_EXPR
8514 && code != LABEL_EXPR
8515 && code != LOOP_EXPR
726a989a
RB
8516 && code != SWITCH_EXPR
8517 && code != TRY_FINALLY_EXPR
8518 && code != OMP_CRITICAL
8519 && code != OMP_FOR
8520 && code != OMP_MASTER
acf0174b 8521 && code != OMP_TASKGROUP
726a989a
RB
8522 && code != OMP_ORDERED
8523 && code != OMP_PARALLEL
8524 && code != OMP_SECTIONS
8525 && code != OMP_SECTION
8526 && code != OMP_SINGLE);
8527 }
8528#endif
6de9cd9a 8529
726a989a
RB
8530 /* Otherwise we're gimplifying a subexpression, so the resulting
8531 value is interesting. If it's a valid operand that matches
8532 GIMPLE_TEST_F, we're done. Unless we are handling some
8533 post-effects internally; if that's the case, we need to copy into
8534 a temporary before adding the post-effects to POST_P. */
8535 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
6de9cd9a
DN
8536 goto out;
8537
8538 /* Otherwise, we need to create a new temporary for the gimplified
8539 expression. */
8540
8541 /* We can't return an lvalue if we have an internal postqueue. The
8542 object the lvalue refers to would (probably) be modified by the
8543 postqueue; we need to copy the value out first, which means an
8544 rvalue. */
726a989a
RB
8545 if ((fallback & fb_lvalue)
8546 && gimple_seq_empty_p (internal_post)
e847cc68 8547 && is_gimple_addressable (*expr_p))
6de9cd9a
DN
8548 {
8549 /* An lvalue will do. Take the address of the expression, store it
8550 in a temporary, and replace the expression with an INDIRECT_REF of
8551 that temporary. */
db3927fb 8552 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
6de9cd9a 8553 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
7f5ad6d7 8554 *expr_p = build_simple_mem_ref (tmp);
6de9cd9a 8555 }
ba4d8f9d 8556 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
6de9cd9a 8557 {
726a989a
RB
8558 /* An rvalue will do. Assign the gimplified expression into a
8559 new temporary TMP and replace the original expression with
8560 TMP. First, make sure that the expression has a type so that
8561 it can be assigned into a temporary. */
282899df 8562 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
412a1d9e 8563 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
6de9cd9a 8564 }
282899df 8565 else
6de9cd9a 8566 {
726a989a 8567#ifdef ENABLE_GIMPLE_CHECKING
282899df
NS
8568 if (!(fallback & fb_mayfail))
8569 {
8570 fprintf (stderr, "gimplification failed:\n");
8571 print_generic_expr (stderr, *expr_p, 0);
8572 debug_tree (*expr_p);
8573 internal_error ("gimplification failed");
8574 }
8575#endif
8576 gcc_assert (fallback & fb_mayfail);
726a989a 8577
282899df 8578 /* If this is an asm statement, and the user asked for the
535a42b1 8579 impossible, don't die. Fail and let gimplify_asm_expr
282899df 8580 issue an error. */
6de9cd9a
DN
8581 ret = GS_ERROR;
8582 goto out;
8583 }
6de9cd9a 8584
6de9cd9a 8585 /* Make sure the temporary matches our predicate. */
282899df 8586 gcc_assert ((*gimple_test_f) (*expr_p));
6de9cd9a 8587
726a989a 8588 if (!gimple_seq_empty_p (internal_post))
6de9cd9a 8589 {
726a989a
RB
8590 annotate_all_with_location (internal_post, input_location);
8591 gimplify_seq_add_seq (pre_p, internal_post);
6de9cd9a
DN
8592 }
8593
8594 out:
8595 input_location = saved_location;
8596 return ret;
8597}
8598
44de5aeb 8599/* Look through TYPE for variable-sized objects and gimplify each such
65355d53 8600 size that we find. Add to LIST_P any statements generated. */
44de5aeb 8601
65355d53 8602void
726a989a 8603gimplify_type_sizes (tree type, gimple_seq *list_p)
44de5aeb 8604{
ad50bc8d
RH
8605 tree field, t;
8606
19dbbf36 8607 if (type == NULL || type == error_mark_node)
8e0a600b 8608 return;
ad50bc8d 8609
6c6cfbfd 8610 /* We first do the main variant, then copy into any other variants. */
ad50bc8d 8611 type = TYPE_MAIN_VARIANT (type);
44de5aeb 8612
8e0a600b 8613 /* Avoid infinite recursion. */
19dbbf36 8614 if (TYPE_SIZES_GIMPLIFIED (type))
8e0a600b
JJ
8615 return;
8616
8617 TYPE_SIZES_GIMPLIFIED (type) = 1;
8618
44de5aeb
RK
8619 switch (TREE_CODE (type))
8620 {
44de5aeb
RK
8621 case INTEGER_TYPE:
8622 case ENUMERAL_TYPE:
8623 case BOOLEAN_TYPE:
44de5aeb 8624 case REAL_TYPE:
325217ed 8625 case FIXED_POINT_TYPE:
65355d53
RH
8626 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
8627 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
ad50bc8d
RH
8628
8629 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
8630 {
8631 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
8632 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
ad50bc8d 8633 }
44de5aeb
RK
8634 break;
8635
8636 case ARRAY_TYPE:
ad50bc8d 8637 /* These types may not have declarations, so handle them here. */
8e0a600b
JJ
8638 gimplify_type_sizes (TREE_TYPE (type), list_p);
8639 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
2e957792
JJ
8640 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
8641 with assigned stack slots, for -O1+ -g they should be tracked
8642 by VTA. */
08d78391
EB
8643 if (!(TYPE_NAME (type)
8644 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
8645 && DECL_IGNORED_P (TYPE_NAME (type)))
8646 && TYPE_DOMAIN (type)
802e9f8e
JJ
8647 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
8648 {
8649 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
8650 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
8651 DECL_IGNORED_P (t) = 0;
8652 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
8653 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
8654 DECL_IGNORED_P (t) = 0;
8655 }
44de5aeb
RK
8656 break;
8657
8658 case RECORD_TYPE:
8659 case UNION_TYPE:
8660 case QUAL_UNION_TYPE:
910ad8de 8661 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
44de5aeb 8662 if (TREE_CODE (field) == FIELD_DECL)
8e0a600b
JJ
8663 {
8664 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
9a9ba8d9
JJ
8665 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
8666 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
8e0a600b
JJ
8667 gimplify_type_sizes (TREE_TYPE (field), list_p);
8668 }
8669 break;
8670
8671 case POINTER_TYPE:
8672 case REFERENCE_TYPE:
706c4bb7
OH
8673 /* We used to recurse on the pointed-to type here, which turned out to
8674 be incorrect because its definition might refer to variables not
8675 yet initialized at this point if a forward declaration is involved.
8676
8677 It was actually useful for anonymous pointed-to types to ensure
8678 that the sizes evaluation dominates every possible later use of the
8679 values. Restricting to such types here would be safe since there
f63645be
KH
8680 is no possible forward declaration around, but would introduce an
8681 undesirable middle-end semantic to anonymity. We then defer to
8682 front-ends the responsibility of ensuring that the sizes are
8683 evaluated both early and late enough, e.g. by attaching artificial
706c4bb7 8684 type declarations to the tree. */
44de5aeb
RK
8685 break;
8686
8687 default:
8688 break;
8689 }
8690
65355d53
RH
8691 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
8692 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
44de5aeb 8693
ad50bc8d 8694 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
b4830636 8695 {
ad50bc8d
RH
8696 TYPE_SIZE (t) = TYPE_SIZE (type);
8697 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
8698 TYPE_SIZES_GIMPLIFIED (t) = 1;
b4830636 8699 }
b4830636
RH
8700}
8701
8702/* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
8703 a size or position, has had all of its SAVE_EXPRs evaluated.
726a989a 8704 We add any required statements to *STMT_P. */
44de5aeb
RK
8705
8706void
726a989a 8707gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
44de5aeb 8708{
3ac8781c 8709 tree expr = *expr_p;
a9c5ddf9 8710
44de5aeb 8711 /* We don't do anything if the value isn't there, is constant, or contains
1e748a2b 8712 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
aabcd309 8713 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
1e748a2b
RK
8714 will want to replace it with a new variable, but that will cause problems
8715 if this type is from outside the function. It's OK to have that here. */
848be094 8716 if (is_gimple_sizepos (expr))
44de5aeb
RK
8717 return;
8718
a9c5ddf9
RH
8719 *expr_p = unshare_expr (expr);
8720
ad50bc8d 8721 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue);
44de5aeb 8722}
6de9cd9a 8723
3ad065ef
EB
8724/* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
8725 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
8726 is true, also gimplify the parameters. */
726a989a
RB
8727
8728gimple
3ad065ef 8729gimplify_body (tree fndecl, bool do_parms)
6de9cd9a
DN
8730{
8731 location_t saved_location = input_location;
726a989a
RB
8732 gimple_seq parm_stmts, seq;
8733 gimple outer_bind;
d406b663 8734 struct gimplify_ctx gctx;
9f9ebcdf 8735 struct cgraph_node *cgn;
6de9cd9a
DN
8736
8737 timevar_push (TV_TREE_GIMPLIFY);
953ff289 8738
f66d6761
SB
8739 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
8740 gimplification. */
8741 default_rtl_profile ();
8742
953ff289 8743 gcc_assert (gimplify_ctxp == NULL);
d406b663 8744 push_gimplify_context (&gctx);
6de9cd9a 8745
acf0174b
JJ
8746 if (flag_openmp)
8747 {
8748 gcc_assert (gimplify_omp_ctxp == NULL);
8749 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
8750 gimplify_omp_ctxp = new_omp_context (ORT_TARGET);
8751 }
8752
44de5aeb
RK
8753 /* Unshare most shared trees in the body and in that of any nested functions.
8754 It would seem we don't have to do this for nested functions because
8755 they are supposed to be output and then the outer function gimplified
8756 first, but the g++ front end doesn't always do it that way. */
3ad065ef
EB
8757 unshare_body (fndecl);
8758 unvisit_body (fndecl);
6de9cd9a 8759
9f9ebcdf
MJ
8760 cgn = cgraph_get_node (fndecl);
8761 if (cgn && cgn->origin)
77f2a970
JJ
8762 nonlocal_vlas = pointer_set_create ();
8763
fa10beec 8764 /* Make sure input_location isn't set to something weird. */
6de9cd9a
DN
8765 input_location = DECL_SOURCE_LOCATION (fndecl);
8766
4744afba
RH
8767 /* Resolve callee-copies. This has to be done before processing
8768 the body so that DECL_VALUE_EXPR gets processed correctly. */
3ad065ef 8769 parm_stmts = do_parms ? gimplify_parameters () : NULL;
4744afba 8770
6de9cd9a 8771 /* Gimplify the function's body. */
726a989a 8772 seq = NULL;
3ad065ef 8773 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
726a989a
RB
8774 outer_bind = gimple_seq_first_stmt (seq);
8775 if (!outer_bind)
6de9cd9a 8776 {
726a989a
RB
8777 outer_bind = gimple_build_nop ();
8778 gimplify_seq_add_stmt (&seq, outer_bind);
6de9cd9a 8779 }
44de5aeb 8780
726a989a
RB
8781 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
8782 not the case, wrap everything in a GIMPLE_BIND to make it so. */
8783 if (gimple_code (outer_bind) == GIMPLE_BIND
8784 && gimple_seq_first (seq) == gimple_seq_last (seq))
8785 ;
8786 else
8787 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
8788
3ad065ef 8789 DECL_SAVED_TREE (fndecl) = NULL_TREE;
4744afba
RH
8790
8791 /* If we had callee-copies statements, insert them at the beginning
f0c10f0f 8792 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
726a989a 8793 if (!gimple_seq_empty_p (parm_stmts))
4744afba 8794 {
f0c10f0f
RG
8795 tree parm;
8796
726a989a
RB
8797 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
8798 gimple_bind_set_body (outer_bind, parm_stmts);
f0c10f0f
RG
8799
8800 for (parm = DECL_ARGUMENTS (current_function_decl);
910ad8de 8801 parm; parm = DECL_CHAIN (parm))
f0c10f0f
RG
8802 if (DECL_HAS_VALUE_EXPR_P (parm))
8803 {
8804 DECL_HAS_VALUE_EXPR_P (parm) = 0;
8805 DECL_IGNORED_P (parm) = 0;
8806 }
4744afba
RH
8807 }
8808
77f2a970
JJ
8809 if (nonlocal_vlas)
8810 {
8811 pointer_set_destroy (nonlocal_vlas);
8812 nonlocal_vlas = NULL;
8813 }
8814
acf0174b
JJ
8815 if (flag_openmp && gimplify_omp_ctxp)
8816 {
8817 delete_omp_context (gimplify_omp_ctxp);
8818 gimplify_omp_ctxp = NULL;
8819 }
8820
726a989a 8821 pop_gimplify_context (outer_bind);
953ff289 8822 gcc_assert (gimplify_ctxp == NULL);
6de9cd9a 8823
07c5a154 8824#ifdef ENABLE_CHECKING
1da2ed5f 8825 if (!seen_error ())
34019e28 8826 verify_gimple_in_seq (gimple_bind_body (outer_bind));
07c5a154 8827#endif
6de9cd9a
DN
8828
8829 timevar_pop (TV_TREE_GIMPLIFY);
8830 input_location = saved_location;
726a989a
RB
8831
8832 return outer_bind;
6de9cd9a
DN
8833}
8834
6a1f6c9c 8835typedef char *char_p; /* For DEF_VEC_P. */
6a1f6c9c
JM
8836
8837/* Return whether we should exclude FNDECL from instrumentation. */
8838
8839static bool
8840flag_instrument_functions_exclude_p (tree fndecl)
8841{
9771b263 8842 vec<char_p> *v;
6a1f6c9c 8843
9771b263
DN
8844 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
8845 if (v && v->length () > 0)
6a1f6c9c
JM
8846 {
8847 const char *name;
8848 int i;
8849 char *s;
8850
8851 name = lang_hooks.decl_printable_name (fndecl, 0);
9771b263 8852 FOR_EACH_VEC_ELT (*v, i, s)
6a1f6c9c
JM
8853 if (strstr (name, s) != NULL)
8854 return true;
8855 }
8856
9771b263
DN
8857 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
8858 if (v && v->length () > 0)
6a1f6c9c
JM
8859 {
8860 const char *name;
8861 int i;
8862 char *s;
8863
8864 name = DECL_SOURCE_FILE (fndecl);
9771b263 8865 FOR_EACH_VEC_ELT (*v, i, s)
6a1f6c9c
JM
8866 if (strstr (name, s) != NULL)
8867 return true;
8868 }
8869
8870 return false;
8871}
8872
6de9cd9a 8873/* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
726a989a 8874 node for the function we want to gimplify.
b8698a0f 8875
ad19c4be 8876 Return the sequence of GIMPLE statements corresponding to the body
726a989a 8877 of FNDECL. */
6de9cd9a
DN
8878
8879void
8880gimplify_function_tree (tree fndecl)
8881{
af16bc76 8882 tree parm, ret;
726a989a
RB
8883 gimple_seq seq;
8884 gimple bind;
6de9cd9a 8885
a406865a
RG
8886 gcc_assert (!gimple_body (fndecl));
8887
db2960f4
SL
8888 if (DECL_STRUCT_FUNCTION (fndecl))
8889 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
8890 else
8891 push_struct_function (fndecl);
6de9cd9a 8892
910ad8de 8893 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
e41d82f5
RH
8894 {
8895 /* Preliminarily mark non-addressed complex variables as eligible
8896 for promotion to gimple registers. We'll transform their uses
8897 as we find them. */
0890b981
AP
8898 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
8899 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
e41d82f5
RH
8900 && !TREE_THIS_VOLATILE (parm)
8901 && !needs_to_live_in_memory (parm))
0890b981 8902 DECL_GIMPLE_REG_P (parm) = 1;
e41d82f5
RH
8903 }
8904
8905 ret = DECL_RESULT (fndecl);
0890b981 8906 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
7b7e6ecd 8907 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
e41d82f5 8908 && !needs_to_live_in_memory (ret))
0890b981 8909 DECL_GIMPLE_REG_P (ret) = 1;
e41d82f5 8910
3ad065ef 8911 bind = gimplify_body (fndecl, true);
726a989a
RB
8912
8913 /* The tree body of the function is no longer needed, replace it
8914 with the new GIMPLE body. */
355a7673 8915 seq = NULL;
726a989a
RB
8916 gimple_seq_add_stmt (&seq, bind);
8917 gimple_set_body (fndecl, seq);
6de9cd9a
DN
8918
8919 /* If we're instrumenting function entry/exit, then prepend the call to
8920 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
8921 catch the exit hook. */
8922 /* ??? Add some way to ignore exceptions for this TFE. */
8923 if (flag_instrument_function_entry_exit
8d5a7d1f
ILT
8924 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
8925 && !flag_instrument_functions_exclude_p (fndecl))
6de9cd9a 8926 {
726a989a
RB
8927 tree x;
8928 gimple new_bind;
8929 gimple tf;
8930 gimple_seq cleanup = NULL, body = NULL;
b01890ff
JH
8931 tree tmp_var;
8932 gimple call;
8933
e79983f4 8934 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
59527282 8935 call = gimple_build_call (x, 1, integer_zero_node);
b01890ff
JH
8936 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
8937 gimple_call_set_lhs (call, tmp_var);
8938 gimplify_seq_add_stmt (&cleanup, call);
e79983f4 8939 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
b01890ff
JH
8940 call = gimple_build_call (x, 2,
8941 build_fold_addr_expr (current_function_decl),
8942 tmp_var);
8943 gimplify_seq_add_stmt (&cleanup, call);
726a989a 8944 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
6de9cd9a 8945
e79983f4 8946 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
59527282 8947 call = gimple_build_call (x, 1, integer_zero_node);
b01890ff
JH
8948 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
8949 gimple_call_set_lhs (call, tmp_var);
8950 gimplify_seq_add_stmt (&body, call);
e79983f4 8951 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
b01890ff
JH
8952 call = gimple_build_call (x, 2,
8953 build_fold_addr_expr (current_function_decl),
8954 tmp_var);
8955 gimplify_seq_add_stmt (&body, call);
726a989a 8956 gimplify_seq_add_stmt (&body, tf);
32001f69 8957 new_bind = gimple_build_bind (NULL, body, gimple_bind_block (bind));
726a989a
RB
8958 /* Clear the block for BIND, since it is no longer directly inside
8959 the function, but within a try block. */
32001f69 8960 gimple_bind_set_block (bind, NULL);
6de9cd9a 8961
726a989a
RB
8962 /* Replace the current function body with the body
8963 wrapped in the try/finally TF. */
355a7673 8964 seq = NULL;
726a989a
RB
8965 gimple_seq_add_stmt (&seq, new_bind);
8966 gimple_set_body (fndecl, seq);
6de9cd9a
DN
8967 }
8968
726a989a 8969 DECL_SAVED_TREE (fndecl) = NULL_TREE;
a406865a 8970 cfun->curr_properties = PROP_gimple_any;
726a989a 8971
db2960f4 8972 pop_cfun ();
6de9cd9a 8973}
726a989a 8974
726a989a
RB
8975/* Some transformations like inlining may invalidate the GIMPLE form
8976 for operands. This function traverses all the operands in STMT and
8977 gimplifies anything that is not a valid gimple operand. Any new
8978 GIMPLE statements are inserted before *GSI_P. */
8979
8980void
8981gimple_regimplify_operands (gimple stmt, gimple_stmt_iterator *gsi_p)
8982{
8983 size_t i, num_ops;
6b4a85ad 8984 tree lhs;
726a989a
RB
8985 gimple_seq pre = NULL;
8986 gimple post_stmt = NULL;
8987 struct gimplify_ctx gctx;
8988
8989 push_gimplify_context (&gctx);
8990 gimplify_ctxp->into_ssa = gimple_in_ssa_p (cfun);
8991
8992 switch (gimple_code (stmt))
8993 {
8994 case GIMPLE_COND:
8995 gimplify_expr (gimple_cond_lhs_ptr (stmt), &pre, NULL,
8996 is_gimple_val, fb_rvalue);
8997 gimplify_expr (gimple_cond_rhs_ptr (stmt), &pre, NULL,
8998 is_gimple_val, fb_rvalue);
8999 break;
e8789588
JJ
9000 case GIMPLE_SWITCH:
9001 gimplify_expr (gimple_switch_index_ptr (stmt), &pre, NULL,
9002 is_gimple_val, fb_rvalue);
9003 break;
726a989a
RB
9004 case GIMPLE_OMP_ATOMIC_LOAD:
9005 gimplify_expr (gimple_omp_atomic_load_rhs_ptr (stmt), &pre, NULL,
9006 is_gimple_val, fb_rvalue);
9007 break;
9008 case GIMPLE_ASM:
9009 {
9010 size_t i, noutputs = gimple_asm_noutputs (stmt);
9011 const char *constraint, **oconstraints;
9012 bool allows_mem, allows_reg, is_inout;
9013
9014 oconstraints
9015 = (const char **) alloca ((noutputs) * sizeof (const char *));
9016 for (i = 0; i < noutputs; i++)
9017 {
9018 tree op = gimple_asm_output_op (stmt, i);
9019 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
9020 oconstraints[i] = constraint;
9021 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
9022 &allows_reg, &is_inout);
9023 gimplify_expr (&TREE_VALUE (op), &pre, NULL,
9024 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
9025 fb_lvalue | fb_mayfail);
9026 }
9027 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
9028 {
9029 tree op = gimple_asm_input_op (stmt, i);
9030 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
9031 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
9032 oconstraints, &allows_mem, &allows_reg);
9033 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (op))) && allows_mem)
9034 allows_reg = 0;
9035 if (!allows_reg && allows_mem)
9036 gimplify_expr (&TREE_VALUE (op), &pre, NULL,
9037 is_gimple_lvalue, fb_lvalue | fb_mayfail);
9038 else
9039 gimplify_expr (&TREE_VALUE (op), &pre, NULL,
9040 is_gimple_asm_val, fb_rvalue);
9041 }
9042 }
9043 break;
9044 default:
9045 /* NOTE: We start gimplifying operands from last to first to
9046 make sure that side-effects on the RHS of calls, assignments
9047 and ASMs are executed before the LHS. The ordering is not
9048 important for other statements. */
9049 num_ops = gimple_num_ops (stmt);
726a989a
RB
9050 for (i = num_ops; i > 0; i--)
9051 {
9052 tree op = gimple_op (stmt, i - 1);
9053 if (op == NULL_TREE)
9054 continue;
9055 if (i == 1 && (is_gimple_call (stmt) || is_gimple_assign (stmt)))
9056 gimplify_expr (&op, &pre, NULL, is_gimple_lvalue, fb_lvalue);
9057 else if (i == 2
9058 && is_gimple_assign (stmt)
9059 && num_ops == 2
9060 && get_gimple_rhs_class (gimple_expr_code (stmt))
9061 == GIMPLE_SINGLE_RHS)
9062 gimplify_expr (&op, &pre, NULL,
9063 rhs_predicate_for (gimple_assign_lhs (stmt)),
9064 fb_rvalue);
9065 else if (i == 2 && is_gimple_call (stmt))
9066 {
9067 if (TREE_CODE (op) == FUNCTION_DECL)
9068 continue;
9069 gimplify_expr (&op, &pre, NULL, is_gimple_call_addr, fb_rvalue);
9070 }
9071 else
9072 gimplify_expr (&op, &pre, NULL, is_gimple_val, fb_rvalue);
9073 gimple_set_op (stmt, i - 1, op);
9074 }
9075
9076 lhs = gimple_get_lhs (stmt);
bdec4dc7
RG
9077 /* If the LHS changed it in a way that requires a simple RHS,
9078 create temporary. */
ba4d8f9d 9079 if (lhs && !is_gimple_reg (lhs))
726a989a
RB
9080 {
9081 bool need_temp = false;
9082
9083 if (is_gimple_assign (stmt)
9084 && num_ops == 2
9085 && get_gimple_rhs_class (gimple_expr_code (stmt))
9086 == GIMPLE_SINGLE_RHS)
9087 gimplify_expr (gimple_assign_rhs1_ptr (stmt), &pre, NULL,
9088 rhs_predicate_for (gimple_assign_lhs (stmt)),
9089 fb_rvalue);
9090 else if (is_gimple_reg (lhs))
9091 {
9092 if (is_gimple_reg_type (TREE_TYPE (lhs)))
9093 {
9094 if (is_gimple_call (stmt))
9095 {
9096 i = gimple_call_flags (stmt);
9097 if ((i & ECF_LOOPING_CONST_OR_PURE)
9098 || !(i & (ECF_CONST | ECF_PURE)))
9099 need_temp = true;
9100 }
9101 if (stmt_can_throw_internal (stmt))
9102 need_temp = true;
9103 }
9104 }
9105 else
9106 {
9107 if (is_gimple_reg_type (TREE_TYPE (lhs)))
9108 need_temp = true;
9109 else if (TYPE_MODE (TREE_TYPE (lhs)) != BLKmode)
9110 {
9111 if (is_gimple_call (stmt))
9112 {
9113 tree fndecl = gimple_call_fndecl (stmt);
9114
9115 if (!aggregate_value_p (TREE_TYPE (lhs), fndecl)
9116 && !(fndecl && DECL_RESULT (fndecl)
9117 && DECL_BY_REFERENCE (DECL_RESULT (fndecl))))
9118 need_temp = true;
9119 }
9120 else
9121 need_temp = true;
9122 }
9123 }
9124 if (need_temp)
9125 {
acd63801 9126 tree temp = create_tmp_reg (TREE_TYPE (lhs), NULL);
726a989a
RB
9127 if (gimple_in_ssa_p (cfun))
9128 temp = make_ssa_name (temp, NULL);
9129 gimple_set_lhs (stmt, temp);
9130 post_stmt = gimple_build_assign (lhs, temp);
9131 if (TREE_CODE (lhs) == SSA_NAME)
9132 SSA_NAME_DEF_STMT (lhs) = post_stmt;
9133 }
9134 }
9135 break;
9136 }
9137
9138 if (!gimple_seq_empty_p (pre))
13714310 9139 gsi_insert_seq_before (gsi_p, pre, GSI_SAME_STMT);
726a989a
RB
9140 if (post_stmt)
9141 gsi_insert_after (gsi_p, post_stmt, GSI_NEW_STMT);
9142
726a989a
RB
9143 pop_gimplify_context (NULL);
9144}
9145
ad19c4be 9146/* Expand EXPR to list of gimple statements STMTS. GIMPLE_TEST_F specifies
bcf71673 9147 the predicate that will hold for the result. If VAR is not NULL, make the
8b11a64c
ZD
9148 base variable of the final destination be VAR if suitable. */
9149
9150tree
bcf71673
RG
9151force_gimple_operand_1 (tree expr, gimple_seq *stmts,
9152 gimple_predicate gimple_test_f, tree var)
8b11a64c 9153{
8b11a64c 9154 enum gimplify_status ret;
d406b663 9155 struct gimplify_ctx gctx;
aa710d25 9156 location_t saved_location;
8b11a64c 9157
726a989a 9158 *stmts = NULL;
8b11a64c 9159
844d5fca
RG
9160 /* gimple_test_f might be more strict than is_gimple_val, make
9161 sure we pass both. Just checking gimple_test_f doesn't work
9162 because most gimple predicates do not work recursively. */
9163 if (is_gimple_val (expr)
9164 && (*gimple_test_f) (expr))
8b11a64c
ZD
9165 return expr;
9166
d406b663 9167 push_gimplify_context (&gctx);
5cd4ec7f 9168 gimplify_ctxp->into_ssa = gimple_in_ssa_p (cfun);
aea74440 9169 gimplify_ctxp->allow_rhs_cond_expr = true;
aa710d25
JJ
9170 saved_location = input_location;
9171 input_location = UNKNOWN_LOCATION;
8b11a64c
ZD
9172
9173 if (var)
2ad728d2
RG
9174 {
9175 if (gimplify_ctxp->into_ssa
9176 && is_gimple_reg (var))
9177 var = make_ssa_name (var, NULL);
9178 expr = build2 (MODIFY_EXPR, TREE_TYPE (var), var, expr);
9179 }
8b11a64c 9180
726a989a 9181 if (TREE_CODE (expr) != MODIFY_EXPR
917948d3
ZD
9182 && TREE_TYPE (expr) == void_type_node)
9183 {
9184 gimplify_and_add (expr, stmts);
9185 expr = NULL_TREE;
9186 }
9187 else
9188 {
726a989a 9189 ret = gimplify_expr (&expr, stmts, NULL, gimple_test_f, fb_rvalue);
917948d3
ZD
9190 gcc_assert (ret != GS_ERROR);
9191 }
8b11a64c 9192
aa710d25 9193 input_location = saved_location;
8b11a64c
ZD
9194 pop_gimplify_context (NULL);
9195
9196 return expr;
9197}
9198
ad19c4be 9199/* Expand EXPR to list of gimple statements STMTS. If SIMPLE is true,
bcf71673
RG
9200 force the result to be either ssa_name or an invariant, otherwise
9201 just force it to be a rhs expression. If VAR is not NULL, make the
9202 base variable of the final destination be VAR if suitable. */
9885da8e
ZD
9203
9204tree
bcf71673
RG
9205force_gimple_operand (tree expr, gimple_seq *stmts, bool simple, tree var)
9206{
9207 return force_gimple_operand_1 (expr, stmts,
9208 simple ? is_gimple_val : is_gimple_reg_rhs,
9209 var);
9210}
9211
ad19c4be 9212/* Invoke force_gimple_operand_1 for EXPR with parameters GIMPLE_TEST_F
bcf71673
RG
9213 and VAR. If some statements are produced, emits them at GSI.
9214 If BEFORE is true. the statements are appended before GSI, otherwise
9215 they are appended after it. M specifies the way GSI moves after
9216 insertion (GSI_SAME_STMT or GSI_CONTINUE_LINKING are the usual values). */
9217
9218tree
9219force_gimple_operand_gsi_1 (gimple_stmt_iterator *gsi, tree expr,
9220 gimple_predicate gimple_test_f,
9221 tree var, bool before,
9222 enum gsi_iterator_update m)
9885da8e 9223{
726a989a 9224 gimple_seq stmts;
9885da8e 9225
bcf71673 9226 expr = force_gimple_operand_1 (expr, &stmts, gimple_test_f, var);
726a989a
RB
9227
9228 if (!gimple_seq_empty_p (stmts))
c6540bde 9229 {
c6540bde 9230 if (before)
726a989a 9231 gsi_insert_seq_before (gsi, stmts, m);
c6540bde 9232 else
726a989a 9233 gsi_insert_seq_after (gsi, stmts, m);
c6540bde 9234 }
9885da8e
ZD
9235
9236 return expr;
9237}
9238
ad19c4be 9239/* Invoke force_gimple_operand_1 for EXPR with parameter VAR.
bcf71673
RG
9240 If SIMPLE is true, force the result to be either ssa_name or an invariant,
9241 otherwise just force it to be a rhs expression. If some statements are
9242 produced, emits them at GSI. If BEFORE is true, the statements are
9243 appended before GSI, otherwise they are appended after it. M specifies
9244 the way GSI moves after insertion (GSI_SAME_STMT or GSI_CONTINUE_LINKING
9245 are the usual values). */
9246
9247tree
9248force_gimple_operand_gsi (gimple_stmt_iterator *gsi, tree expr,
9249 bool simple_p, tree var, bool before,
9250 enum gsi_iterator_update m)
9251{
9252 return force_gimple_operand_gsi_1 (gsi, expr,
9253 simple_p
9254 ? is_gimple_val : is_gimple_reg_rhs,
9255 var, before, m);
9256}
9257
4a7cb16f
AM
9258/* Return a dummy expression of type TYPE in order to keep going after an
9259 error. */
b184c8f1 9260
4a7cb16f
AM
9261static tree
9262dummy_object (tree type)
b184c8f1 9263{
4a7cb16f
AM
9264 tree t = build_int_cst (build_pointer_type (type), 0);
9265 return build2 (MEM_REF, type, t, t);
b184c8f1
AM
9266}
9267
4a7cb16f
AM
9268/* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
9269 builtin function, but a very special sort of operator. */
b184c8f1 9270
4a7cb16f
AM
9271enum gimplify_status
9272gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
9273{
9274 tree promoted_type, have_va_type;
9275 tree valist = TREE_OPERAND (*expr_p, 0);
9276 tree type = TREE_TYPE (*expr_p);
9277 tree t;
9278 location_t loc = EXPR_LOCATION (*expr_p);
b184c8f1 9279
4a7cb16f
AM
9280 /* Verify that valist is of the proper type. */
9281 have_va_type = TREE_TYPE (valist);
9282 if (have_va_type == error_mark_node)
9283 return GS_ERROR;
9284 have_va_type = targetm.canonical_va_list_type (have_va_type);
b184c8f1 9285
4a7cb16f
AM
9286 if (have_va_type == NULL_TREE)
9287 {
9288 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
9289 return GS_ERROR;
9290 }
b184c8f1 9291
4a7cb16f
AM
9292 /* Generate a diagnostic for requesting data of a type that cannot
9293 be passed through `...' due to type promotion at the call site. */
9294 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
9295 != type)
9296 {
9297 static bool gave_help;
9298 bool warned;
b184c8f1 9299
4a7cb16f
AM
9300 /* Unfortunately, this is merely undefined, rather than a constraint
9301 violation, so we cannot make this an error. If this call is never
9302 executed, the program is still strictly conforming. */
9303 warned = warning_at (loc, 0,
9304 "%qT is promoted to %qT when passed through %<...%>",
9305 type, promoted_type);
9306 if (!gave_help && warned)
9307 {
9308 gave_help = true;
9309 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
9310 promoted_type, type);
9311 }
b184c8f1 9312
4a7cb16f
AM
9313 /* We can, however, treat "undefined" any way we please.
9314 Call abort to encourage the user to fix the program. */
9315 if (warned)
9316 inform (loc, "if this code is reached, the program will abort");
9317 /* Before the abort, allow the evaluation of the va_list
9318 expression to exit or longjmp. */
9319 gimplify_and_add (valist, pre_p);
9320 t = build_call_expr_loc (loc,
9321 builtin_decl_implicit (BUILT_IN_TRAP), 0);
b184c8f1
AM
9322 gimplify_and_add (t, pre_p);
9323
4a7cb16f
AM
9324 /* This is dead code, but go ahead and finish so that the
9325 mode of the result comes out right. */
9326 *expr_p = dummy_object (type);
9327 return GS_ALL_DONE;
b184c8f1
AM
9328 }
9329 else
b184c8f1 9330 {
4a7cb16f
AM
9331 /* Make it easier for the backends by protecting the valist argument
9332 from multiple evaluations. */
9333 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
9334 {
9335 /* For this case, the backends will be expecting a pointer to
9336 TREE_TYPE (abi), but it's possible we've
9337 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
9338 So fix it. */
9339 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
9340 {
9341 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
9342 valist = fold_convert_loc (loc, p1,
9343 build_fold_addr_expr_loc (loc, valist));
9344 }
b184c8f1 9345
4a7cb16f
AM
9346 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
9347 }
9348 else
9349 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
b184c8f1 9350
4a7cb16f
AM
9351 if (!targetm.gimplify_va_arg_expr)
9352 /* FIXME: Once most targets are converted we should merely
9353 assert this is non-null. */
9354 return GS_ALL_DONE;
b184c8f1 9355
4a7cb16f
AM
9356 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
9357 return GS_OK;
b184c8f1 9358 }
b184c8f1 9359}
bcf71673 9360
6de9cd9a 9361#include "gt-gimplify.h"