]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gimplify.c
coretypes.h (gimple_seq, [...]): Typedef as gimple.
[thirdparty/gcc.git] / gcc / gimplify.c
CommitLineData
6de9cd9a
DN
1/* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
32be32af
JJ
3 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
4 2012 Free Software Foundation, Inc.
6de9cd9a
DN
5 Major work done by Sebastian Pop <s.pop@laposte.net>,
6 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7
8This file is part of GCC.
9
10GCC is free software; you can redistribute it and/or modify it under
11the terms of the GNU General Public License as published by the Free
9dcd6f09 12Software Foundation; either version 3, or (at your option) any later
6de9cd9a
DN
13version.
14
15GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16WARRANTY; without even the implied warranty of MERCHANTABILITY or
17FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18for more details.
19
20You should have received a copy of the GNU General Public License
9dcd6f09
NC
21along with GCC; see the file COPYING3. If not see
22<http://www.gnu.org/licenses/>. */
6de9cd9a
DN
23
24#include "config.h"
25#include "system.h"
26#include "coretypes.h"
27#include "tm.h"
28#include "tree.h"
726a989a
RB
29#include "gimple.h"
30#include "tree-iterator.h"
6de9cd9a 31#include "tree-inline.h"
cf835838 32#include "tree-pretty-print.h"
6de9cd9a 33#include "langhooks.h"
6de9cd9a 34#include "tree-flow.h"
44de5aeb 35#include "cgraph.h"
6de9cd9a 36#include "timevar.h"
6de9cd9a
DN
37#include "hashtab.h"
38#include "flags.h"
6de9cd9a
DN
39#include "function.h"
40#include "output.h"
6de9cd9a 41#include "ggc.h"
718f9c0f 42#include "diagnostic-core.h"
cd3ce9b4 43#include "target.h"
953ff289 44#include "pointer-set.h"
6be42dd4 45#include "splay-tree.h"
726a989a
RB
46#include "vec.h"
47#include "gimple.h"
a406865a 48#include "tree-pass.h"
6de9cd9a 49
2eb79bbb
SB
50#include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name. */
51#include "expr.h" /* FIXME: for can_move_by_pieces
52 and STACK_CHECK_MAX_VAR_SIZE. */
953ff289
DN
53
54enum gimplify_omp_var_data
55{
56 GOVD_SEEN = 1,
57 GOVD_EXPLICIT = 2,
58 GOVD_SHARED = 4,
59 GOVD_PRIVATE = 8,
60 GOVD_FIRSTPRIVATE = 16,
61 GOVD_LASTPRIVATE = 32,
62 GOVD_REDUCTION = 64,
63 GOVD_LOCAL = 128,
64 GOVD_DEBUG_PRIVATE = 256,
a68ab351 65 GOVD_PRIVATE_OUTER_REF = 512,
953ff289
DN
66 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
67 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LOCAL)
68};
69
726a989a 70
a68ab351
JJ
71enum omp_region_type
72{
73 ORT_WORKSHARE = 0,
a68ab351 74 ORT_PARALLEL = 2,
f22f4340
JJ
75 ORT_COMBINED_PARALLEL = 3,
76 ORT_TASK = 4,
77 ORT_UNTIED_TASK = 5
a68ab351
JJ
78};
79
953ff289 80struct gimplify_omp_ctx
6de9cd9a 81{
953ff289
DN
82 struct gimplify_omp_ctx *outer_context;
83 splay_tree variables;
84 struct pointer_set_t *privatized_types;
85 location_t location;
86 enum omp_clause_default_kind default_kind;
a68ab351 87 enum omp_region_type region_type;
953ff289
DN
88};
89
953ff289
DN
90static struct gimplify_ctx *gimplify_ctxp;
91static struct gimplify_omp_ctx *gimplify_omp_ctxp;
92
6de9cd9a 93
ad19c4be 94/* Formal (expression) temporary table handling: multiple occurrences of
6de9cd9a
DN
95 the same scalar expression are evaluated into the same temporary. */
96
97typedef struct gimple_temp_hash_elt
98{
99 tree val; /* Key */
100 tree temp; /* Value */
101} elt_t;
102
ad19c4be 103/* Forward declaration. */
726a989a 104static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
eb6127a4 105
936d04b6
JH
106/* Mark X addressable. Unlike the langhook we expect X to be in gimple
107 form and we don't do any syntax checking. */
ad19c4be 108
628c189e 109void
936d04b6
JH
110mark_addressable (tree x)
111{
112 while (handled_component_p (x))
113 x = TREE_OPERAND (x, 0);
70f34814
RG
114 if (TREE_CODE (x) == MEM_REF
115 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
116 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
628c189e
RG
117 if (TREE_CODE (x) != VAR_DECL
118 && TREE_CODE (x) != PARM_DECL
119 && TREE_CODE (x) != RESULT_DECL)
70f34814 120 return;
936d04b6
JH
121 TREE_ADDRESSABLE (x) = 1;
122}
eb6127a4 123
6de9cd9a
DN
124/* Return a hash value for a formal temporary table entry. */
125
126static hashval_t
127gimple_tree_hash (const void *p)
128{
aa4a53af 129 tree t = ((const elt_t *) p)->val;
6de9cd9a
DN
130 return iterative_hash_expr (t, 0);
131}
132
133/* Compare two formal temporary table entries. */
134
135static int
136gimple_tree_eq (const void *p1, const void *p2)
137{
aa4a53af
RK
138 tree t1 = ((const elt_t *) p1)->val;
139 tree t2 = ((const elt_t *) p2)->val;
6de9cd9a
DN
140 enum tree_code code = TREE_CODE (t1);
141
142 if (TREE_CODE (t2) != code
143 || TREE_TYPE (t1) != TREE_TYPE (t2))
144 return 0;
145
146 if (!operand_equal_p (t1, t2, 0))
147 return 0;
148
fdd7cdbf 149#ifdef ENABLE_CHECKING
6de9cd9a
DN
150 /* Only allow them to compare equal if they also hash equal; otherwise
151 results are nondeterminate, and we fail bootstrap comparison. */
282899df 152 gcc_assert (gimple_tree_hash (p1) == gimple_tree_hash (p2));
fdd7cdbf 153#endif
6de9cd9a
DN
154
155 return 1;
156}
157
726a989a
RB
158/* Link gimple statement GS to the end of the sequence *SEQ_P. If
159 *SEQ_P is NULL, a new sequence is allocated. This function is
160 similar to gimple_seq_add_stmt, but does not scan the operands.
161 During gimplification, we need to manipulate statement sequences
162 before the def/use vectors have been constructed. */
163
786f715d 164void
a1a6c5b2 165gimple_seq_add_stmt_without_update (gimple_seq *seq_p, gimple gs)
726a989a
RB
166{
167 gimple_stmt_iterator si;
168
169 if (gs == NULL)
170 return;
171
726a989a 172 si = gsi_last (*seq_p);
726a989a
RB
173 gsi_insert_after_without_update (&si, gs, GSI_NEW_STMT);
174}
175
a1a6c5b2
JJ
176/* Shorter alias name for the above function for use in gimplify.c
177 only. */
178
179static inline void
180gimplify_seq_add_stmt (gimple_seq *seq_p, gimple gs)
181{
182 gimple_seq_add_stmt_without_update (seq_p, gs);
183}
184
726a989a
RB
185/* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
186 NULL, a new sequence is allocated. This function is
187 similar to gimple_seq_add_seq, but does not scan the operands.
188 During gimplification, we need to manipulate statement sequences
189 before the def/use vectors have been constructed. */
190
191static void
192gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
193{
194 gimple_stmt_iterator si;
195
196 if (src == NULL)
197 return;
198
726a989a
RB
199 si = gsi_last (*dst_p);
200 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
201}
202
6de9cd9a
DN
203/* Set up a context for the gimplifier. */
204
205void
d406b663 206push_gimplify_context (struct gimplify_ctx *c)
6de9cd9a 207{
d406b663 208 memset (c, '\0', sizeof (*c));
953ff289 209 c->prev_context = gimplify_ctxp;
953ff289 210 gimplify_ctxp = c;
6de9cd9a
DN
211}
212
213/* Tear down a context for the gimplifier. If BODY is non-null, then
214 put the temporaries into the outer BIND_EXPR. Otherwise, put them
726a989a
RB
215 in the local_decls.
216
217 BODY is not a sequence, but the first tuple in a sequence. */
6de9cd9a
DN
218
219void
726a989a 220pop_gimplify_context (gimple body)
6de9cd9a 221{
953ff289 222 struct gimplify_ctx *c = gimplify_ctxp;
17ad5b5e 223
726a989a
RB
224 gcc_assert (c && (c->bind_expr_stack == NULL
225 || VEC_empty (gimple, c->bind_expr_stack)));
a856e96f 226 VEC_free (gimple, heap, c->bind_expr_stack);
953ff289 227 gimplify_ctxp = c->prev_context;
6de9cd9a
DN
228
229 if (body)
5123ad09 230 declare_vars (c->temps, body, false);
6de9cd9a 231 else
953ff289 232 record_vars (c->temps);
6de9cd9a 233
d406b663 234 if (c->temp_htab)
953ff289 235 htab_delete (c->temp_htab);
6de9cd9a
DN
236}
237
ad19c4be
EB
238/* Push a GIMPLE_BIND tuple onto the stack of bindings. */
239
c24b7de9 240static void
726a989a 241gimple_push_bind_expr (gimple gimple_bind)
6de9cd9a 242{
726a989a
RB
243 if (gimplify_ctxp->bind_expr_stack == NULL)
244 gimplify_ctxp->bind_expr_stack = VEC_alloc (gimple, heap, 8);
245 VEC_safe_push (gimple, heap, gimplify_ctxp->bind_expr_stack, gimple_bind);
6de9cd9a
DN
246}
247
ad19c4be
EB
248/* Pop the first element off the stack of bindings. */
249
c24b7de9 250static void
6de9cd9a
DN
251gimple_pop_bind_expr (void)
252{
726a989a 253 VEC_pop (gimple, gimplify_ctxp->bind_expr_stack);
6de9cd9a
DN
254}
255
ad19c4be
EB
256/* Return the first element of the stack of bindings. */
257
726a989a 258gimple
6de9cd9a
DN
259gimple_current_bind_expr (void)
260{
726a989a
RB
261 return VEC_last (gimple, gimplify_ctxp->bind_expr_stack);
262}
263
ad19c4be 264/* Return the stack of bindings created during gimplification. */
726a989a
RB
265
266VEC(gimple, heap) *
267gimple_bind_expr_stack (void)
268{
269 return gimplify_ctxp->bind_expr_stack;
6de9cd9a
DN
270}
271
ad19c4be 272/* Return true iff there is a COND_EXPR between us and the innermost
6de9cd9a
DN
273 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
274
275static bool
276gimple_conditional_context (void)
277{
278 return gimplify_ctxp->conditions > 0;
279}
280
281/* Note that we've entered a COND_EXPR. */
282
283static void
284gimple_push_condition (void)
285{
726a989a 286#ifdef ENABLE_GIMPLE_CHECKING
d775bc45 287 if (gimplify_ctxp->conditions == 0)
726a989a 288 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
d775bc45 289#endif
6de9cd9a
DN
290 ++(gimplify_ctxp->conditions);
291}
292
293/* Note that we've left a COND_EXPR. If we're back at unconditional scope
294 now, add any conditional cleanups we've seen to the prequeue. */
295
296static void
726a989a 297gimple_pop_condition (gimple_seq *pre_p)
6de9cd9a
DN
298{
299 int conds = --(gimplify_ctxp->conditions);
aa4a53af 300
282899df 301 gcc_assert (conds >= 0);
6de9cd9a
DN
302 if (conds == 0)
303 {
726a989a
RB
304 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
305 gimplify_ctxp->conditional_cleanups = NULL;
6de9cd9a 306 }
6de9cd9a
DN
307}
308
953ff289
DN
309/* A stable comparison routine for use with splay trees and DECLs. */
310
311static int
312splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
313{
314 tree a = (tree) xa;
315 tree b = (tree) xb;
316
317 return DECL_UID (a) - DECL_UID (b);
318}
319
320/* Create a new omp construct that deals with variable remapping. */
321
322static struct gimplify_omp_ctx *
a68ab351 323new_omp_context (enum omp_region_type region_type)
953ff289
DN
324{
325 struct gimplify_omp_ctx *c;
326
327 c = XCNEW (struct gimplify_omp_ctx);
328 c->outer_context = gimplify_omp_ctxp;
329 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
330 c->privatized_types = pointer_set_create ();
331 c->location = input_location;
a68ab351 332 c->region_type = region_type;
f22f4340 333 if ((region_type & ORT_TASK) == 0)
a68ab351
JJ
334 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
335 else
336 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
953ff289
DN
337
338 return c;
339}
340
341/* Destroy an omp construct that deals with variable remapping. */
342
343static void
344delete_omp_context (struct gimplify_omp_ctx *c)
345{
346 splay_tree_delete (c->variables);
347 pointer_set_destroy (c->privatized_types);
348 XDELETE (c);
349}
350
351static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
352static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
353
726a989a
RB
354/* Both gimplify the statement T and append it to *SEQ_P. This function
355 behaves exactly as gimplify_stmt, but you don't have to pass T as a
356 reference. */
cd3ce9b4
JM
357
358void
726a989a
RB
359gimplify_and_add (tree t, gimple_seq *seq_p)
360{
361 gimplify_stmt (&t, seq_p);
362}
363
364/* Gimplify statement T into sequence *SEQ_P, and return the first
365 tuple in the sequence of generated tuples for this statement.
366 Return NULL if gimplifying T produced no tuples. */
367
368static gimple
369gimplify_and_return_first (tree t, gimple_seq *seq_p)
cd3ce9b4 370{
726a989a
RB
371 gimple_stmt_iterator last = gsi_last (*seq_p);
372
373 gimplify_and_add (t, seq_p);
374
375 if (!gsi_end_p (last))
376 {
377 gsi_next (&last);
378 return gsi_stmt (last);
379 }
380 else
381 return gimple_seq_first_stmt (*seq_p);
cd3ce9b4
JM
382}
383
6de9cd9a
DN
384/* Strip off a legitimate source ending from the input string NAME of
385 length LEN. Rather than having to know the names used by all of
386 our front ends, we strip off an ending of a period followed by
387 up to five characters. (Java uses ".class".) */
388
389static inline void
390remove_suffix (char *name, int len)
391{
392 int i;
393
394 for (i = 2; i < 8 && len > i; i++)
395 {
396 if (name[len - i] == '.')
397 {
398 name[len - i] = '\0';
399 break;
400 }
401 }
402}
403
ad19c4be 404/* Create a new temporary name with PREFIX. Return an identifier. */
6de9cd9a
DN
405
406static GTY(()) unsigned int tmp_var_id_num;
407
7e140280 408tree
6de9cd9a
DN
409create_tmp_var_name (const char *prefix)
410{
411 char *tmp_name;
412
413 if (prefix)
414 {
415 char *preftmp = ASTRDUP (prefix);
aa4a53af 416
6de9cd9a 417 remove_suffix (preftmp, strlen (preftmp));
0a35513e
AH
418 clean_symbol_name (preftmp);
419
6de9cd9a
DN
420 prefix = preftmp;
421 }
422
423 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix ? prefix : "T", tmp_var_id_num++);
424 return get_identifier (tmp_name);
425}
426
6de9cd9a 427/* Create a new temporary variable declaration of type TYPE.
ad19c4be 428 Do NOT push it into the current binding. */
6de9cd9a
DN
429
430tree
431create_tmp_var_raw (tree type, const char *prefix)
432{
433 tree tmp_var;
6de9cd9a 434
c2255bc4
AH
435 tmp_var = build_decl (input_location,
436 VAR_DECL, prefix ? create_tmp_var_name (prefix) : NULL,
769da818 437 type);
6de9cd9a
DN
438
439 /* The variable was declared by the compiler. */
440 DECL_ARTIFICIAL (tmp_var) = 1;
441 /* And we don't want debug info for it. */
442 DECL_IGNORED_P (tmp_var) = 1;
443
444 /* Make the variable writable. */
445 TREE_READONLY (tmp_var) = 0;
446
447 DECL_EXTERNAL (tmp_var) = 0;
448 TREE_STATIC (tmp_var) = 0;
449 TREE_USED (tmp_var) = 1;
450
451 return tmp_var;
452}
453
ad19c4be 454/* Create a new temporary variable declaration of type TYPE. DO push the
6de9cd9a
DN
455 variable into the current binding. Further, assume that this is called
456 only from gimplification or optimization, at which point the creation of
457 certain types are bugs. */
458
459tree
460create_tmp_var (tree type, const char *prefix)
461{
462 tree tmp_var;
463
44de5aeb 464 /* We don't allow types that are addressable (meaning we can't make copies),
a441447f
OH
465 or incomplete. We also used to reject every variable size objects here,
466 but now support those for which a constant upper bound can be obtained.
467 The processing for variable sizes is performed in gimple_add_tmp_var,
468 point at which it really matters and possibly reached via paths not going
469 through this function, e.g. after direct calls to create_tmp_var_raw. */
470 gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type));
6de9cd9a
DN
471
472 tmp_var = create_tmp_var_raw (type, prefix);
473 gimple_add_tmp_var (tmp_var);
474 return tmp_var;
475}
476
acd63801
MJ
477/* Create a new temporary variable declaration of type TYPE by calling
478 create_tmp_var and if TYPE is a vector or a complex number, mark the new
479 temporary as gimple register. */
480
481tree
482create_tmp_reg (tree type, const char *prefix)
483{
484 tree tmp;
485
486 tmp = create_tmp_var (type, prefix);
487 if (TREE_CODE (type) == COMPLEX_TYPE
488 || TREE_CODE (type) == VECTOR_TYPE)
489 DECL_GIMPLE_REG_P (tmp) = 1;
490
491 return tmp;
492}
493
6de9cd9a
DN
494/* Create a temporary with a name derived from VAL. Subroutine of
495 lookup_tmp_var; nobody else should call this function. */
496
497static inline tree
498create_tmp_from_val (tree val)
499{
0c2ad203
RG
500 /* Drop all qualifiers and address-space information from the value type. */
501 return create_tmp_var (TYPE_MAIN_VARIANT (TREE_TYPE (val)), get_name (val));
6de9cd9a
DN
502}
503
504/* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
505 an existing expression temporary. */
506
507static tree
508lookup_tmp_var (tree val, bool is_formal)
509{
17ad5b5e
RH
510 tree ret;
511
bbbb79d4
GK
512 /* If not optimizing, never really reuse a temporary. local-alloc
513 won't allocate any variable that is used in more than one basic
514 block, which means it will go into memory, causing much extra
515 work in reload and final and poorer code generation, outweighing
516 the extra memory allocation here. */
517 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
17ad5b5e 518 ret = create_tmp_from_val (val);
6de9cd9a
DN
519 else
520 {
521 elt_t elt, *elt_p;
522 void **slot;
523
524 elt.val = val;
d406b663
JJ
525 if (gimplify_ctxp->temp_htab == NULL)
526 gimplify_ctxp->temp_htab
527 = htab_create (1000, gimple_tree_hash, gimple_tree_eq, free);
6de9cd9a
DN
528 slot = htab_find_slot (gimplify_ctxp->temp_htab, (void *)&elt, INSERT);
529 if (*slot == NULL)
530 {
858904db 531 elt_p = XNEW (elt_t);
6de9cd9a 532 elt_p->val = val;
17ad5b5e 533 elt_p->temp = ret = create_tmp_from_val (val);
af72267c 534 *slot = (void *) elt_p;
6de9cd9a
DN
535 }
536 else
17ad5b5e
RH
537 {
538 elt_p = (elt_t *) *slot;
539 ret = elt_p->temp;
540 }
6de9cd9a 541 }
17ad5b5e 542
17ad5b5e 543 return ret;
6de9cd9a
DN
544}
545
216820a4
RG
546/* Returns true iff T is a valid RHS for an assignment to a renamed
547 user -- or front-end generated artificial -- variable. */
548
549static bool
550is_gimple_reg_rhs (tree t)
551{
552 return get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS;
553}
554
555/* Returns true iff T is a valid RHS for an assignment to an un-renamed
556 LHS, or for a call argument. */
557
558static bool
559is_gimple_mem_rhs (tree t)
560{
561 /* If we're dealing with a renamable type, either source or dest must be
562 a renamed variable. */
563 if (is_gimple_reg_type (TREE_TYPE (t)))
564 return is_gimple_val (t);
565 else
566 return is_gimple_val (t) || is_gimple_lvalue (t);
567}
568
726a989a 569/* Return true if T is a CALL_EXPR or an expression that can be
12947319 570 assigned to a temporary. Note that this predicate should only be
726a989a
RB
571 used during gimplification. See the rationale for this in
572 gimplify_modify_expr. */
573
574static bool
ba4d8f9d 575is_gimple_reg_rhs_or_call (tree t)
726a989a 576{
ba4d8f9d
RG
577 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
578 || TREE_CODE (t) == CALL_EXPR);
726a989a
RB
579}
580
581/* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
582 this predicate should only be used during gimplification. See the
583 rationale for this in gimplify_modify_expr. */
584
585static bool
ba4d8f9d 586is_gimple_mem_rhs_or_call (tree t)
726a989a
RB
587{
588 /* If we're dealing with a renamable type, either source or dest must be
050bbfeb
RG
589 a renamed variable. */
590 if (is_gimple_reg_type (TREE_TYPE (t)))
726a989a
RB
591 return is_gimple_val (t);
592 else
ba4d8f9d
RG
593 return (is_gimple_val (t) || is_gimple_lvalue (t)
594 || TREE_CODE (t) == CALL_EXPR);
726a989a
RB
595}
596
ba4d8f9d 597/* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
6de9cd9a
DN
598
599static tree
726a989a
RB
600internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
601 bool is_formal)
6de9cd9a
DN
602{
603 tree t, mod;
6de9cd9a 604
726a989a
RB
605 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
606 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
ba4d8f9d 607 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
726a989a 608 fb_rvalue);
6de9cd9a
DN
609
610 t = lookup_tmp_var (val, is_formal);
611
5b21f0f3
RG
612 if (is_formal
613 && (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
614 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE))
615 DECL_GIMPLE_REG_P (t) = 1;
e41d82f5 616
2e929cf3 617 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
6de9cd9a 618
ec52b111 619 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_HERE (val));
6de9cd9a 620
fff34d35
RK
621 /* gimplify_modify_expr might want to reduce this further. */
622 gimplify_and_add (mod, pre_p);
726a989a 623 ggc_free (mod);
8b11a64c
ZD
624
625 /* If we're gimplifying into ssa, gimplify_modify_expr will have
726a989a 626 given our temporary an SSA name. Find and return it. */
8b11a64c 627 if (gimplify_ctxp->into_ssa)
726a989a
RB
628 {
629 gimple last = gimple_seq_last_stmt (*pre_p);
630 t = gimple_get_lhs (last);
631 }
8b11a64c 632
6de9cd9a
DN
633 return t;
634}
635
ad19c4be 636/* Return a formal temporary variable initialized with VAL. PRE_P is as
ba4d8f9d
RG
637 in gimplify_expr. Only use this function if:
638
639 1) The value of the unfactored expression represented by VAL will not
640 change between the initialization and use of the temporary, and
641 2) The temporary will not be otherwise modified.
642
643 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
644 and #2 means it is inappropriate for && temps.
645
646 For other cases, use get_initialized_tmp_var instead. */
50674e96 647
6de9cd9a 648tree
726a989a 649get_formal_tmp_var (tree val, gimple_seq *pre_p)
6de9cd9a
DN
650{
651 return internal_get_tmp_var (val, pre_p, NULL, true);
652}
653
ad19c4be 654/* Return a temporary variable initialized with VAL. PRE_P and POST_P
6de9cd9a
DN
655 are as in gimplify_expr. */
656
657tree
726a989a 658get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p)
6de9cd9a
DN
659{
660 return internal_get_tmp_var (val, pre_p, post_p, false);
661}
662
ad19c4be
EB
663/* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
664 generate debug info for them; otherwise don't. */
6de9cd9a
DN
665
666void
726a989a 667declare_vars (tree vars, gimple scope, bool debug_info)
6de9cd9a
DN
668{
669 tree last = vars;
670 if (last)
671 {
5123ad09 672 tree temps, block;
6de9cd9a 673
726a989a 674 gcc_assert (gimple_code (scope) == GIMPLE_BIND);
6de9cd9a
DN
675
676 temps = nreverse (last);
5123ad09 677
524d9a45 678 block = gimple_bind_block (scope);
726a989a 679 gcc_assert (!block || TREE_CODE (block) == BLOCK);
5123ad09
EB
680 if (!block || !debug_info)
681 {
910ad8de 682 DECL_CHAIN (last) = gimple_bind_vars (scope);
726a989a 683 gimple_bind_set_vars (scope, temps);
5123ad09
EB
684 }
685 else
686 {
687 /* We need to attach the nodes both to the BIND_EXPR and to its
688 associated BLOCK for debugging purposes. The key point here
689 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
690 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
691 if (BLOCK_VARS (block))
692 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
693 else
694 {
726a989a
RB
695 gimple_bind_set_vars (scope,
696 chainon (gimple_bind_vars (scope), temps));
5123ad09
EB
697 BLOCK_VARS (block) = temps;
698 }
699 }
6de9cd9a
DN
700 }
701}
702
a441447f
OH
703/* For VAR a VAR_DECL of variable size, try to find a constant upper bound
704 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
705 no such upper bound can be obtained. */
706
707static void
708force_constant_size (tree var)
709{
710 /* The only attempt we make is by querying the maximum size of objects
711 of the variable's type. */
712
713 HOST_WIDE_INT max_size;
714
715 gcc_assert (TREE_CODE (var) == VAR_DECL);
716
717 max_size = max_int_size_in_bytes (TREE_TYPE (var));
718
719 gcc_assert (max_size >= 0);
720
721 DECL_SIZE_UNIT (var)
722 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
723 DECL_SIZE (var)
724 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
725}
726
ad19c4be
EB
727/* Push the temporary variable TMP into the current binding. */
728
6de9cd9a
DN
729void
730gimple_add_tmp_var (tree tmp)
731{
910ad8de 732 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
6de9cd9a 733
a441447f
OH
734 /* Later processing assumes that the object size is constant, which might
735 not be true at this point. Force the use of a constant upper bound in
736 this case. */
737 if (!host_integerp (DECL_SIZE_UNIT (tmp), 1))
738 force_constant_size (tmp);
739
6de9cd9a 740 DECL_CONTEXT (tmp) = current_function_decl;
48eb4e53 741 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
6de9cd9a
DN
742
743 if (gimplify_ctxp)
744 {
910ad8de 745 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
6de9cd9a 746 gimplify_ctxp->temps = tmp;
953ff289
DN
747
748 /* Mark temporaries local within the nearest enclosing parallel. */
749 if (gimplify_omp_ctxp)
750 {
751 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
a68ab351 752 while (ctx && ctx->region_type == ORT_WORKSHARE)
953ff289
DN
753 ctx = ctx->outer_context;
754 if (ctx)
755 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
756 }
6de9cd9a
DN
757 }
758 else if (cfun)
759 record_vars (tmp);
760 else
726a989a
RB
761 {
762 gimple_seq body_seq;
763
764 /* This case is for nested functions. We need to expose the locals
765 they create. */
766 body_seq = gimple_body (current_function_decl);
767 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
768 }
769}
770
ad19c4be 771/* Determine whether to assign a location to the statement GS. */
726a989a
RB
772
773static bool
774should_carry_location_p (gimple gs)
775{
776 /* Don't emit a line note for a label. We particularly don't want to
777 emit one for the break label, since it doesn't actually correspond
778 to the beginning of the loop/switch. */
779 if (gimple_code (gs) == GIMPLE_LABEL)
780 return false;
781
782 return true;
6de9cd9a
DN
783}
784
726a989a
RB
785/* Return true if a location should not be emitted for this statement
786 by annotate_one_with_location. */
787
788static inline bool
789gimple_do_not_emit_location_p (gimple g)
790{
791 return gimple_plf (g, GF_PLF_1);
792}
793
794/* Mark statement G so a location will not be emitted by
795 annotate_one_with_location. */
796
797static inline void
798gimple_set_do_not_emit_location (gimple g)
799{
800 /* The PLF flags are initialized to 0 when a new tuple is created,
801 so no need to initialize it anywhere. */
802 gimple_set_plf (g, GF_PLF_1, true);
803}
804
5e278028 805/* Set the location for gimple statement GS to LOCATION. */
726a989a
RB
806
807static void
808annotate_one_with_location (gimple gs, location_t location)
809{
b8698a0f 810 if (!gimple_has_location (gs)
726a989a
RB
811 && !gimple_do_not_emit_location_p (gs)
812 && should_carry_location_p (gs))
813 gimple_set_location (gs, location);
814}
815
726a989a
RB
816/* Set LOCATION for all the statements after iterator GSI in sequence
817 SEQ. If GSI is pointing to the end of the sequence, start with the
818 first statement in SEQ. */
819
820static void
821annotate_all_with_location_after (gimple_seq seq, gimple_stmt_iterator gsi,
822 location_t location)
823{
824 if (gsi_end_p (gsi))
825 gsi = gsi_start (seq);
826 else
827 gsi_next (&gsi);
828
829 for (; !gsi_end_p (gsi); gsi_next (&gsi))
830 annotate_one_with_location (gsi_stmt (gsi), location);
831}
832
5e278028 833/* Set the location for all the statements in a sequence STMT_P to LOCATION. */
726a989a
RB
834
835void
836annotate_all_with_location (gimple_seq stmt_p, location_t location)
837{
838 gimple_stmt_iterator i;
839
840 if (gimple_seq_empty_p (stmt_p))
841 return;
842
843 for (i = gsi_start (stmt_p); !gsi_end_p (i); gsi_next (&i))
844 {
845 gimple gs = gsi_stmt (i);
846 annotate_one_with_location (gs, location);
847 }
7c34ced1 848}
616f1431
EB
849\f
850/* This page contains routines to unshare tree nodes, i.e. to duplicate tree
851 nodes that are referenced more than once in GENERIC functions. This is
852 necessary because gimplification (translation into GIMPLE) is performed
853 by modifying tree nodes in-place, so gimplication of a shared node in a
854 first context could generate an invalid GIMPLE form in a second context.
855
856 This is achieved with a simple mark/copy/unmark algorithm that walks the
857 GENERIC representation top-down, marks nodes with TREE_VISITED the first
858 time it encounters them, duplicates them if they already have TREE_VISITED
859 set, and finally removes the TREE_VISITED marks it has set.
860
861 The algorithm works only at the function level, i.e. it generates a GENERIC
862 representation of a function with no nodes shared within the function when
863 passed a GENERIC function (except for nodes that are allowed to be shared).
864
865 At the global level, it is also necessary to unshare tree nodes that are
866 referenced in more than one function, for the same aforementioned reason.
867 This requires some cooperation from the front-end. There are 2 strategies:
868
869 1. Manual unsharing. The front-end needs to call unshare_expr on every
870 expression that might end up being shared across functions.
871
872 2. Deep unsharing. This is an extension of regular unsharing. Instead
873 of calling unshare_expr on expressions that might be shared across
874 functions, the front-end pre-marks them with TREE_VISITED. This will
875 ensure that they are unshared on the first reference within functions
876 when the regular unsharing algorithm runs. The counterpart is that
877 this algorithm must look deeper than for manual unsharing, which is
878 specified by LANG_HOOKS_DEEP_UNSHARING.
879
880 If there are only few specific cases of node sharing across functions, it is
881 probably easier for a front-end to unshare the expressions manually. On the
882 contrary, if the expressions generated at the global level are as widespread
883 as expressions generated within functions, deep unsharing is very likely the
884 way to go. */
885
886/* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
3ad065ef
EB
887 These nodes model computations that must be done once. If we were to
888 unshare something like SAVE_EXPR(i++), the gimplification process would
889 create wrong code. However, if DATA is non-null, it must hold a pointer
890 set that is used to unshare the subtrees of these nodes. */
6de9cd9a
DN
891
892static tree
893mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
894{
616f1431
EB
895 tree t = *tp;
896 enum tree_code code = TREE_CODE (t);
897
6687b740
EB
898 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
899 copy their subtrees if we can make sure to do it only once. */
900 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
616f1431
EB
901 {
902 if (data && !pointer_set_insert ((struct pointer_set_t *)data, t))
903 ;
904 else
905 *walk_subtrees = 0;
906 }
907
908 /* Stop at types, decls, constants like copy_tree_r. */
909 else if (TREE_CODE_CLASS (code) == tcc_type
910 || TREE_CODE_CLASS (code) == tcc_declaration
911 || TREE_CODE_CLASS (code) == tcc_constant
912 /* We can't do anything sensible with a BLOCK used as an
913 expression, but we also can't just die when we see it
914 because of non-expression uses. So we avert our eyes
915 and cross our fingers. Silly Java. */
916 || code == BLOCK)
6de9cd9a 917 *walk_subtrees = 0;
616f1431
EB
918
919 /* Cope with the statement expression extension. */
920 else if (code == STATEMENT_LIST)
921 ;
922
923 /* Leave the bulk of the work to copy_tree_r itself. */
6de9cd9a 924 else
6687b740 925 copy_tree_r (tp, walk_subtrees, NULL);
6de9cd9a
DN
926
927 return NULL_TREE;
928}
929
3ad065ef
EB
930/* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
931 If *TP has been visited already, then *TP is deeply copied by calling
932 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
6de9cd9a
DN
933
934static tree
616f1431 935copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
6de9cd9a 936{
f0638e1d
RH
937 tree t = *tp;
938 enum tree_code code = TREE_CODE (t);
939
44de5aeb
RK
940 /* Skip types, decls, and constants. But we do want to look at their
941 types and the bounds of types. Mark them as visited so we properly
942 unmark their subtrees on the unmark pass. If we've already seen them,
943 don't look down further. */
6615c446
JO
944 if (TREE_CODE_CLASS (code) == tcc_type
945 || TREE_CODE_CLASS (code) == tcc_declaration
946 || TREE_CODE_CLASS (code) == tcc_constant)
44de5aeb
RK
947 {
948 if (TREE_VISITED (t))
949 *walk_subtrees = 0;
950 else
951 TREE_VISITED (t) = 1;
952 }
f0638e1d 953
6de9cd9a
DN
954 /* If this node has been visited already, unshare it and don't look
955 any deeper. */
f0638e1d 956 else if (TREE_VISITED (t))
6de9cd9a 957 {
616f1431 958 walk_tree (tp, mostly_copy_tree_r, data, NULL);
6de9cd9a
DN
959 *walk_subtrees = 0;
960 }
f0638e1d 961
616f1431 962 /* Otherwise, mark the node as visited and keep looking. */
6de9cd9a 963 else
77c9db77 964 TREE_VISITED (t) = 1;
f0638e1d 965
6de9cd9a
DN
966 return NULL_TREE;
967}
968
3ad065ef
EB
969/* Unshare most of the shared trees rooted at *TP. DATA is passed to the
970 copy_if_shared_r callback unmodified. */
6de9cd9a 971
616f1431 972static inline void
3ad065ef 973copy_if_shared (tree *tp, void *data)
616f1431 974{
3ad065ef 975 walk_tree (tp, copy_if_shared_r, data, NULL);
6de9cd9a
DN
976}
977
3ad065ef
EB
978/* Unshare all the trees in the body of FNDECL, as well as in the bodies of
979 any nested functions. */
44de5aeb
RK
980
981static void
3ad065ef 982unshare_body (tree fndecl)
44de5aeb 983{
9f9ebcdf 984 struct cgraph_node *cgn = cgraph_get_node (fndecl);
3ad065ef
EB
985 /* If the language requires deep unsharing, we need a pointer set to make
986 sure we don't repeatedly unshare subtrees of unshareable nodes. */
987 struct pointer_set_t *visited
988 = lang_hooks.deep_unsharing ? pointer_set_create () : NULL;
44de5aeb 989
3ad065ef
EB
990 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
991 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
992 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
993
994 if (visited)
995 pointer_set_destroy (visited);
616f1431 996
3ad065ef 997 if (cgn)
48eb4e53 998 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
960bfb69 999 unshare_body (cgn->symbol.decl);
44de5aeb
RK
1000}
1001
616f1431
EB
1002/* Callback for walk_tree to unmark the visited trees rooted at *TP.
1003 Subtrees are walked until the first unvisited node is encountered. */
1004
1005static tree
1006unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1007{
1008 tree t = *tp;
1009
1010 /* If this node has been visited, unmark it and keep looking. */
1011 if (TREE_VISITED (t))
1012 TREE_VISITED (t) = 0;
1013
1014 /* Otherwise, don't look any deeper. */
1015 else
1016 *walk_subtrees = 0;
1017
1018 return NULL_TREE;
1019}
1020
1021/* Unmark the visited trees rooted at *TP. */
1022
1023static inline void
1024unmark_visited (tree *tp)
1025{
1026 walk_tree (tp, unmark_visited_r, NULL, NULL);
1027}
1028
44de5aeb
RK
1029/* Likewise, but mark all trees as not visited. */
1030
1031static void
3ad065ef 1032unvisit_body (tree fndecl)
44de5aeb 1033{
9f9ebcdf 1034 struct cgraph_node *cgn = cgraph_get_node (fndecl);
44de5aeb 1035
3ad065ef
EB
1036 unmark_visited (&DECL_SAVED_TREE (fndecl));
1037 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
1038 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
616f1431 1039
3ad065ef 1040 if (cgn)
48eb4e53 1041 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
960bfb69 1042 unvisit_body (cgn->symbol.decl);
44de5aeb
RK
1043}
1044
6de9cd9a
DN
1045/* Unconditionally make an unshared copy of EXPR. This is used when using
1046 stored expressions which span multiple functions, such as BINFO_VTABLE,
1047 as the normal unsharing process can't tell that they're shared. */
1048
1049tree
1050unshare_expr (tree expr)
1051{
1052 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1053 return expr;
1054}
6de9cd9a
DN
1055\f
1056/* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1057 contain statements and have a value. Assign its value to a temporary
ad19c4be 1058 and give it void_type_node. Return the temporary, or NULL_TREE if
6de9cd9a
DN
1059 WRAPPER was already void. */
1060
1061tree
325c3691 1062voidify_wrapper_expr (tree wrapper, tree temp)
6de9cd9a 1063{
4832214a
JM
1064 tree type = TREE_TYPE (wrapper);
1065 if (type && !VOID_TYPE_P (type))
6de9cd9a 1066 {
c6c7698d 1067 tree *p;
6de9cd9a 1068
c6c7698d
JM
1069 /* Set p to point to the body of the wrapper. Loop until we find
1070 something that isn't a wrapper. */
1071 for (p = &wrapper; p && *p; )
d3147f64 1072 {
c6c7698d 1073 switch (TREE_CODE (*p))
6de9cd9a 1074 {
c6c7698d
JM
1075 case BIND_EXPR:
1076 TREE_SIDE_EFFECTS (*p) = 1;
1077 TREE_TYPE (*p) = void_type_node;
1078 /* For a BIND_EXPR, the body is operand 1. */
1079 p = &BIND_EXPR_BODY (*p);
1080 break;
1081
1082 case CLEANUP_POINT_EXPR:
1083 case TRY_FINALLY_EXPR:
1084 case TRY_CATCH_EXPR:
6de9cd9a
DN
1085 TREE_SIDE_EFFECTS (*p) = 1;
1086 TREE_TYPE (*p) = void_type_node;
c6c7698d
JM
1087 p = &TREE_OPERAND (*p, 0);
1088 break;
1089
1090 case STATEMENT_LIST:
1091 {
1092 tree_stmt_iterator i = tsi_last (*p);
1093 TREE_SIDE_EFFECTS (*p) = 1;
1094 TREE_TYPE (*p) = void_type_node;
1095 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1096 }
1097 break;
1098
1099 case COMPOUND_EXPR:
ad19c4be
EB
1100 /* Advance to the last statement. Set all container types to
1101 void. */
c6c7698d
JM
1102 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1103 {
1104 TREE_SIDE_EFFECTS (*p) = 1;
1105 TREE_TYPE (*p) = void_type_node;
1106 }
1107 break;
1108
0a35513e
AH
1109 case TRANSACTION_EXPR:
1110 TREE_SIDE_EFFECTS (*p) = 1;
1111 TREE_TYPE (*p) = void_type_node;
1112 p = &TRANSACTION_EXPR_BODY (*p);
1113 break;
1114
c6c7698d 1115 default:
5f23640f
TR
1116 /* Assume that any tree upon which voidify_wrapper_expr is
1117 directly called is a wrapper, and that its body is op0. */
1118 if (p == &wrapper)
1119 {
1120 TREE_SIDE_EFFECTS (*p) = 1;
1121 TREE_TYPE (*p) = void_type_node;
1122 p = &TREE_OPERAND (*p, 0);
1123 break;
1124 }
c6c7698d 1125 goto out;
6de9cd9a
DN
1126 }
1127 }
1128
c6c7698d 1129 out:
325c3691 1130 if (p == NULL || IS_EMPTY_STMT (*p))
c6c7698d
JM
1131 temp = NULL_TREE;
1132 else if (temp)
6de9cd9a 1133 {
c6c7698d
JM
1134 /* The wrapper is on the RHS of an assignment that we're pushing
1135 down. */
1136 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1137 || TREE_CODE (temp) == MODIFY_EXPR);
726a989a 1138 TREE_OPERAND (temp, 1) = *p;
c6c7698d 1139 *p = temp;
6de9cd9a
DN
1140 }
1141 else
1142 {
c6c7698d
JM
1143 temp = create_tmp_var (type, "retval");
1144 *p = build2 (INIT_EXPR, type, temp, *p);
6de9cd9a
DN
1145 }
1146
6de9cd9a
DN
1147 return temp;
1148 }
1149
1150 return NULL_TREE;
1151}
1152
1153/* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1ea7e6ad 1154 a temporary through which they communicate. */
6de9cd9a
DN
1155
1156static void
726a989a 1157build_stack_save_restore (gimple *save, gimple *restore)
6de9cd9a 1158{
726a989a 1159 tree tmp_var;
6de9cd9a 1160
e79983f4 1161 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
6de9cd9a 1162 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
726a989a 1163 gimple_call_set_lhs (*save, tmp_var);
6de9cd9a 1164
ad19c4be 1165 *restore
e79983f4 1166 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
ad19c4be 1167 1, tmp_var);
6de9cd9a
DN
1168}
1169
1170/* Gimplify a BIND_EXPR. Just voidify and recurse. */
1171
1172static enum gimplify_status
726a989a 1173gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
6de9cd9a
DN
1174{
1175 tree bind_expr = *expr_p;
6de9cd9a
DN
1176 bool old_save_stack = gimplify_ctxp->save_stack;
1177 tree t;
726a989a 1178 gimple gimple_bind;
47598145
MM
1179 gimple_seq body, cleanup;
1180 gimple stack_save;
6de9cd9a 1181
c6c7698d 1182 tree temp = voidify_wrapper_expr (bind_expr, NULL);
325c3691 1183
6de9cd9a 1184 /* Mark variables seen in this bind expr. */
910ad8de 1185 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
e41d82f5 1186 {
820cc88f 1187 if (TREE_CODE (t) == VAR_DECL)
8cb86b65
JJ
1188 {
1189 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1190
1191 /* Mark variable as local. */
144f4153 1192 if (ctx && !DECL_EXTERNAL (t)
8cb86b65
JJ
1193 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1194 || splay_tree_lookup (ctx->variables,
1195 (splay_tree_key) t) == NULL))
1196 omp_add_variable (gimplify_omp_ctxp, t, GOVD_LOCAL | GOVD_SEEN);
1197
1198 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
fc3103e7
JJ
1199
1200 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1201 cfun->has_local_explicit_reg_vars = true;
8cb86b65 1202 }
e41d82f5
RH
1203
1204 /* Preliminarily mark non-addressed complex variables as eligible
1205 for promotion to gimple registers. We'll transform their uses
bd2e63a1
RG
1206 as we find them. */
1207 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1208 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
e41d82f5
RH
1209 && !TREE_THIS_VOLATILE (t)
1210 && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
1211 && !needs_to_live_in_memory (t))
0890b981 1212 DECL_GIMPLE_REG_P (t) = 1;
e41d82f5 1213 }
6de9cd9a 1214
726a989a
RB
1215 gimple_bind = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1216 BIND_EXPR_BLOCK (bind_expr));
1217 gimple_push_bind_expr (gimple_bind);
1218
6de9cd9a
DN
1219 gimplify_ctxp->save_stack = false;
1220
726a989a
RB
1221 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1222 body = NULL;
1223 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1224 gimple_bind_set_body (gimple_bind, body);
6de9cd9a 1225
47598145
MM
1226 cleanup = NULL;
1227 stack_save = NULL;
6de9cd9a
DN
1228 if (gimplify_ctxp->save_stack)
1229 {
47598145 1230 gimple stack_restore;
6de9cd9a
DN
1231
1232 /* Save stack on entry and restore it on exit. Add a try_finally
1233 block to achieve this. Note that mudflap depends on the
1234 format of the emitted code: see mx_register_decls(). */
1235 build_stack_save_restore (&stack_save, &stack_restore);
1236
726a989a 1237 gimplify_seq_add_stmt (&cleanup, stack_restore);
47598145
MM
1238 }
1239
1240 /* Add clobbers for all variables that go out of scope. */
1241 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1242 {
1243 if (TREE_CODE (t) == VAR_DECL
1244 && !is_global_var (t)
1245 && DECL_CONTEXT (t) == current_function_decl
1246 && !DECL_HARD_REGISTER (t)
1247 && !TREE_THIS_VOLATILE (t)
1248 && !DECL_HAS_VALUE_EXPR_P (t)
1249 /* Only care for variables that have to be in memory. Others
1250 will be rewritten into SSA names, hence moved to the top-level. */
81bfd197 1251 && !is_gimple_reg (t))
47598145
MM
1252 {
1253 tree clobber = build_constructor (TREE_TYPE (t), NULL);
1254 TREE_THIS_VOLATILE (clobber) = 1;
1255 gimplify_seq_add_stmt (&cleanup, gimple_build_assign (t, clobber));
1256 }
1257 }
1258
1259 if (cleanup)
1260 {
1261 gimple gs;
1262 gimple_seq new_body;
1263
1264 new_body = NULL;
726a989a
RB
1265 gs = gimple_build_try (gimple_bind_body (gimple_bind), cleanup,
1266 GIMPLE_TRY_FINALLY);
6de9cd9a 1267
47598145
MM
1268 if (stack_save)
1269 gimplify_seq_add_stmt (&new_body, stack_save);
726a989a
RB
1270 gimplify_seq_add_stmt (&new_body, gs);
1271 gimple_bind_set_body (gimple_bind, new_body);
6de9cd9a
DN
1272 }
1273
1274 gimplify_ctxp->save_stack = old_save_stack;
1275 gimple_pop_bind_expr ();
1276
726a989a
RB
1277 gimplify_seq_add_stmt (pre_p, gimple_bind);
1278
6de9cd9a
DN
1279 if (temp)
1280 {
1281 *expr_p = temp;
6de9cd9a
DN
1282 return GS_OK;
1283 }
726a989a
RB
1284
1285 *expr_p = NULL_TREE;
1286 return GS_ALL_DONE;
6de9cd9a
DN
1287}
1288
1289/* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1290 GIMPLE value, it is assigned to a new temporary and the statement is
1291 re-written to return the temporary.
1292
726a989a 1293 PRE_P points to the sequence where side effects that must happen before
6de9cd9a
DN
1294 STMT should be stored. */
1295
1296static enum gimplify_status
726a989a 1297gimplify_return_expr (tree stmt, gimple_seq *pre_p)
6de9cd9a 1298{
726a989a 1299 gimple ret;
6de9cd9a 1300 tree ret_expr = TREE_OPERAND (stmt, 0);
71877985 1301 tree result_decl, result;
6de9cd9a 1302
726a989a
RB
1303 if (ret_expr == error_mark_node)
1304 return GS_ERROR;
1305
1306 if (!ret_expr
1307 || TREE_CODE (ret_expr) == RESULT_DECL
55e99d52 1308 || ret_expr == error_mark_node)
726a989a
RB
1309 {
1310 gimple ret = gimple_build_return (ret_expr);
1311 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1312 gimplify_seq_add_stmt (pre_p, ret);
1313 return GS_ALL_DONE;
1314 }
6de9cd9a 1315
6de9cd9a 1316 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
71877985 1317 result_decl = NULL_TREE;
6de9cd9a
DN
1318 else
1319 {
726a989a
RB
1320 result_decl = TREE_OPERAND (ret_expr, 0);
1321
1322 /* See through a return by reference. */
cc77ae10 1323 if (TREE_CODE (result_decl) == INDIRECT_REF)
cc77ae10 1324 result_decl = TREE_OPERAND (result_decl, 0);
282899df
NS
1325
1326 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1327 || TREE_CODE (ret_expr) == INIT_EXPR)
1328 && TREE_CODE (result_decl) == RESULT_DECL);
6de9cd9a
DN
1329 }
1330
71877985
RH
1331 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1332 Recall that aggregate_value_p is FALSE for any aggregate type that is
1333 returned in registers. If we're returning values in registers, then
1334 we don't want to extend the lifetime of the RESULT_DECL, particularly
d3147f64 1335 across another call. In addition, for those aggregates for which
535a42b1 1336 hard_function_value generates a PARALLEL, we'll die during normal
71877985
RH
1337 expansion of structure assignments; there's special code in expand_return
1338 to handle this case that does not exist in expand_expr. */
ca361dec
EB
1339 if (!result_decl)
1340 result = NULL_TREE;
1341 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1342 {
1343 if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST)
1344 {
1345 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1346 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1347 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1348 should be effectively allocated by the caller, i.e. all calls to
1349 this function must be subject to the Return Slot Optimization. */
1350 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1351 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1352 }
1353 result = result_decl;
1354 }
71877985
RH
1355 else if (gimplify_ctxp->return_temp)
1356 result = gimplify_ctxp->return_temp;
1357 else
1358 {
acd63801 1359 result = create_tmp_reg (TREE_TYPE (result_decl), NULL);
ff98621c
RH
1360
1361 /* ??? With complex control flow (usually involving abnormal edges),
1362 we can wind up warning about an uninitialized value for this. Due
1363 to how this variable is constructed and initialized, this is never
1364 true. Give up and never warn. */
1365 TREE_NO_WARNING (result) = 1;
1366
71877985
RH
1367 gimplify_ctxp->return_temp = result;
1368 }
1369
726a989a 1370 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
71877985
RH
1371 Then gimplify the whole thing. */
1372 if (result != result_decl)
726a989a 1373 TREE_OPERAND (ret_expr, 0) = result;
fff34d35
RK
1374
1375 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
6de9cd9a 1376
726a989a
RB
1377 ret = gimple_build_return (result);
1378 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1379 gimplify_seq_add_stmt (pre_p, ret);
6de9cd9a 1380
6de9cd9a
DN
1381 return GS_ALL_DONE;
1382}
1383
ad19c4be
EB
1384/* Gimplify a variable-length array DECL. */
1385
786025ea 1386static void
726a989a 1387gimplify_vla_decl (tree decl, gimple_seq *seq_p)
786025ea
JJ
1388{
1389 /* This is a variable-sized decl. Simplify its size and mark it
1390 for deferred expansion. Note that mudflap depends on the format
1391 of the emitted code: see mx_register_decls(). */
1392 tree t, addr, ptr_type;
1393
726a989a
RB
1394 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1395 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
786025ea
JJ
1396
1397 /* All occurrences of this decl in final gimplified code will be
1398 replaced by indirection. Setting DECL_VALUE_EXPR does two
1399 things: First, it lets the rest of the gimplifier know what
1400 replacement to use. Second, it lets the debug info know
1401 where to find the value. */
1402 ptr_type = build_pointer_type (TREE_TYPE (decl));
1403 addr = create_tmp_var (ptr_type, get_name (decl));
1404 DECL_IGNORED_P (addr) = 0;
1405 t = build_fold_indirect_ref (addr);
31408f60 1406 TREE_THIS_NOTRAP (t) = 1;
786025ea
JJ
1407 SET_DECL_VALUE_EXPR (decl, t);
1408 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1409
e79983f4 1410 t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
13e49da9
TV
1411 t = build_call_expr (t, 2, DECL_SIZE_UNIT (decl),
1412 size_int (DECL_ALIGN (decl)));
d3c12306 1413 /* The call has been built for a variable-sized object. */
63d2a353 1414 CALL_ALLOCA_FOR_VAR_P (t) = 1;
786025ea 1415 t = fold_convert (ptr_type, t);
726a989a 1416 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
786025ea 1417
726a989a 1418 gimplify_and_add (t, seq_p);
786025ea
JJ
1419
1420 /* Indicate that we need to restore the stack level when the
1421 enclosing BIND_EXPR is exited. */
1422 gimplify_ctxp->save_stack = true;
1423}
1424
ad19c4be 1425/* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
350fae66
RK
1426 and initialization explicit. */
1427
1428static enum gimplify_status
726a989a 1429gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
350fae66
RK
1430{
1431 tree stmt = *stmt_p;
1432 tree decl = DECL_EXPR_DECL (stmt);
1433
1434 *stmt_p = NULL_TREE;
1435
1436 if (TREE_TYPE (decl) == error_mark_node)
1437 return GS_ERROR;
1438
8e0a600b
JJ
1439 if ((TREE_CODE (decl) == TYPE_DECL
1440 || TREE_CODE (decl) == VAR_DECL)
1441 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
726a989a 1442 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
350fae66 1443
8e0a600b 1444 if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl))
350fae66
RK
1445 {
1446 tree init = DECL_INITIAL (decl);
1447
b38f3813
EB
1448 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1449 || (!TREE_STATIC (decl)
1450 && flag_stack_check == GENERIC_STACK_CHECK
1451 && compare_tree_int (DECL_SIZE_UNIT (decl),
1452 STACK_CHECK_MAX_VAR_SIZE) > 0))
726a989a 1453 gimplify_vla_decl (decl, seq_p);
350fae66 1454
22192559
JM
1455 /* Some front ends do not explicitly declare all anonymous
1456 artificial variables. We compensate here by declaring the
1457 variables, though it would be better if the front ends would
1458 explicitly declare them. */
1459 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1460 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1461 gimple_add_tmp_var (decl);
1462
350fae66
RK
1463 if (init && init != error_mark_node)
1464 {
1465 if (!TREE_STATIC (decl))
1466 {
1467 DECL_INITIAL (decl) = NULL_TREE;
dae7ec87 1468 init = build2 (INIT_EXPR, void_type_node, decl, init);
726a989a
RB
1469 gimplify_and_add (init, seq_p);
1470 ggc_free (init);
350fae66
RK
1471 }
1472 else
1473 /* We must still examine initializers for static variables
1474 as they may contain a label address. */
1475 walk_tree (&init, force_labels_r, NULL, NULL);
1476 }
350fae66
RK
1477 }
1478
1479 return GS_ALL_DONE;
1480}
1481
6de9cd9a
DN
1482/* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1483 and replacing the LOOP_EXPR with goto, but if the loop contains an
1484 EXIT_EXPR, we need to append a label for it to jump to. */
1485
1486static enum gimplify_status
726a989a 1487gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
6de9cd9a
DN
1488{
1489 tree saved_label = gimplify_ctxp->exit_label;
c2255bc4 1490 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
6de9cd9a 1491
726a989a 1492 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
6de9cd9a
DN
1493
1494 gimplify_ctxp->exit_label = NULL_TREE;
1495
fff34d35 1496 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
6de9cd9a 1497
726a989a
RB
1498 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1499
6de9cd9a 1500 if (gimplify_ctxp->exit_label)
ad19c4be
EB
1501 gimplify_seq_add_stmt (pre_p,
1502 gimple_build_label (gimplify_ctxp->exit_label));
726a989a
RB
1503
1504 gimplify_ctxp->exit_label = saved_label;
1505
1506 *expr_p = NULL;
1507 return GS_ALL_DONE;
1508}
1509
ad19c4be 1510/* Gimplify a statement list onto a sequence. These may be created either
726a989a
RB
1511 by an enlightened front-end, or by shortcut_cond_expr. */
1512
1513static enum gimplify_status
1514gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1515{
1516 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1517
1518 tree_stmt_iterator i = tsi_start (*expr_p);
1519
1520 while (!tsi_end_p (i))
6de9cd9a 1521 {
726a989a
RB
1522 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1523 tsi_delink (&i);
6de9cd9a 1524 }
6de9cd9a 1525
726a989a
RB
1526 if (temp)
1527 {
1528 *expr_p = temp;
1529 return GS_OK;
1530 }
6de9cd9a
DN
1531
1532 return GS_ALL_DONE;
1533}
68e72840 1534\f
f667741c
SB
1535/* Compare two case labels. Because the front end should already have
1536 made sure that case ranges do not overlap, it is enough to only compare
1537 the CASE_LOW values of each case label. */
1538
1539static int
1540compare_case_labels (const void *p1, const void *p2)
1541{
741ac903
KG
1542 const_tree const case1 = *(const_tree const*)p1;
1543 const_tree const case2 = *(const_tree const*)p2;
f667741c 1544
726a989a
RB
1545 /* The 'default' case label always goes first. */
1546 if (!CASE_LOW (case1))
1547 return -1;
1548 else if (!CASE_LOW (case2))
1549 return 1;
1550 else
1551 return tree_int_cst_compare (CASE_LOW (case1), CASE_LOW (case2));
f667741c
SB
1552}
1553
165b54c3 1554/* Sort the case labels in LABEL_VEC in place in ascending order. */
0f1f6967
SB
1555
1556void
726a989a 1557sort_case_labels (VEC(tree,heap)* label_vec)
0f1f6967 1558{
5095da95 1559 VEC_qsort (tree, label_vec, compare_case_labels);
0f1f6967 1560}
68e72840
SB
1561\f
1562/* Prepare a vector of case labels to be used in a GIMPLE_SWITCH statement.
1563
1564 LABELS is a vector that contains all case labels to look at.
1565
1566 INDEX_TYPE is the type of the switch index expression. Case labels
1567 in LABELS are discarded if their values are not in the value range
1568 covered by INDEX_TYPE. The remaining case label values are folded
1569 to INDEX_TYPE.
1570
1571 If a default case exists in LABELS, it is removed from LABELS and
1572 returned in DEFAULT_CASEP. If no default case exists, but the
1573 case labels already cover the whole range of INDEX_TYPE, a default
1574 case is returned pointing to one of the existing case labels.
1575 Otherwise DEFAULT_CASEP is set to NULL_TREE.
1576
1577 DEFAULT_CASEP may be NULL, in which case the above comment doesn't
1578 apply and no action is taken regardless of whether a default case is
1579 found or not. */
1580
1581void
1582preprocess_case_label_vec_for_gimple (VEC(tree,heap) *labels,
1583 tree index_type,
1584 tree *default_casep)
1585{
1586 tree min_value, max_value;
1587 tree default_case = NULL_TREE;
1588 size_t i, len;
1589
1590 i = 0;
1591 min_value = TYPE_MIN_VALUE (index_type);
1592 max_value = TYPE_MAX_VALUE (index_type);
1593 while (i < VEC_length (tree, labels))
1594 {
1595 tree elt = VEC_index (tree, labels, i);
1596 tree low = CASE_LOW (elt);
1597 tree high = CASE_HIGH (elt);
1598 bool remove_element = FALSE;
1599
1600 if (low)
1601 {
1602 gcc_checking_assert (TREE_CODE (low) == INTEGER_CST);
1603 gcc_checking_assert (!high || TREE_CODE (high) == INTEGER_CST);
1604
1605 /* This is a non-default case label, i.e. it has a value.
1606
1607 See if the case label is reachable within the range of
1608 the index type. Remove out-of-range case values. Turn
1609 case ranges into a canonical form (high > low strictly)
1610 and convert the case label values to the index type.
1611
1612 NB: The type of gimple_switch_index() may be the promoted
1613 type, but the case labels retain the original type. */
1614
1615 if (high)
1616 {
1617 /* This is a case range. Discard empty ranges.
1618 If the bounds or the range are equal, turn this
1619 into a simple (one-value) case. */
1620 int cmp = tree_int_cst_compare (high, low);
1621 if (cmp < 0)
1622 remove_element = TRUE;
1623 else if (cmp == 0)
1624 high = NULL_TREE;
1625 }
1626
1627 if (! high)
1628 {
1629 /* If the simple case value is unreachable, ignore it. */
1630 if ((TREE_CODE (min_value) == INTEGER_CST
1631 && tree_int_cst_compare (low, min_value) < 0)
1632 || (TREE_CODE (max_value) == INTEGER_CST
1633 && tree_int_cst_compare (low, max_value) > 0))
1634 remove_element = TRUE;
1635 else
1636 low = fold_convert (index_type, low);
1637 }
1638 else
1639 {
1640 /* If the entire case range is unreachable, ignore it. */
1641 if ((TREE_CODE (min_value) == INTEGER_CST
1642 && tree_int_cst_compare (high, min_value) < 0)
1643 || (TREE_CODE (max_value) == INTEGER_CST
1644 && tree_int_cst_compare (low, max_value) > 0))
1645 remove_element = TRUE;
1646 else
1647 {
1648 /* If the lower bound is less than the index type's
1649 minimum value, truncate the range bounds. */
1650 if (TREE_CODE (min_value) == INTEGER_CST
1651 && tree_int_cst_compare (low, min_value) < 0)
1652 low = min_value;
1653 low = fold_convert (index_type, low);
1654
1655 /* If the upper bound is greater than the index type's
1656 maximum value, truncate the range bounds. */
1657 if (TREE_CODE (max_value) == INTEGER_CST
1658 && tree_int_cst_compare (high, max_value) > 0)
1659 high = max_value;
1660 high = fold_convert (index_type, high);
1661 }
1662 }
1663
1664 CASE_LOW (elt) = low;
1665 CASE_HIGH (elt) = high;
1666 }
1667 else
1668 {
1669 gcc_assert (!default_case);
1670 default_case = elt;
1671 /* The default case must be passed separately to the
1672 gimple_build_switch routines. But if DEFAULT_CASEP
1673 is NULL, we do not remove the default case (it would
1674 be completely lost). */
1675 if (default_casep)
1676 remove_element = TRUE;
1677 }
1678
1679 if (remove_element)
1680 VEC_ordered_remove (tree, labels, i);
1681 else
1682 i++;
1683 }
1684 len = i;
1685
1686 if (!VEC_empty (tree, labels))
1687 sort_case_labels (labels);
1688
1689 if (default_casep && !default_case)
1690 {
1691 /* If the switch has no default label, add one, so that we jump
1692 around the switch body. If the labels already cover the whole
1693 range of the switch index_type, add the default label pointing
1694 to one of the existing labels. */
1695 if (len
1696 && TYPE_MIN_VALUE (index_type)
1697 && TYPE_MAX_VALUE (index_type)
1698 && tree_int_cst_equal (CASE_LOW (VEC_index (tree, labels, 0)),
1699 TYPE_MIN_VALUE (index_type)))
1700 {
1701 tree low, high = CASE_HIGH (VEC_index (tree, labels, len - 1));
1702 if (!high)
1703 high = CASE_LOW (VEC_index (tree, labels, len - 1));
1704 if (tree_int_cst_equal (high, TYPE_MAX_VALUE (index_type)))
1705 {
1706 for (i = 1; i < len; i++)
1707 {
1708 high = CASE_LOW (VEC_index (tree, labels, i));
1709 low = CASE_HIGH (VEC_index (tree, labels, i - 1));
1710 if (!low)
1711 low = CASE_LOW (VEC_index (tree, labels, i - 1));
1712 if ((TREE_INT_CST_LOW (low) + 1
1713 != TREE_INT_CST_LOW (high))
1714 || (TREE_INT_CST_HIGH (low)
1715 + (TREE_INT_CST_LOW (high) == 0)
1716 != TREE_INT_CST_HIGH (high)))
1717 break;
1718 }
1719 if (i == len)
1720 {
1721 tree label = CASE_LABEL (VEC_index (tree, labels, 0));
1722 default_case = build_case_label (NULL_TREE, NULL_TREE,
1723 label);
1724 }
1725 }
1726 }
1727 }
0f1f6967 1728
68e72840
SB
1729 if (default_casep)
1730 *default_casep = default_case;
1731}
1732\f
1733/* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
6de9cd9a
DN
1734 branch to. */
1735
1736static enum gimplify_status
726a989a 1737gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
6de9cd9a
DN
1738{
1739 tree switch_expr = *expr_p;
726a989a 1740 gimple_seq switch_body_seq = NULL;
6de9cd9a 1741 enum gimplify_status ret;
0cd2402d
SB
1742 tree index_type = TREE_TYPE (switch_expr);
1743 if (index_type == NULL_TREE)
1744 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
6de9cd9a 1745
726a989a
RB
1746 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
1747 fb_rvalue);
1748 if (ret == GS_ERROR || ret == GS_UNHANDLED)
1749 return ret;
6de9cd9a
DN
1750
1751 if (SWITCH_BODY (switch_expr))
1752 {
726a989a
RB
1753 VEC (tree,heap) *labels;
1754 VEC (tree,heap) *saved_labels;
1755 tree default_case = NULL_TREE;
726a989a 1756 gimple gimple_switch;
b8698a0f 1757
6de9cd9a
DN
1758 /* If someone can be bothered to fill in the labels, they can
1759 be bothered to null out the body too. */
282899df 1760 gcc_assert (!SWITCH_LABELS (switch_expr));
6de9cd9a 1761
0cd2402d 1762 /* Save old labels, get new ones from body, then restore the old
726a989a 1763 labels. Save all the things from the switch body to append after. */
6de9cd9a 1764 saved_labels = gimplify_ctxp->case_labels;
84c76d40 1765 gimplify_ctxp->case_labels = VEC_alloc (tree, heap, 8);
6de9cd9a 1766
726a989a 1767 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
6de9cd9a
DN
1768 labels = gimplify_ctxp->case_labels;
1769 gimplify_ctxp->case_labels = saved_labels;
b8698a0f 1770
68e72840
SB
1771 preprocess_case_label_vec_for_gimple (labels, index_type,
1772 &default_case);
32f579f6 1773
726a989a 1774 if (!default_case)
6de9cd9a 1775 {
68e72840 1776 gimple new_default;
6de9cd9a 1777
68e72840
SB
1778 default_case
1779 = build_case_label (NULL_TREE, NULL_TREE,
1780 create_artificial_label (UNKNOWN_LOCATION));
1781 new_default = gimple_build_label (CASE_LABEL (default_case));
1782 gimplify_seq_add_stmt (&switch_body_seq, new_default);
32f579f6 1783 }
f667741c 1784
b8698a0f 1785 gimple_switch = gimple_build_switch_vec (SWITCH_COND (switch_expr),
68e72840 1786 default_case, labels);
726a989a
RB
1787 gimplify_seq_add_stmt (pre_p, gimple_switch);
1788 gimplify_seq_add_seq (pre_p, switch_body_seq);
1789 VEC_free(tree, heap, labels);
6de9cd9a 1790 }
282899df
NS
1791 else
1792 gcc_assert (SWITCH_LABELS (switch_expr));
6de9cd9a 1793
726a989a 1794 return GS_ALL_DONE;
6de9cd9a
DN
1795}
1796
ad19c4be 1797/* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
726a989a 1798
6de9cd9a 1799static enum gimplify_status
726a989a 1800gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
6de9cd9a 1801{
953ff289 1802 struct gimplify_ctx *ctxp;
726a989a 1803 gimple gimple_label;
953ff289
DN
1804
1805 /* Invalid OpenMP programs can play Duff's Device type games with
1806 #pragma omp parallel. At least in the C front end, we don't
1807 detect such invalid branches until after gimplification. */
1808 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
1809 if (ctxp->case_labels)
1810 break;
282899df 1811
726a989a
RB
1812 gimple_label = gimple_build_label (CASE_LABEL (*expr_p));
1813 VEC_safe_push (tree, heap, ctxp->case_labels, *expr_p);
1814 gimplify_seq_add_stmt (pre_p, gimple_label);
1815
6de9cd9a
DN
1816 return GS_ALL_DONE;
1817}
1818
6de9cd9a
DN
1819/* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
1820 if necessary. */
1821
1822tree
1823build_and_jump (tree *label_p)
1824{
1825 if (label_p == NULL)
1826 /* If there's nowhere to jump, just fall through. */
65355d53 1827 return NULL_TREE;
6de9cd9a
DN
1828
1829 if (*label_p == NULL_TREE)
1830 {
c2255bc4 1831 tree label = create_artificial_label (UNKNOWN_LOCATION);
6de9cd9a
DN
1832 *label_p = label;
1833 }
1834
1835 return build1 (GOTO_EXPR, void_type_node, *label_p);
1836}
1837
1838/* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
1839 This also involves building a label to jump to and communicating it to
1840 gimplify_loop_expr through gimplify_ctxp->exit_label. */
1841
1842static enum gimplify_status
1843gimplify_exit_expr (tree *expr_p)
1844{
1845 tree cond = TREE_OPERAND (*expr_p, 0);
1846 tree expr;
1847
1848 expr = build_and_jump (&gimplify_ctxp->exit_label);
b4257cfc 1849 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
6de9cd9a
DN
1850 *expr_p = expr;
1851
1852 return GS_OK;
1853}
1854
1855/* A helper function to be called via walk_tree. Mark all labels under *TP
1856 as being forced. To be called for DECL_INITIAL of static variables. */
1857
1858tree
1859force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1860{
1861 if (TYPE_P (*tp))
1862 *walk_subtrees = 0;
1863 if (TREE_CODE (*tp) == LABEL_DECL)
1864 FORCED_LABEL (*tp) = 1;
1865
1866 return NULL_TREE;
1867}
1868
26d44ae2
RH
1869/* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
1870 different from its canonical type, wrap the whole thing inside a
1871 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
1872 type.
6de9cd9a 1873
26d44ae2
RH
1874 The canonical type of a COMPONENT_REF is the type of the field being
1875 referenced--unless the field is a bit-field which can be read directly
1876 in a smaller mode, in which case the canonical type is the
1877 sign-appropriate type corresponding to that mode. */
6de9cd9a 1878
26d44ae2
RH
1879static void
1880canonicalize_component_ref (tree *expr_p)
6de9cd9a 1881{
26d44ae2
RH
1882 tree expr = *expr_p;
1883 tree type;
6de9cd9a 1884
282899df 1885 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
6de9cd9a 1886
26d44ae2
RH
1887 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
1888 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
1889 else
1890 type = TREE_TYPE (TREE_OPERAND (expr, 1));
6de9cd9a 1891
b26c6d55
RG
1892 /* One could argue that all the stuff below is not necessary for
1893 the non-bitfield case and declare it a FE error if type
1894 adjustment would be needed. */
26d44ae2 1895 if (TREE_TYPE (expr) != type)
6de9cd9a 1896 {
b26c6d55 1897#ifdef ENABLE_TYPES_CHECKING
26d44ae2 1898 tree old_type = TREE_TYPE (expr);
b26c6d55
RG
1899#endif
1900 int type_quals;
1901
1902 /* We need to preserve qualifiers and propagate them from
1903 operand 0. */
1904 type_quals = TYPE_QUALS (type)
1905 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
1906 if (TYPE_QUALS (type) != type_quals)
1907 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
6de9cd9a 1908
26d44ae2
RH
1909 /* Set the type of the COMPONENT_REF to the underlying type. */
1910 TREE_TYPE (expr) = type;
6de9cd9a 1911
b26c6d55
RG
1912#ifdef ENABLE_TYPES_CHECKING
1913 /* It is now a FE error, if the conversion from the canonical
1914 type to the original expression type is not useless. */
1915 gcc_assert (useless_type_conversion_p (old_type, type));
1916#endif
26d44ae2
RH
1917 }
1918}
6de9cd9a 1919
26d44ae2 1920/* If a NOP conversion is changing a pointer to array of foo to a pointer
d3147f64 1921 to foo, embed that change in the ADDR_EXPR by converting
26d44ae2
RH
1922 T array[U];
1923 (T *)&array
1924 ==>
1925 &array[L]
1926 where L is the lower bound. For simplicity, only do this for constant
04d86531
RG
1927 lower bound.
1928 The constraint is that the type of &array[L] is trivially convertible
1929 to T *. */
6de9cd9a 1930
26d44ae2
RH
1931static void
1932canonicalize_addr_expr (tree *expr_p)
1933{
1934 tree expr = *expr_p;
26d44ae2 1935 tree addr_expr = TREE_OPERAND (expr, 0);
04d86531 1936 tree datype, ddatype, pddatype;
6de9cd9a 1937
04d86531
RG
1938 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
1939 if (!POINTER_TYPE_P (TREE_TYPE (expr))
1940 || TREE_CODE (addr_expr) != ADDR_EXPR)
26d44ae2 1941 return;
6de9cd9a 1942
26d44ae2 1943 /* The addr_expr type should be a pointer to an array. */
04d86531 1944 datype = TREE_TYPE (TREE_TYPE (addr_expr));
26d44ae2
RH
1945 if (TREE_CODE (datype) != ARRAY_TYPE)
1946 return;
6de9cd9a 1947
04d86531
RG
1948 /* The pointer to element type shall be trivially convertible to
1949 the expression pointer type. */
26d44ae2 1950 ddatype = TREE_TYPE (datype);
04d86531 1951 pddatype = build_pointer_type (ddatype);
e5fdcd8c
RG
1952 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
1953 pddatype))
26d44ae2 1954 return;
6de9cd9a 1955
26d44ae2 1956 /* The lower bound and element sizes must be constant. */
04d86531
RG
1957 if (!TYPE_SIZE_UNIT (ddatype)
1958 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
26d44ae2
RH
1959 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
1960 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
1961 return;
6de9cd9a 1962
26d44ae2 1963 /* All checks succeeded. Build a new node to merge the cast. */
04d86531 1964 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
26d44ae2 1965 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
5852948c 1966 NULL_TREE, NULL_TREE);
04d86531 1967 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
e5fdcd8c
RG
1968
1969 /* We can have stripped a required restrict qualifier above. */
1970 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
1971 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
26d44ae2 1972}
6de9cd9a 1973
26d44ae2
RH
1974/* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
1975 underneath as appropriate. */
6de9cd9a 1976
26d44ae2
RH
1977static enum gimplify_status
1978gimplify_conversion (tree *expr_p)
d3147f64 1979{
db3927fb 1980 location_t loc = EXPR_LOCATION (*expr_p);
1043771b 1981 gcc_assert (CONVERT_EXPR_P (*expr_p));
c2255bc4 1982
0710ccff
NS
1983 /* Then strip away all but the outermost conversion. */
1984 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
1985
1986 /* And remove the outermost conversion if it's useless. */
1987 if (tree_ssa_useless_type_conversion (*expr_p))
1988 *expr_p = TREE_OPERAND (*expr_p, 0);
6de9cd9a 1989
26d44ae2
RH
1990 /* If we still have a conversion at the toplevel,
1991 then canonicalize some constructs. */
1043771b 1992 if (CONVERT_EXPR_P (*expr_p))
26d44ae2
RH
1993 {
1994 tree sub = TREE_OPERAND (*expr_p, 0);
6de9cd9a 1995
26d44ae2
RH
1996 /* If a NOP conversion is changing the type of a COMPONENT_REF
1997 expression, then canonicalize its type now in order to expose more
1998 redundant conversions. */
1999 if (TREE_CODE (sub) == COMPONENT_REF)
2000 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
6de9cd9a 2001
26d44ae2
RH
2002 /* If a NOP conversion is changing a pointer to array of foo
2003 to a pointer to foo, embed that change in the ADDR_EXPR. */
2004 else if (TREE_CODE (sub) == ADDR_EXPR)
2005 canonicalize_addr_expr (expr_p);
2006 }
6de9cd9a 2007
8b17cc05
RG
2008 /* If we have a conversion to a non-register type force the
2009 use of a VIEW_CONVERT_EXPR instead. */
4f934809 2010 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
db3927fb 2011 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
4f934809 2012 TREE_OPERAND (*expr_p, 0));
8b17cc05 2013
6de9cd9a
DN
2014 return GS_OK;
2015}
2016
77f2a970
JJ
2017/* Nonlocal VLAs seen in the current function. */
2018static struct pointer_set_t *nonlocal_vlas;
2019
ad19c4be 2020/* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
a9f7c570
RH
2021 DECL_VALUE_EXPR, and it's worth re-examining things. */
2022
2023static enum gimplify_status
2024gimplify_var_or_parm_decl (tree *expr_p)
2025{
2026 tree decl = *expr_p;
2027
2028 /* ??? If this is a local variable, and it has not been seen in any
2029 outer BIND_EXPR, then it's probably the result of a duplicate
2030 declaration, for which we've already issued an error. It would
2031 be really nice if the front end wouldn't leak these at all.
2032 Currently the only known culprit is C++ destructors, as seen
2033 in g++.old-deja/g++.jason/binding.C. */
2034 if (TREE_CODE (decl) == VAR_DECL
2035 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
2036 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
2037 && decl_function_context (decl) == current_function_decl)
2038 {
1da2ed5f 2039 gcc_assert (seen_error ());
a9f7c570
RH
2040 return GS_ERROR;
2041 }
2042
953ff289
DN
2043 /* When within an OpenMP context, notice uses of variables. */
2044 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
2045 return GS_ALL_DONE;
2046
a9f7c570
RH
2047 /* If the decl is an alias for another expression, substitute it now. */
2048 if (DECL_HAS_VALUE_EXPR_P (decl))
2049 {
77f2a970
JJ
2050 tree value_expr = DECL_VALUE_EXPR (decl);
2051
2052 /* For referenced nonlocal VLAs add a decl for debugging purposes
2053 to the current function. */
2054 if (TREE_CODE (decl) == VAR_DECL
2055 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
2056 && nonlocal_vlas != NULL
2057 && TREE_CODE (value_expr) == INDIRECT_REF
2058 && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
2059 && decl_function_context (decl) != current_function_decl)
2060 {
2061 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
2062 while (ctx && ctx->region_type == ORT_WORKSHARE)
2063 ctx = ctx->outer_context;
2064 if (!ctx && !pointer_set_insert (nonlocal_vlas, decl))
2065 {
2066 tree copy = copy_node (decl), block;
2067
2068 lang_hooks.dup_lang_specific_decl (copy);
2eb79bbb 2069 SET_DECL_RTL (copy, 0);
77f2a970
JJ
2070 TREE_USED (copy) = 1;
2071 block = DECL_INITIAL (current_function_decl);
910ad8de 2072 DECL_CHAIN (copy) = BLOCK_VARS (block);
77f2a970
JJ
2073 BLOCK_VARS (block) = copy;
2074 SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
2075 DECL_HAS_VALUE_EXPR_P (copy) = 1;
2076 }
2077 }
2078
2079 *expr_p = unshare_expr (value_expr);
a9f7c570
RH
2080 return GS_OK;
2081 }
2082
2083 return GS_ALL_DONE;
2084}
2085
6de9cd9a 2086/* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
726a989a 2087 node *EXPR_P.
6de9cd9a
DN
2088
2089 compound_lval
2090 : min_lval '[' val ']'
2091 | min_lval '.' ID
2092 | compound_lval '[' val ']'
2093 | compound_lval '.' ID
2094
2095 This is not part of the original SIMPLE definition, which separates
2096 array and member references, but it seems reasonable to handle them
2097 together. Also, this way we don't run into problems with union
2098 aliasing; gcc requires that for accesses through a union to alias, the
2099 union reference must be explicit, which was not always the case when we
2100 were splitting up array and member refs.
2101
726a989a 2102 PRE_P points to the sequence where side effects that must happen before
6de9cd9a
DN
2103 *EXPR_P should be stored.
2104
726a989a 2105 POST_P points to the sequence where side effects that must happen after
6de9cd9a
DN
2106 *EXPR_P should be stored. */
2107
2108static enum gimplify_status
726a989a
RB
2109gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2110 fallback_t fallback)
6de9cd9a
DN
2111{
2112 tree *p;
ec234842 2113 VEC(tree,heap) *stack;
941f78d1 2114 enum gimplify_status ret = GS_ALL_DONE, tret;
af72267c 2115 int i;
db3927fb 2116 location_t loc = EXPR_LOCATION (*expr_p);
941f78d1 2117 tree expr = *expr_p;
6de9cd9a 2118
6de9cd9a 2119 /* Create a stack of the subexpressions so later we can walk them in
ec234842
KH
2120 order from inner to outer. */
2121 stack = VEC_alloc (tree, heap, 10);
6de9cd9a 2122
afe84921 2123 /* We can handle anything that get_inner_reference can deal with. */
6a720599
JM
2124 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
2125 {
a9f7c570 2126 restart:
6a720599
JM
2127 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
2128 if (TREE_CODE (*p) == INDIRECT_REF)
db3927fb 2129 *p = fold_indirect_ref_loc (loc, *p);
a9f7c570
RH
2130
2131 if (handled_component_p (*p))
2132 ;
2133 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
2134 additional COMPONENT_REFs. */
2135 else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL)
2136 && gimplify_var_or_parm_decl (p) == GS_OK)
2137 goto restart;
2138 else
6a720599 2139 break;
b8698a0f 2140
ec234842 2141 VEC_safe_push (tree, heap, stack, *p);
6a720599 2142 }
6de9cd9a 2143
ec234842 2144 gcc_assert (VEC_length (tree, stack));
9e51aaf5 2145
44de5aeb
RK
2146 /* Now STACK is a stack of pointers to all the refs we've walked through
2147 and P points to the innermost expression.
6de9cd9a 2148
af72267c
RK
2149 Java requires that we elaborated nodes in source order. That
2150 means we must gimplify the inner expression followed by each of
2151 the indices, in order. But we can't gimplify the inner
2152 expression until we deal with any variable bounds, sizes, or
2153 positions in order to deal with PLACEHOLDER_EXPRs.
2154
2155 So we do this in three steps. First we deal with the annotations
2156 for any variables in the components, then we gimplify the base,
2157 then we gimplify any indices, from left to right. */
ec234842 2158 for (i = VEC_length (tree, stack) - 1; i >= 0; i--)
6de9cd9a 2159 {
ec234842 2160 tree t = VEC_index (tree, stack, i);
44de5aeb
RK
2161
2162 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
6de9cd9a 2163 {
44de5aeb
RK
2164 /* Gimplify the low bound and element type size and put them into
2165 the ARRAY_REF. If these values are set, they have already been
2166 gimplified. */
726a989a 2167 if (TREE_OPERAND (t, 2) == NULL_TREE)
44de5aeb 2168 {
a7cc468a
RH
2169 tree low = unshare_expr (array_ref_low_bound (t));
2170 if (!is_gimple_min_invariant (low))
44de5aeb 2171 {
726a989a
RB
2172 TREE_OPERAND (t, 2) = low;
2173 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
ba4d8f9d 2174 post_p, is_gimple_reg,
726a989a 2175 fb_rvalue);
44de5aeb
RK
2176 ret = MIN (ret, tret);
2177 }
2178 }
19c44640
JJ
2179 else
2180 {
2181 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2182 is_gimple_reg, fb_rvalue);
2183 ret = MIN (ret, tret);
2184 }
44de5aeb 2185
19c44640 2186 if (TREE_OPERAND (t, 3) == NULL_TREE)
44de5aeb
RK
2187 {
2188 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
2189 tree elmt_size = unshare_expr (array_ref_element_size (t));
a4e9ffe5 2190 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
44de5aeb
RK
2191
2192 /* Divide the element size by the alignment of the element
2193 type (above). */
ad19c4be
EB
2194 elmt_size
2195 = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
44de5aeb 2196
a7cc468a 2197 if (!is_gimple_min_invariant (elmt_size))
44de5aeb 2198 {
726a989a
RB
2199 TREE_OPERAND (t, 3) = elmt_size;
2200 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
ba4d8f9d 2201 post_p, is_gimple_reg,
726a989a 2202 fb_rvalue);
44de5aeb
RK
2203 ret = MIN (ret, tret);
2204 }
6de9cd9a 2205 }
19c44640
JJ
2206 else
2207 {
2208 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
2209 is_gimple_reg, fb_rvalue);
2210 ret = MIN (ret, tret);
2211 }
6de9cd9a 2212 }
44de5aeb
RK
2213 else if (TREE_CODE (t) == COMPONENT_REF)
2214 {
2215 /* Set the field offset into T and gimplify it. */
19c44640 2216 if (TREE_OPERAND (t, 2) == NULL_TREE)
44de5aeb
RK
2217 {
2218 tree offset = unshare_expr (component_ref_field_offset (t));
2219 tree field = TREE_OPERAND (t, 1);
2220 tree factor
2221 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
2222
2223 /* Divide the offset by its alignment. */
db3927fb 2224 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
44de5aeb 2225
a7cc468a 2226 if (!is_gimple_min_invariant (offset))
44de5aeb 2227 {
726a989a
RB
2228 TREE_OPERAND (t, 2) = offset;
2229 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
ba4d8f9d 2230 post_p, is_gimple_reg,
726a989a 2231 fb_rvalue);
44de5aeb
RK
2232 ret = MIN (ret, tret);
2233 }
2234 }
19c44640
JJ
2235 else
2236 {
2237 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2238 is_gimple_reg, fb_rvalue);
2239 ret = MIN (ret, tret);
2240 }
44de5aeb 2241 }
af72267c
RK
2242 }
2243
a9f7c570
RH
2244 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
2245 so as to match the min_lval predicate. Failure to do so may result
2246 in the creation of large aggregate temporaries. */
2247 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
2248 fallback | fb_lvalue);
af72267c
RK
2249 ret = MIN (ret, tret);
2250
48eb4e53
RK
2251 /* And finally, the indices and operands to BIT_FIELD_REF. During this
2252 loop we also remove any useless conversions. */
ec234842 2253 for (; VEC_length (tree, stack) > 0; )
af72267c 2254 {
ec234842 2255 tree t = VEC_pop (tree, stack);
af72267c
RK
2256
2257 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2258 {
ba4d8f9d 2259 /* Gimplify the dimension. */
af72267c
RK
2260 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2261 {
2262 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
ba4d8f9d 2263 is_gimple_val, fb_rvalue);
af72267c
RK
2264 ret = MIN (ret, tret);
2265 }
2266 }
44de5aeb
RK
2267 else if (TREE_CODE (t) == BIT_FIELD_REF)
2268 {
2269 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2270 is_gimple_val, fb_rvalue);
2271 ret = MIN (ret, tret);
2272 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2273 is_gimple_val, fb_rvalue);
2274 ret = MIN (ret, tret);
2275 }
48eb4e53
RK
2276
2277 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
2278
726a989a
RB
2279 /* The innermost expression P may have originally had
2280 TREE_SIDE_EFFECTS set which would have caused all the outer
2281 expressions in *EXPR_P leading to P to also have had
2282 TREE_SIDE_EFFECTS set. */
6de9cd9a 2283 recalculate_side_effects (t);
6de9cd9a
DN
2284 }
2285
2286 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
90051e16 2287 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
6de9cd9a
DN
2288 {
2289 canonicalize_component_ref (expr_p);
6de9cd9a
DN
2290 }
2291
ec234842 2292 VEC_free (tree, heap, stack);
07724022 2293
941f78d1
JM
2294 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
2295
6de9cd9a
DN
2296 return ret;
2297}
2298
206048bd
VR
2299/* Gimplify the self modifying expression pointed to by EXPR_P
2300 (++, --, +=, -=).
6de9cd9a
DN
2301
2302 PRE_P points to the list where side effects that must happen before
2303 *EXPR_P should be stored.
2304
2305 POST_P points to the list where side effects that must happen after
2306 *EXPR_P should be stored.
2307
2308 WANT_VALUE is nonzero iff we want to use the value of this expression
2309 in another expression. */
2310
2311static enum gimplify_status
726a989a 2312gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
44de5aeb 2313 bool want_value)
6de9cd9a
DN
2314{
2315 enum tree_code code;
726a989a
RB
2316 tree lhs, lvalue, rhs, t1;
2317 gimple_seq post = NULL, *orig_post_p = post_p;
6de9cd9a
DN
2318 bool postfix;
2319 enum tree_code arith_code;
2320 enum gimplify_status ret;
db3927fb 2321 location_t loc = EXPR_LOCATION (*expr_p);
6de9cd9a
DN
2322
2323 code = TREE_CODE (*expr_p);
2324
282899df
NS
2325 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
2326 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
6de9cd9a
DN
2327
2328 /* Prefix or postfix? */
2329 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
2330 /* Faster to treat as prefix if result is not used. */
2331 postfix = want_value;
2332 else
2333 postfix = false;
2334
82181741
JJ
2335 /* For postfix, make sure the inner expression's post side effects
2336 are executed after side effects from this expression. */
2337 if (postfix)
2338 post_p = &post;
2339
6de9cd9a
DN
2340 /* Add or subtract? */
2341 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
2342 arith_code = PLUS_EXPR;
2343 else
2344 arith_code = MINUS_EXPR;
2345
2346 /* Gimplify the LHS into a GIMPLE lvalue. */
2347 lvalue = TREE_OPERAND (*expr_p, 0);
2348 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
2349 if (ret == GS_ERROR)
2350 return ret;
2351
2352 /* Extract the operands to the arithmetic operation. */
2353 lhs = lvalue;
2354 rhs = TREE_OPERAND (*expr_p, 1);
2355
2356 /* For postfix operator, we evaluate the LHS to an rvalue and then use
ba4d8f9d
RG
2357 that as the result value and in the postqueue operation. We also
2358 make sure to make lvalue a minimal lval, see
2359 gcc.c-torture/execute/20040313-1.c for an example where this matters. */
6de9cd9a
DN
2360 if (postfix)
2361 {
ba4d8f9d
RG
2362 if (!is_gimple_min_lval (lvalue))
2363 {
2364 mark_addressable (lvalue);
db3927fb 2365 lvalue = build_fold_addr_expr_loc (input_location, lvalue);
ba4d8f9d 2366 gimplify_expr (&lvalue, pre_p, post_p, is_gimple_val, fb_rvalue);
db3927fb 2367 lvalue = build_fold_indirect_ref_loc (input_location, lvalue);
ba4d8f9d 2368 }
6de9cd9a
DN
2369 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
2370 if (ret == GS_ERROR)
2371 return ret;
2372 }
2373
5be014d5
AP
2374 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
2375 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
2376 {
0d82a1c8 2377 rhs = convert_to_ptrofftype_loc (loc, rhs);
5be014d5 2378 if (arith_code == MINUS_EXPR)
db3927fb 2379 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
5be014d5
AP
2380 arith_code = POINTER_PLUS_EXPR;
2381 }
2382
6de9cd9a
DN
2383 if (postfix)
2384 {
cf1867a0
RG
2385 tree t2 = get_initialized_tmp_var (lhs, pre_p, NULL);
2386 t1 = build2 (arith_code, TREE_TYPE (*expr_p), t2, rhs);
2387 gimplify_assign (lvalue, t1, pre_p);
726a989a 2388 gimplify_seq_add_seq (orig_post_p, post);
cf1867a0 2389 *expr_p = t2;
6de9cd9a
DN
2390 return GS_ALL_DONE;
2391 }
2392 else
2393 {
cf1867a0 2394 t1 = build2 (arith_code, TREE_TYPE (*expr_p), lhs, rhs);
726a989a 2395 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
6de9cd9a
DN
2396 return GS_OK;
2397 }
2398}
2399
d25cee4d
RH
2400/* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
2401
2402static void
2403maybe_with_size_expr (tree *expr_p)
2404{
61025d1b
RK
2405 tree expr = *expr_p;
2406 tree type = TREE_TYPE (expr);
2407 tree size;
d25cee4d 2408
61025d1b
RK
2409 /* If we've already wrapped this or the type is error_mark_node, we can't do
2410 anything. */
2411 if (TREE_CODE (expr) == WITH_SIZE_EXPR
2412 || type == error_mark_node)
d25cee4d
RH
2413 return;
2414
61025d1b 2415 /* If the size isn't known or is a constant, we have nothing to do. */
d25cee4d 2416 size = TYPE_SIZE_UNIT (type);
61025d1b
RK
2417 if (!size || TREE_CODE (size) == INTEGER_CST)
2418 return;
2419
2420 /* Otherwise, make a WITH_SIZE_EXPR. */
2421 size = unshare_expr (size);
2422 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
2423 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
d25cee4d
RH
2424}
2425
726a989a 2426/* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
1282697f
AH
2427 Store any side-effects in PRE_P. CALL_LOCATION is the location of
2428 the CALL_EXPR. */
e4f78bd4
JM
2429
2430static enum gimplify_status
1282697f 2431gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location)
e4f78bd4
JM
2432{
2433 bool (*test) (tree);
2434 fallback_t fb;
2435
2436 /* In general, we allow lvalues for function arguments to avoid
2437 extra overhead of copying large aggregates out of even larger
2438 aggregates into temporaries only to copy the temporaries to
2439 the argument list. Make optimizers happy by pulling out to
2440 temporaries those types that fit in registers. */
726a989a 2441 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
e4f78bd4
JM
2442 test = is_gimple_val, fb = fb_rvalue;
2443 else
b4ef8aac
JM
2444 {
2445 test = is_gimple_lvalue, fb = fb_either;
2446 /* Also strip a TARGET_EXPR that would force an extra copy. */
2447 if (TREE_CODE (*arg_p) == TARGET_EXPR)
2448 {
2449 tree init = TARGET_EXPR_INITIAL (*arg_p);
2450 if (init
2451 && !VOID_TYPE_P (TREE_TYPE (init)))
2452 *arg_p = init;
2453 }
2454 }
e4f78bd4 2455
d25cee4d 2456 /* If this is a variable sized type, we must remember the size. */
726a989a 2457 maybe_with_size_expr (arg_p);
d25cee4d 2458
c2255bc4 2459 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
1282697f
AH
2460 /* Make sure arguments have the same location as the function call
2461 itself. */
2462 protected_set_expr_location (*arg_p, call_location);
2463
e4f78bd4
JM
2464 /* There is a sequence point before a function call. Side effects in
2465 the argument list must occur before the actual call. So, when
2466 gimplifying arguments, force gimplify_expr to use an internal
2467 post queue which is then appended to the end of PRE_P. */
726a989a 2468 return gimplify_expr (arg_p, pre_p, NULL, test, fb);
e4f78bd4
JM
2469}
2470
726a989a 2471/* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
90051e16 2472 WANT_VALUE is true if the result of the call is desired. */
6de9cd9a
DN
2473
2474static enum gimplify_status
726a989a 2475gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
6de9cd9a 2476{
f20ca725 2477 tree fndecl, parms, p, fnptrtype;
6de9cd9a 2478 enum gimplify_status ret;
5039610b 2479 int i, nargs;
726a989a
RB
2480 gimple call;
2481 bool builtin_va_start_p = FALSE;
db3927fb 2482 location_t loc = EXPR_LOCATION (*expr_p);
6de9cd9a 2483
282899df 2484 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
6de9cd9a 2485
d3147f64 2486 /* For reliable diagnostics during inlining, it is necessary that
6de9cd9a 2487 every call_expr be annotated with file and line. */
a281759f
PB
2488 if (! EXPR_HAS_LOCATION (*expr_p))
2489 SET_EXPR_LOCATION (*expr_p, input_location);
6de9cd9a
DN
2490
2491 /* This may be a call to a builtin function.
2492
2493 Builtin function calls may be transformed into different
2494 (and more efficient) builtin function calls under certain
2495 circumstances. Unfortunately, gimplification can muck things
2496 up enough that the builtin expanders are not aware that certain
2497 transformations are still valid.
2498
2499 So we attempt transformation/gimplification of the call before
2500 we gimplify the CALL_EXPR. At this time we do not manage to
2501 transform all calls in the same manner as the expanders do, but
2502 we do transform most of them. */
726a989a
RB
2503 fndecl = get_callee_fndecl (*expr_p);
2504 if (fndecl && DECL_BUILT_IN (fndecl))
6de9cd9a 2505 {
db3927fb 2506 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
6de9cd9a 2507
82d6e6fc 2508 if (new_tree && new_tree != *expr_p)
6de9cd9a
DN
2509 {
2510 /* There was a transformation of this call which computes the
2511 same value, but in a more efficient way. Return and try
2512 again. */
82d6e6fc 2513 *expr_p = new_tree;
6de9cd9a
DN
2514 return GS_OK;
2515 }
e4f78bd4 2516
726a989a
RB
2517 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2518 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_VA_START)
2efcfa4e 2519 {
726a989a 2520 builtin_va_start_p = TRUE;
5039610b 2521 if (call_expr_nargs (*expr_p) < 2)
2efcfa4e
AP
2522 {
2523 error ("too few arguments to function %<va_start%>");
c2255bc4 2524 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2efcfa4e
AP
2525 return GS_OK;
2526 }
b8698a0f 2527
5039610b 2528 if (fold_builtin_next_arg (*expr_p, true))
2efcfa4e 2529 {
c2255bc4 2530 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2efcfa4e
AP
2531 return GS_OK;
2532 }
2efcfa4e 2533 }
6de9cd9a
DN
2534 }
2535
f20ca725
RG
2536 /* Remember the original function pointer type. */
2537 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
2538
6de9cd9a
DN
2539 /* There is a sequence point before the call, so any side effects in
2540 the calling expression must occur before the actual call. Force
2541 gimplify_expr to use an internal post queue. */
5039610b 2542 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
0f59171d 2543 is_gimple_call_addr, fb_rvalue);
6de9cd9a 2544
5039610b
SL
2545 nargs = call_expr_nargs (*expr_p);
2546
e36711f3 2547 /* Get argument types for verification. */
726a989a 2548 fndecl = get_callee_fndecl (*expr_p);
e36711f3 2549 parms = NULL_TREE;
726a989a
RB
2550 if (fndecl)
2551 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
e36711f3
RG
2552 else if (POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_FN (*expr_p))))
2553 parms = TYPE_ARG_TYPES (TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (*expr_p))));
2554
726a989a 2555 if (fndecl && DECL_ARGUMENTS (fndecl))
f9487002 2556 p = DECL_ARGUMENTS (fndecl);
004e2fa7 2557 else if (parms)
f9487002 2558 p = parms;
6ef5231b 2559 else
498e51ca 2560 p = NULL_TREE;
f9487002
JJ
2561 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
2562 ;
6ef5231b
JJ
2563
2564 /* If the last argument is __builtin_va_arg_pack () and it is not
2565 passed as a named argument, decrease the number of CALL_EXPR
2566 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
2567 if (!p
2568 && i < nargs
2569 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
2570 {
2571 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
2572 tree last_arg_fndecl = get_callee_fndecl (last_arg);
2573
2574 if (last_arg_fndecl
2575 && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
2576 && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
2577 && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
2578 {
2579 tree call = *expr_p;
2580
2581 --nargs;
db3927fb
AH
2582 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
2583 CALL_EXPR_FN (call),
2584 nargs, CALL_EXPR_ARGP (call));
726a989a
RB
2585
2586 /* Copy all CALL_EXPR flags, location and block, except
6ef5231b
JJ
2587 CALL_EXPR_VA_ARG_PACK flag. */
2588 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
2589 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
2590 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
2591 = CALL_EXPR_RETURN_SLOT_OPT (call);
2592 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
5e278028 2593 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
6ef5231b 2594 TREE_BLOCK (*expr_p) = TREE_BLOCK (call);
726a989a 2595
6ef5231b
JJ
2596 /* Set CALL_EXPR_VA_ARG_PACK. */
2597 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
2598 }
2599 }
e36711f3
RG
2600
2601 /* Finally, gimplify the function arguments. */
726a989a 2602 if (nargs > 0)
6de9cd9a 2603 {
726a989a
RB
2604 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
2605 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
2606 PUSH_ARGS_REVERSED ? i-- : i++)
2607 {
2608 enum gimplify_status t;
6de9cd9a 2609
726a989a
RB
2610 /* Avoid gimplifying the second argument to va_start, which needs to
2611 be the plain PARM_DECL. */
2612 if ((i != 1) || !builtin_va_start_p)
2613 {
1282697f
AH
2614 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
2615 EXPR_LOCATION (*expr_p));
6de9cd9a 2616
726a989a
RB
2617 if (t == GS_ERROR)
2618 ret = GS_ERROR;
2619 }
2620 }
6de9cd9a 2621 }
6de9cd9a 2622
33922890
RG
2623 /* Verify the function result. */
2624 if (want_value && fndecl
f20ca725 2625 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
33922890
RG
2626 {
2627 error_at (loc, "using result of function returning %<void%>");
2628 ret = GS_ERROR;
2629 }
2630
6de9cd9a 2631 /* Try this again in case gimplification exposed something. */
6f538523 2632 if (ret != GS_ERROR)
6de9cd9a 2633 {
db3927fb 2634 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
6f538523 2635
82d6e6fc 2636 if (new_tree && new_tree != *expr_p)
5039610b
SL
2637 {
2638 /* There was a transformation of this call which computes the
2639 same value, but in a more efficient way. Return and try
2640 again. */
82d6e6fc 2641 *expr_p = new_tree;
5039610b 2642 return GS_OK;
6de9cd9a
DN
2643 }
2644 }
726a989a
RB
2645 else
2646 {
df8fa700 2647 *expr_p = error_mark_node;
726a989a
RB
2648 return GS_ERROR;
2649 }
6de9cd9a
DN
2650
2651 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
2652 decl. This allows us to eliminate redundant or useless
2653 calls to "const" functions. */
becfd6e5
KZ
2654 if (TREE_CODE (*expr_p) == CALL_EXPR)
2655 {
2656 int flags = call_expr_flags (*expr_p);
2657 if (flags & (ECF_CONST | ECF_PURE)
2658 /* An infinite loop is considered a side effect. */
2659 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
2660 TREE_SIDE_EFFECTS (*expr_p) = 0;
2661 }
726a989a
RB
2662
2663 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
2664 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
2665 form and delegate the creation of a GIMPLE_CALL to
2666 gimplify_modify_expr. This is always possible because when
2667 WANT_VALUE is true, the caller wants the result of this call into
2668 a temporary, which means that we will emit an INIT_EXPR in
2669 internal_get_tmp_var which will then be handled by
2670 gimplify_modify_expr. */
2671 if (!want_value)
2672 {
2673 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
2674 have to do is replicate it as a GIMPLE_CALL tuple. */
64e0f5ff 2675 gimple_stmt_iterator gsi;
726a989a 2676 call = gimple_build_call_from_tree (*expr_p);
f20ca725 2677 gimple_call_set_fntype (call, TREE_TYPE (fnptrtype));
726a989a 2678 gimplify_seq_add_stmt (pre_p, call);
64e0f5ff
JH
2679 gsi = gsi_last (*pre_p);
2680 fold_stmt (&gsi);
726a989a
RB
2681 *expr_p = NULL_TREE;
2682 }
f20ca725
RG
2683 else
2684 /* Remember the original function type. */
2685 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
2686 CALL_EXPR_FN (*expr_p));
726a989a 2687
6de9cd9a
DN
2688 return ret;
2689}
2690
2691/* Handle shortcut semantics in the predicate operand of a COND_EXPR by
2692 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
2693
2694 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
2695 condition is true or false, respectively. If null, we should generate
2696 our own to skip over the evaluation of this specific expression.
2697
ca80e52b
EB
2698 LOCUS is the source location of the COND_EXPR.
2699
6de9cd9a
DN
2700 This function is the tree equivalent of do_jump.
2701
2702 shortcut_cond_r should only be called by shortcut_cond_expr. */
2703
2704static tree
ca80e52b
EB
2705shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
2706 location_t locus)
6de9cd9a
DN
2707{
2708 tree local_label = NULL_TREE;
2709 tree t, expr = NULL;
2710
2711 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
2712 retain the shortcut semantics. Just insert the gotos here;
2713 shortcut_cond_expr will append the real blocks later. */
2714 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2715 {
ca80e52b
EB
2716 location_t new_locus;
2717
6de9cd9a
DN
2718 /* Turn if (a && b) into
2719
2720 if (a); else goto no;
2721 if (b) goto yes; else goto no;
2722 (no:) */
2723
2724 if (false_label_p == NULL)
2725 false_label_p = &local_label;
2726
ca80e52b
EB
2727 /* Keep the original source location on the first 'if'. */
2728 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
6de9cd9a
DN
2729 append_to_statement_list (t, &expr);
2730
ca80e52b
EB
2731 /* Set the source location of the && on the second 'if'. */
2732 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2733 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2734 new_locus);
6de9cd9a
DN
2735 append_to_statement_list (t, &expr);
2736 }
2737 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2738 {
ca80e52b
EB
2739 location_t new_locus;
2740
6de9cd9a
DN
2741 /* Turn if (a || b) into
2742
2743 if (a) goto yes;
2744 if (b) goto yes; else goto no;
2745 (yes:) */
2746
2747 if (true_label_p == NULL)
2748 true_label_p = &local_label;
2749
ca80e52b
EB
2750 /* Keep the original source location on the first 'if'. */
2751 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
6de9cd9a
DN
2752 append_to_statement_list (t, &expr);
2753
ca80e52b
EB
2754 /* Set the source location of the || on the second 'if'. */
2755 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2756 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2757 new_locus);
6de9cd9a
DN
2758 append_to_statement_list (t, &expr);
2759 }
1537737f
JJ
2760 else if (TREE_CODE (pred) == COND_EXPR
2761 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
2762 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
6de9cd9a 2763 {
ca80e52b
EB
2764 location_t new_locus;
2765
6de9cd9a
DN
2766 /* As long as we're messing with gotos, turn if (a ? b : c) into
2767 if (a)
2768 if (b) goto yes; else goto no;
2769 else
1537737f
JJ
2770 if (c) goto yes; else goto no;
2771
2772 Don't do this if one of the arms has void type, which can happen
2773 in C++ when the arm is throw. */
ca80e52b
EB
2774
2775 /* Keep the original source location on the first 'if'. Set the source
2776 location of the ? on the second 'if'. */
2777 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
b4257cfc
RG
2778 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
2779 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
ca80e52b 2780 false_label_p, locus),
b4257cfc 2781 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
ca80e52b 2782 false_label_p, new_locus));
6de9cd9a
DN
2783 }
2784 else
2785 {
b4257cfc
RG
2786 expr = build3 (COND_EXPR, void_type_node, pred,
2787 build_and_jump (true_label_p),
2788 build_and_jump (false_label_p));
ca80e52b 2789 SET_EXPR_LOCATION (expr, locus);
6de9cd9a
DN
2790 }
2791
2792 if (local_label)
2793 {
2794 t = build1 (LABEL_EXPR, void_type_node, local_label);
2795 append_to_statement_list (t, &expr);
2796 }
2797
2798 return expr;
2799}
2800
726a989a
RB
2801/* Given a conditional expression EXPR with short-circuit boolean
2802 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
2803 predicate appart into the equivalent sequence of conditionals. */
2804
6de9cd9a
DN
2805static tree
2806shortcut_cond_expr (tree expr)
2807{
2808 tree pred = TREE_OPERAND (expr, 0);
2809 tree then_ = TREE_OPERAND (expr, 1);
2810 tree else_ = TREE_OPERAND (expr, 2);
2811 tree true_label, false_label, end_label, t;
2812 tree *true_label_p;
2813 tree *false_label_p;
089efaa4 2814 bool emit_end, emit_false, jump_over_else;
65355d53
RH
2815 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
2816 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
6de9cd9a
DN
2817
2818 /* First do simple transformations. */
65355d53 2819 if (!else_se)
6de9cd9a 2820 {
ca80e52b
EB
2821 /* If there is no 'else', turn
2822 if (a && b) then c
2823 into
2824 if (a) if (b) then c. */
6de9cd9a
DN
2825 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2826 {
ca80e52b 2827 /* Keep the original source location on the first 'if'. */
ec52b111 2828 location_t locus = EXPR_LOC_OR_HERE (expr);
6de9cd9a 2829 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
ca80e52b
EB
2830 /* Set the source location of the && on the second 'if'. */
2831 if (EXPR_HAS_LOCATION (pred))
2832 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
6de9cd9a 2833 then_ = shortcut_cond_expr (expr);
4356a1bf 2834 then_se = then_ && TREE_SIDE_EFFECTS (then_);
6de9cd9a 2835 pred = TREE_OPERAND (pred, 0);
b4257cfc 2836 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
ca80e52b 2837 SET_EXPR_LOCATION (expr, locus);
6de9cd9a
DN
2838 }
2839 }
726a989a 2840
65355d53 2841 if (!then_se)
6de9cd9a
DN
2842 {
2843 /* If there is no 'then', turn
2844 if (a || b); else d
2845 into
2846 if (a); else if (b); else d. */
2847 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2848 {
ca80e52b 2849 /* Keep the original source location on the first 'if'. */
ec52b111 2850 location_t locus = EXPR_LOC_OR_HERE (expr);
6de9cd9a 2851 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
ca80e52b
EB
2852 /* Set the source location of the || on the second 'if'. */
2853 if (EXPR_HAS_LOCATION (pred))
2854 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
6de9cd9a 2855 else_ = shortcut_cond_expr (expr);
4356a1bf 2856 else_se = else_ && TREE_SIDE_EFFECTS (else_);
6de9cd9a 2857 pred = TREE_OPERAND (pred, 0);
b4257cfc 2858 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
ca80e52b 2859 SET_EXPR_LOCATION (expr, locus);
6de9cd9a
DN
2860 }
2861 }
2862
2863 /* If we're done, great. */
2864 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
2865 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
2866 return expr;
2867
2868 /* Otherwise we need to mess with gotos. Change
2869 if (a) c; else d;
2870 to
2871 if (a); else goto no;
2872 c; goto end;
2873 no: d; end:
2874 and recursively gimplify the condition. */
2875
2876 true_label = false_label = end_label = NULL_TREE;
2877
2878 /* If our arms just jump somewhere, hijack those labels so we don't
2879 generate jumps to jumps. */
2880
65355d53
RH
2881 if (then_
2882 && TREE_CODE (then_) == GOTO_EXPR
6de9cd9a
DN
2883 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
2884 {
2885 true_label = GOTO_DESTINATION (then_);
65355d53
RH
2886 then_ = NULL;
2887 then_se = false;
6de9cd9a
DN
2888 }
2889
65355d53
RH
2890 if (else_
2891 && TREE_CODE (else_) == GOTO_EXPR
6de9cd9a
DN
2892 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
2893 {
2894 false_label = GOTO_DESTINATION (else_);
65355d53
RH
2895 else_ = NULL;
2896 else_se = false;
6de9cd9a
DN
2897 }
2898
9cf737f8 2899 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
6de9cd9a
DN
2900 if (true_label)
2901 true_label_p = &true_label;
2902 else
2903 true_label_p = NULL;
2904
2905 /* The 'else' branch also needs a label if it contains interesting code. */
65355d53 2906 if (false_label || else_se)
6de9cd9a
DN
2907 false_label_p = &false_label;
2908 else
2909 false_label_p = NULL;
2910
2911 /* If there was nothing else in our arms, just forward the label(s). */
65355d53 2912 if (!then_se && !else_se)
ca80e52b 2913 return shortcut_cond_r (pred, true_label_p, false_label_p,
ec52b111 2914 EXPR_LOC_OR_HERE (expr));
6de9cd9a
DN
2915
2916 /* If our last subexpression already has a terminal label, reuse it. */
65355d53 2917 if (else_se)
ca80e52b 2918 t = expr_last (else_);
65355d53 2919 else if (then_se)
ca80e52b 2920 t = expr_last (then_);
65355d53 2921 else
ca80e52b
EB
2922 t = NULL;
2923 if (t && TREE_CODE (t) == LABEL_EXPR)
2924 end_label = LABEL_EXPR_LABEL (t);
6de9cd9a
DN
2925
2926 /* If we don't care about jumping to the 'else' branch, jump to the end
2927 if the condition is false. */
2928 if (!false_label_p)
2929 false_label_p = &end_label;
2930
2931 /* We only want to emit these labels if we aren't hijacking them. */
2932 emit_end = (end_label == NULL_TREE);
2933 emit_false = (false_label == NULL_TREE);
2934
089efaa4
ILT
2935 /* We only emit the jump over the else clause if we have to--if the
2936 then clause may fall through. Otherwise we can wind up with a
2937 useless jump and a useless label at the end of gimplified code,
2938 which will cause us to think that this conditional as a whole
2939 falls through even if it doesn't. If we then inline a function
2940 which ends with such a condition, that can cause us to issue an
2941 inappropriate warning about control reaching the end of a
2942 non-void function. */
2943 jump_over_else = block_may_fallthru (then_);
2944
ca80e52b 2945 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
ec52b111 2946 EXPR_LOC_OR_HERE (expr));
6de9cd9a
DN
2947
2948 expr = NULL;
2949 append_to_statement_list (pred, &expr);
2950
2951 append_to_statement_list (then_, &expr);
65355d53 2952 if (else_se)
6de9cd9a 2953 {
089efaa4
ILT
2954 if (jump_over_else)
2955 {
ca80e52b 2956 tree last = expr_last (expr);
089efaa4 2957 t = build_and_jump (&end_label);
ca80e52b
EB
2958 if (EXPR_HAS_LOCATION (last))
2959 SET_EXPR_LOCATION (t, EXPR_LOCATION (last));
089efaa4
ILT
2960 append_to_statement_list (t, &expr);
2961 }
6de9cd9a
DN
2962 if (emit_false)
2963 {
2964 t = build1 (LABEL_EXPR, void_type_node, false_label);
2965 append_to_statement_list (t, &expr);
2966 }
2967 append_to_statement_list (else_, &expr);
2968 }
2969 if (emit_end && end_label)
2970 {
2971 t = build1 (LABEL_EXPR, void_type_node, end_label);
2972 append_to_statement_list (t, &expr);
2973 }
2974
2975 return expr;
2976}
2977
2978/* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
2979
50674e96 2980tree
6de9cd9a
DN
2981gimple_boolify (tree expr)
2982{
2983 tree type = TREE_TYPE (expr);
db3927fb 2984 location_t loc = EXPR_LOCATION (expr);
6de9cd9a 2985
554cf330
JJ
2986 if (TREE_CODE (expr) == NE_EXPR
2987 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
2988 && integer_zerop (TREE_OPERAND (expr, 1)))
2989 {
2990 tree call = TREE_OPERAND (expr, 0);
2991 tree fn = get_callee_fndecl (call);
2992
d53c73e0
JJ
2993 /* For __builtin_expect ((long) (x), y) recurse into x as well
2994 if x is truth_value_p. */
554cf330
JJ
2995 if (fn
2996 && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL
2997 && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT
2998 && call_expr_nargs (call) == 2)
2999 {
3000 tree arg = CALL_EXPR_ARG (call, 0);
3001 if (arg)
3002 {
3003 if (TREE_CODE (arg) == NOP_EXPR
3004 && TREE_TYPE (arg) == TREE_TYPE (call))
3005 arg = TREE_OPERAND (arg, 0);
d53c73e0
JJ
3006 if (truth_value_p (TREE_CODE (arg)))
3007 {
3008 arg = gimple_boolify (arg);
3009 CALL_EXPR_ARG (call, 0)
3010 = fold_convert_loc (loc, TREE_TYPE (call), arg);
3011 }
554cf330
JJ
3012 }
3013 }
3014 }
3015
6de9cd9a
DN
3016 switch (TREE_CODE (expr))
3017 {
3018 case TRUTH_AND_EXPR:
3019 case TRUTH_OR_EXPR:
3020 case TRUTH_XOR_EXPR:
3021 case TRUTH_ANDIF_EXPR:
3022 case TRUTH_ORIF_EXPR:
3023 /* Also boolify the arguments of truth exprs. */
3024 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
3025 /* FALLTHRU */
3026
3027 case TRUTH_NOT_EXPR:
3028 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
6de9cd9a 3029
6de9cd9a 3030 /* These expressions always produce boolean results. */
7f3ff782
KT
3031 if (TREE_CODE (type) != BOOLEAN_TYPE)
3032 TREE_TYPE (expr) = boolean_type_node;
6de9cd9a 3033 return expr;
d3147f64 3034
6de9cd9a 3035 default:
7f3ff782
KT
3036 if (COMPARISON_CLASS_P (expr))
3037 {
3038 /* There expressions always prduce boolean results. */
3039 if (TREE_CODE (type) != BOOLEAN_TYPE)
3040 TREE_TYPE (expr) = boolean_type_node;
3041 return expr;
3042 }
6de9cd9a
DN
3043 /* Other expressions that get here must have boolean values, but
3044 might need to be converted to the appropriate mode. */
7f3ff782 3045 if (TREE_CODE (type) == BOOLEAN_TYPE)
1d15f620 3046 return expr;
db3927fb 3047 return fold_convert_loc (loc, boolean_type_node, expr);
6de9cd9a
DN
3048 }
3049}
3050
aea74440
JJ
3051/* Given a conditional expression *EXPR_P without side effects, gimplify
3052 its operands. New statements are inserted to PRE_P. */
3053
3054static enum gimplify_status
726a989a 3055gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
aea74440
JJ
3056{
3057 tree expr = *expr_p, cond;
3058 enum gimplify_status ret, tret;
3059 enum tree_code code;
3060
3061 cond = gimple_boolify (COND_EXPR_COND (expr));
3062
3063 /* We need to handle && and || specially, as their gimplification
3064 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
3065 code = TREE_CODE (cond);
3066 if (code == TRUTH_ANDIF_EXPR)
3067 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
3068 else if (code == TRUTH_ORIF_EXPR)
3069 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
726a989a 3070 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
aea74440
JJ
3071 COND_EXPR_COND (*expr_p) = cond;
3072
3073 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
3074 is_gimple_val, fb_rvalue);
3075 ret = MIN (ret, tret);
3076 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
3077 is_gimple_val, fb_rvalue);
3078
3079 return MIN (ret, tret);
3080}
3081
ad19c4be 3082/* Return true if evaluating EXPR could trap.
aea74440
JJ
3083 EXPR is GENERIC, while tree_could_trap_p can be called
3084 only on GIMPLE. */
3085
3086static bool
3087generic_expr_could_trap_p (tree expr)
3088{
3089 unsigned i, n;
3090
3091 if (!expr || is_gimple_val (expr))
3092 return false;
3093
3094 if (!EXPR_P (expr) || tree_could_trap_p (expr))
3095 return true;
3096
3097 n = TREE_OPERAND_LENGTH (expr);
3098 for (i = 0; i < n; i++)
3099 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
3100 return true;
3101
3102 return false;
3103}
3104
206048bd 3105/* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
6de9cd9a
DN
3106 into
3107
3108 if (p) if (p)
3109 t1 = a; a;
3110 else or else
3111 t1 = b; b;
3112 t1;
3113
3114 The second form is used when *EXPR_P is of type void.
3115
3116 PRE_P points to the list where side effects that must happen before
dae7ec87 3117 *EXPR_P should be stored. */
6de9cd9a
DN
3118
3119static enum gimplify_status
726a989a 3120gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
6de9cd9a
DN
3121{
3122 tree expr = *expr_p;
06ec59e6
EB
3123 tree type = TREE_TYPE (expr);
3124 location_t loc = EXPR_LOCATION (expr);
3125 tree tmp, arm1, arm2;
6de9cd9a 3126 enum gimplify_status ret;
726a989a
RB
3127 tree label_true, label_false, label_cont;
3128 bool have_then_clause_p, have_else_clause_p;
3129 gimple gimple_cond;
3130 enum tree_code pred_code;
3131 gimple_seq seq = NULL;
26d44ae2
RH
3132
3133 /* If this COND_EXPR has a value, copy the values into a temporary within
3134 the arms. */
06ec59e6 3135 if (!VOID_TYPE_P (type))
26d44ae2 3136 {
06ec59e6 3137 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
aff98faf
AO
3138 tree result;
3139
06ec59e6
EB
3140 /* If either an rvalue is ok or we do not require an lvalue, create the
3141 temporary. But we cannot do that if the type is addressable. */
3142 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
c3e203cf 3143 && !TREE_ADDRESSABLE (type))
aff98faf 3144 {
aea74440
JJ
3145 if (gimplify_ctxp->allow_rhs_cond_expr
3146 /* If either branch has side effects or could trap, it can't be
3147 evaluated unconditionally. */
06ec59e6
EB
3148 && !TREE_SIDE_EFFECTS (then_)
3149 && !generic_expr_could_trap_p (then_)
3150 && !TREE_SIDE_EFFECTS (else_)
3151 && !generic_expr_could_trap_p (else_))
aea74440
JJ
3152 return gimplify_pure_cond_expr (expr_p, pre_p);
3153
06ec59e6
EB
3154 tmp = create_tmp_var (type, "iftmp");
3155 result = tmp;
aff98faf 3156 }
06ec59e6
EB
3157
3158 /* Otherwise, only create and copy references to the values. */
26d44ae2
RH
3159 else
3160 {
06ec59e6 3161 type = build_pointer_type (type);
aff98faf 3162
06ec59e6
EB
3163 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3164 then_ = build_fold_addr_expr_loc (loc, then_);
aff98faf 3165
06ec59e6
EB
3166 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3167 else_ = build_fold_addr_expr_loc (loc, else_);
3168
3169 expr
3170 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
aea74440 3171
726a989a 3172 tmp = create_tmp_var (type, "iftmp");
70f34814 3173 result = build_simple_mem_ref_loc (loc, tmp);
26d44ae2
RH
3174 }
3175
06ec59e6
EB
3176 /* Build the new then clause, `tmp = then_;'. But don't build the
3177 assignment if the value is void; in C++ it can be if it's a throw. */
3178 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3179 TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_);
26d44ae2 3180
06ec59e6
EB
3181 /* Similarly, build the new else clause, `tmp = else_;'. */
3182 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3183 TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_);
26d44ae2
RH
3184
3185 TREE_TYPE (expr) = void_type_node;
3186 recalculate_side_effects (expr);
3187
d91ba7b0 3188 /* Move the COND_EXPR to the prequeue. */
726a989a 3189 gimplify_stmt (&expr, pre_p);
26d44ae2 3190
aff98faf 3191 *expr_p = result;
726a989a 3192 return GS_ALL_DONE;
26d44ae2
RH
3193 }
3194
f2f81d57
EB
3195 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
3196 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
3197 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
3198 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
3199
26d44ae2
RH
3200 /* Make sure the condition has BOOLEAN_TYPE. */
3201 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3202
3203 /* Break apart && and || conditions. */
3204 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
3205 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
3206 {
3207 expr = shortcut_cond_expr (expr);
3208
3209 if (expr != *expr_p)
3210 {
3211 *expr_p = expr;
3212
3213 /* We can't rely on gimplify_expr to re-gimplify the expanded
3214 form properly, as cleanups might cause the target labels to be
3215 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
3216 set up a conditional context. */
3217 gimple_push_condition ();
726a989a 3218 gimplify_stmt (expr_p, &seq);
26d44ae2 3219 gimple_pop_condition (pre_p);
726a989a 3220 gimple_seq_add_seq (pre_p, seq);
26d44ae2
RH
3221
3222 return GS_ALL_DONE;
3223 }
3224 }
3225
3226 /* Now do the normal gimplification. */
26d44ae2 3227
726a989a
RB
3228 /* Gimplify condition. */
3229 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
3230 fb_rvalue);
26d44ae2 3231 if (ret == GS_ERROR)
726a989a
RB
3232 return GS_ERROR;
3233 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
3234
3235 gimple_push_condition ();
26d44ae2 3236
726a989a
RB
3237 have_then_clause_p = have_else_clause_p = false;
3238 if (TREE_OPERAND (expr, 1) != NULL
3239 && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR
3240 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL
3241 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1)))
3242 == current_function_decl)
3243 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3244 have different locations, otherwise we end up with incorrect
3245 location information on the branches. */
3246 && (optimize
3247 || !EXPR_HAS_LOCATION (expr)
3248 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1))
3249 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1))))
3250 {
3251 label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1));
3252 have_then_clause_p = true;
26d44ae2
RH
3253 }
3254 else
c2255bc4 3255 label_true = create_artificial_label (UNKNOWN_LOCATION);
726a989a
RB
3256 if (TREE_OPERAND (expr, 2) != NULL
3257 && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR
3258 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL
3259 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2)))
3260 == current_function_decl)
3261 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3262 have different locations, otherwise we end up with incorrect
3263 location information on the branches. */
3264 && (optimize
3265 || !EXPR_HAS_LOCATION (expr)
3266 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2))
3267 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2))))
3268 {
3269 label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2));
3270 have_else_clause_p = true;
3271 }
3272 else
c2255bc4 3273 label_false = create_artificial_label (UNKNOWN_LOCATION);
26d44ae2 3274
726a989a
RB
3275 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
3276 &arm2);
26d44ae2 3277
726a989a
RB
3278 gimple_cond = gimple_build_cond (pred_code, arm1, arm2, label_true,
3279 label_false);
26d44ae2 3280
726a989a
RB
3281 gimplify_seq_add_stmt (&seq, gimple_cond);
3282 label_cont = NULL_TREE;
3283 if (!have_then_clause_p)
3284 {
3285 /* For if (...) {} else { code; } put label_true after
3286 the else block. */
3287 if (TREE_OPERAND (expr, 1) == NULL_TREE
3288 && !have_else_clause_p
3289 && TREE_OPERAND (expr, 2) != NULL_TREE)
3290 label_cont = label_true;
3291 else
3292 {
3293 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
3294 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
3295 /* For if (...) { code; } else {} or
3296 if (...) { code; } else goto label; or
3297 if (...) { code; return; } else { ... }
3298 label_cont isn't needed. */
3299 if (!have_else_clause_p
3300 && TREE_OPERAND (expr, 2) != NULL_TREE
3301 && gimple_seq_may_fallthru (seq))
3302 {
3303 gimple g;
c2255bc4 3304 label_cont = create_artificial_label (UNKNOWN_LOCATION);
726a989a
RB
3305
3306 g = gimple_build_goto (label_cont);
3307
3308 /* GIMPLE_COND's are very low level; they have embedded
3309 gotos. This particular embedded goto should not be marked
3310 with the location of the original COND_EXPR, as it would
3311 correspond to the COND_EXPR's condition, not the ELSE or the
3312 THEN arms. To avoid marking it with the wrong location, flag
3313 it as "no location". */
3314 gimple_set_do_not_emit_location (g);
3315
3316 gimplify_seq_add_stmt (&seq, g);
3317 }
3318 }
3319 }
3320 if (!have_else_clause_p)
3321 {
3322 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
3323 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
3324 }
3325 if (label_cont)
3326 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
3327
3328 gimple_pop_condition (pre_p);
3329 gimple_seq_add_seq (pre_p, seq);
3330
3331 if (ret == GS_ERROR)
3332 ; /* Do nothing. */
3333 else if (have_then_clause_p || have_else_clause_p)
3334 ret = GS_ALL_DONE;
3335 else
3336 {
3337 /* Both arms are empty; replace the COND_EXPR with its predicate. */
3338 expr = TREE_OPERAND (expr, 0);
3339 gimplify_stmt (&expr, pre_p);
3340 }
3341
3342 *expr_p = NULL;
3343 return ret;
3344}
3345
f76d6e6f
EB
3346/* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
3347 to be marked addressable.
3348
3349 We cannot rely on such an expression being directly markable if a temporary
3350 has been created by the gimplification. In this case, we create another
3351 temporary and initialize it with a copy, which will become a store after we
3352 mark it addressable. This can happen if the front-end passed us something
3353 that it could not mark addressable yet, like a Fortran pass-by-reference
3354 parameter (int) floatvar. */
3355
3356static void
3357prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
3358{
3359 while (handled_component_p (*expr_p))
3360 expr_p = &TREE_OPERAND (*expr_p, 0);
3361 if (is_gimple_reg (*expr_p))
3362 *expr_p = get_initialized_tmp_var (*expr_p, seq_p, NULL);
3363}
3364
726a989a
RB
3365/* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3366 a call to __builtin_memcpy. */
3367
3368static enum gimplify_status
3369gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
3370 gimple_seq *seq_p)
26d44ae2 3371{
5039610b 3372 tree t, to, to_ptr, from, from_ptr;
726a989a 3373 gimple gs;
db3927fb 3374 location_t loc = EXPR_LOCATION (*expr_p);
26d44ae2 3375
726a989a
RB
3376 to = TREE_OPERAND (*expr_p, 0);
3377 from = TREE_OPERAND (*expr_p, 1);
26d44ae2 3378
f76d6e6f
EB
3379 /* Mark the RHS addressable. Beware that it may not be possible to do so
3380 directly if a temporary has been created by the gimplification. */
3381 prepare_gimple_addressable (&from, seq_p);
3382
628c189e 3383 mark_addressable (from);
db3927fb
AH
3384 from_ptr = build_fold_addr_expr_loc (loc, from);
3385 gimplify_arg (&from_ptr, seq_p, loc);
26d44ae2 3386
628c189e 3387 mark_addressable (to);
db3927fb
AH
3388 to_ptr = build_fold_addr_expr_loc (loc, to);
3389 gimplify_arg (&to_ptr, seq_p, loc);
726a989a 3390
e79983f4 3391 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
726a989a
RB
3392
3393 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
26d44ae2
RH
3394
3395 if (want_value)
3396 {
726a989a
RB
3397 /* tmp = memcpy() */
3398 t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3399 gimple_call_set_lhs (gs, t);
3400 gimplify_seq_add_stmt (seq_p, gs);
3401
70f34814 3402 *expr_p = build_simple_mem_ref (t);
726a989a 3403 return GS_ALL_DONE;
26d44ae2
RH
3404 }
3405
726a989a
RB
3406 gimplify_seq_add_stmt (seq_p, gs);
3407 *expr_p = NULL;
3408 return GS_ALL_DONE;
26d44ae2
RH
3409}
3410
3411/* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3412 a call to __builtin_memset. In this case we know that the RHS is
3413 a CONSTRUCTOR with an empty element list. */
3414
3415static enum gimplify_status
726a989a
RB
3416gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
3417 gimple_seq *seq_p)
26d44ae2 3418{
1a13360e 3419 tree t, from, to, to_ptr;
726a989a 3420 gimple gs;
db3927fb 3421 location_t loc = EXPR_LOCATION (*expr_p);
26d44ae2 3422
1a13360e
OH
3423 /* Assert our assumptions, to abort instead of producing wrong code
3424 silently if they are not met. Beware that the RHS CONSTRUCTOR might
3425 not be immediately exposed. */
b8698a0f 3426 from = TREE_OPERAND (*expr_p, 1);
1a13360e
OH
3427 if (TREE_CODE (from) == WITH_SIZE_EXPR)
3428 from = TREE_OPERAND (from, 0);
3429
3430 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
3431 && VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (from)));
3432
3433 /* Now proceed. */
726a989a 3434 to = TREE_OPERAND (*expr_p, 0);
26d44ae2 3435
db3927fb
AH
3436 to_ptr = build_fold_addr_expr_loc (loc, to);
3437 gimplify_arg (&to_ptr, seq_p, loc);
e79983f4 3438 t = builtin_decl_implicit (BUILT_IN_MEMSET);
726a989a
RB
3439
3440 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
26d44ae2
RH
3441
3442 if (want_value)
3443 {
726a989a
RB
3444 /* tmp = memset() */
3445 t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3446 gimple_call_set_lhs (gs, t);
3447 gimplify_seq_add_stmt (seq_p, gs);
3448
3449 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
3450 return GS_ALL_DONE;
26d44ae2
RH
3451 }
3452
726a989a
RB
3453 gimplify_seq_add_stmt (seq_p, gs);
3454 *expr_p = NULL;
3455 return GS_ALL_DONE;
26d44ae2
RH
3456}
3457
57d1dd87
RH
3458/* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
3459 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
ad19c4be 3460 assignment. Return non-null if we detect a potential overlap. */
57d1dd87
RH
3461
3462struct gimplify_init_ctor_preeval_data
3463{
3464 /* The base decl of the lhs object. May be NULL, in which case we
3465 have to assume the lhs is indirect. */
3466 tree lhs_base_decl;
3467
3468 /* The alias set of the lhs object. */
4862826d 3469 alias_set_type lhs_alias_set;
57d1dd87
RH
3470};
3471
3472static tree
3473gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
3474{
3475 struct gimplify_init_ctor_preeval_data *data
3476 = (struct gimplify_init_ctor_preeval_data *) xdata;
3477 tree t = *tp;
3478
3479 /* If we find the base object, obviously we have overlap. */
3480 if (data->lhs_base_decl == t)
3481 return t;
3482
3483 /* If the constructor component is indirect, determine if we have a
3484 potential overlap with the lhs. The only bits of information we
3485 have to go on at this point are addressability and alias sets. */
70f34814
RG
3486 if ((INDIRECT_REF_P (t)
3487 || TREE_CODE (t) == MEM_REF)
57d1dd87
RH
3488 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3489 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
3490 return t;
3491
df10ee2a 3492 /* If the constructor component is a call, determine if it can hide a
70f34814
RG
3493 potential overlap with the lhs through an INDIRECT_REF like above.
3494 ??? Ugh - this is completely broken. In fact this whole analysis
3495 doesn't look conservative. */
df10ee2a
EB
3496 if (TREE_CODE (t) == CALL_EXPR)
3497 {
3498 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
3499
3500 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
3501 if (POINTER_TYPE_P (TREE_VALUE (type))
3502 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3503 && alias_sets_conflict_p (data->lhs_alias_set,
3504 get_alias_set
3505 (TREE_TYPE (TREE_VALUE (type)))))
3506 return t;
3507 }
3508
6615c446 3509 if (IS_TYPE_OR_DECL_P (t))
57d1dd87
RH
3510 *walk_subtrees = 0;
3511 return NULL;
3512}
3513
726a989a 3514/* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
57d1dd87
RH
3515 force values that overlap with the lhs (as described by *DATA)
3516 into temporaries. */
3517
3518static void
726a989a 3519gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
57d1dd87
RH
3520 struct gimplify_init_ctor_preeval_data *data)
3521{
3522 enum gimplify_status one;
3523
51eed280
PB
3524 /* If the value is constant, then there's nothing to pre-evaluate. */
3525 if (TREE_CONSTANT (*expr_p))
3526 {
3527 /* Ensure it does not have side effects, it might contain a reference to
3528 the object we're initializing. */
3529 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
3530 return;
3531 }
57d1dd87
RH
3532
3533 /* If the type has non-trivial constructors, we can't pre-evaluate. */
3534 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
3535 return;
3536
3537 /* Recurse for nested constructors. */
3538 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
3539 {
4038c495
GB
3540 unsigned HOST_WIDE_INT ix;
3541 constructor_elt *ce;
3542 VEC(constructor_elt,gc) *v = CONSTRUCTOR_ELTS (*expr_p);
3543
ac47786e 3544 FOR_EACH_VEC_ELT (constructor_elt, v, ix, ce)
4038c495 3545 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
726a989a 3546
57d1dd87
RH
3547 return;
3548 }
3549
0461b801
EB
3550 /* If this is a variable sized type, we must remember the size. */
3551 maybe_with_size_expr (expr_p);
57d1dd87
RH
3552
3553 /* Gimplify the constructor element to something appropriate for the rhs
726a989a 3554 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
d3147f64 3555 the gimplifier will consider this a store to memory. Doing this
57d1dd87
RH
3556 gimplification now means that we won't have to deal with complicated
3557 language-specific trees, nor trees like SAVE_EXPR that can induce
b01d837f 3558 exponential search behavior. */
57d1dd87
RH
3559 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
3560 if (one == GS_ERROR)
3561 {
3562 *expr_p = NULL;
3563 return;
3564 }
3565
3566 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
3567 with the lhs, since "a = { .x=a }" doesn't make sense. This will
3568 always be true for all scalars, since is_gimple_mem_rhs insists on a
3569 temporary variable for them. */
3570 if (DECL_P (*expr_p))
3571 return;
3572
3573 /* If this is of variable size, we have no choice but to assume it doesn't
3574 overlap since we can't make a temporary for it. */
4c923c28 3575 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
57d1dd87
RH
3576 return;
3577
3578 /* Otherwise, we must search for overlap ... */
3579 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
3580 return;
3581
3582 /* ... and if found, force the value into a temporary. */
3583 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
3584}
3585
6fa91b48
SB
3586/* A subroutine of gimplify_init_ctor_eval. Create a loop for
3587 a RANGE_EXPR in a CONSTRUCTOR for an array.
3588
3589 var = lower;
3590 loop_entry:
3591 object[var] = value;
3592 if (var == upper)
3593 goto loop_exit;
3594 var = var + 1;
3595 goto loop_entry;
3596 loop_exit:
3597
3598 We increment var _after_ the loop exit check because we might otherwise
3599 fail if upper == TYPE_MAX_VALUE (type for upper).
3600
3601 Note that we never have to deal with SAVE_EXPRs here, because this has
3602 already been taken care of for us, in gimplify_init_ctor_preeval(). */
3603
4038c495 3604static void gimplify_init_ctor_eval (tree, VEC(constructor_elt,gc) *,
726a989a 3605 gimple_seq *, bool);
6fa91b48
SB
3606
3607static void
3608gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
3609 tree value, tree array_elt_type,
726a989a 3610 gimple_seq *pre_p, bool cleared)
6fa91b48 3611{
726a989a 3612 tree loop_entry_label, loop_exit_label, fall_thru_label;
b56b9fe3 3613 tree var, var_type, cref, tmp;
6fa91b48 3614
c2255bc4
AH
3615 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
3616 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
3617 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
6fa91b48
SB
3618
3619 /* Create and initialize the index variable. */
3620 var_type = TREE_TYPE (upper);
3621 var = create_tmp_var (var_type, NULL);
726a989a 3622 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
6fa91b48
SB
3623
3624 /* Add the loop entry label. */
726a989a 3625 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
6fa91b48
SB
3626
3627 /* Build the reference. */
3628 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3629 var, NULL_TREE, NULL_TREE);
3630
3631 /* If we are a constructor, just call gimplify_init_ctor_eval to do
3632 the store. Otherwise just assign value to the reference. */
3633
3634 if (TREE_CODE (value) == CONSTRUCTOR)
3635 /* NB we might have to call ourself recursively through
3636 gimplify_init_ctor_eval if the value is a constructor. */
3637 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3638 pre_p, cleared);
3639 else
726a989a 3640 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
6fa91b48
SB
3641
3642 /* We exit the loop when the index var is equal to the upper bound. */
726a989a
RB
3643 gimplify_seq_add_stmt (pre_p,
3644 gimple_build_cond (EQ_EXPR, var, upper,
3645 loop_exit_label, fall_thru_label));
3646
3647 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
6fa91b48
SB
3648
3649 /* Otherwise, increment the index var... */
b56b9fe3
RS
3650 tmp = build2 (PLUS_EXPR, var_type, var,
3651 fold_convert (var_type, integer_one_node));
726a989a 3652 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
6fa91b48
SB
3653
3654 /* ...and jump back to the loop entry. */
726a989a 3655 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
6fa91b48
SB
3656
3657 /* Add the loop exit label. */
726a989a 3658 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
6fa91b48
SB
3659}
3660
292a398f 3661/* Return true if FDECL is accessing a field that is zero sized. */
b8698a0f 3662
292a398f 3663static bool
22ea9ec0 3664zero_sized_field_decl (const_tree fdecl)
292a398f 3665{
b8698a0f 3666 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
292a398f
DB
3667 && integer_zerop (DECL_SIZE (fdecl)))
3668 return true;
3669 return false;
3670}
3671
d06526b7 3672/* Return true if TYPE is zero sized. */
b8698a0f 3673
d06526b7 3674static bool
22ea9ec0 3675zero_sized_type (const_tree type)
d06526b7
AP
3676{
3677 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
3678 && integer_zerop (TYPE_SIZE (type)))
3679 return true;
3680 return false;
3681}
3682
57d1dd87
RH
3683/* A subroutine of gimplify_init_constructor. Generate individual
3684 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
4038c495 3685 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
57d1dd87
RH
3686 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
3687 zeroed first. */
3688
3689static void
4038c495 3690gimplify_init_ctor_eval (tree object, VEC(constructor_elt,gc) *elts,
726a989a 3691 gimple_seq *pre_p, bool cleared)
57d1dd87
RH
3692{
3693 tree array_elt_type = NULL;
4038c495
GB
3694 unsigned HOST_WIDE_INT ix;
3695 tree purpose, value;
57d1dd87
RH
3696
3697 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
3698 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
3699
4038c495 3700 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
57d1dd87 3701 {
726a989a 3702 tree cref;
57d1dd87
RH
3703
3704 /* NULL values are created above for gimplification errors. */
3705 if (value == NULL)
3706 continue;
3707
3708 if (cleared && initializer_zerop (value))
3709 continue;
3710
6fa91b48
SB
3711 /* ??? Here's to hoping the front end fills in all of the indices,
3712 so we don't have to figure out what's missing ourselves. */
3713 gcc_assert (purpose);
3714
816fa80a
OH
3715 /* Skip zero-sized fields, unless value has side-effects. This can
3716 happen with calls to functions returning a zero-sized type, which
3717 we shouldn't discard. As a number of downstream passes don't
3718 expect sets of zero-sized fields, we rely on the gimplification of
3719 the MODIFY_EXPR we make below to drop the assignment statement. */
3720 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
292a398f
DB
3721 continue;
3722
6fa91b48
SB
3723 /* If we have a RANGE_EXPR, we have to build a loop to assign the
3724 whole range. */
3725 if (TREE_CODE (purpose) == RANGE_EXPR)
57d1dd87 3726 {
6fa91b48
SB
3727 tree lower = TREE_OPERAND (purpose, 0);
3728 tree upper = TREE_OPERAND (purpose, 1);
3729
3730 /* If the lower bound is equal to upper, just treat it as if
3731 upper was the index. */
3732 if (simple_cst_equal (lower, upper))
3733 purpose = upper;
3734 else
3735 {
3736 gimplify_init_ctor_eval_range (object, lower, upper, value,
3737 array_elt_type, pre_p, cleared);
3738 continue;
3739 }
3740 }
57d1dd87 3741
6fa91b48
SB
3742 if (array_elt_type)
3743 {
1a1640db
RG
3744 /* Do not use bitsizetype for ARRAY_REF indices. */
3745 if (TYPE_DOMAIN (TREE_TYPE (object)))
ad19c4be
EB
3746 purpose
3747 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
3748 purpose);
b4257cfc
RG
3749 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3750 purpose, NULL_TREE, NULL_TREE);
57d1dd87
RH
3751 }
3752 else
cf0efa6a
ILT
3753 {
3754 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
b4257cfc
RG
3755 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
3756 unshare_expr (object), purpose, NULL_TREE);
cf0efa6a 3757 }
57d1dd87 3758
cf0efa6a
ILT
3759 if (TREE_CODE (value) == CONSTRUCTOR
3760 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
57d1dd87
RH
3761 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3762 pre_p, cleared);
3763 else
3764 {
726a989a 3765 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
57d1dd87 3766 gimplify_and_add (init, pre_p);
726a989a 3767 ggc_free (init);
57d1dd87
RH
3768 }
3769 }
3770}
3771
ad19c4be 3772/* Return the appropriate RHS predicate for this LHS. */
726a989a
RB
3773
3774gimple_predicate
3775rhs_predicate_for (tree lhs)
3776{
ba4d8f9d
RG
3777 if (is_gimple_reg (lhs))
3778 return is_gimple_reg_rhs_or_call;
726a989a 3779 else
ba4d8f9d 3780 return is_gimple_mem_rhs_or_call;
726a989a
RB
3781}
3782
2ec5deb5
PB
3783/* Gimplify a C99 compound literal expression. This just means adding
3784 the DECL_EXPR before the current statement and using its anonymous
3785 decl instead. */
3786
3787static enum gimplify_status
a845a7f5
ILT
3788gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
3789 fallback_t fallback)
2ec5deb5
PB
3790{
3791 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
3792 tree decl = DECL_EXPR_DECL (decl_s);
3793 /* Mark the decl as addressable if the compound literal
3794 expression is addressable now, otherwise it is marked too late
3795 after we gimplify the initialization expression. */
3796 if (TREE_ADDRESSABLE (*expr_p))
3797 TREE_ADDRESSABLE (decl) = 1;
3798
3799 /* Preliminarily mark non-addressed complex variables as eligible
3800 for promotion to gimple registers. We'll transform their uses
3801 as we find them. */
3802 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
3803 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
3804 && !TREE_THIS_VOLATILE (decl)
3805 && !needs_to_live_in_memory (decl))
3806 DECL_GIMPLE_REG_P (decl) = 1;
3807
a845a7f5
ILT
3808 /* If the decl is not addressable, then it is being used in some
3809 expression or on the right hand side of a statement, and it can
3810 be put into a readonly data section. */
3811 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
3812 TREE_READONLY (decl) = 1;
3813
2ec5deb5
PB
3814 /* This decl isn't mentioned in the enclosing block, so add it to the
3815 list of temps. FIXME it seems a bit of a kludge to say that
3816 anonymous artificial vars aren't pushed, but everything else is. */
3817 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
3818 gimple_add_tmp_var (decl);
3819
3820 gimplify_and_add (decl_s, pre_p);
3821 *expr_p = decl;
3822 return GS_OK;
3823}
3824
3825/* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
3826 return a new CONSTRUCTOR if something changed. */
3827
3828static tree
3829optimize_compound_literals_in_ctor (tree orig_ctor)
3830{
3831 tree ctor = orig_ctor;
3832 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (ctor);
3833 unsigned int idx, num = VEC_length (constructor_elt, elts);
3834
3835 for (idx = 0; idx < num; idx++)
3836 {
3837 tree value = VEC_index (constructor_elt, elts, idx)->value;
3838 tree newval = value;
3839 if (TREE_CODE (value) == CONSTRUCTOR)
3840 newval = optimize_compound_literals_in_ctor (value);
3841 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
3842 {
3843 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
3844 tree decl = DECL_EXPR_DECL (decl_s);
3845 tree init = DECL_INITIAL (decl);
3846
3847 if (!TREE_ADDRESSABLE (value)
3848 && !TREE_ADDRESSABLE (decl)
3849 && init)
3850 newval = optimize_compound_literals_in_ctor (init);
3851 }
3852 if (newval == value)
3853 continue;
3854
3855 if (ctor == orig_ctor)
3856 {
3857 ctor = copy_node (orig_ctor);
3858 CONSTRUCTOR_ELTS (ctor) = VEC_copy (constructor_elt, gc, elts);
3859 elts = CONSTRUCTOR_ELTS (ctor);
3860 }
3861 VEC_index (constructor_elt, elts, idx)->value = newval;
3862 }
3863 return ctor;
3864}
3865
26d44ae2
RH
3866/* A subroutine of gimplify_modify_expr. Break out elements of a
3867 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
3868
3869 Note that we still need to clear any elements that don't have explicit
3870 initializers, so if not all elements are initialized we keep the
ffed8a01
AH
3871 original MODIFY_EXPR, we just remove all of the constructor elements.
3872
3873 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
3874 GS_ERROR if we would have to create a temporary when gimplifying
3875 this constructor. Otherwise, return GS_OK.
3876
3877 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
26d44ae2
RH
3878
3879static enum gimplify_status
726a989a
RB
3880gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3881 bool want_value, bool notify_temp_creation)
26d44ae2 3882{
f5a1f0d0 3883 tree object, ctor, type;
26d44ae2 3884 enum gimplify_status ret;
4038c495 3885 VEC(constructor_elt,gc) *elts;
26d44ae2 3886
f5a1f0d0 3887 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
26d44ae2 3888
ffed8a01
AH
3889 if (!notify_temp_creation)
3890 {
726a989a 3891 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
ffed8a01
AH
3892 is_gimple_lvalue, fb_lvalue);
3893 if (ret == GS_ERROR)
3894 return ret;
3895 }
57d1dd87 3896
726a989a 3897 object = TREE_OPERAND (*expr_p, 0);
f5a1f0d0
PB
3898 ctor = TREE_OPERAND (*expr_p, 1) =
3899 optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
3900 type = TREE_TYPE (ctor);
3901 elts = CONSTRUCTOR_ELTS (ctor);
26d44ae2 3902 ret = GS_ALL_DONE;
726a989a 3903
26d44ae2
RH
3904 switch (TREE_CODE (type))
3905 {
3906 case RECORD_TYPE:
3907 case UNION_TYPE:
3908 case QUAL_UNION_TYPE:
3909 case ARRAY_TYPE:
3910 {
57d1dd87 3911 struct gimplify_init_ctor_preeval_data preeval_data;
953d0c90
RS
3912 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
3913 bool cleared, complete_p, valid_const_initializer;
26d44ae2
RH
3914
3915 /* Aggregate types must lower constructors to initialization of
3916 individual elements. The exception is that a CONSTRUCTOR node
3917 with no elements indicates zero-initialization of the whole. */
4038c495 3918 if (VEC_empty (constructor_elt, elts))
ffed8a01
AH
3919 {
3920 if (notify_temp_creation)
3921 return GS_OK;
3922 break;
3923 }
b8698a0f 3924
fe24d485
OH
3925 /* Fetch information about the constructor to direct later processing.
3926 We might want to make static versions of it in various cases, and
3927 can only do so if it known to be a valid constant initializer. */
3928 valid_const_initializer
3929 = categorize_ctor_elements (ctor, &num_nonzero_elements,
953d0c90 3930 &num_ctor_elements, &complete_p);
26d44ae2
RH
3931
3932 /* If a const aggregate variable is being initialized, then it
3933 should never be a lose to promote the variable to be static. */
fe24d485 3934 if (valid_const_initializer
6f642f98 3935 && num_nonzero_elements > 1
26d44ae2 3936 && TREE_READONLY (object)
d0ea0759
SE
3937 && TREE_CODE (object) == VAR_DECL
3938 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
26d44ae2 3939 {
ffed8a01
AH
3940 if (notify_temp_creation)
3941 return GS_ERROR;
26d44ae2
RH
3942 DECL_INITIAL (object) = ctor;
3943 TREE_STATIC (object) = 1;
3944 if (!DECL_NAME (object))
3945 DECL_NAME (object) = create_tmp_var_name ("C");
3946 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
3947
3948 /* ??? C++ doesn't automatically append a .<number> to the
3949 assembler name, and even when it does, it looks a FE private
3950 data structures to figure out what that number should be,
3951 which are not set for this variable. I suppose this is
3952 important for local statics for inline functions, which aren't
3953 "local" in the object file sense. So in order to get a unique
3954 TU-local symbol, we must invoke the lhd version now. */
3955 lhd_set_decl_assembler_name (object);
3956
3957 *expr_p = NULL_TREE;
3958 break;
3959 }
3960
cce70747
JC
3961 /* If there are "lots" of initialized elements, even discounting
3962 those that are not address constants (and thus *must* be
3963 computed at runtime), then partition the constructor into
3964 constant and non-constant parts. Block copy the constant
3965 parts in, then generate code for the non-constant parts. */
3966 /* TODO. There's code in cp/typeck.c to do this. */
3967
953d0c90
RS
3968 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
3969 /* store_constructor will ignore the clearing of variable-sized
3970 objects. Initializers for such objects must explicitly set
3971 every field that needs to be set. */
3972 cleared = false;
3973 else if (!complete_p)
3974 /* If the constructor isn't complete, clear the whole object
3975 beforehand.
3976
3977 ??? This ought not to be needed. For any element not present
3978 in the initializer, we should simply set them to zero. Except
3979 we'd need to *find* the elements that are not present, and that
3980 requires trickery to avoid quadratic compile-time behavior in
3981 large cases or excessive memory use in small cases. */
73ed17ff 3982 cleared = true;
953d0c90 3983 else if (num_ctor_elements - num_nonzero_elements
e04ad03d 3984 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
953d0c90
RS
3985 && num_nonzero_elements < num_ctor_elements / 4)
3986 /* If there are "lots" of zeros, it's more efficient to clear
3987 the memory and then set the nonzero elements. */
cce70747 3988 cleared = true;
953d0c90
RS
3989 else
3990 cleared = false;
cce70747 3991
26d44ae2
RH
3992 /* If there are "lots" of initialized elements, and all of them
3993 are valid address constants, then the entire initializer can
cce70747
JC
3994 be dropped to memory, and then memcpy'd out. Don't do this
3995 for sparse arrays, though, as it's more efficient to follow
3996 the standard CONSTRUCTOR behavior of memset followed by
8afd015a
JM
3997 individual element initialization. Also don't do this for small
3998 all-zero initializers (which aren't big enough to merit
3999 clearing), and don't try to make bitwise copies of
4000 TREE_ADDRESSABLE types. */
4001 if (valid_const_initializer
4002 && !(cleared || num_nonzero_elements == 0)
c69c7be1 4003 && !TREE_ADDRESSABLE (type))
26d44ae2
RH
4004 {
4005 HOST_WIDE_INT size = int_size_in_bytes (type);
4006 unsigned int align;
4007
4008 /* ??? We can still get unbounded array types, at least
4009 from the C++ front end. This seems wrong, but attempt
4010 to work around it for now. */
4011 if (size < 0)
4012 {
4013 size = int_size_in_bytes (TREE_TYPE (object));
4014 if (size >= 0)
4015 TREE_TYPE (ctor) = type = TREE_TYPE (object);
4016 }
4017
4018 /* Find the maximum alignment we can assume for the object. */
4019 /* ??? Make use of DECL_OFFSET_ALIGN. */
4020 if (DECL_P (object))
4021 align = DECL_ALIGN (object);
4022 else
4023 align = TYPE_ALIGN (type);
4024
329ad380
JJ
4025 if (size > 0
4026 && num_nonzero_elements > 1
4027 && !can_move_by_pieces (size, align))
26d44ae2 4028 {
ffed8a01
AH
4029 if (notify_temp_creation)
4030 return GS_ERROR;
4031
46314d3e
EB
4032 walk_tree (&ctor, force_labels_r, NULL, NULL);
4033 ctor = tree_output_constant_def (ctor);
4034 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
4035 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
4036 TREE_OPERAND (*expr_p, 1) = ctor;
57d1dd87
RH
4037
4038 /* This is no longer an assignment of a CONSTRUCTOR, but
4039 we still may have processing to do on the LHS. So
4040 pretend we didn't do anything here to let that happen. */
4041 return GS_UNHANDLED;
26d44ae2
RH
4042 }
4043 }
4044
558af7ca
EB
4045 /* If the target is volatile, we have non-zero elements and more than
4046 one field to assign, initialize the target from a temporary. */
61c7cbf8
RG
4047 if (TREE_THIS_VOLATILE (object)
4048 && !TREE_ADDRESSABLE (type)
558af7ca
EB
4049 && num_nonzero_elements > 0
4050 && VEC_length (constructor_elt, elts) > 1)
61c7cbf8
RG
4051 {
4052 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type), NULL);
4053 TREE_OPERAND (*expr_p, 0) = temp;
4054 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
4055 *expr_p,
4056 build2 (MODIFY_EXPR, void_type_node,
4057 object, temp));
4058 return GS_OK;
4059 }
4060
ffed8a01
AH
4061 if (notify_temp_creation)
4062 return GS_OK;
4063
675c873b
EB
4064 /* If there are nonzero elements and if needed, pre-evaluate to capture
4065 elements overlapping with the lhs into temporaries. We must do this
4066 before clearing to fetch the values before they are zeroed-out. */
4067 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
85d89e76
OH
4068 {
4069 preeval_data.lhs_base_decl = get_base_address (object);
4070 if (!DECL_P (preeval_data.lhs_base_decl))
4071 preeval_data.lhs_base_decl = NULL;
4072 preeval_data.lhs_alias_set = get_alias_set (object);
4073
726a989a 4074 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
85d89e76
OH
4075 pre_p, post_p, &preeval_data);
4076 }
4077
26d44ae2
RH
4078 if (cleared)
4079 {
4080 /* Zap the CONSTRUCTOR element list, which simplifies this case.
4081 Note that we still have to gimplify, in order to handle the
57d1dd87 4082 case of variable sized types. Avoid shared tree structures. */
4038c495 4083 CONSTRUCTOR_ELTS (ctor) = NULL;
726a989a 4084 TREE_SIDE_EFFECTS (ctor) = 0;
57d1dd87 4085 object = unshare_expr (object);
726a989a 4086 gimplify_stmt (expr_p, pre_p);
26d44ae2
RH
4087 }
4088
6fa91b48
SB
4089 /* If we have not block cleared the object, or if there are nonzero
4090 elements in the constructor, add assignments to the individual
4091 scalar fields of the object. */
4092 if (!cleared || num_nonzero_elements > 0)
85d89e76 4093 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
26d44ae2
RH
4094
4095 *expr_p = NULL_TREE;
4096 }
4097 break;
4098
4099 case COMPLEX_TYPE:
4100 {
4101 tree r, i;
4102
ffed8a01
AH
4103 if (notify_temp_creation)
4104 return GS_OK;
4105
26d44ae2 4106 /* Extract the real and imaginary parts out of the ctor. */
4038c495
GB
4107 gcc_assert (VEC_length (constructor_elt, elts) == 2);
4108 r = VEC_index (constructor_elt, elts, 0)->value;
4109 i = VEC_index (constructor_elt, elts, 1)->value;
26d44ae2
RH
4110 if (r == NULL || i == NULL)
4111 {
e8160c9a 4112 tree zero = build_zero_cst (TREE_TYPE (type));
26d44ae2
RH
4113 if (r == NULL)
4114 r = zero;
4115 if (i == NULL)
4116 i = zero;
4117 }
4118
4119 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
4120 represent creation of a complex value. */
4121 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
4122 {
4123 ctor = build_complex (type, r, i);
4124 TREE_OPERAND (*expr_p, 1) = ctor;
4125 }
4126 else
4127 {
b4257cfc 4128 ctor = build2 (COMPLEX_EXPR, type, r, i);
26d44ae2 4129 TREE_OPERAND (*expr_p, 1) = ctor;
726a989a
RB
4130 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
4131 pre_p,
4132 post_p,
17ad5b5e
RH
4133 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
4134 fb_rvalue);
26d44ae2
RH
4135 }
4136 }
4137 break;
506e2710 4138
26d44ae2 4139 case VECTOR_TYPE:
4038c495
GB
4140 {
4141 unsigned HOST_WIDE_INT ix;
4142 constructor_elt *ce;
e89be13b 4143
ffed8a01
AH
4144 if (notify_temp_creation)
4145 return GS_OK;
4146
4038c495
GB
4147 /* Go ahead and simplify constant constructors to VECTOR_CST. */
4148 if (TREE_CONSTANT (ctor))
4149 {
4150 bool constant_p = true;
4151 tree value;
4152
4153 /* Even when ctor is constant, it might contain non-*_CST
9f1da821
RS
4154 elements, such as addresses or trapping values like
4155 1.0/0.0 - 1.0/0.0. Such expressions don't belong
4156 in VECTOR_CST nodes. */
4038c495
GB
4157 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
4158 if (!CONSTANT_CLASS_P (value))
4159 {
4160 constant_p = false;
4161 break;
4162 }
e89be13b 4163
4038c495
GB
4164 if (constant_p)
4165 {
4166 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
4167 break;
4168 }
84816907 4169
9f1da821 4170 /* Don't reduce an initializer constant even if we can't
84816907
JM
4171 make a VECTOR_CST. It won't do anything for us, and it'll
4172 prevent us from representing it as a single constant. */
9f1da821
RS
4173 if (initializer_constant_valid_p (ctor, type))
4174 break;
4175
4176 TREE_CONSTANT (ctor) = 0;
4038c495 4177 }
e89be13b 4178
4038c495
GB
4179 /* Vector types use CONSTRUCTOR all the way through gimple
4180 compilation as a general initializer. */
ac47786e 4181 FOR_EACH_VEC_ELT (constructor_elt, elts, ix, ce)
4038c495
GB
4182 {
4183 enum gimplify_status tret;
726a989a
RB
4184 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
4185 fb_rvalue);
4038c495
GB
4186 if (tret == GS_ERROR)
4187 ret = GS_ERROR;
4188 }
726a989a
RB
4189 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
4190 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
4038c495 4191 }
26d44ae2 4192 break;
6de9cd9a 4193
26d44ae2
RH
4194 default:
4195 /* So how did we get a CONSTRUCTOR for a scalar type? */
282899df 4196 gcc_unreachable ();
26d44ae2 4197 }
6de9cd9a 4198
26d44ae2
RH
4199 if (ret == GS_ERROR)
4200 return GS_ERROR;
4201 else if (want_value)
4202 {
26d44ae2
RH
4203 *expr_p = object;
4204 return GS_OK;
6de9cd9a 4205 }
26d44ae2 4206 else
726a989a
RB
4207 {
4208 /* If we have gimplified both sides of the initializer but have
4209 not emitted an assignment, do so now. */
4210 if (*expr_p)
4211 {
4212 tree lhs = TREE_OPERAND (*expr_p, 0);
4213 tree rhs = TREE_OPERAND (*expr_p, 1);
4214 gimple init = gimple_build_assign (lhs, rhs);
4215 gimplify_seq_add_stmt (pre_p, init);
4216 *expr_p = NULL;
4217 }
4218
4219 return GS_ALL_DONE;
4220 }
26d44ae2 4221}
6de9cd9a 4222
30d2e943
RG
4223/* Given a pointer value OP0, return a simplified version of an
4224 indirection through OP0, or NULL_TREE if no simplification is
de4af523
JJ
4225 possible. Note that the resulting type may be different from
4226 the type pointed to in the sense that it is still compatible
4227 from the langhooks point of view. */
30d2e943 4228
de4af523
JJ
4229tree
4230gimple_fold_indirect_ref (tree t)
30d2e943 4231{
70f34814 4232 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
30d2e943
RG
4233 tree sub = t;
4234 tree subtype;
4235
cb6b911a 4236 STRIP_NOPS (sub);
30d2e943
RG
4237 subtype = TREE_TYPE (sub);
4238 if (!POINTER_TYPE_P (subtype))
4239 return NULL_TREE;
4240
4241 if (TREE_CODE (sub) == ADDR_EXPR)
4242 {
4243 tree op = TREE_OPERAND (sub, 0);
4244 tree optype = TREE_TYPE (op);
4245 /* *&p => p */
f4088621 4246 if (useless_type_conversion_p (type, optype))
30d2e943 4247 return op;
de4af523 4248
30d2e943 4249 /* *(foo *)&fooarray => fooarray[0] */
de4af523 4250 if (TREE_CODE (optype) == ARRAY_TYPE
cb6b911a 4251 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
de4af523 4252 && useless_type_conversion_p (type, TREE_TYPE (optype)))
30d2e943
RG
4253 {
4254 tree type_domain = TYPE_DOMAIN (optype);
4255 tree min_val = size_zero_node;
4256 if (type_domain && TYPE_MIN_VALUE (type_domain))
4257 min_val = TYPE_MIN_VALUE (type_domain);
cb6b911a
RG
4258 if (TREE_CODE (min_val) == INTEGER_CST)
4259 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
30d2e943 4260 }
cb6b911a
RG
4261 /* *(foo *)&complexfoo => __real__ complexfoo */
4262 else if (TREE_CODE (optype) == COMPLEX_TYPE
4263 && useless_type_conversion_p (type, TREE_TYPE (optype)))
4264 return fold_build1 (REALPART_EXPR, type, op);
4265 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
4266 else if (TREE_CODE (optype) == VECTOR_TYPE
4267 && useless_type_conversion_p (type, TREE_TYPE (optype)))
4268 {
4269 tree part_width = TYPE_SIZE (type);
4270 tree index = bitsize_int (0);
4271 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
4272 }
4273 }
4274
70f34814 4275 /* *(p + CST) -> ... */
cb6b911a
RG
4276 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
4277 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
4278 {
70f34814
RG
4279 tree addr = TREE_OPERAND (sub, 0);
4280 tree off = TREE_OPERAND (sub, 1);
4281 tree addrtype;
cb6b911a 4282
70f34814
RG
4283 STRIP_NOPS (addr);
4284 addrtype = TREE_TYPE (addr);
4285
4286 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
4287 if (TREE_CODE (addr) == ADDR_EXPR
4288 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
956011be
RG
4289 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
4290 && host_integerp (off, 1))
cb6b911a 4291 {
956011be 4292 unsigned HOST_WIDE_INT offset = tree_low_cst (off, 1);
70f34814
RG
4293 tree part_width = TYPE_SIZE (type);
4294 unsigned HOST_WIDE_INT part_widthi
4295 = tree_low_cst (part_width, 0) / BITS_PER_UNIT;
4296 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
4297 tree index = bitsize_int (indexi);
4298 if (offset / part_widthi
4299 <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
4300 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
4301 part_width, index);
cb6b911a 4302 }
70f34814
RG
4303
4304 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
4305 if (TREE_CODE (addr) == ADDR_EXPR
4306 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
4307 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
4308 {
4309 tree size = TYPE_SIZE_UNIT (type);
4310 if (tree_int_cst_equal (size, off))
4311 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
4312 }
4313
4314 /* *(p + CST) -> MEM_REF <p, CST>. */
4315 if (TREE_CODE (addr) != ADDR_EXPR
4316 || DECL_P (TREE_OPERAND (addr, 0)))
4317 return fold_build2 (MEM_REF, type,
4318 addr,
4319 build_int_cst_wide (ptype,
4320 TREE_INT_CST_LOW (off),
4321 TREE_INT_CST_HIGH (off)));
30d2e943
RG
4322 }
4323
4324 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
4325 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
cb6b911a 4326 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
f4088621 4327 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
30d2e943
RG
4328 {
4329 tree type_domain;
4330 tree min_val = size_zero_node;
c2953725 4331 tree osub = sub;
de4af523 4332 sub = gimple_fold_indirect_ref (sub);
30d2e943 4333 if (! sub)
c2953725 4334 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
30d2e943
RG
4335 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
4336 if (type_domain && TYPE_MIN_VALUE (type_domain))
4337 min_val = TYPE_MIN_VALUE (type_domain);
cb6b911a
RG
4338 if (TREE_CODE (min_val) == INTEGER_CST)
4339 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
30d2e943
RG
4340 }
4341
4342 return NULL_TREE;
4343}
4344
de4af523
JJ
4345/* Given a pointer value OP0, return a simplified version of an
4346 indirection through OP0, or NULL_TREE if no simplification is
4347 possible. This may only be applied to a rhs of an expression.
4348 Note that the resulting type may be different from the type pointed
4349 to in the sense that it is still compatible from the langhooks
4350 point of view. */
4351
4352static tree
4353gimple_fold_indirect_ref_rhs (tree t)
4354{
4355 return gimple_fold_indirect_ref (t);
4356}
4357
4caa08da
AH
4358/* Subroutine of gimplify_modify_expr to do simplifications of
4359 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
4360 something changes. */
6de9cd9a 4361
26d44ae2 4362static enum gimplify_status
726a989a
RB
4363gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
4364 gimple_seq *pre_p, gimple_seq *post_p,
4365 bool want_value)
26d44ae2 4366{
6d729f28
JM
4367 enum gimplify_status ret = GS_UNHANDLED;
4368 bool changed;
6de9cd9a 4369
6d729f28
JM
4370 do
4371 {
4372 changed = false;
4373 switch (TREE_CODE (*from_p))
4374 {
4375 case VAR_DECL:
4376 /* If we're assigning from a read-only variable initialized with
4377 a constructor, do the direct assignment from the constructor,
4378 but only if neither source nor target are volatile since this
4379 latter assignment might end up being done on a per-field basis. */
4380 if (DECL_INITIAL (*from_p)
4381 && TREE_READONLY (*from_p)
4382 && !TREE_THIS_VOLATILE (*from_p)
4383 && !TREE_THIS_VOLATILE (*to_p)
4384 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
4385 {
4386 tree old_from = *from_p;
4387 enum gimplify_status subret;
4388
4389 /* Move the constructor into the RHS. */
4390 *from_p = unshare_expr (DECL_INITIAL (*from_p));
4391
4392 /* Let's see if gimplify_init_constructor will need to put
4393 it in memory. */
4394 subret = gimplify_init_constructor (expr_p, NULL, NULL,
4395 false, true);
4396 if (subret == GS_ERROR)
4397 {
4398 /* If so, revert the change. */
4399 *from_p = old_from;
4400 }
4401 else
4402 {
4403 ret = GS_OK;
4404 changed = true;
4405 }
4406 }
4407 break;
4408 case INDIRECT_REF:
4caa08da 4409 {
6d729f28 4410 /* If we have code like
ffed8a01 4411
6d729f28 4412 *(const A*)(A*)&x
ffed8a01 4413
6d729f28
JM
4414 where the type of "x" is a (possibly cv-qualified variant
4415 of "A"), treat the entire expression as identical to "x".
4416 This kind of code arises in C++ when an object is bound
4417 to a const reference, and if "x" is a TARGET_EXPR we want
4418 to take advantage of the optimization below. */
06baaba3 4419 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
6d729f28
JM
4420 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
4421 if (t)
ffed8a01 4422 {
06baaba3
RG
4423 if (TREE_THIS_VOLATILE (t) != volatile_p)
4424 {
4425 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
4426 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
4427 build_fold_addr_expr (t));
4428 if (REFERENCE_CLASS_P (t))
4429 TREE_THIS_VOLATILE (t) = volatile_p;
4430 }
6d729f28
JM
4431 *from_p = t;
4432 ret = GS_OK;
4433 changed = true;
ffed8a01 4434 }
6d729f28
JM
4435 break;
4436 }
4437
4438 case TARGET_EXPR:
4439 {
4440 /* If we are initializing something from a TARGET_EXPR, strip the
4441 TARGET_EXPR and initialize it directly, if possible. This can't
4442 be done if the initializer is void, since that implies that the
4443 temporary is set in some non-trivial way.
4444
4445 ??? What about code that pulls out the temp and uses it
4446 elsewhere? I think that such code never uses the TARGET_EXPR as
4447 an initializer. If I'm wrong, we'll die because the temp won't
4448 have any RTL. In that case, I guess we'll need to replace
4449 references somehow. */
4450 tree init = TARGET_EXPR_INITIAL (*from_p);
4451
4452 if (init
4453 && !VOID_TYPE_P (TREE_TYPE (init)))
ffed8a01 4454 {
6d729f28 4455 *from_p = init;
ffed8a01 4456 ret = GS_OK;
6d729f28 4457 changed = true;
ffed8a01 4458 }
4caa08da 4459 }
6d729f28 4460 break;
f98625f6 4461
6d729f28
JM
4462 case COMPOUND_EXPR:
4463 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
4464 caught. */
4465 gimplify_compound_expr (from_p, pre_p, true);
4466 ret = GS_OK;
4467 changed = true;
4468 break;
f98625f6 4469
6d729f28 4470 case CONSTRUCTOR:
ce3beba3
JM
4471 /* If we already made some changes, let the front end have a
4472 crack at this before we break it down. */
4473 if (ret != GS_UNHANDLED)
4474 break;
6d729f28
JM
4475 /* If we're initializing from a CONSTRUCTOR, break this into
4476 individual MODIFY_EXPRs. */
4477 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
4478 false);
4479
4480 case COND_EXPR:
4481 /* If we're assigning to a non-register type, push the assignment
4482 down into the branches. This is mandatory for ADDRESSABLE types,
4483 since we cannot generate temporaries for such, but it saves a
4484 copy in other cases as well. */
4485 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
f98625f6 4486 {
6d729f28
JM
4487 /* This code should mirror the code in gimplify_cond_expr. */
4488 enum tree_code code = TREE_CODE (*expr_p);
4489 tree cond = *from_p;
4490 tree result = *to_p;
4491
4492 ret = gimplify_expr (&result, pre_p, post_p,
4493 is_gimple_lvalue, fb_lvalue);
4494 if (ret != GS_ERROR)
4495 ret = GS_OK;
4496
4497 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
4498 TREE_OPERAND (cond, 1)
4499 = build2 (code, void_type_node, result,
4500 TREE_OPERAND (cond, 1));
4501 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
4502 TREE_OPERAND (cond, 2)
4503 = build2 (code, void_type_node, unshare_expr (result),
4504 TREE_OPERAND (cond, 2));
4505
4506 TREE_TYPE (cond) = void_type_node;
4507 recalculate_side_effects (cond);
4508
4509 if (want_value)
4510 {
4511 gimplify_and_add (cond, pre_p);
4512 *expr_p = unshare_expr (result);
4513 }
4514 else
4515 *expr_p = cond;
4516 return ret;
f98625f6 4517 }
f98625f6 4518 break;
f98625f6 4519
6d729f28
JM
4520 case CALL_EXPR:
4521 /* For calls that return in memory, give *to_p as the CALL_EXPR's
4522 return slot so that we don't generate a temporary. */
4523 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
4524 && aggregate_value_p (*from_p, *from_p))
26d44ae2 4525 {
6d729f28
JM
4526 bool use_target;
4527
4528 if (!(rhs_predicate_for (*to_p))(*from_p))
4529 /* If we need a temporary, *to_p isn't accurate. */
4530 use_target = false;
ad19c4be 4531 /* It's OK to use the return slot directly unless it's an NRV. */
6d729f28
JM
4532 else if (TREE_CODE (*to_p) == RESULT_DECL
4533 && DECL_NAME (*to_p) == NULL_TREE
4534 && needs_to_live_in_memory (*to_p))
6d729f28
JM
4535 use_target = true;
4536 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
4537 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
4538 /* Don't force regs into memory. */
4539 use_target = false;
4540 else if (TREE_CODE (*expr_p) == INIT_EXPR)
4541 /* It's OK to use the target directly if it's being
4542 initialized. */
4543 use_target = true;
aabb90e5
RG
4544 else if (variably_modified_type_p (TREE_TYPE (*to_p), NULL_TREE))
4545 /* Always use the target and thus RSO for variable-sized types.
4546 GIMPLE cannot deal with a variable-sized assignment
4547 embedded in a call statement. */
4548 use_target = true;
345ae177
AH
4549 else if (TREE_CODE (*to_p) != SSA_NAME
4550 && (!is_gimple_variable (*to_p)
4551 || needs_to_live_in_memory (*to_p)))
6d729f28
JM
4552 /* Don't use the original target if it's already addressable;
4553 if its address escapes, and the called function uses the
4554 NRV optimization, a conforming program could see *to_p
4555 change before the called function returns; see c++/19317.
4556 When optimizing, the return_slot pass marks more functions
4557 as safe after we have escape info. */
4558 use_target = false;
4559 else
4560 use_target = true;
4561
4562 if (use_target)
4563 {
4564 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
4565 mark_addressable (*to_p);
4566 }
26d44ae2 4567 }
6d729f28 4568 break;
6de9cd9a 4569
6d729f28
JM
4570 case WITH_SIZE_EXPR:
4571 /* Likewise for calls that return an aggregate of non-constant size,
4572 since we would not be able to generate a temporary at all. */
4573 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
4574 {
4575 *from_p = TREE_OPERAND (*from_p, 0);
ebad5233
JM
4576 /* We don't change ret in this case because the
4577 WITH_SIZE_EXPR might have been added in
4578 gimplify_modify_expr, so returning GS_OK would lead to an
4579 infinite loop. */
6d729f28
JM
4580 changed = true;
4581 }
4582 break;
6de9cd9a 4583
6d729f28
JM
4584 /* If we're initializing from a container, push the initialization
4585 inside it. */
4586 case CLEANUP_POINT_EXPR:
4587 case BIND_EXPR:
4588 case STATEMENT_LIST:
26d44ae2 4589 {
6d729f28
JM
4590 tree wrap = *from_p;
4591 tree t;
dae7ec87 4592
6d729f28
JM
4593 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
4594 fb_lvalue);
dae7ec87
JM
4595 if (ret != GS_ERROR)
4596 ret = GS_OK;
4597
6d729f28
JM
4598 t = voidify_wrapper_expr (wrap, *expr_p);
4599 gcc_assert (t == *expr_p);
dae7ec87
JM
4600
4601 if (want_value)
4602 {
6d729f28
JM
4603 gimplify_and_add (wrap, pre_p);
4604 *expr_p = unshare_expr (*to_p);
dae7ec87
JM
4605 }
4606 else
6d729f28
JM
4607 *expr_p = wrap;
4608 return GS_OK;
26d44ae2 4609 }
6de9cd9a 4610
6d729f28 4611 case COMPOUND_LITERAL_EXPR:
fa47911c 4612 {
6d729f28
JM
4613 tree complit = TREE_OPERAND (*expr_p, 1);
4614 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
4615 tree decl = DECL_EXPR_DECL (decl_s);
4616 tree init = DECL_INITIAL (decl);
4617
4618 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
4619 into struct T x = { 0, 1, 2 } if the address of the
4620 compound literal has never been taken. */
4621 if (!TREE_ADDRESSABLE (complit)
4622 && !TREE_ADDRESSABLE (decl)
4623 && init)
fa47911c 4624 {
6d729f28
JM
4625 *expr_p = copy_node (*expr_p);
4626 TREE_OPERAND (*expr_p, 1) = init;
4627 return GS_OK;
fa47911c
JM
4628 }
4629 }
4630
6d729f28
JM
4631 default:
4632 break;
2ec5deb5 4633 }
6d729f28
JM
4634 }
4635 while (changed);
6de9cd9a 4636
6de9cd9a
DN
4637 return ret;
4638}
4639
216820a4
RG
4640
4641/* Return true if T looks like a valid GIMPLE statement. */
4642
4643static bool
4644is_gimple_stmt (tree t)
4645{
4646 const enum tree_code code = TREE_CODE (t);
4647
4648 switch (code)
4649 {
4650 case NOP_EXPR:
4651 /* The only valid NOP_EXPR is the empty statement. */
4652 return IS_EMPTY_STMT (t);
4653
4654 case BIND_EXPR:
4655 case COND_EXPR:
4656 /* These are only valid if they're void. */
4657 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
4658
4659 case SWITCH_EXPR:
4660 case GOTO_EXPR:
4661 case RETURN_EXPR:
4662 case LABEL_EXPR:
4663 case CASE_LABEL_EXPR:
4664 case TRY_CATCH_EXPR:
4665 case TRY_FINALLY_EXPR:
4666 case EH_FILTER_EXPR:
4667 case CATCH_EXPR:
4668 case ASM_EXPR:
4669 case STATEMENT_LIST:
4670 case OMP_PARALLEL:
4671 case OMP_FOR:
4672 case OMP_SECTIONS:
4673 case OMP_SECTION:
4674 case OMP_SINGLE:
4675 case OMP_MASTER:
4676 case OMP_ORDERED:
4677 case OMP_CRITICAL:
4678 case OMP_TASK:
4679 /* These are always void. */
4680 return true;
4681
4682 case CALL_EXPR:
4683 case MODIFY_EXPR:
4684 case PREDICT_EXPR:
4685 /* These are valid regardless of their type. */
4686 return true;
4687
4688 default:
4689 return false;
4690 }
4691}
4692
4693
d9c2d296
AP
4694/* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
4695 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
7b7e6ecd
EB
4696 DECL_GIMPLE_REG_P set.
4697
4698 IMPORTANT NOTE: This promotion is performed by introducing a load of the
4699 other, unmodified part of the complex object just before the total store.
4700 As a consequence, if the object is still uninitialized, an undefined value
4701 will be loaded into a register, which may result in a spurious exception
4702 if the register is floating-point and the value happens to be a signaling
4703 NaN for example. Then the fully-fledged complex operations lowering pass
4704 followed by a DCE pass are necessary in order to fix things up. */
d9c2d296
AP
4705
4706static enum gimplify_status
726a989a
RB
4707gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
4708 bool want_value)
d9c2d296
AP
4709{
4710 enum tree_code code, ocode;
4711 tree lhs, rhs, new_rhs, other, realpart, imagpart;
4712
726a989a
RB
4713 lhs = TREE_OPERAND (*expr_p, 0);
4714 rhs = TREE_OPERAND (*expr_p, 1);
d9c2d296
AP
4715 code = TREE_CODE (lhs);
4716 lhs = TREE_OPERAND (lhs, 0);
4717
4718 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
4719 other = build1 (ocode, TREE_TYPE (rhs), lhs);
8d2b0410 4720 TREE_NO_WARNING (other) = 1;
d9c2d296
AP
4721 other = get_formal_tmp_var (other, pre_p);
4722
4723 realpart = code == REALPART_EXPR ? rhs : other;
4724 imagpart = code == REALPART_EXPR ? other : rhs;
4725
4726 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
4727 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
4728 else
4729 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
4730
726a989a
RB
4731 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
4732 *expr_p = (want_value) ? rhs : NULL_TREE;
d9c2d296
AP
4733
4734 return GS_ALL_DONE;
4735}
4736
206048bd 4737/* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
6de9cd9a
DN
4738
4739 modify_expr
4740 : varname '=' rhs
4741 | '*' ID '=' rhs
4742
4743 PRE_P points to the list where side effects that must happen before
4744 *EXPR_P should be stored.
4745
4746 POST_P points to the list where side effects that must happen after
4747 *EXPR_P should be stored.
4748
4749 WANT_VALUE is nonzero iff we want to use the value of this expression
4750 in another expression. */
4751
4752static enum gimplify_status
726a989a
RB
4753gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4754 bool want_value)
6de9cd9a 4755{
726a989a
RB
4756 tree *from_p = &TREE_OPERAND (*expr_p, 1);
4757 tree *to_p = &TREE_OPERAND (*expr_p, 0);
44de5aeb 4758 enum gimplify_status ret = GS_UNHANDLED;
726a989a 4759 gimple assign;
db3927fb 4760 location_t loc = EXPR_LOCATION (*expr_p);
6de9cd9a 4761
282899df
NS
4762 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
4763 || TREE_CODE (*expr_p) == INIT_EXPR);
6de9cd9a 4764
d0ad58f9
JM
4765 /* Trying to simplify a clobber using normal logic doesn't work,
4766 so handle it here. */
4767 if (TREE_CLOBBER_P (*from_p))
4768 {
4769 gcc_assert (!want_value && TREE_CODE (*to_p) == VAR_DECL);
4770 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
4771 *expr_p = NULL;
4772 return GS_ALL_DONE;
4773 }
4774
1b24a790
RG
4775 /* Insert pointer conversions required by the middle-end that are not
4776 required by the frontend. This fixes middle-end type checking for
4777 for example gcc.dg/redecl-6.c. */
daad0278 4778 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
1b24a790
RG
4779 {
4780 STRIP_USELESS_TYPE_CONVERSION (*from_p);
4781 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
db3927fb 4782 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
1b24a790
RG
4783 }
4784
83d7e8f0
JM
4785 /* See if any simplifications can be done based on what the RHS is. */
4786 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4787 want_value);
4788 if (ret != GS_UNHANDLED)
4789 return ret;
4790
4791 /* For zero sized types only gimplify the left hand side and right hand
4792 side as statements and throw away the assignment. Do this after
4793 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
4794 types properly. */
753b34d7 4795 if (zero_sized_type (TREE_TYPE (*from_p)) && !want_value)
412f8986 4796 {
726a989a
RB
4797 gimplify_stmt (from_p, pre_p);
4798 gimplify_stmt (to_p, pre_p);
412f8986
AP
4799 *expr_p = NULL_TREE;
4800 return GS_ALL_DONE;
4801 }
6de9cd9a 4802
d25cee4d
RH
4803 /* If the value being copied is of variable width, compute the length
4804 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
4805 before gimplifying any of the operands so that we can resolve any
4806 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
4807 the size of the expression to be copied, not of the destination, so
726a989a 4808 that is what we must do here. */
d25cee4d 4809 maybe_with_size_expr (from_p);
6de9cd9a 4810
44de5aeb
RK
4811 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4812 if (ret == GS_ERROR)
4813 return ret;
6de9cd9a 4814
726a989a
RB
4815 /* As a special case, we have to temporarily allow for assignments
4816 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
4817 a toplevel statement, when gimplifying the GENERIC expression
4818 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
4819 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
4820
4821 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
4822 prevent gimplify_expr from trying to create a new temporary for
4823 foo's LHS, we tell it that it should only gimplify until it
4824 reaches the CALL_EXPR. On return from gimplify_expr, the newly
4825 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
4826 and all we need to do here is set 'a' to be its LHS. */
4827 ret = gimplify_expr (from_p, pre_p, post_p, rhs_predicate_for (*to_p),
4828 fb_rvalue);
6de9cd9a
DN
4829 if (ret == GS_ERROR)
4830 return ret;
4831
44de5aeb
RK
4832 /* Now see if the above changed *from_p to something we handle specially. */
4833 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4834 want_value);
6de9cd9a
DN
4835 if (ret != GS_UNHANDLED)
4836 return ret;
4837
d25cee4d
RH
4838 /* If we've got a variable sized assignment between two lvalues (i.e. does
4839 not involve a call), then we can make things a bit more straightforward
4840 by converting the assignment to memcpy or memset. */
4841 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
4842 {
4843 tree from = TREE_OPERAND (*from_p, 0);
4844 tree size = TREE_OPERAND (*from_p, 1);
4845
4846 if (TREE_CODE (from) == CONSTRUCTOR)
726a989a
RB
4847 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
4848
e847cc68 4849 if (is_gimple_addressable (from))
d25cee4d
RH
4850 {
4851 *from_p = from;
726a989a
RB
4852 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
4853 pre_p);
d25cee4d
RH
4854 }
4855 }
4856
e41d82f5
RH
4857 /* Transform partial stores to non-addressable complex variables into
4858 total stores. This allows us to use real instead of virtual operands
4859 for these variables, which improves optimization. */
4860 if ((TREE_CODE (*to_p) == REALPART_EXPR
4861 || TREE_CODE (*to_p) == IMAGPART_EXPR)
4862 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
4863 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
4864
f173837a
EB
4865 /* Try to alleviate the effects of the gimplification creating artificial
4866 temporaries (see for example is_gimple_reg_rhs) on the debug info. */
4867 if (!gimplify_ctxp->into_ssa
f2896bc9 4868 && TREE_CODE (*from_p) == VAR_DECL
726a989a
RB
4869 && DECL_IGNORED_P (*from_p)
4870 && DECL_P (*to_p)
4871 && !DECL_IGNORED_P (*to_p))
f173837a
EB
4872 {
4873 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
4874 DECL_NAME (*from_p)
4875 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
4876 DECL_DEBUG_EXPR_IS_FROM (*from_p) = 1;
4877 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
726a989a
RB
4878 }
4879
8f0fe813
NS
4880 if (want_value && TREE_THIS_VOLATILE (*to_p))
4881 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
4882
726a989a
RB
4883 if (TREE_CODE (*from_p) == CALL_EXPR)
4884 {
4885 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
4886 instead of a GIMPLE_ASSIGN. */
f20ca725
RG
4887 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
4888 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
4889 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
726a989a 4890 assign = gimple_build_call_from_tree (*from_p);
f20ca725 4891 gimple_call_set_fntype (assign, TREE_TYPE (fnptrtype));
5de8da9b
AO
4892 if (!gimple_call_noreturn_p (assign))
4893 gimple_call_set_lhs (assign, *to_p);
f173837a 4894 }
726a989a 4895 else
c2255bc4
AH
4896 {
4897 assign = gimple_build_assign (*to_p, *from_p);
4898 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
4899 }
f173837a 4900
726a989a
RB
4901 gimplify_seq_add_stmt (pre_p, assign);
4902
4903 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
6de9cd9a 4904 {
726a989a
RB
4905 /* If we've somehow already got an SSA_NAME on the LHS, then
4906 we've probably modified it twice. Not good. */
4907 gcc_assert (TREE_CODE (*to_p) != SSA_NAME);
4908 *to_p = make_ssa_name (*to_p, assign);
4909 gimple_set_lhs (assign, *to_p);
4910 }
07beea0d 4911
726a989a
RB
4912 if (want_value)
4913 {
8f0fe813 4914 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
17ad5b5e 4915 return GS_OK;
6de9cd9a 4916 }
726a989a
RB
4917 else
4918 *expr_p = NULL;
6de9cd9a 4919
17ad5b5e 4920 return GS_ALL_DONE;
6de9cd9a
DN
4921}
4922
ad19c4be
EB
4923/* Gimplify a comparison between two variable-sized objects. Do this
4924 with a call to BUILT_IN_MEMCMP. */
44de5aeb
RK
4925
4926static enum gimplify_status
4927gimplify_variable_sized_compare (tree *expr_p)
4928{
692ad9aa 4929 location_t loc = EXPR_LOCATION (*expr_p);
44de5aeb
RK
4930 tree op0 = TREE_OPERAND (*expr_p, 0);
4931 tree op1 = TREE_OPERAND (*expr_p, 1);
692ad9aa 4932 tree t, arg, dest, src, expr;
5039610b
SL
4933
4934 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
4935 arg = unshare_expr (arg);
4936 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
db3927fb
AH
4937 src = build_fold_addr_expr_loc (loc, op1);
4938 dest = build_fold_addr_expr_loc (loc, op0);
e79983f4 4939 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
db3927fb 4940 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
692ad9aa
EB
4941
4942 expr
b4257cfc 4943 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
692ad9aa
EB
4944 SET_EXPR_LOCATION (expr, loc);
4945 *expr_p = expr;
44de5aeb
RK
4946
4947 return GS_OK;
4948}
4949
ad19c4be
EB
4950/* Gimplify a comparison between two aggregate objects of integral scalar
4951 mode as a comparison between the bitwise equivalent scalar values. */
61c25908
OH
4952
4953static enum gimplify_status
4954gimplify_scalar_mode_aggregate_compare (tree *expr_p)
4955{
db3927fb 4956 location_t loc = EXPR_LOCATION (*expr_p);
61c25908
OH
4957 tree op0 = TREE_OPERAND (*expr_p, 0);
4958 tree op1 = TREE_OPERAND (*expr_p, 1);
4959
4960 tree type = TREE_TYPE (op0);
4961 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
4962
db3927fb
AH
4963 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
4964 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
61c25908
OH
4965
4966 *expr_p
db3927fb 4967 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
61c25908
OH
4968
4969 return GS_OK;
4970}
4971
ad19c4be
EB
4972/* Gimplify an expression sequence. This function gimplifies each
4973 expression and rewrites the original expression with the last
6de9cd9a
DN
4974 expression of the sequence in GIMPLE form.
4975
4976 PRE_P points to the list where the side effects for all the
4977 expressions in the sequence will be emitted.
d3147f64 4978
6de9cd9a 4979 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
6de9cd9a
DN
4980
4981static enum gimplify_status
726a989a 4982gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
6de9cd9a
DN
4983{
4984 tree t = *expr_p;
4985
4986 do
4987 {
4988 tree *sub_p = &TREE_OPERAND (t, 0);
4989
4990 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
4991 gimplify_compound_expr (sub_p, pre_p, false);
4992 else
726a989a 4993 gimplify_stmt (sub_p, pre_p);
6de9cd9a
DN
4994
4995 t = TREE_OPERAND (t, 1);
4996 }
4997 while (TREE_CODE (t) == COMPOUND_EXPR);
4998
4999 *expr_p = t;
5000 if (want_value)
5001 return GS_OK;
5002 else
5003 {
726a989a 5004 gimplify_stmt (expr_p, pre_p);
6de9cd9a
DN
5005 return GS_ALL_DONE;
5006 }
5007}
5008
726a989a
RB
5009/* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
5010 gimplify. After gimplification, EXPR_P will point to a new temporary
5011 that holds the original value of the SAVE_EXPR node.
6de9cd9a 5012
726a989a 5013 PRE_P points to the list where side effects that must happen before
ad19c4be 5014 *EXPR_P should be stored. */
6de9cd9a
DN
5015
5016static enum gimplify_status
726a989a 5017gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6de9cd9a
DN
5018{
5019 enum gimplify_status ret = GS_ALL_DONE;
5020 tree val;
5021
282899df 5022 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
6de9cd9a
DN
5023 val = TREE_OPERAND (*expr_p, 0);
5024
7f5e6307
RH
5025 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
5026 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
17ad5b5e 5027 {
7f5e6307
RH
5028 /* The operand may be a void-valued expression such as SAVE_EXPRs
5029 generated by the Java frontend for class initialization. It is
5030 being executed only for its side-effects. */
5031 if (TREE_TYPE (val) == void_type_node)
5032 {
5033 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5034 is_gimple_stmt, fb_none);
7f5e6307
RH
5035 val = NULL;
5036 }
5037 else
5038 val = get_initialized_tmp_var (val, pre_p, post_p);
5039
5040 TREE_OPERAND (*expr_p, 0) = val;
5041 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
17ad5b5e 5042 }
6de9cd9a 5043
7f5e6307
RH
5044 *expr_p = val;
5045
6de9cd9a
DN
5046 return ret;
5047}
5048
ad19c4be 5049/* Rewrite the ADDR_EXPR node pointed to by EXPR_P
6de9cd9a
DN
5050
5051 unary_expr
5052 : ...
5053 | '&' varname
5054 ...
5055
5056 PRE_P points to the list where side effects that must happen before
5057 *EXPR_P should be stored.
5058
5059 POST_P points to the list where side effects that must happen after
5060 *EXPR_P should be stored. */
5061
5062static enum gimplify_status
726a989a 5063gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6de9cd9a
DN
5064{
5065 tree expr = *expr_p;
5066 tree op0 = TREE_OPERAND (expr, 0);
5067 enum gimplify_status ret;
db3927fb 5068 location_t loc = EXPR_LOCATION (*expr_p);
6de9cd9a
DN
5069
5070 switch (TREE_CODE (op0))
5071 {
5072 case INDIRECT_REF:
67f23620 5073 do_indirect_ref:
6de9cd9a
DN
5074 /* Check if we are dealing with an expression of the form '&*ptr'.
5075 While the front end folds away '&*ptr' into 'ptr', these
5076 expressions may be generated internally by the compiler (e.g.,
5077 builtins like __builtin_va_end). */
67f23620
RH
5078 /* Caution: the silent array decomposition semantics we allow for
5079 ADDR_EXPR means we can't always discard the pair. */
c87ac7e8
AO
5080 /* Gimplification of the ADDR_EXPR operand may drop
5081 cv-qualification conversions, so make sure we add them if
5082 needed. */
67f23620
RH
5083 {
5084 tree op00 = TREE_OPERAND (op0, 0);
5085 tree t_expr = TREE_TYPE (expr);
5086 tree t_op00 = TREE_TYPE (op00);
5087
f4088621 5088 if (!useless_type_conversion_p (t_expr, t_op00))
db3927fb 5089 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
67f23620
RH
5090 *expr_p = op00;
5091 ret = GS_OK;
5092 }
6de9cd9a
DN
5093 break;
5094
44de5aeb
RK
5095 case VIEW_CONVERT_EXPR:
5096 /* Take the address of our operand and then convert it to the type of
af72267c
RK
5097 this ADDR_EXPR.
5098
5099 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
5100 all clear. The impact of this transformation is even less clear. */
91804752
EB
5101
5102 /* If the operand is a useless conversion, look through it. Doing so
5103 guarantees that the ADDR_EXPR and its operand will remain of the
5104 same type. */
5105 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
317c0092 5106 op0 = TREE_OPERAND (op0, 0);
91804752 5107
db3927fb
AH
5108 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
5109 build_fold_addr_expr_loc (loc,
5110 TREE_OPERAND (op0, 0)));
44de5aeb 5111 ret = GS_OK;
6de9cd9a
DN
5112 break;
5113
5114 default:
5115 /* We use fb_either here because the C frontend sometimes takes
5201931e
JM
5116 the address of a call that returns a struct; see
5117 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
5118 the implied temporary explicit. */
936d04b6 5119
f76d6e6f 5120 /* Make the operand addressable. */
6de9cd9a 5121 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
e847cc68 5122 is_gimple_addressable, fb_either);
8b17cc05
RG
5123 if (ret == GS_ERROR)
5124 break;
67f23620 5125
f76d6e6f
EB
5126 /* Then mark it. Beware that it may not be possible to do so directly
5127 if a temporary has been created by the gimplification. */
5128 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
9e51aaf5 5129
8b17cc05 5130 op0 = TREE_OPERAND (expr, 0);
6de9cd9a 5131
8b17cc05
RG
5132 /* For various reasons, the gimplification of the expression
5133 may have made a new INDIRECT_REF. */
5134 if (TREE_CODE (op0) == INDIRECT_REF)
5135 goto do_indirect_ref;
5136
6b8b9e42
RG
5137 mark_addressable (TREE_OPERAND (expr, 0));
5138
5139 /* The FEs may end up building ADDR_EXPRs early on a decl with
5140 an incomplete type. Re-build ADDR_EXPRs in canonical form
5141 here. */
5142 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
5143 *expr_p = build_fold_addr_expr (op0);
5144
8b17cc05 5145 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
6b8b9e42
RG
5146 recompute_tree_invariant_for_addr_expr (*expr_p);
5147
5148 /* If we re-built the ADDR_EXPR add a conversion to the original type
5149 if required. */
5150 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
5151 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
8b17cc05 5152
6de9cd9a
DN
5153 break;
5154 }
5155
6de9cd9a
DN
5156 return ret;
5157}
5158
5159/* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
5160 value; output operands should be a gimple lvalue. */
5161
5162static enum gimplify_status
726a989a 5163gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6de9cd9a 5164{
726a989a
RB
5165 tree expr;
5166 int noutputs;
5167 const char **oconstraints;
6de9cd9a
DN
5168 int i;
5169 tree link;
5170 const char *constraint;
5171 bool allows_mem, allows_reg, is_inout;
5172 enum gimplify_status ret, tret;
726a989a
RB
5173 gimple stmt;
5174 VEC(tree, gc) *inputs;
5175 VEC(tree, gc) *outputs;
5176 VEC(tree, gc) *clobbers;
1c384bf1 5177 VEC(tree, gc) *labels;
726a989a 5178 tree link_next;
b8698a0f 5179
726a989a
RB
5180 expr = *expr_p;
5181 noutputs = list_length (ASM_OUTPUTS (expr));
5182 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
5183
1c384bf1 5184 inputs = outputs = clobbers = labels = NULL;
6de9cd9a 5185
6de9cd9a 5186 ret = GS_ALL_DONE;
726a989a
RB
5187 link_next = NULL_TREE;
5188 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
6de9cd9a 5189 {
2c68ba8e 5190 bool ok;
726a989a
RB
5191 size_t constraint_len;
5192
5193 link_next = TREE_CHAIN (link);
5194
5195 oconstraints[i]
5196 = constraint
6de9cd9a 5197 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6db081f1
AP
5198 constraint_len = strlen (constraint);
5199 if (constraint_len == 0)
5200 continue;
6de9cd9a 5201
2c68ba8e
LB
5202 ok = parse_output_constraint (&constraint, i, 0, 0,
5203 &allows_mem, &allows_reg, &is_inout);
5204 if (!ok)
5205 {
5206 ret = GS_ERROR;
5207 is_inout = false;
5208 }
6de9cd9a
DN
5209
5210 if (!allows_reg && allows_mem)
936d04b6 5211 mark_addressable (TREE_VALUE (link));
6de9cd9a
DN
5212
5213 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5214 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
5215 fb_lvalue | fb_mayfail);
5216 if (tret == GS_ERROR)
5217 {
5218 error ("invalid lvalue in asm output %d", i);
5219 ret = tret;
5220 }
5221
726a989a
RB
5222 VEC_safe_push (tree, gc, outputs, link);
5223 TREE_CHAIN (link) = NULL_TREE;
5224
6de9cd9a
DN
5225 if (is_inout)
5226 {
5227 /* An input/output operand. To give the optimizers more
5228 flexibility, split it into separate input and output
5229 operands. */
5230 tree input;
5231 char buf[10];
6de9cd9a
DN
5232
5233 /* Turn the in/out constraint into an output constraint. */
5234 char *p = xstrdup (constraint);
5235 p[0] = '=';
5236 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
6de9cd9a
DN
5237
5238 /* And add a matching input constraint. */
5239 if (allows_reg)
5240 {
5241 sprintf (buf, "%d", i);
372d72d9
JJ
5242
5243 /* If there are multiple alternatives in the constraint,
5244 handle each of them individually. Those that allow register
5245 will be replaced with operand number, the others will stay
5246 unchanged. */
5247 if (strchr (p, ',') != NULL)
5248 {
5249 size_t len = 0, buflen = strlen (buf);
5250 char *beg, *end, *str, *dst;
5251
5252 for (beg = p + 1;;)
5253 {
5254 end = strchr (beg, ',');
5255 if (end == NULL)
5256 end = strchr (beg, '\0');
5257 if ((size_t) (end - beg) < buflen)
5258 len += buflen + 1;
5259 else
5260 len += end - beg + 1;
5261 if (*end)
5262 beg = end + 1;
5263 else
5264 break;
5265 }
5266
858904db 5267 str = (char *) alloca (len);
372d72d9
JJ
5268 for (beg = p + 1, dst = str;;)
5269 {
5270 const char *tem;
5271 bool mem_p, reg_p, inout_p;
5272
5273 end = strchr (beg, ',');
5274 if (end)
5275 *end = '\0';
5276 beg[-1] = '=';
5277 tem = beg - 1;
5278 parse_output_constraint (&tem, i, 0, 0,
5279 &mem_p, &reg_p, &inout_p);
5280 if (dst != str)
5281 *dst++ = ',';
5282 if (reg_p)
5283 {
5284 memcpy (dst, buf, buflen);
5285 dst += buflen;
5286 }
5287 else
5288 {
5289 if (end)
5290 len = end - beg;
5291 else
5292 len = strlen (beg);
5293 memcpy (dst, beg, len);
5294 dst += len;
5295 }
5296 if (end)
5297 beg = end + 1;
5298 else
5299 break;
5300 }
5301 *dst = '\0';
5302 input = build_string (dst - str, str);
5303 }
5304 else
5305 input = build_string (strlen (buf), buf);
6de9cd9a
DN
5306 }
5307 else
5308 input = build_string (constraint_len - 1, constraint + 1);
372d72d9
JJ
5309
5310 free (p);
5311
6de9cd9a
DN
5312 input = build_tree_list (build_tree_list (NULL_TREE, input),
5313 unshare_expr (TREE_VALUE (link)));
5314 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
5315 }
5316 }
5317
726a989a
RB
5318 link_next = NULL_TREE;
5319 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
6de9cd9a 5320 {
726a989a
RB
5321 link_next = TREE_CHAIN (link);
5322 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6de9cd9a
DN
5323 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
5324 oconstraints, &allows_mem, &allows_reg);
5325
f497c16c
JJ
5326 /* If we can't make copies, we can only accept memory. */
5327 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
5328 {
5329 if (allows_mem)
5330 allows_reg = 0;
5331 else
5332 {
5333 error ("impossible constraint in %<asm%>");
5334 error ("non-memory input %d must stay in memory", i);
5335 return GS_ERROR;
5336 }
5337 }
5338
6de9cd9a
DN
5339 /* If the operand is a memory input, it should be an lvalue. */
5340 if (!allows_reg && allows_mem)
5341 {
502c5084
JJ
5342 tree inputv = TREE_VALUE (link);
5343 STRIP_NOPS (inputv);
5344 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
5345 || TREE_CODE (inputv) == PREINCREMENT_EXPR
5346 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
5347 || TREE_CODE (inputv) == POSTINCREMENT_EXPR)
5348 TREE_VALUE (link) = error_mark_node;
6de9cd9a
DN
5349 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5350 is_gimple_lvalue, fb_lvalue | fb_mayfail);
936d04b6 5351 mark_addressable (TREE_VALUE (link));
6de9cd9a
DN
5352 if (tret == GS_ERROR)
5353 {
6a3799eb
AH
5354 if (EXPR_HAS_LOCATION (TREE_VALUE (link)))
5355 input_location = EXPR_LOCATION (TREE_VALUE (link));
6de9cd9a
DN
5356 error ("memory input %d is not directly addressable", i);
5357 ret = tret;
5358 }
5359 }
5360 else
5361 {
5362 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
e670d9e4 5363 is_gimple_asm_val, fb_rvalue);
6de9cd9a
DN
5364 if (tret == GS_ERROR)
5365 ret = tret;
5366 }
726a989a
RB
5367
5368 TREE_CHAIN (link) = NULL_TREE;
5369 VEC_safe_push (tree, gc, inputs, link);
6de9cd9a 5370 }
b8698a0f 5371
726a989a 5372 for (link = ASM_CLOBBERS (expr); link; ++i, link = TREE_CHAIN (link))
1c384bf1
RH
5373 VEC_safe_push (tree, gc, clobbers, link);
5374
5375 for (link = ASM_LABELS (expr); link; ++i, link = TREE_CHAIN (link))
5376 VEC_safe_push (tree, gc, labels, link);
726a989a 5377
a406865a
RG
5378 /* Do not add ASMs with errors to the gimple IL stream. */
5379 if (ret != GS_ERROR)
5380 {
5381 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
1c384bf1 5382 inputs, outputs, clobbers, labels);
726a989a 5383
a406865a
RG
5384 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr));
5385 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
5386
5387 gimplify_seq_add_stmt (pre_p, stmt);
5388 }
6de9cd9a
DN
5389
5390 return ret;
5391}
5392
5393/* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
726a989a 5394 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6de9cd9a
DN
5395 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
5396 return to this function.
5397
5398 FIXME should we complexify the prequeue handling instead? Or use flags
5399 for all the cleanups and let the optimizer tighten them up? The current
5400 code seems pretty fragile; it will break on a cleanup within any
5401 non-conditional nesting. But any such nesting would be broken, anyway;
5402 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
5403 and continues out of it. We can do that at the RTL level, though, so
5404 having an optimizer to tighten up try/finally regions would be a Good
5405 Thing. */
5406
5407static enum gimplify_status
726a989a 5408gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
6de9cd9a 5409{
726a989a
RB
5410 gimple_stmt_iterator iter;
5411 gimple_seq body_sequence = NULL;
6de9cd9a 5412
325c3691 5413 tree temp = voidify_wrapper_expr (*expr_p, NULL);
6de9cd9a
DN
5414
5415 /* We only care about the number of conditions between the innermost
df77f454
JM
5416 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
5417 any cleanups collected outside the CLEANUP_POINT_EXPR. */
6de9cd9a 5418 int old_conds = gimplify_ctxp->conditions;
726a989a 5419 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
32be32af 5420 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
6de9cd9a 5421 gimplify_ctxp->conditions = 0;
726a989a 5422 gimplify_ctxp->conditional_cleanups = NULL;
32be32af 5423 gimplify_ctxp->in_cleanup_point_expr = true;
6de9cd9a 5424
726a989a 5425 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
6de9cd9a
DN
5426
5427 gimplify_ctxp->conditions = old_conds;
df77f454 5428 gimplify_ctxp->conditional_cleanups = old_cleanups;
32be32af 5429 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
6de9cd9a 5430
726a989a 5431 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
6de9cd9a 5432 {
726a989a 5433 gimple wce = gsi_stmt (iter);
6de9cd9a 5434
726a989a 5435 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
6de9cd9a 5436 {
726a989a 5437 if (gsi_one_before_end_p (iter))
6de9cd9a 5438 {
726a989a
RB
5439 /* Note that gsi_insert_seq_before and gsi_remove do not
5440 scan operands, unlike some other sequence mutators. */
ae0595b0
RG
5441 if (!gimple_wce_cleanup_eh_only (wce))
5442 gsi_insert_seq_before_without_update (&iter,
5443 gimple_wce_cleanup (wce),
5444 GSI_SAME_STMT);
726a989a 5445 gsi_remove (&iter, true);
6de9cd9a
DN
5446 break;
5447 }
5448 else
5449 {
82d6e6fc 5450 gimple gtry;
726a989a
RB
5451 gimple_seq seq;
5452 enum gimple_try_flags kind;
40aac948 5453
726a989a
RB
5454 if (gimple_wce_cleanup_eh_only (wce))
5455 kind = GIMPLE_TRY_CATCH;
40aac948 5456 else
726a989a
RB
5457 kind = GIMPLE_TRY_FINALLY;
5458 seq = gsi_split_seq_after (iter);
5459
82d6e6fc 5460 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
726a989a
RB
5461 /* Do not use gsi_replace here, as it may scan operands.
5462 We want to do a simple structural modification only. */
355a7673
MM
5463 gsi_set_stmt (&iter, gtry);
5464 iter = gsi_start (gtry->gimple_try.eval);
6de9cd9a
DN
5465 }
5466 }
5467 else
726a989a 5468 gsi_next (&iter);
6de9cd9a
DN
5469 }
5470
726a989a 5471 gimplify_seq_add_seq (pre_p, body_sequence);
6de9cd9a
DN
5472 if (temp)
5473 {
5474 *expr_p = temp;
6de9cd9a
DN
5475 return GS_OK;
5476 }
5477 else
5478 {
726a989a 5479 *expr_p = NULL;
6de9cd9a
DN
5480 return GS_ALL_DONE;
5481 }
5482}
5483
5484/* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
726a989a
RB
5485 is the cleanup action required. EH_ONLY is true if the cleanup should
5486 only be executed if an exception is thrown, not on normal exit. */
6de9cd9a
DN
5487
5488static void
726a989a 5489gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p)
6de9cd9a 5490{
726a989a
RB
5491 gimple wce;
5492 gimple_seq cleanup_stmts = NULL;
6de9cd9a
DN
5493
5494 /* Errors can result in improperly nested cleanups. Which results in
726a989a 5495 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
1da2ed5f 5496 if (seen_error ())
6de9cd9a
DN
5497 return;
5498
5499 if (gimple_conditional_context ())
5500 {
5501 /* If we're in a conditional context, this is more complex. We only
5502 want to run the cleanup if we actually ran the initialization that
5503 necessitates it, but we want to run it after the end of the
5504 conditional context. So we wrap the try/finally around the
5505 condition and use a flag to determine whether or not to actually
5506 run the destructor. Thus
5507
5508 test ? f(A()) : 0
5509
5510 becomes (approximately)
5511
5512 flag = 0;
5513 try {
5514 if (test) { A::A(temp); flag = 1; val = f(temp); }
5515 else { val = 0; }
5516 } finally {
5517 if (flag) A::~A(temp);
5518 }
5519 val
5520 */
6de9cd9a 5521 tree flag = create_tmp_var (boolean_type_node, "cleanup");
726a989a
RB
5522 gimple ffalse = gimple_build_assign (flag, boolean_false_node);
5523 gimple ftrue = gimple_build_assign (flag, boolean_true_node);
5524
b4257cfc 5525 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
726a989a
RB
5526 gimplify_stmt (&cleanup, &cleanup_stmts);
5527 wce = gimple_build_wce (cleanup_stmts);
5528
5529 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
5530 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
5531 gimplify_seq_add_stmt (pre_p, ftrue);
6de9cd9a
DN
5532
5533 /* Because of this manipulation, and the EH edges that jump
5534 threading cannot redirect, the temporary (VAR) will appear
5535 to be used uninitialized. Don't warn. */
5536 TREE_NO_WARNING (var) = 1;
5537 }
5538 else
5539 {
726a989a
RB
5540 gimplify_stmt (&cleanup, &cleanup_stmts);
5541 wce = gimple_build_wce (cleanup_stmts);
5542 gimple_wce_set_cleanup_eh_only (wce, eh_only);
5543 gimplify_seq_add_stmt (pre_p, wce);
6de9cd9a 5544 }
6de9cd9a
DN
5545}
5546
5547/* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
5548
5549static enum gimplify_status
726a989a 5550gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6de9cd9a
DN
5551{
5552 tree targ = *expr_p;
5553 tree temp = TARGET_EXPR_SLOT (targ);
5554 tree init = TARGET_EXPR_INITIAL (targ);
5555 enum gimplify_status ret;
5556
5557 if (init)
5558 {
d0ad58f9
JM
5559 tree cleanup = NULL_TREE;
5560
3a5b9284 5561 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
786025ea
JJ
5562 to the temps list. Handle also variable length TARGET_EXPRs. */
5563 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
5564 {
5565 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
5566 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
5567 gimplify_vla_decl (temp, pre_p);
5568 }
5569 else
5570 gimple_add_tmp_var (temp);
6de9cd9a 5571
3a5b9284
RH
5572 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
5573 expression is supposed to initialize the slot. */
5574 if (VOID_TYPE_P (TREE_TYPE (init)))
5575 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5576 else
325c3691 5577 {
726a989a
RB
5578 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
5579 init = init_expr;
5580 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5581 init = NULL;
5582 ggc_free (init_expr);
325c3691 5583 }
3a5b9284 5584 if (ret == GS_ERROR)
abc67de1
SM
5585 {
5586 /* PR c++/28266 Make sure this is expanded only once. */
5587 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5588 return GS_ERROR;
5589 }
726a989a
RB
5590 if (init)
5591 gimplify_and_add (init, pre_p);
6de9cd9a
DN
5592
5593 /* If needed, push the cleanup for the temp. */
5594 if (TARGET_EXPR_CLEANUP (targ))
d0ad58f9
JM
5595 {
5596 if (CLEANUP_EH_ONLY (targ))
5597 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
5598 CLEANUP_EH_ONLY (targ), pre_p);
5599 else
5600 cleanup = TARGET_EXPR_CLEANUP (targ);
5601 }
5602
5603 /* Add a clobber for the temporary going out of scope, like
5604 gimplify_bind_expr. */
32be32af
JJ
5605 if (gimplify_ctxp->in_cleanup_point_expr
5606 && needs_to_live_in_memory (temp))
d0ad58f9
JM
5607 {
5608 tree clobber = build_constructor (TREE_TYPE (temp), NULL);
5609 TREE_THIS_VOLATILE (clobber) = true;
5610 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
5611 if (cleanup)
5612 cleanup = build2 (COMPOUND_EXPR, void_type_node, cleanup,
5613 clobber);
5614 else
5615 cleanup = clobber;
5616 }
5617
5618 if (cleanup)
5619 gimple_push_cleanup (temp, cleanup, false, pre_p);
6de9cd9a
DN
5620
5621 /* Only expand this once. */
5622 TREE_OPERAND (targ, 3) = init;
5623 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5624 }
282899df 5625 else
6de9cd9a 5626 /* We should have expanded this before. */
282899df 5627 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
6de9cd9a
DN
5628
5629 *expr_p = temp;
5630 return GS_OK;
5631}
5632
5633/* Gimplification of expression trees. */
5634
726a989a
RB
5635/* Gimplify an expression which appears at statement context. The
5636 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
5637 NULL, a new sequence is allocated.
6de9cd9a 5638
726a989a
RB
5639 Return true if we actually added a statement to the queue. */
5640
5641bool
5642gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
6de9cd9a 5643{
726a989a 5644 gimple_seq_node last;
6de9cd9a 5645
726a989a
RB
5646 last = gimple_seq_last (*seq_p);
5647 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
5648 return last != gimple_seq_last (*seq_p);
6de9cd9a
DN
5649}
5650
953ff289
DN
5651/* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
5652 to CTX. If entries already exist, force them to be some flavor of private.
5653 If there is no enclosing parallel, do nothing. */
5654
5655void
5656omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
5657{
5658 splay_tree_node n;
5659
5660 if (decl == NULL || !DECL_P (decl))
5661 return;
5662
5663 do
5664 {
5665 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5666 if (n != NULL)
5667 {
5668 if (n->value & GOVD_SHARED)
5669 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
5670 else
5671 return;
5672 }
a68ab351 5673 else if (ctx->region_type != ORT_WORKSHARE)
953ff289
DN
5674 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
5675
5676 ctx = ctx->outer_context;
5677 }
5678 while (ctx);
5679}
5680
5681/* Similarly for each of the type sizes of TYPE. */
5682
5683static void
5684omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
5685{
5686 if (type == NULL || type == error_mark_node)
5687 return;
5688 type = TYPE_MAIN_VARIANT (type);
5689
5690 if (pointer_set_insert (ctx->privatized_types, type))
5691 return;
5692
5693 switch (TREE_CODE (type))
5694 {
5695 case INTEGER_TYPE:
5696 case ENUMERAL_TYPE:
5697 case BOOLEAN_TYPE:
953ff289 5698 case REAL_TYPE:
325217ed 5699 case FIXED_POINT_TYPE:
953ff289
DN
5700 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
5701 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
5702 break;
5703
5704 case ARRAY_TYPE:
5705 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5706 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
5707 break;
5708
5709 case RECORD_TYPE:
5710 case UNION_TYPE:
5711 case QUAL_UNION_TYPE:
5712 {
5713 tree field;
910ad8de 5714 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
953ff289
DN
5715 if (TREE_CODE (field) == FIELD_DECL)
5716 {
5717 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
5718 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
5719 }
5720 }
5721 break;
5722
5723 case POINTER_TYPE:
5724 case REFERENCE_TYPE:
5725 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5726 break;
5727
5728 default:
5729 break;
5730 }
5731
5732 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
5733 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
5734 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
5735}
5736
5737/* Add an entry for DECL in the OpenMP context CTX with FLAGS. */
5738
5739static void
5740omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
5741{
5742 splay_tree_node n;
5743 unsigned int nflags;
5744 tree t;
5745
b504a918 5746 if (error_operand_p (decl))
953ff289
DN
5747 return;
5748
5749 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
5750 there are constructors involved somewhere. */
5751 if (TREE_ADDRESSABLE (TREE_TYPE (decl))
5752 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))
5753 flags |= GOVD_SEEN;
5754
5755 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5756 if (n != NULL)
5757 {
5758 /* We shouldn't be re-adding the decl with the same data
5759 sharing class. */
5760 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
5761 /* The only combination of data sharing classes we should see is
5762 FIRSTPRIVATE and LASTPRIVATE. */
5763 nflags = n->value | flags;
5764 gcc_assert ((nflags & GOVD_DATA_SHARE_CLASS)
5765 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE));
5766 n->value = nflags;
5767 return;
5768 }
5769
5770 /* When adding a variable-sized variable, we have to handle all sorts
b8698a0f 5771 of additional bits of data: the pointer replacement variable, and
953ff289 5772 the parameters of the type. */
4c923c28 5773 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
953ff289
DN
5774 {
5775 /* Add the pointer replacement variable as PRIVATE if the variable
5776 replacement is private, else FIRSTPRIVATE since we'll need the
5777 address of the original variable either for SHARED, or for the
5778 copy into or out of the context. */
5779 if (!(flags & GOVD_LOCAL))
5780 {
5781 nflags = flags & GOVD_PRIVATE ? GOVD_PRIVATE : GOVD_FIRSTPRIVATE;
5782 nflags |= flags & GOVD_SEEN;
5783 t = DECL_VALUE_EXPR (decl);
5784 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5785 t = TREE_OPERAND (t, 0);
5786 gcc_assert (DECL_P (t));
5787 omp_add_variable (ctx, t, nflags);
5788 }
5789
5790 /* Add all of the variable and type parameters (which should have
5791 been gimplified to a formal temporary) as FIRSTPRIVATE. */
5792 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
5793 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
5794 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5795
5796 /* The variable-sized variable itself is never SHARED, only some form
5797 of PRIVATE. The sharing would take place via the pointer variable
5798 which we remapped above. */
5799 if (flags & GOVD_SHARED)
5800 flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE
5801 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
5802
b8698a0f 5803 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
953ff289
DN
5804 alloca statement we generate for the variable, so make sure it
5805 is available. This isn't automatically needed for the SHARED
4288fea2
JJ
5806 case, since we won't be allocating local storage then.
5807 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
5808 in this case omp_notice_variable will be called later
5809 on when it is gimplified. */
423ed416
JJ
5810 else if (! (flags & GOVD_LOCAL)
5811 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
953ff289
DN
5812 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
5813 }
5814 else if (lang_hooks.decls.omp_privatize_by_reference (decl))
5815 {
5816 gcc_assert ((flags & GOVD_LOCAL) == 0);
5817 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5818
5819 /* Similar to the direct variable sized case above, we'll need the
5820 size of references being privatized. */
5821 if ((flags & GOVD_SHARED) == 0)
5822 {
5823 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
4c923c28 5824 if (TREE_CODE (t) != INTEGER_CST)
953ff289
DN
5825 omp_notice_variable (ctx, t, true);
5826 }
5827 }
5828
5829 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
5830}
5831
f22f4340
JJ
5832/* Notice a threadprivate variable DECL used in OpenMP context CTX.
5833 This just prints out diagnostics about threadprivate variable uses
5834 in untied tasks. If DECL2 is non-NULL, prevent this warning
5835 on that variable. */
5836
5837static bool
5838omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
5839 tree decl2)
5840{
5841 splay_tree_node n;
5842
5843 if (ctx->region_type != ORT_UNTIED_TASK)
5844 return false;
5845 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5846 if (n == NULL)
5847 {
ad19c4be
EB
5848 error ("threadprivate variable %qE used in untied task",
5849 DECL_NAME (decl));
f22f4340
JJ
5850 error_at (ctx->location, "enclosing task");
5851 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
5852 }
5853 if (decl2)
5854 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
5855 return false;
5856}
5857
953ff289
DN
5858/* Record the fact that DECL was used within the OpenMP context CTX.
5859 IN_CODE is true when real code uses DECL, and false when we should
5860 merely emit default(none) errors. Return true if DECL is going to
5861 be remapped and thus DECL shouldn't be gimplified into its
5862 DECL_VALUE_EXPR (if any). */
5863
5864static bool
5865omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
5866{
5867 splay_tree_node n;
5868 unsigned flags = in_code ? GOVD_SEEN : 0;
5869 bool ret = false, shared;
5870
b504a918 5871 if (error_operand_p (decl))
953ff289
DN
5872 return false;
5873
5874 /* Threadprivate variables are predetermined. */
5875 if (is_global_var (decl))
5876 {
5877 if (DECL_THREAD_LOCAL_P (decl))
f22f4340 5878 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
953ff289
DN
5879
5880 if (DECL_HAS_VALUE_EXPR_P (decl))
5881 {
5882 tree value = get_base_address (DECL_VALUE_EXPR (decl));
5883
5884 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
f22f4340 5885 return omp_notice_threadprivate_variable (ctx, decl, value);
953ff289
DN
5886 }
5887 }
5888
5889 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5890 if (n == NULL)
5891 {
5892 enum omp_clause_default_kind default_kind, kind;
a68ab351 5893 struct gimplify_omp_ctx *octx;
953ff289 5894
a68ab351 5895 if (ctx->region_type == ORT_WORKSHARE)
953ff289
DN
5896 goto do_outer;
5897
5898 /* ??? Some compiler-generated variables (like SAVE_EXPRs) could be
5899 remapped firstprivate instead of shared. To some extent this is
5900 addressed in omp_firstprivatize_type_sizes, but not effectively. */
5901 default_kind = ctx->default_kind;
5902 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
5903 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
5904 default_kind = kind;
5905
5906 switch (default_kind)
5907 {
5908 case OMP_CLAUSE_DEFAULT_NONE:
4f1e4960 5909 error ("%qE not specified in enclosing parallel",
79943d19 5910 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)));
f22f4340
JJ
5911 if ((ctx->region_type & ORT_TASK) != 0)
5912 error_at (ctx->location, "enclosing task");
5913 else
5914 error_at (ctx->location, "enclosing parallel");
953ff289
DN
5915 /* FALLTHRU */
5916 case OMP_CLAUSE_DEFAULT_SHARED:
5917 flags |= GOVD_SHARED;
5918 break;
5919 case OMP_CLAUSE_DEFAULT_PRIVATE:
5920 flags |= GOVD_PRIVATE;
5921 break;
a68ab351
JJ
5922 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
5923 flags |= GOVD_FIRSTPRIVATE;
5924 break;
5925 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
5926 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
f22f4340 5927 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
a68ab351
JJ
5928 if (ctx->outer_context)
5929 omp_notice_variable (ctx->outer_context, decl, in_code);
5930 for (octx = ctx->outer_context; octx; octx = octx->outer_context)
5931 {
5932 splay_tree_node n2;
5933
5934 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
5935 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
5936 {
5937 flags |= GOVD_FIRSTPRIVATE;
5938 break;
5939 }
5940 if ((octx->region_type & ORT_PARALLEL) != 0)
5941 break;
5942 }
5943 if (flags & GOVD_FIRSTPRIVATE)
5944 break;
5945 if (octx == NULL
5946 && (TREE_CODE (decl) == PARM_DECL
5947 || (!is_global_var (decl)
5948 && DECL_CONTEXT (decl) == current_function_decl)))
5949 {
5950 flags |= GOVD_FIRSTPRIVATE;
5951 break;
5952 }
5953 flags |= GOVD_SHARED;
5954 break;
953ff289
DN
5955 default:
5956 gcc_unreachable ();
5957 }
5958
a68ab351
JJ
5959 if ((flags & GOVD_PRIVATE)
5960 && lang_hooks.decls.omp_private_outer_ref (decl))
5961 flags |= GOVD_PRIVATE_OUTER_REF;
5962
953ff289
DN
5963 omp_add_variable (ctx, decl, flags);
5964
5965 shared = (flags & GOVD_SHARED) != 0;
5966 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5967 goto do_outer;
5968 }
5969
3ad6b266
JJ
5970 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
5971 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
5972 && DECL_SIZE (decl)
5973 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5974 {
5975 splay_tree_node n2;
5976 tree t = DECL_VALUE_EXPR (decl);
5977 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5978 t = TREE_OPERAND (t, 0);
5979 gcc_assert (DECL_P (t));
5980 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
5981 n2->value |= GOVD_SEEN;
5982 }
5983
953ff289
DN
5984 shared = ((flags | n->value) & GOVD_SHARED) != 0;
5985 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5986
5987 /* If nothing changed, there's nothing left to do. */
5988 if ((n->value & flags) == flags)
5989 return ret;
5990 flags |= n->value;
5991 n->value = flags;
5992
5993 do_outer:
5994 /* If the variable is private in the current context, then we don't
5995 need to propagate anything to an outer context. */
a68ab351 5996 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
953ff289
DN
5997 return ret;
5998 if (ctx->outer_context
5999 && omp_notice_variable (ctx->outer_context, decl, in_code))
6000 return true;
6001 return ret;
6002}
6003
6004/* Verify that DECL is private within CTX. If there's specific information
6005 to the contrary in the innermost scope, generate an error. */
6006
6007static bool
6008omp_is_private (struct gimplify_omp_ctx *ctx, tree decl)
6009{
6010 splay_tree_node n;
6011
6012 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6013 if (n != NULL)
6014 {
6015 if (n->value & GOVD_SHARED)
6016 {
6017 if (ctx == gimplify_omp_ctxp)
f6a5ffbf 6018 {
4f1e4960
JM
6019 error ("iteration variable %qE should be private",
6020 DECL_NAME (decl));
f6a5ffbf
JJ
6021 n->value = GOVD_PRIVATE;
6022 return true;
6023 }
6024 else
6025 return false;
953ff289 6026 }
761041be
JJ
6027 else if ((n->value & GOVD_EXPLICIT) != 0
6028 && (ctx == gimplify_omp_ctxp
a68ab351 6029 || (ctx->region_type == ORT_COMBINED_PARALLEL
761041be
JJ
6030 && gimplify_omp_ctxp->outer_context == ctx)))
6031 {
6032 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
4f1e4960
JM
6033 error ("iteration variable %qE should not be firstprivate",
6034 DECL_NAME (decl));
761041be 6035 else if ((n->value & GOVD_REDUCTION) != 0)
4f1e4960
JM
6036 error ("iteration variable %qE should not be reduction",
6037 DECL_NAME (decl));
761041be 6038 }
ca2b1311
JJ
6039 return (ctx == gimplify_omp_ctxp
6040 || (ctx->region_type == ORT_COMBINED_PARALLEL
6041 && gimplify_omp_ctxp->outer_context == ctx));
953ff289
DN
6042 }
6043
a68ab351 6044 if (ctx->region_type != ORT_WORKSHARE)
953ff289 6045 return false;
f6a5ffbf
JJ
6046 else if (ctx->outer_context)
6047 return omp_is_private (ctx->outer_context, decl);
ca2b1311 6048 return false;
953ff289
DN
6049}
6050
07b7aade
JJ
6051/* Return true if DECL is private within a parallel region
6052 that binds to the current construct's context or in parallel
6053 region's REDUCTION clause. */
6054
6055static bool
6056omp_check_private (struct gimplify_omp_ctx *ctx, tree decl)
6057{
6058 splay_tree_node n;
6059
6060 do
6061 {
6062 ctx = ctx->outer_context;
6063 if (ctx == NULL)
6064 return !(is_global_var (decl)
6065 /* References might be private, but might be shared too. */
6066 || lang_hooks.decls.omp_privatize_by_reference (decl));
6067
6068 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6069 if (n != NULL)
6070 return (n->value & GOVD_SHARED) == 0;
6071 }
a68ab351 6072 while (ctx->region_type == ORT_WORKSHARE);
07b7aade
JJ
6073 return false;
6074}
6075
953ff289
DN
6076/* Scan the OpenMP clauses in *LIST_P, installing mappings into a new
6077 and previous omp contexts. */
6078
6079static void
726a989a 6080gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
a68ab351 6081 enum omp_region_type region_type)
953ff289
DN
6082{
6083 struct gimplify_omp_ctx *ctx, *outer_ctx;
d406b663 6084 struct gimplify_ctx gctx;
953ff289
DN
6085 tree c;
6086
a68ab351 6087 ctx = new_omp_context (region_type);
953ff289
DN
6088 outer_ctx = ctx->outer_context;
6089
6090 while ((c = *list_p) != NULL)
6091 {
953ff289
DN
6092 bool remove = false;
6093 bool notice_outer = true;
07b7aade 6094 const char *check_non_private = NULL;
953ff289
DN
6095 unsigned int flags;
6096 tree decl;
6097
aaf46ef9 6098 switch (OMP_CLAUSE_CODE (c))
953ff289
DN
6099 {
6100 case OMP_CLAUSE_PRIVATE:
6101 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
a68ab351
JJ
6102 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
6103 {
6104 flags |= GOVD_PRIVATE_OUTER_REF;
6105 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
6106 }
6107 else
6108 notice_outer = false;
953ff289
DN
6109 goto do_add;
6110 case OMP_CLAUSE_SHARED:
6111 flags = GOVD_SHARED | GOVD_EXPLICIT;
6112 goto do_add;
6113 case OMP_CLAUSE_FIRSTPRIVATE:
6114 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
07b7aade 6115 check_non_private = "firstprivate";
953ff289
DN
6116 goto do_add;
6117 case OMP_CLAUSE_LASTPRIVATE:
6118 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
07b7aade 6119 check_non_private = "lastprivate";
953ff289
DN
6120 goto do_add;
6121 case OMP_CLAUSE_REDUCTION:
6122 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
07b7aade 6123 check_non_private = "reduction";
953ff289
DN
6124 goto do_add;
6125
6126 do_add:
6127 decl = OMP_CLAUSE_DECL (c);
b504a918 6128 if (error_operand_p (decl))
953ff289
DN
6129 {
6130 remove = true;
6131 break;
6132 }
6133 omp_add_variable (ctx, decl, flags);
693d710f 6134 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
953ff289
DN
6135 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
6136 {
6137 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
693d710f 6138 GOVD_LOCAL | GOVD_SEEN);
953ff289 6139 gimplify_omp_ctxp = ctx;
d406b663 6140 push_gimplify_context (&gctx);
726a989a 6141
355a7673
MM
6142 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6143 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
726a989a
RB
6144
6145 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
6146 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
6147 pop_gimplify_context
6148 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
d406b663 6149 push_gimplify_context (&gctx);
726a989a
RB
6150 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
6151 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
b8698a0f 6152 pop_gimplify_context
726a989a
RB
6153 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
6154 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
6155 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
6156
953ff289
DN
6157 gimplify_omp_ctxp = outer_ctx;
6158 }
a68ab351
JJ
6159 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6160 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
6161 {
6162 gimplify_omp_ctxp = ctx;
d406b663 6163 push_gimplify_context (&gctx);
a68ab351
JJ
6164 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
6165 {
6166 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
6167 NULL, NULL);
6168 TREE_SIDE_EFFECTS (bind) = 1;
6169 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
6170 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
6171 }
726a989a
RB
6172 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
6173 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
6174 pop_gimplify_context
6175 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
6176 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
6177
a68ab351
JJ
6178 gimplify_omp_ctxp = outer_ctx;
6179 }
953ff289
DN
6180 if (notice_outer)
6181 goto do_notice;
6182 break;
6183
6184 case OMP_CLAUSE_COPYIN:
6185 case OMP_CLAUSE_COPYPRIVATE:
6186 decl = OMP_CLAUSE_DECL (c);
b504a918 6187 if (error_operand_p (decl))
953ff289
DN
6188 {
6189 remove = true;
6190 break;
6191 }
6192 do_notice:
6193 if (outer_ctx)
6194 omp_notice_variable (outer_ctx, decl, true);
07b7aade 6195 if (check_non_private
a68ab351 6196 && region_type == ORT_WORKSHARE
07b7aade
JJ
6197 && omp_check_private (ctx, decl))
6198 {
4f1e4960
JM
6199 error ("%s variable %qE is private in outer context",
6200 check_non_private, DECL_NAME (decl));
07b7aade
JJ
6201 remove = true;
6202 }
953ff289
DN
6203 break;
6204
20906c66 6205 case OMP_CLAUSE_FINAL:
953ff289 6206 case OMP_CLAUSE_IF:
d568d1a8
RS
6207 OMP_CLAUSE_OPERAND (c, 0)
6208 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
6209 /* Fall through. */
6210
6211 case OMP_CLAUSE_SCHEDULE:
953ff289 6212 case OMP_CLAUSE_NUM_THREADS:
726a989a
RB
6213 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
6214 is_gimple_val, fb_rvalue) == GS_ERROR)
6215 remove = true;
953ff289
DN
6216 break;
6217
6218 case OMP_CLAUSE_NOWAIT:
6219 case OMP_CLAUSE_ORDERED:
a68ab351
JJ
6220 case OMP_CLAUSE_UNTIED:
6221 case OMP_CLAUSE_COLLAPSE:
20906c66 6222 case OMP_CLAUSE_MERGEABLE:
953ff289
DN
6223 break;
6224
6225 case OMP_CLAUSE_DEFAULT:
6226 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
6227 break;
6228
6229 default:
6230 gcc_unreachable ();
6231 }
6232
6233 if (remove)
6234 *list_p = OMP_CLAUSE_CHAIN (c);
6235 else
6236 list_p = &OMP_CLAUSE_CHAIN (c);
6237 }
6238
6239 gimplify_omp_ctxp = ctx;
6240}
6241
6242/* For all variables that were not actually used within the context,
6243 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
6244
6245static int
6246gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
6247{
6248 tree *list_p = (tree *) data;
6249 tree decl = (tree) n->key;
6250 unsigned flags = n->value;
aaf46ef9 6251 enum omp_clause_code code;
953ff289
DN
6252 tree clause;
6253 bool private_debug;
6254
6255 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
6256 return 0;
6257 if ((flags & GOVD_SEEN) == 0)
6258 return 0;
6259 if (flags & GOVD_DEBUG_PRIVATE)
6260 {
6261 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE);
6262 private_debug = true;
6263 }
6264 else
6265 private_debug
6266 = lang_hooks.decls.omp_private_debug_clause (decl,
6267 !!(flags & GOVD_SHARED));
6268 if (private_debug)
6269 code = OMP_CLAUSE_PRIVATE;
6270 else if (flags & GOVD_SHARED)
6271 {
6272 if (is_global_var (decl))
64964499
JJ
6273 {
6274 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
6275 while (ctx != NULL)
6276 {
6277 splay_tree_node on
6278 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6279 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
6280 | GOVD_PRIVATE | GOVD_REDUCTION)) != 0)
6281 break;
6282 ctx = ctx->outer_context;
6283 }
6284 if (ctx == NULL)
6285 return 0;
6286 }
953ff289
DN
6287 code = OMP_CLAUSE_SHARED;
6288 }
6289 else if (flags & GOVD_PRIVATE)
6290 code = OMP_CLAUSE_PRIVATE;
6291 else if (flags & GOVD_FIRSTPRIVATE)
6292 code = OMP_CLAUSE_FIRSTPRIVATE;
6293 else
6294 gcc_unreachable ();
6295
c2255bc4 6296 clause = build_omp_clause (input_location, code);
aaf46ef9 6297 OMP_CLAUSE_DECL (clause) = decl;
953ff289
DN
6298 OMP_CLAUSE_CHAIN (clause) = *list_p;
6299 if (private_debug)
6300 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
a68ab351
JJ
6301 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
6302 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
953ff289 6303 *list_p = clause;
a68ab351 6304 lang_hooks.decls.omp_finish_clause (clause);
953ff289
DN
6305
6306 return 0;
6307}
6308
6309static void
6310gimplify_adjust_omp_clauses (tree *list_p)
6311{
6312 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
6313 tree c, decl;
6314
6315 while ((c = *list_p) != NULL)
6316 {
6317 splay_tree_node n;
6318 bool remove = false;
6319
aaf46ef9 6320 switch (OMP_CLAUSE_CODE (c))
953ff289
DN
6321 {
6322 case OMP_CLAUSE_PRIVATE:
6323 case OMP_CLAUSE_SHARED:
6324 case OMP_CLAUSE_FIRSTPRIVATE:
6325 decl = OMP_CLAUSE_DECL (c);
6326 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6327 remove = !(n->value & GOVD_SEEN);
6328 if (! remove)
6329 {
aaf46ef9 6330 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
953ff289
DN
6331 if ((n->value & GOVD_DEBUG_PRIVATE)
6332 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
6333 {
6334 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
6335 || ((n->value & GOVD_DATA_SHARE_CLASS)
6336 == GOVD_PRIVATE));
aaf46ef9 6337 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
953ff289
DN
6338 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
6339 }
6340 }
6341 break;
6342
6343 case OMP_CLAUSE_LASTPRIVATE:
6344 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
6345 accurately reflect the presence of a FIRSTPRIVATE clause. */
6346 decl = OMP_CLAUSE_DECL (c);
6347 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6348 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
6349 = (n->value & GOVD_FIRSTPRIVATE) != 0;
6350 break;
b8698a0f 6351
953ff289
DN
6352 case OMP_CLAUSE_REDUCTION:
6353 case OMP_CLAUSE_COPYIN:
6354 case OMP_CLAUSE_COPYPRIVATE:
6355 case OMP_CLAUSE_IF:
6356 case OMP_CLAUSE_NUM_THREADS:
6357 case OMP_CLAUSE_SCHEDULE:
6358 case OMP_CLAUSE_NOWAIT:
6359 case OMP_CLAUSE_ORDERED:
6360 case OMP_CLAUSE_DEFAULT:
a68ab351
JJ
6361 case OMP_CLAUSE_UNTIED:
6362 case OMP_CLAUSE_COLLAPSE:
20906c66
JJ
6363 case OMP_CLAUSE_FINAL:
6364 case OMP_CLAUSE_MERGEABLE:
953ff289
DN
6365 break;
6366
6367 default:
6368 gcc_unreachable ();
6369 }
6370
6371 if (remove)
6372 *list_p = OMP_CLAUSE_CHAIN (c);
6373 else
6374 list_p = &OMP_CLAUSE_CHAIN (c);
6375 }
6376
6377 /* Add in any implicit data sharing. */
6378 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, list_p);
b8698a0f 6379
953ff289
DN
6380 gimplify_omp_ctxp = ctx->outer_context;
6381 delete_omp_context (ctx);
6382}
6383
6384/* Gimplify the contents of an OMP_PARALLEL statement. This involves
6385 gimplification of the body, as well as scanning the body for used
6386 variables. We need to do this scan now, because variable-sized
6387 decls will be decomposed during gimplification. */
6388
726a989a
RB
6389static void
6390gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
953ff289
DN
6391{
6392 tree expr = *expr_p;
726a989a
RB
6393 gimple g;
6394 gimple_seq body = NULL;
d406b663 6395 struct gimplify_ctx gctx;
953ff289 6396
a68ab351
JJ
6397 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
6398 OMP_PARALLEL_COMBINED (expr)
6399 ? ORT_COMBINED_PARALLEL
6400 : ORT_PARALLEL);
953ff289 6401
d406b663 6402 push_gimplify_context (&gctx);
953ff289 6403
726a989a
RB
6404 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
6405 if (gimple_code (g) == GIMPLE_BIND)
6406 pop_gimplify_context (g);
50674e96 6407 else
726a989a 6408 pop_gimplify_context (NULL);
953ff289
DN
6409
6410 gimplify_adjust_omp_clauses (&OMP_PARALLEL_CLAUSES (expr));
6411
726a989a
RB
6412 g = gimple_build_omp_parallel (body,
6413 OMP_PARALLEL_CLAUSES (expr),
6414 NULL_TREE, NULL_TREE);
6415 if (OMP_PARALLEL_COMBINED (expr))
6416 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
6417 gimplify_seq_add_stmt (pre_p, g);
6418 *expr_p = NULL_TREE;
953ff289
DN
6419}
6420
a68ab351
JJ
6421/* Gimplify the contents of an OMP_TASK statement. This involves
6422 gimplification of the body, as well as scanning the body for used
6423 variables. We need to do this scan now, because variable-sized
6424 decls will be decomposed during gimplification. */
953ff289 6425
726a989a
RB
6426static void
6427gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
953ff289 6428{
a68ab351 6429 tree expr = *expr_p;
726a989a
RB
6430 gimple g;
6431 gimple_seq body = NULL;
d406b663 6432 struct gimplify_ctx gctx;
953ff289 6433
f22f4340
JJ
6434 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
6435 find_omp_clause (OMP_TASK_CLAUSES (expr),
6436 OMP_CLAUSE_UNTIED)
6437 ? ORT_UNTIED_TASK : ORT_TASK);
953ff289 6438
d406b663 6439 push_gimplify_context (&gctx);
953ff289 6440
726a989a
RB
6441 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
6442 if (gimple_code (g) == GIMPLE_BIND)
6443 pop_gimplify_context (g);
953ff289 6444 else
726a989a 6445 pop_gimplify_context (NULL);
953ff289 6446
a68ab351 6447 gimplify_adjust_omp_clauses (&OMP_TASK_CLAUSES (expr));
917948d3 6448
726a989a
RB
6449 g = gimple_build_omp_task (body,
6450 OMP_TASK_CLAUSES (expr),
6451 NULL_TREE, NULL_TREE,
6452 NULL_TREE, NULL_TREE, NULL_TREE);
6453 gimplify_seq_add_stmt (pre_p, g);
6454 *expr_p = NULL_TREE;
a68ab351
JJ
6455}
6456
6457/* Gimplify the gross structure of an OMP_FOR statement. */
6458
6459static enum gimplify_status
726a989a 6460gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
a68ab351 6461{
726a989a 6462 tree for_stmt, decl, var, t;
32e8bb8e
ILT
6463 enum gimplify_status ret = GS_ALL_DONE;
6464 enum gimplify_status tret;
726a989a
RB
6465 gimple gfor;
6466 gimple_seq for_body, for_pre_body;
a68ab351
JJ
6467 int i;
6468
6469 for_stmt = *expr_p;
6470
6471 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p,
6472 ORT_WORKSHARE);
917948d3 6473
726a989a
RB
6474 /* Handle OMP_FOR_INIT. */
6475 for_pre_body = NULL;
6476 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
6477 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
a68ab351 6478
355a7673 6479 for_body = NULL;
a68ab351
JJ
6480 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
6481 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
6482 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
6483 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
6484 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
6485 {
6486 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
726a989a
RB
6487 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
6488 decl = TREE_OPERAND (t, 0);
a68ab351
JJ
6489 gcc_assert (DECL_P (decl));
6490 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
6491 || POINTER_TYPE_P (TREE_TYPE (decl)));
6492
6493 /* Make sure the iteration variable is private. */
6494 if (omp_is_private (gimplify_omp_ctxp, decl))
6495 omp_notice_variable (gimplify_omp_ctxp, decl, true);
6496 else
6497 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
6498
6499 /* If DECL is not a gimple register, create a temporary variable to act
6500 as an iteration counter. This is valid, since DECL cannot be
6501 modified in the body of the loop. */
6502 if (!is_gimple_reg (decl))
6503 {
6504 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
726a989a 6505 TREE_OPERAND (t, 0) = var;
b8698a0f 6506
726a989a 6507 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
953ff289 6508
a68ab351
JJ
6509 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
6510 }
6511 else
6512 var = decl;
07beea0d 6513
32e8bb8e 6514 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
726a989a 6515 is_gimple_val, fb_rvalue);
32e8bb8e 6516 ret = MIN (ret, tret);
726a989a
RB
6517 if (ret == GS_ERROR)
6518 return ret;
953ff289 6519
726a989a 6520 /* Handle OMP_FOR_COND. */
a68ab351
JJ
6521 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
6522 gcc_assert (COMPARISON_CLASS_P (t));
726a989a 6523 gcc_assert (TREE_OPERAND (t, 0) == decl);
b56b9fe3 6524
32e8bb8e 6525 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
726a989a 6526 is_gimple_val, fb_rvalue);
32e8bb8e 6527 ret = MIN (ret, tret);
917948d3 6528
726a989a 6529 /* Handle OMP_FOR_INCR. */
a68ab351 6530 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
953ff289
DN
6531 switch (TREE_CODE (t))
6532 {
a68ab351
JJ
6533 case PREINCREMENT_EXPR:
6534 case POSTINCREMENT_EXPR:
6535 t = build_int_cst (TREE_TYPE (decl), 1);
6536 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
726a989a 6537 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
a68ab351
JJ
6538 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
6539 break;
6540
6541 case PREDECREMENT_EXPR:
6542 case POSTDECREMENT_EXPR:
6543 t = build_int_cst (TREE_TYPE (decl), -1);
6544 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
726a989a 6545 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
a68ab351
JJ
6546 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
6547 break;
6548
726a989a
RB
6549 case MODIFY_EXPR:
6550 gcc_assert (TREE_OPERAND (t, 0) == decl);
6551 TREE_OPERAND (t, 0) = var;
a68ab351 6552
726a989a 6553 t = TREE_OPERAND (t, 1);
a68ab351 6554 switch (TREE_CODE (t))
953ff289 6555 {
a68ab351
JJ
6556 case PLUS_EXPR:
6557 if (TREE_OPERAND (t, 1) == decl)
6558 {
6559 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
6560 TREE_OPERAND (t, 0) = var;
6561 break;
6562 }
6563
6564 /* Fallthru. */
6565 case MINUS_EXPR:
6566 case POINTER_PLUS_EXPR:
6567 gcc_assert (TREE_OPERAND (t, 0) == decl);
917948d3 6568 TREE_OPERAND (t, 0) = var;
953ff289 6569 break;
a68ab351
JJ
6570 default:
6571 gcc_unreachable ();
953ff289 6572 }
917948d3 6573
32e8bb8e 6574 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
726a989a 6575 is_gimple_val, fb_rvalue);
32e8bb8e 6576 ret = MIN (ret, tret);
953ff289 6577 break;
a68ab351 6578
953ff289
DN
6579 default:
6580 gcc_unreachable ();
6581 }
6582
a68ab351
JJ
6583 if (var != decl || TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1)
6584 {
6585 tree c;
6586 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
726a989a
RB
6587 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6588 && OMP_CLAUSE_DECL (c) == decl
6589 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
6590 {
6591 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6592 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
6593 gcc_assert (TREE_OPERAND (t, 0) == var);
6594 t = TREE_OPERAND (t, 1);
6595 gcc_assert (TREE_CODE (t) == PLUS_EXPR
6596 || TREE_CODE (t) == MINUS_EXPR
6597 || TREE_CODE (t) == POINTER_PLUS_EXPR);
6598 gcc_assert (TREE_OPERAND (t, 0) == var);
6599 t = build2 (TREE_CODE (t), TREE_TYPE (decl), decl,
6600 TREE_OPERAND (t, 1));
6601 gimplify_assign (decl, t,
6602 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
a68ab351
JJ
6603 }
6604 }
953ff289
DN
6605 }
6606
726a989a
RB
6607 gimplify_and_add (OMP_FOR_BODY (for_stmt), &for_body);
6608
953ff289
DN
6609 gimplify_adjust_omp_clauses (&OMP_FOR_CLAUSES (for_stmt));
6610
726a989a
RB
6611 gfor = gimple_build_omp_for (for_body, OMP_FOR_CLAUSES (for_stmt),
6612 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
6613 for_pre_body);
6614
6615 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
6616 {
6617 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
6618 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
6619 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
6620 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
6621 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
6622 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
6623 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6624 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
6625 }
6626
6627 gimplify_seq_add_stmt (pre_p, gfor);
953ff289
DN
6628 return ret == GS_ALL_DONE ? GS_ALL_DONE : GS_ERROR;
6629}
6630
6631/* Gimplify the gross structure of other OpenMP worksharing constructs.
6632 In particular, OMP_SECTIONS and OMP_SINGLE. */
6633
726a989a
RB
6634static void
6635gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
953ff289 6636{
726a989a
RB
6637 tree expr = *expr_p;
6638 gimple stmt;
6639 gimple_seq body = NULL;
953ff289 6640
726a989a
RB
6641 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ORT_WORKSHARE);
6642 gimplify_and_add (OMP_BODY (expr), &body);
6643 gimplify_adjust_omp_clauses (&OMP_CLAUSES (expr));
953ff289 6644
726a989a
RB
6645 if (TREE_CODE (expr) == OMP_SECTIONS)
6646 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
6647 else if (TREE_CODE (expr) == OMP_SINGLE)
6648 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
6649 else
6650 gcc_unreachable ();
6651
6652 gimplify_seq_add_stmt (pre_p, stmt);
953ff289
DN
6653}
6654
6655/* A subroutine of gimplify_omp_atomic. The front end is supposed to have
b8698a0f 6656 stabilized the lhs of the atomic operation as *ADDR. Return true if
953ff289
DN
6657 EXPR is this stabilized form. */
6658
6659static bool
a509ebb5 6660goa_lhs_expr_p (tree expr, tree addr)
953ff289
DN
6661{
6662 /* Also include casts to other type variants. The C front end is fond
b8698a0f 6663 of adding these for e.g. volatile variables. This is like
953ff289 6664 STRIP_TYPE_NOPS but includes the main variant lookup. */
9600efe1 6665 STRIP_USELESS_TYPE_CONVERSION (expr);
953ff289 6666
78e47463
JJ
6667 if (TREE_CODE (expr) == INDIRECT_REF)
6668 {
6669 expr = TREE_OPERAND (expr, 0);
6670 while (expr != addr
1043771b 6671 && (CONVERT_EXPR_P (expr)
78e47463
JJ
6672 || TREE_CODE (expr) == NON_LVALUE_EXPR)
6673 && TREE_CODE (expr) == TREE_CODE (addr)
9600efe1 6674 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
78e47463
JJ
6675 {
6676 expr = TREE_OPERAND (expr, 0);
6677 addr = TREE_OPERAND (addr, 0);
6678 }
251923f5
JJ
6679 if (expr == addr)
6680 return true;
71458b8a
JJ
6681 return (TREE_CODE (addr) == ADDR_EXPR
6682 && TREE_CODE (expr) == ADDR_EXPR
251923f5 6683 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
78e47463 6684 }
953ff289
DN
6685 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
6686 return true;
6687 return false;
6688}
6689
ad19c4be
EB
6690/* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
6691 expression does not involve the lhs, evaluate it into a temporary.
6692 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
6693 or -1 if an error was encountered. */
953ff289
DN
6694
6695static int
726a989a
RB
6696goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
6697 tree lhs_var)
953ff289
DN
6698{
6699 tree expr = *expr_p;
6700 int saw_lhs;
6701
6702 if (goa_lhs_expr_p (expr, lhs_addr))
6703 {
6704 *expr_p = lhs_var;
6705 return 1;
6706 }
6707 if (is_gimple_val (expr))
6708 return 0;
b8698a0f 6709
953ff289
DN
6710 saw_lhs = 0;
6711 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
6712 {
6713 case tcc_binary:
067dd3c9 6714 case tcc_comparison:
726a989a
RB
6715 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
6716 lhs_var);
953ff289 6717 case tcc_unary:
726a989a
RB
6718 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
6719 lhs_var);
953ff289 6720 break;
067dd3c9
JJ
6721 case tcc_expression:
6722 switch (TREE_CODE (expr))
6723 {
6724 case TRUTH_ANDIF_EXPR:
6725 case TRUTH_ORIF_EXPR:
f2b11865
JJ
6726 case TRUTH_AND_EXPR:
6727 case TRUTH_OR_EXPR:
6728 case TRUTH_XOR_EXPR:
067dd3c9
JJ
6729 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
6730 lhs_addr, lhs_var);
f2b11865 6731 case TRUTH_NOT_EXPR:
067dd3c9
JJ
6732 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
6733 lhs_addr, lhs_var);
6734 break;
4063e61b
JM
6735 case COMPOUND_EXPR:
6736 /* Break out any preevaluations from cp_build_modify_expr. */
6737 for (; TREE_CODE (expr) == COMPOUND_EXPR;
6738 expr = TREE_OPERAND (expr, 1))
6739 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
6740 *expr_p = expr;
6741 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
067dd3c9
JJ
6742 default:
6743 break;
6744 }
6745 break;
953ff289
DN
6746 default:
6747 break;
6748 }
6749
6750 if (saw_lhs == 0)
6751 {
6752 enum gimplify_status gs;
6753 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
6754 if (gs != GS_ALL_DONE)
6755 saw_lhs = -1;
6756 }
6757
6758 return saw_lhs;
6759}
6760
953ff289
DN
6761/* Gimplify an OMP_ATOMIC statement. */
6762
6763static enum gimplify_status
726a989a 6764gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
953ff289
DN
6765{
6766 tree addr = TREE_OPERAND (*expr_p, 0);
20906c66
JJ
6767 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
6768 ? NULL : TREE_OPERAND (*expr_p, 1);
953ff289 6769 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
726a989a 6770 tree tmp_load;
20906c66 6771 gimple loadstmt, storestmt;
953ff289 6772
20906c66
JJ
6773 tmp_load = create_tmp_reg (type, NULL);
6774 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
6775 return GS_ERROR;
6776
6777 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
6778 != GS_ALL_DONE)
6779 return GS_ERROR;
953ff289 6780
20906c66
JJ
6781 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr);
6782 gimplify_seq_add_stmt (pre_p, loadstmt);
6783 if (rhs && gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
6784 != GS_ALL_DONE)
6785 return GS_ERROR;
953ff289 6786
20906c66
JJ
6787 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
6788 rhs = tmp_load;
6789 storestmt = gimple_build_omp_atomic_store (rhs);
6790 gimplify_seq_add_stmt (pre_p, storestmt);
6791 switch (TREE_CODE (*expr_p))
6792 {
6793 case OMP_ATOMIC_READ:
6794 case OMP_ATOMIC_CAPTURE_OLD:
6795 *expr_p = tmp_load;
6796 gimple_omp_atomic_set_need_value (loadstmt);
6797 break;
6798 case OMP_ATOMIC_CAPTURE_NEW:
6799 *expr_p = rhs;
6800 gimple_omp_atomic_set_need_value (storestmt);
6801 break;
6802 default:
6803 *expr_p = NULL;
6804 break;
6805 }
a509ebb5
RL
6806
6807 return GS_ALL_DONE;
953ff289 6808}
6de9cd9a 6809
0a35513e
AH
6810/* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
6811 body, and adding some EH bits. */
6812
6813static enum gimplify_status
6814gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
6815{
6816 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
6817 gimple g;
6818 gimple_seq body = NULL;
6819 struct gimplify_ctx gctx;
6820 int subcode = 0;
6821
6822 /* Wrap the transaction body in a BIND_EXPR so we have a context
6823 where to put decls for OpenMP. */
6824 if (TREE_CODE (tbody) != BIND_EXPR)
6825 {
6826 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
6827 TREE_SIDE_EFFECTS (bind) = 1;
6828 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
6829 TRANSACTION_EXPR_BODY (expr) = bind;
6830 }
6831
6832 push_gimplify_context (&gctx);
6833 temp = voidify_wrapper_expr (*expr_p, NULL);
6834
6835 g = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
6836 pop_gimplify_context (g);
6837
6838 g = gimple_build_transaction (body, NULL);
6839 if (TRANSACTION_EXPR_OUTER (expr))
6840 subcode = GTMA_IS_OUTER;
6841 else if (TRANSACTION_EXPR_RELAXED (expr))
6842 subcode = GTMA_IS_RELAXED;
6843 gimple_transaction_set_subcode (g, subcode);
6844
6845 gimplify_seq_add_stmt (pre_p, g);
6846
6847 if (temp)
6848 {
6849 *expr_p = temp;
6850 return GS_OK;
6851 }
6852
6853 *expr_p = NULL_TREE;
6854 return GS_ALL_DONE;
6855}
6856
ad19c4be 6857/* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
726a989a
RB
6858 expression produces a value to be used as an operand inside a GIMPLE
6859 statement, the value will be stored back in *EXPR_P. This value will
6860 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
6861 an SSA_NAME. The corresponding sequence of GIMPLE statements is
6862 emitted in PRE_P and POST_P.
6863
6864 Additionally, this process may overwrite parts of the input
6865 expression during gimplification. Ideally, it should be
6866 possible to do non-destructive gimplification.
6867
6868 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
6869 the expression needs to evaluate to a value to be used as
6870 an operand in a GIMPLE statement, this value will be stored in
6871 *EXPR_P on exit. This happens when the caller specifies one
6872 of fb_lvalue or fb_rvalue fallback flags.
6873
6874 PRE_P will contain the sequence of GIMPLE statements corresponding
6875 to the evaluation of EXPR and all the side-effects that must
6876 be executed before the main expression. On exit, the last
6877 statement of PRE_P is the core statement being gimplified. For
6878 instance, when gimplifying 'if (++a)' the last statement in
6879 PRE_P will be 'if (t.1)' where t.1 is the result of
6880 pre-incrementing 'a'.
6881
6882 POST_P will contain the sequence of GIMPLE statements corresponding
6883 to the evaluation of all the side-effects that must be executed
6884 after the main expression. If this is NULL, the post
6885 side-effects are stored at the end of PRE_P.
6886
6887 The reason why the output is split in two is to handle post
6888 side-effects explicitly. In some cases, an expression may have
6889 inner and outer post side-effects which need to be emitted in
6890 an order different from the one given by the recursive
6891 traversal. For instance, for the expression (*p--)++ the post
6892 side-effects of '--' must actually occur *after* the post
6893 side-effects of '++'. However, gimplification will first visit
6894 the inner expression, so if a separate POST sequence was not
6895 used, the resulting sequence would be:
6896
6897 1 t.1 = *p
6898 2 p = p - 1
6899 3 t.2 = t.1 + 1
6900 4 *p = t.2
6901
6902 However, the post-decrement operation in line #2 must not be
6903 evaluated until after the store to *p at line #4, so the
6904 correct sequence should be:
6905
6906 1 t.1 = *p
6907 2 t.2 = t.1 + 1
6908 3 *p = t.2
6909 4 p = p - 1
6910
6911 So, by specifying a separate post queue, it is possible
6912 to emit the post side-effects in the correct order.
6913 If POST_P is NULL, an internal queue will be used. Before
6914 returning to the caller, the sequence POST_P is appended to
6915 the main output sequence PRE_P.
6916
6917 GIMPLE_TEST_F points to a function that takes a tree T and
6918 returns nonzero if T is in the GIMPLE form requested by the
12947319 6919 caller. The GIMPLE predicates are in gimple.c.
726a989a
RB
6920
6921 FALLBACK tells the function what sort of a temporary we want if
6922 gimplification cannot produce an expression that complies with
6923 GIMPLE_TEST_F.
6924
6925 fb_none means that no temporary should be generated
6926 fb_rvalue means that an rvalue is OK to generate
6927 fb_lvalue means that an lvalue is OK to generate
6928 fb_either means that either is OK, but an lvalue is preferable.
6929 fb_mayfail means that gimplification may fail (in which case
6930 GS_ERROR will be returned)
6931
6932 The return value is either GS_ERROR or GS_ALL_DONE, since this
6933 function iterates until EXPR is completely gimplified or an error
6934 occurs. */
6de9cd9a
DN
6935
6936enum gimplify_status
726a989a
RB
6937gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
6938 bool (*gimple_test_f) (tree), fallback_t fallback)
6de9cd9a
DN
6939{
6940 tree tmp;
726a989a
RB
6941 gimple_seq internal_pre = NULL;
6942 gimple_seq internal_post = NULL;
6de9cd9a 6943 tree save_expr;
726a989a 6944 bool is_statement;
6de9cd9a
DN
6945 location_t saved_location;
6946 enum gimplify_status ret;
726a989a 6947 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
6de9cd9a
DN
6948
6949 save_expr = *expr_p;
6950 if (save_expr == NULL_TREE)
6951 return GS_ALL_DONE;
6952
726a989a
RB
6953 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
6954 is_statement = gimple_test_f == is_gimple_stmt;
6955 if (is_statement)
6956 gcc_assert (pre_p);
6957
6958 /* Consistency checks. */
6959 if (gimple_test_f == is_gimple_reg)
6960 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
6961 else if (gimple_test_f == is_gimple_val
726a989a
RB
6962 || gimple_test_f == is_gimple_call_addr
6963 || gimple_test_f == is_gimple_condexpr
6964 || gimple_test_f == is_gimple_mem_rhs
ba4d8f9d 6965 || gimple_test_f == is_gimple_mem_rhs_or_call
726a989a 6966 || gimple_test_f == is_gimple_reg_rhs
ba4d8f9d 6967 || gimple_test_f == is_gimple_reg_rhs_or_call
70f34814
RG
6968 || gimple_test_f == is_gimple_asm_val
6969 || gimple_test_f == is_gimple_mem_ref_addr)
726a989a
RB
6970 gcc_assert (fallback & fb_rvalue);
6971 else if (gimple_test_f == is_gimple_min_lval
6972 || gimple_test_f == is_gimple_lvalue)
6973 gcc_assert (fallback & fb_lvalue);
6974 else if (gimple_test_f == is_gimple_addressable)
6975 gcc_assert (fallback & fb_either);
6976 else if (gimple_test_f == is_gimple_stmt)
6977 gcc_assert (fallback == fb_none);
6978 else
6979 {
6980 /* We should have recognized the GIMPLE_TEST_F predicate to
6981 know what kind of fallback to use in case a temporary is
6982 needed to hold the value or address of *EXPR_P. */
6983 gcc_unreachable ();
6984 }
6985
6de9cd9a
DN
6986 /* We used to check the predicate here and return immediately if it
6987 succeeds. This is wrong; the design is for gimplification to be
6988 idempotent, and for the predicates to only test for valid forms, not
6989 whether they are fully simplified. */
6de9cd9a
DN
6990 if (pre_p == NULL)
6991 pre_p = &internal_pre;
726a989a 6992
6de9cd9a
DN
6993 if (post_p == NULL)
6994 post_p = &internal_post;
6995
726a989a
RB
6996 /* Remember the last statements added to PRE_P and POST_P. Every
6997 new statement added by the gimplification helpers needs to be
6998 annotated with location information. To centralize the
6999 responsibility, we remember the last statement that had been
7000 added to both queues before gimplifying *EXPR_P. If
7001 gimplification produces new statements in PRE_P and POST_P, those
7002 statements will be annotated with the same location information
7003 as *EXPR_P. */
7004 pre_last_gsi = gsi_last (*pre_p);
7005 post_last_gsi = gsi_last (*post_p);
7006
6de9cd9a 7007 saved_location = input_location;
a281759f
PB
7008 if (save_expr != error_mark_node
7009 && EXPR_HAS_LOCATION (*expr_p))
7010 input_location = EXPR_LOCATION (*expr_p);
6de9cd9a
DN
7011
7012 /* Loop over the specific gimplifiers until the toplevel node
7013 remains the same. */
7014 do
7015 {
73d6ddef
RK
7016 /* Strip away as many useless type conversions as possible
7017 at the toplevel. */
7018 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
6de9cd9a
DN
7019
7020 /* Remember the expr. */
7021 save_expr = *expr_p;
7022
7023 /* Die, die, die, my darling. */
7024 if (save_expr == error_mark_node
726a989a 7025 || (TREE_TYPE (save_expr)
65355d53 7026 && TREE_TYPE (save_expr) == error_mark_node))
6de9cd9a
DN
7027 {
7028 ret = GS_ERROR;
7029 break;
7030 }
7031
7032 /* Do any language-specific gimplification. */
32e8bb8e
ILT
7033 ret = ((enum gimplify_status)
7034 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
6de9cd9a
DN
7035 if (ret == GS_OK)
7036 {
7037 if (*expr_p == NULL_TREE)
7038 break;
7039 if (*expr_p != save_expr)
7040 continue;
7041 }
7042 else if (ret != GS_UNHANDLED)
7043 break;
7044
941f78d1
JM
7045 /* Make sure that all the cases set 'ret' appropriately. */
7046 ret = GS_UNHANDLED;
6de9cd9a
DN
7047 switch (TREE_CODE (*expr_p))
7048 {
7049 /* First deal with the special cases. */
7050
7051 case POSTINCREMENT_EXPR:
7052 case POSTDECREMENT_EXPR:
7053 case PREINCREMENT_EXPR:
7054 case PREDECREMENT_EXPR:
7055 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
7056 fallback != fb_none);
7057 break;
7058
7059 case ARRAY_REF:
44de5aeb
RK
7060 case ARRAY_RANGE_REF:
7061 case REALPART_EXPR:
7062 case IMAGPART_EXPR:
6de9cd9a 7063 case COMPONENT_REF:
9e51aaf5 7064 case VIEW_CONVERT_EXPR:
6de9cd9a 7065 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
90051e16 7066 fallback ? fallback : fb_rvalue);
6de9cd9a
DN
7067 break;
7068
7069 case COND_EXPR:
dae7ec87 7070 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
726a989a 7071
0223e4f5
JM
7072 /* C99 code may assign to an array in a structure value of a
7073 conditional expression, and this has undefined behavior
7074 only on execution, so create a temporary if an lvalue is
7075 required. */
7076 if (fallback == fb_lvalue)
7077 {
7078 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
936d04b6 7079 mark_addressable (*expr_p);
941f78d1 7080 ret = GS_OK;
0223e4f5 7081 }
6de9cd9a
DN
7082 break;
7083
7084 case CALL_EXPR:
90051e16 7085 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
726a989a 7086
0223e4f5
JM
7087 /* C99 code may assign to an array in a structure returned
7088 from a function, and this has undefined behavior only on
7089 execution, so create a temporary if an lvalue is
7090 required. */
7091 if (fallback == fb_lvalue)
7092 {
7093 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
936d04b6 7094 mark_addressable (*expr_p);
941f78d1 7095 ret = GS_OK;
0223e4f5 7096 }
6de9cd9a
DN
7097 break;
7098
7099 case TREE_LIST:
282899df 7100 gcc_unreachable ();
6de9cd9a
DN
7101
7102 case COMPOUND_EXPR:
7103 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
7104 break;
7105
2ec5deb5 7106 case COMPOUND_LITERAL_EXPR:
a845a7f5 7107 ret = gimplify_compound_literal_expr (expr_p, pre_p, fallback);
2ec5deb5
PB
7108 break;
7109
6de9cd9a
DN
7110 case MODIFY_EXPR:
7111 case INIT_EXPR:
ebad5233
JM
7112 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
7113 fallback != fb_none);
6de9cd9a
DN
7114 break;
7115
7116 case TRUTH_ANDIF_EXPR:
7117 case TRUTH_ORIF_EXPR:
1d15f620
KT
7118 {
7119 /* Preserve the original type of the expression and the
7120 source location of the outer expression. */
7121 tree org_type = TREE_TYPE (*expr_p);
7122 *expr_p = gimple_boolify (*expr_p);
4b4455e5 7123 *expr_p = build3_loc (input_location, COND_EXPR,
1d15f620
KT
7124 org_type, *expr_p,
7125 fold_convert_loc
4b4455e5 7126 (input_location,
1d15f620
KT
7127 org_type, boolean_true_node),
7128 fold_convert_loc
4b4455e5 7129 (input_location,
1d15f620
KT
7130 org_type, boolean_false_node));
7131 ret = GS_OK;
7132 break;
7133 }
6de9cd9a
DN
7134
7135 case TRUTH_NOT_EXPR:
3c6cbf7a 7136 {
53020648
RG
7137 tree type = TREE_TYPE (*expr_p);
7138 /* The parsers are careful to generate TRUTH_NOT_EXPR
7139 only with operands that are always zero or one.
7140 We do not fold here but handle the only interesting case
7141 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
3c6cbf7a 7142 *expr_p = gimple_boolify (*expr_p);
53020648
RG
7143 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
7144 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
7145 TREE_TYPE (*expr_p),
7146 TREE_OPERAND (*expr_p, 0));
7147 else
7148 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
7149 TREE_TYPE (*expr_p),
7150 TREE_OPERAND (*expr_p, 0),
7151 build_int_cst (TREE_TYPE (*expr_p), 1));
7152 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
7153 *expr_p = fold_convert_loc (input_location, type, *expr_p);
7154 ret = GS_OK;
bd5d002e 7155 break;
3c6cbf7a 7156 }
67339062 7157
6de9cd9a
DN
7158 case ADDR_EXPR:
7159 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
7160 break;
7161
7162 case VA_ARG_EXPR:
cd3ce9b4 7163 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
6de9cd9a
DN
7164 break;
7165
1043771b 7166 CASE_CONVERT:
6de9cd9a
DN
7167 if (IS_EMPTY_STMT (*expr_p))
7168 {
7169 ret = GS_ALL_DONE;
7170 break;
7171 }
7172
7173 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
7174 || fallback == fb_none)
7175 {
7176 /* Just strip a conversion to void (or in void context) and
7177 try again. */
7178 *expr_p = TREE_OPERAND (*expr_p, 0);
941f78d1 7179 ret = GS_OK;
6de9cd9a
DN
7180 break;
7181 }
7182
7183 ret = gimplify_conversion (expr_p);
7184 if (ret == GS_ERROR)
7185 break;
7186 if (*expr_p != save_expr)
7187 break;
7188 /* FALLTHRU */
7189
7190 case FIX_TRUNC_EXPR:
6de9cd9a
DN
7191 /* unary_expr: ... | '(' cast ')' val | ... */
7192 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7193 is_gimple_val, fb_rvalue);
7194 recalculate_side_effects (*expr_p);
7195 break;
7196
6a720599 7197 case INDIRECT_REF:
70f34814
RG
7198 {
7199 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
3748f5c9 7200 bool notrap = TREE_THIS_NOTRAP (*expr_p);
70f34814
RG
7201 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
7202
7203 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
7204 if (*expr_p != save_expr)
7205 {
7206 ret = GS_OK;
7207 break;
7208 }
7209
7210 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7211 is_gimple_reg, fb_rvalue);
dca26746
RG
7212 if (ret == GS_ERROR)
7213 break;
70f34814 7214
dca26746 7215 recalculate_side_effects (*expr_p);
70f34814
RG
7216 *expr_p = fold_build2_loc (input_location, MEM_REF,
7217 TREE_TYPE (*expr_p),
7218 TREE_OPERAND (*expr_p, 0),
7219 build_int_cst (saved_ptr_type, 0));
7220 TREE_THIS_VOLATILE (*expr_p) = volatilep;
3748f5c9 7221 TREE_THIS_NOTRAP (*expr_p) = notrap;
70f34814
RG
7222 ret = GS_OK;
7223 break;
7224 }
7225
7226 /* We arrive here through the various re-gimplifcation paths. */
7227 case MEM_REF:
7228 /* First try re-folding the whole thing. */
7229 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
7230 TREE_OPERAND (*expr_p, 0),
7231 TREE_OPERAND (*expr_p, 1));
7232 if (tmp)
941f78d1 7233 {
70f34814
RG
7234 *expr_p = tmp;
7235 recalculate_side_effects (*expr_p);
941f78d1
JM
7236 ret = GS_OK;
7237 break;
7238 }
01718e96
RG
7239 /* Avoid re-gimplifying the address operand if it is already
7240 in suitable form. Re-gimplifying would mark the address
7241 operand addressable. Always gimplify when not in SSA form
7242 as we still may have to gimplify decls with value-exprs. */
7243 if (!gimplify_ctxp || !gimplify_ctxp->into_ssa
7244 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
7245 {
7246 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7247 is_gimple_mem_ref_addr, fb_rvalue);
7248 if (ret == GS_ERROR)
7249 break;
7250 }
6de9cd9a 7251 recalculate_side_effects (*expr_p);
70f34814 7252 ret = GS_ALL_DONE;
6de9cd9a
DN
7253 break;
7254
01718e96 7255 /* Constants need not be gimplified. */
6de9cd9a
DN
7256 case INTEGER_CST:
7257 case REAL_CST:
325217ed 7258 case FIXED_CST:
6de9cd9a
DN
7259 case STRING_CST:
7260 case COMPLEX_CST:
7261 case VECTOR_CST:
7262 ret = GS_ALL_DONE;
7263 break;
7264
7265 case CONST_DECL:
0534fa56 7266 /* If we require an lvalue, such as for ADDR_EXPR, retain the
2a7e31df 7267 CONST_DECL node. Otherwise the decl is replaceable by its
0534fa56
RH
7268 value. */
7269 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
7270 if (fallback & fb_lvalue)
7271 ret = GS_ALL_DONE;
7272 else
941f78d1
JM
7273 {
7274 *expr_p = DECL_INITIAL (*expr_p);
7275 ret = GS_OK;
7276 }
6de9cd9a
DN
7277 break;
7278
350fae66 7279 case DECL_EXPR:
726a989a 7280 ret = gimplify_decl_expr (expr_p, pre_p);
350fae66
RK
7281 break;
7282
6de9cd9a 7283 case BIND_EXPR:
c6c7698d 7284 ret = gimplify_bind_expr (expr_p, pre_p);
6de9cd9a
DN
7285 break;
7286
7287 case LOOP_EXPR:
7288 ret = gimplify_loop_expr (expr_p, pre_p);
7289 break;
7290
7291 case SWITCH_EXPR:
7292 ret = gimplify_switch_expr (expr_p, pre_p);
7293 break;
7294
6de9cd9a
DN
7295 case EXIT_EXPR:
7296 ret = gimplify_exit_expr (expr_p);
7297 break;
7298
7299 case GOTO_EXPR:
7300 /* If the target is not LABEL, then it is a computed jump
7301 and the target needs to be gimplified. */
7302 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
8c50b495
JJ
7303 {
7304 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
7305 NULL, is_gimple_val, fb_rvalue);
7306 if (ret == GS_ERROR)
7307 break;
7308 }
726a989a
RB
7309 gimplify_seq_add_stmt (pre_p,
7310 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
941f78d1 7311 ret = GS_ALL_DONE;
6de9cd9a
DN
7312 break;
7313
2e28e797 7314 case PREDICT_EXPR:
726a989a
RB
7315 gimplify_seq_add_stmt (pre_p,
7316 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
7317 PREDICT_EXPR_OUTCOME (*expr_p)));
7318 ret = GS_ALL_DONE;
7319 break;
2e28e797 7320
6de9cd9a
DN
7321 case LABEL_EXPR:
7322 ret = GS_ALL_DONE;
282899df
NS
7323 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
7324 == current_function_decl);
726a989a
RB
7325 gimplify_seq_add_stmt (pre_p,
7326 gimple_build_label (LABEL_EXPR_LABEL (*expr_p)));
6de9cd9a
DN
7327 break;
7328
7329 case CASE_LABEL_EXPR:
726a989a 7330 ret = gimplify_case_label_expr (expr_p, pre_p);
6de9cd9a
DN
7331 break;
7332
7333 case RETURN_EXPR:
7334 ret = gimplify_return_expr (*expr_p, pre_p);
7335 break;
7336
7337 case CONSTRUCTOR:
48eb4e53
RK
7338 /* Don't reduce this in place; let gimplify_init_constructor work its
7339 magic. Buf if we're just elaborating this for side effects, just
7340 gimplify any element that has side-effects. */
7341 if (fallback == fb_none)
7342 {
4038c495 7343 unsigned HOST_WIDE_INT ix;
ac47786e 7344 tree val;
08330ec2 7345 tree temp = NULL_TREE;
ac47786e
NF
7346 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
7347 if (TREE_SIDE_EFFECTS (val))
7348 append_to_statement_list (val, &temp);
48eb4e53 7349
08330ec2 7350 *expr_p = temp;
941f78d1 7351 ret = temp ? GS_OK : GS_ALL_DONE;
48eb4e53 7352 }
ca0b7d18
AP
7353 /* C99 code may assign to an array in a constructed
7354 structure or union, and this has undefined behavior only
7355 on execution, so create a temporary if an lvalue is
7356 required. */
7357 else if (fallback == fb_lvalue)
7358 {
7359 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
936d04b6 7360 mark_addressable (*expr_p);
941f78d1 7361 ret = GS_OK;
ca0b7d18 7362 }
08330ec2
AP
7363 else
7364 ret = GS_ALL_DONE;
6de9cd9a
DN
7365 break;
7366
7367 /* The following are special cases that are not handled by the
7368 original GIMPLE grammar. */
7369
7370 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
7371 eliminated. */
7372 case SAVE_EXPR:
7373 ret = gimplify_save_expr (expr_p, pre_p, post_p);
7374 break;
7375
7376 case BIT_FIELD_REF:
7377 {
7378 enum gimplify_status r0, r1, r2;
7379
726a989a
RB
7380 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7381 post_p, is_gimple_lvalue, fb_either);
7382 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
7383 post_p, is_gimple_val, fb_rvalue);
7384 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
7385 post_p, is_gimple_val, fb_rvalue);
6de9cd9a
DN
7386 recalculate_side_effects (*expr_p);
7387
7388 ret = MIN (r0, MIN (r1, r2));
7389 }
7390 break;
7391
150e3929
RG
7392 case TARGET_MEM_REF:
7393 {
7394 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
7395
23a534a1 7396 if (TMR_BASE (*expr_p))
150e3929 7397 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
4d948885 7398 post_p, is_gimple_mem_ref_addr, fb_either);
150e3929
RG
7399 if (TMR_INDEX (*expr_p))
7400 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
7401 post_p, is_gimple_val, fb_rvalue);
4d948885
RG
7402 if (TMR_INDEX2 (*expr_p))
7403 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
7404 post_p, is_gimple_val, fb_rvalue);
150e3929
RG
7405 /* TMR_STEP and TMR_OFFSET are always integer constants. */
7406 ret = MIN (r0, r1);
7407 }
7408 break;
7409
6de9cd9a
DN
7410 case NON_LVALUE_EXPR:
7411 /* This should have been stripped above. */
282899df 7412 gcc_unreachable ();
6de9cd9a
DN
7413
7414 case ASM_EXPR:
7415 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
7416 break;
7417
7418 case TRY_FINALLY_EXPR:
7419 case TRY_CATCH_EXPR:
726a989a
RB
7420 {
7421 gimple_seq eval, cleanup;
7422 gimple try_;
7423
7424 eval = cleanup = NULL;
7425 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
7426 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
cc8b343d
JJ
7427 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
7428 if (gimple_seq_empty_p (cleanup))
7429 {
7430 gimple_seq_add_seq (pre_p, eval);
7431 ret = GS_ALL_DONE;
7432 break;
7433 }
726a989a
RB
7434 try_ = gimple_build_try (eval, cleanup,
7435 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
7436 ? GIMPLE_TRY_FINALLY
7437 : GIMPLE_TRY_CATCH);
7438 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
7439 gimple_try_set_catch_is_cleanup (try_,
7440 TRY_CATCH_IS_CLEANUP (*expr_p));
7441 gimplify_seq_add_stmt (pre_p, try_);
7442 ret = GS_ALL_DONE;
7443 break;
7444 }
6de9cd9a
DN
7445
7446 case CLEANUP_POINT_EXPR:
7447 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
7448 break;
7449
7450 case TARGET_EXPR:
7451 ret = gimplify_target_expr (expr_p, pre_p, post_p);
7452 break;
7453
7454 case CATCH_EXPR:
726a989a
RB
7455 {
7456 gimple c;
7457 gimple_seq handler = NULL;
7458 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
7459 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
7460 gimplify_seq_add_stmt (pre_p, c);
7461 ret = GS_ALL_DONE;
7462 break;
7463 }
6de9cd9a
DN
7464
7465 case EH_FILTER_EXPR:
726a989a
RB
7466 {
7467 gimple ehf;
7468 gimple_seq failure = NULL;
7469
7470 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
7471 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
d665b6e5 7472 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
726a989a
RB
7473 gimplify_seq_add_stmt (pre_p, ehf);
7474 ret = GS_ALL_DONE;
7475 break;
7476 }
6de9cd9a 7477
0f59171d
RH
7478 case OBJ_TYPE_REF:
7479 {
7480 enum gimplify_status r0, r1;
726a989a
RB
7481 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
7482 post_p, is_gimple_val, fb_rvalue);
7483 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
7484 post_p, is_gimple_val, fb_rvalue);
0f3a057a 7485 TREE_SIDE_EFFECTS (*expr_p) = 0;
0f59171d
RH
7486 ret = MIN (r0, r1);
7487 }
6de9cd9a
DN
7488 break;
7489
6de9cd9a
DN
7490 case LABEL_DECL:
7491 /* We get here when taking the address of a label. We mark
7492 the label as "forced"; meaning it can never be removed and
7493 it is a potential target for any computed goto. */
7494 FORCED_LABEL (*expr_p) = 1;
7495 ret = GS_ALL_DONE;
7496 break;
7497
7498 case STATEMENT_LIST:
c6c7698d 7499 ret = gimplify_statement_list (expr_p, pre_p);
6de9cd9a
DN
7500 break;
7501
d25cee4d
RH
7502 case WITH_SIZE_EXPR:
7503 {
70e2829d
KH
7504 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7505 post_p == &internal_post ? NULL : post_p,
7506 gimple_test_f, fallback);
7507 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
7508 is_gimple_val, fb_rvalue);
941f78d1 7509 ret = GS_ALL_DONE;
d25cee4d
RH
7510 }
7511 break;
7512
6de9cd9a 7513 case VAR_DECL:
4744afba 7514 case PARM_DECL:
a9f7c570 7515 ret = gimplify_var_or_parm_decl (expr_p);
6de9cd9a
DN
7516 break;
7517
077b0dfb
JJ
7518 case RESULT_DECL:
7519 /* When within an OpenMP context, notice uses of variables. */
7520 if (gimplify_omp_ctxp)
7521 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
7522 ret = GS_ALL_DONE;
7523 break;
7524
71956db3
RH
7525 case SSA_NAME:
7526 /* Allow callbacks into the gimplifier during optimization. */
7527 ret = GS_ALL_DONE;
7528 break;
7529
953ff289 7530 case OMP_PARALLEL:
726a989a
RB
7531 gimplify_omp_parallel (expr_p, pre_p);
7532 ret = GS_ALL_DONE;
953ff289
DN
7533 break;
7534
a68ab351 7535 case OMP_TASK:
726a989a
RB
7536 gimplify_omp_task (expr_p, pre_p);
7537 ret = GS_ALL_DONE;
a68ab351
JJ
7538 break;
7539
953ff289
DN
7540 case OMP_FOR:
7541 ret = gimplify_omp_for (expr_p, pre_p);
7542 break;
7543
7544 case OMP_SECTIONS:
7545 case OMP_SINGLE:
726a989a
RB
7546 gimplify_omp_workshare (expr_p, pre_p);
7547 ret = GS_ALL_DONE;
953ff289
DN
7548 break;
7549
7550 case OMP_SECTION:
7551 case OMP_MASTER:
7552 case OMP_ORDERED:
7553 case OMP_CRITICAL:
726a989a
RB
7554 {
7555 gimple_seq body = NULL;
7556 gimple g;
7557
7558 gimplify_and_add (OMP_BODY (*expr_p), &body);
7559 switch (TREE_CODE (*expr_p))
7560 {
7561 case OMP_SECTION:
7562 g = gimple_build_omp_section (body);
7563 break;
7564 case OMP_MASTER:
7565 g = gimple_build_omp_master (body);
7566 break;
7567 case OMP_ORDERED:
7568 g = gimple_build_omp_ordered (body);
7569 break;
7570 case OMP_CRITICAL:
7571 g = gimple_build_omp_critical (body,
7572 OMP_CRITICAL_NAME (*expr_p));
7573 break;
7574 default:
7575 gcc_unreachable ();
7576 }
7577 gimplify_seq_add_stmt (pre_p, g);
7578 ret = GS_ALL_DONE;
7579 break;
7580 }
953ff289
DN
7581
7582 case OMP_ATOMIC:
20906c66
JJ
7583 case OMP_ATOMIC_READ:
7584 case OMP_ATOMIC_CAPTURE_OLD:
7585 case OMP_ATOMIC_CAPTURE_NEW:
953ff289
DN
7586 ret = gimplify_omp_atomic (expr_p, pre_p);
7587 break;
7588
0a35513e
AH
7589 case TRANSACTION_EXPR:
7590 ret = gimplify_transaction (expr_p, pre_p);
7591 break;
7592
16949072
RG
7593 case TRUTH_AND_EXPR:
7594 case TRUTH_OR_EXPR:
7595 case TRUTH_XOR_EXPR:
1d15f620 7596 {
bd5d002e 7597 tree orig_type = TREE_TYPE (*expr_p);
fc1f4caf 7598 tree new_type, xop0, xop1;
1d15f620 7599 *expr_p = gimple_boolify (*expr_p);
fc1f4caf
KT
7600 new_type = TREE_TYPE (*expr_p);
7601 if (!useless_type_conversion_p (orig_type, new_type))
1d15f620 7602 {
4b4455e5 7603 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
1d15f620
KT
7604 ret = GS_OK;
7605 break;
7606 }
da5fb469 7607
bd5d002e
RG
7608 /* Boolified binary truth expressions are semantically equivalent
7609 to bitwise binary expressions. Canonicalize them to the
7610 bitwise variant. */
7611 switch (TREE_CODE (*expr_p))
7612 {
7613 case TRUTH_AND_EXPR:
7614 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
7615 break;
7616 case TRUTH_OR_EXPR:
7617 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
7618 break;
7619 case TRUTH_XOR_EXPR:
7620 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
7621 break;
7622 default:
7623 break;
7624 }
fc1f4caf
KT
7625 /* Now make sure that operands have compatible type to
7626 expression's new_type. */
7627 xop0 = TREE_OPERAND (*expr_p, 0);
7628 xop1 = TREE_OPERAND (*expr_p, 1);
7629 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
7630 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
7631 new_type,
7632 xop0);
7633 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
7634 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
7635 new_type,
7636 xop1);
bd5d002e
RG
7637 /* Continue classified as tcc_binary. */
7638 goto expr_2;
da5fb469 7639 }
16949072
RG
7640
7641 case FMA_EXPR:
2205ed25 7642 case VEC_PERM_EXPR:
16949072
RG
7643 /* Classified as tcc_expression. */
7644 goto expr_3;
7645
5be014d5 7646 case POINTER_PLUS_EXPR:
315f5f1b
RG
7647 {
7648 enum gimplify_status r0, r1;
7649 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7650 post_p, is_gimple_val, fb_rvalue);
7651 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
7652 post_p, is_gimple_val, fb_rvalue);
7653 recalculate_side_effects (*expr_p);
7654 ret = MIN (r0, r1);
7655 /* Convert &X + CST to invariant &MEM[&X, CST]. Do this
7656 after gimplifying operands - this is similar to how
7657 it would be folding all gimplified stmts on creation
7658 to have them canonicalized, which is what we eventually
7659 should do anyway. */
7660 if (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
7661 && is_gimple_min_invariant (TREE_OPERAND (*expr_p, 0)))
7662 {
7663 *expr_p = build_fold_addr_expr_with_type_loc
7664 (input_location,
7665 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (*expr_p)),
7666 TREE_OPERAND (*expr_p, 0),
7667 fold_convert (ptr_type_node,
7668 TREE_OPERAND (*expr_p, 1))),
7669 TREE_TYPE (*expr_p));
7670 ret = MIN (ret, GS_OK);
7671 }
7672 break;
7673 }
726a989a 7674
6de9cd9a 7675 default:
282899df 7676 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
6de9cd9a 7677 {
6615c446 7678 case tcc_comparison:
61c25908
OH
7679 /* Handle comparison of objects of non scalar mode aggregates
7680 with a call to memcmp. It would be nice to only have to do
7681 this for variable-sized objects, but then we'd have to allow
7682 the same nest of reference nodes we allow for MODIFY_EXPR and
7683 that's too complex.
7684
7685 Compare scalar mode aggregates as scalar mode values. Using
7686 memcmp for them would be very inefficient at best, and is
7687 plain wrong if bitfields are involved. */
726a989a
RB
7688 {
7689 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
61c25908 7690
544d960a
AS
7691 /* Vector comparisons need no boolification. */
7692 if (TREE_CODE (type) == VECTOR_TYPE)
7693 goto expr_2;
7694 else if (!AGGREGATE_TYPE_P (type))
7f3ff782
KT
7695 {
7696 tree org_type = TREE_TYPE (*expr_p);
7697 *expr_p = gimple_boolify (*expr_p);
7698 if (!useless_type_conversion_p (org_type,
7699 TREE_TYPE (*expr_p)))
7700 {
7701 *expr_p = fold_convert_loc (input_location,
7702 org_type, *expr_p);
7703 ret = GS_OK;
7704 }
7705 else
7706 goto expr_2;
7707 }
726a989a
RB
7708 else if (TYPE_MODE (type) != BLKmode)
7709 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
7710 else
7711 ret = gimplify_variable_sized_compare (expr_p);
61c25908 7712
726a989a 7713 break;
61c25908 7714 }
d3147f64 7715
282899df
NS
7716 /* If *EXPR_P does not need to be special-cased, handle it
7717 according to its class. */
6615c446 7718 case tcc_unary:
282899df
NS
7719 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7720 post_p, is_gimple_val, fb_rvalue);
7721 break;
6de9cd9a 7722
6615c446 7723 case tcc_binary:
282899df
NS
7724 expr_2:
7725 {
7726 enum gimplify_status r0, r1;
d3147f64 7727
282899df 7728 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
726a989a 7729 post_p, is_gimple_val, fb_rvalue);
282899df
NS
7730 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
7731 post_p, is_gimple_val, fb_rvalue);
d3147f64 7732
282899df
NS
7733 ret = MIN (r0, r1);
7734 break;
7735 }
d3147f64 7736
16949072
RG
7737 expr_3:
7738 {
7739 enum gimplify_status r0, r1, r2;
7740
7741 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7742 post_p, is_gimple_val, fb_rvalue);
7743 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
7744 post_p, is_gimple_val, fb_rvalue);
7745 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
7746 post_p, is_gimple_val, fb_rvalue);
7747
7748 ret = MIN (MIN (r0, r1), r2);
7749 break;
7750 }
7751
6615c446
JO
7752 case tcc_declaration:
7753 case tcc_constant:
6de9cd9a 7754 ret = GS_ALL_DONE;
282899df 7755 goto dont_recalculate;
d3147f64 7756
282899df 7757 default:
16949072 7758 gcc_unreachable ();
6de9cd9a 7759 }
6de9cd9a
DN
7760
7761 recalculate_side_effects (*expr_p);
726a989a 7762
282899df 7763 dont_recalculate:
6de9cd9a
DN
7764 break;
7765 }
d3147f64 7766
941f78d1 7767 gcc_assert (*expr_p || ret != GS_OK);
6de9cd9a
DN
7768 }
7769 while (ret == GS_OK);
7770
7771 /* If we encountered an error_mark somewhere nested inside, either
7772 stub out the statement or propagate the error back out. */
7773 if (ret == GS_ERROR)
7774 {
7775 if (is_statement)
65355d53 7776 *expr_p = NULL;
6de9cd9a
DN
7777 goto out;
7778 }
7779
6de9cd9a
DN
7780 /* This was only valid as a return value from the langhook, which
7781 we handled. Make sure it doesn't escape from any other context. */
282899df 7782 gcc_assert (ret != GS_UNHANDLED);
6de9cd9a 7783
65355d53 7784 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
6de9cd9a
DN
7785 {
7786 /* We aren't looking for a value, and we don't have a valid
7787 statement. If it doesn't have side-effects, throw it away. */
7788 if (!TREE_SIDE_EFFECTS (*expr_p))
65355d53 7789 *expr_p = NULL;
6de9cd9a 7790 else if (!TREE_THIS_VOLATILE (*expr_p))
44de5aeb
RK
7791 {
7792 /* This is probably a _REF that contains something nested that
7793 has side effects. Recurse through the operands to find it. */
7794 enum tree_code code = TREE_CODE (*expr_p);
7795
282899df 7796 switch (code)
44de5aeb 7797 {
282899df 7798 case COMPONENT_REF:
02a5eac4
EB
7799 case REALPART_EXPR:
7800 case IMAGPART_EXPR:
7801 case VIEW_CONVERT_EXPR:
282899df
NS
7802 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7803 gimple_test_f, fallback);
7804 break;
7805
a9e64c63
EB
7806 case ARRAY_REF:
7807 case ARRAY_RANGE_REF:
44de5aeb
RK
7808 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7809 gimple_test_f, fallback);
7810 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
282899df
NS
7811 gimple_test_f, fallback);
7812 break;
7813
7814 default:
7815 /* Anything else with side-effects must be converted to
a9e64c63 7816 a valid statement before we get here. */
282899df 7817 gcc_unreachable ();
44de5aeb 7818 }
44de5aeb 7819
65355d53 7820 *expr_p = NULL;
44de5aeb 7821 }
a9e64c63
EB
7822 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
7823 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
6de9cd9a 7824 {
a9e64c63
EB
7825 /* Historically, the compiler has treated a bare reference
7826 to a non-BLKmode volatile lvalue as forcing a load. */
af62f6f9 7827 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
726a989a 7828
c22b1771 7829 /* Normally, we do not want to create a temporary for a
a38578e1
MM
7830 TREE_ADDRESSABLE type because such a type should not be
7831 copied by bitwise-assignment. However, we make an
7832 exception here, as all we are doing here is ensuring that
7833 we read the bytes that make up the type. We use
7834 create_tmp_var_raw because create_tmp_var will abort when
57b51d4d 7835 given a TREE_ADDRESSABLE type. */
a38578e1
MM
7836 tree tmp = create_tmp_var_raw (type, "vol");
7837 gimple_add_tmp_var (tmp);
726a989a
RB
7838 gimplify_assign (tmp, *expr_p, pre_p);
7839 *expr_p = NULL;
6de9cd9a
DN
7840 }
7841 else
7842 /* We can't do anything useful with a volatile reference to
a9e64c63
EB
7843 an incomplete type, so just throw it away. Likewise for
7844 a BLKmode type, since any implicit inner load should
7845 already have been turned into an explicit one by the
7846 gimplification process. */
65355d53 7847 *expr_p = NULL;
6de9cd9a
DN
7848 }
7849
7850 /* If we are gimplifying at the statement level, we're done. Tack
726a989a 7851 everything together and return. */
325c3691 7852 if (fallback == fb_none || is_statement)
6de9cd9a 7853 {
726a989a
RB
7854 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
7855 it out for GC to reclaim it. */
7856 *expr_p = NULL_TREE;
7857
7858 if (!gimple_seq_empty_p (internal_pre)
7859 || !gimple_seq_empty_p (internal_post))
be00f578 7860 {
726a989a
RB
7861 gimplify_seq_add_seq (&internal_pre, internal_post);
7862 gimplify_seq_add_seq (pre_p, internal_pre);
be00f578 7863 }
726a989a
RB
7864
7865 /* The result of gimplifying *EXPR_P is going to be the last few
7866 statements in *PRE_P and *POST_P. Add location information
7867 to all the statements that were added by the gimplification
7868 helpers. */
7869 if (!gimple_seq_empty_p (*pre_p))
7870 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
7871
7872 if (!gimple_seq_empty_p (*post_p))
7873 annotate_all_with_location_after (*post_p, post_last_gsi,
7874 input_location);
7875
6de9cd9a
DN
7876 goto out;
7877 }
7878
726a989a
RB
7879#ifdef ENABLE_GIMPLE_CHECKING
7880 if (*expr_p)
7881 {
7882 enum tree_code code = TREE_CODE (*expr_p);
7883 /* These expressions should already be in gimple IR form. */
7884 gcc_assert (code != MODIFY_EXPR
7885 && code != ASM_EXPR
7886 && code != BIND_EXPR
7887 && code != CATCH_EXPR
6fc4fb06 7888 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
726a989a
RB
7889 && code != EH_FILTER_EXPR
7890 && code != GOTO_EXPR
7891 && code != LABEL_EXPR
7892 && code != LOOP_EXPR
726a989a
RB
7893 && code != SWITCH_EXPR
7894 && code != TRY_FINALLY_EXPR
7895 && code != OMP_CRITICAL
7896 && code != OMP_FOR
7897 && code != OMP_MASTER
7898 && code != OMP_ORDERED
7899 && code != OMP_PARALLEL
7900 && code != OMP_SECTIONS
7901 && code != OMP_SECTION
7902 && code != OMP_SINGLE);
7903 }
7904#endif
6de9cd9a 7905
726a989a
RB
7906 /* Otherwise we're gimplifying a subexpression, so the resulting
7907 value is interesting. If it's a valid operand that matches
7908 GIMPLE_TEST_F, we're done. Unless we are handling some
7909 post-effects internally; if that's the case, we need to copy into
7910 a temporary before adding the post-effects to POST_P. */
7911 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
6de9cd9a
DN
7912 goto out;
7913
7914 /* Otherwise, we need to create a new temporary for the gimplified
7915 expression. */
7916
7917 /* We can't return an lvalue if we have an internal postqueue. The
7918 object the lvalue refers to would (probably) be modified by the
7919 postqueue; we need to copy the value out first, which means an
7920 rvalue. */
726a989a
RB
7921 if ((fallback & fb_lvalue)
7922 && gimple_seq_empty_p (internal_post)
e847cc68 7923 && is_gimple_addressable (*expr_p))
6de9cd9a
DN
7924 {
7925 /* An lvalue will do. Take the address of the expression, store it
7926 in a temporary, and replace the expression with an INDIRECT_REF of
7927 that temporary. */
db3927fb 7928 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
6de9cd9a 7929 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
7f5ad6d7 7930 *expr_p = build_simple_mem_ref (tmp);
6de9cd9a 7931 }
ba4d8f9d 7932 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
6de9cd9a 7933 {
726a989a
RB
7934 /* An rvalue will do. Assign the gimplified expression into a
7935 new temporary TMP and replace the original expression with
7936 TMP. First, make sure that the expression has a type so that
7937 it can be assigned into a temporary. */
282899df 7938 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
6de9cd9a 7939
726a989a 7940 if (!gimple_seq_empty_p (internal_post) || (fallback & fb_lvalue))
6de9cd9a
DN
7941 /* The postqueue might change the value of the expression between
7942 the initialization and use of the temporary, so we can't use a
7943 formal temp. FIXME do we care? */
c685de4a
RG
7944 {
7945 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
7946 if (TREE_CODE (TREE_TYPE (*expr_p)) == COMPLEX_TYPE
7947 || TREE_CODE (TREE_TYPE (*expr_p)) == VECTOR_TYPE)
7948 DECL_GIMPLE_REG_P (*expr_p) = 1;
7949 }
6de9cd9a
DN
7950 else
7951 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
7952 }
282899df 7953 else
6de9cd9a 7954 {
726a989a 7955#ifdef ENABLE_GIMPLE_CHECKING
282899df
NS
7956 if (!(fallback & fb_mayfail))
7957 {
7958 fprintf (stderr, "gimplification failed:\n");
7959 print_generic_expr (stderr, *expr_p, 0);
7960 debug_tree (*expr_p);
7961 internal_error ("gimplification failed");
7962 }
7963#endif
7964 gcc_assert (fallback & fb_mayfail);
726a989a 7965
282899df 7966 /* If this is an asm statement, and the user asked for the
535a42b1 7967 impossible, don't die. Fail and let gimplify_asm_expr
282899df 7968 issue an error. */
6de9cd9a
DN
7969 ret = GS_ERROR;
7970 goto out;
7971 }
6de9cd9a 7972
6de9cd9a 7973 /* Make sure the temporary matches our predicate. */
282899df 7974 gcc_assert ((*gimple_test_f) (*expr_p));
6de9cd9a 7975
726a989a 7976 if (!gimple_seq_empty_p (internal_post))
6de9cd9a 7977 {
726a989a
RB
7978 annotate_all_with_location (internal_post, input_location);
7979 gimplify_seq_add_seq (pre_p, internal_post);
6de9cd9a
DN
7980 }
7981
7982 out:
7983 input_location = saved_location;
7984 return ret;
7985}
7986
44de5aeb 7987/* Look through TYPE for variable-sized objects and gimplify each such
65355d53 7988 size that we find. Add to LIST_P any statements generated. */
44de5aeb 7989
65355d53 7990void
726a989a 7991gimplify_type_sizes (tree type, gimple_seq *list_p)
44de5aeb 7992{
ad50bc8d
RH
7993 tree field, t;
7994
19dbbf36 7995 if (type == NULL || type == error_mark_node)
8e0a600b 7996 return;
ad50bc8d 7997
6c6cfbfd 7998 /* We first do the main variant, then copy into any other variants. */
ad50bc8d 7999 type = TYPE_MAIN_VARIANT (type);
44de5aeb 8000
8e0a600b 8001 /* Avoid infinite recursion. */
19dbbf36 8002 if (TYPE_SIZES_GIMPLIFIED (type))
8e0a600b
JJ
8003 return;
8004
8005 TYPE_SIZES_GIMPLIFIED (type) = 1;
8006
44de5aeb
RK
8007 switch (TREE_CODE (type))
8008 {
44de5aeb
RK
8009 case INTEGER_TYPE:
8010 case ENUMERAL_TYPE:
8011 case BOOLEAN_TYPE:
44de5aeb 8012 case REAL_TYPE:
325217ed 8013 case FIXED_POINT_TYPE:
65355d53
RH
8014 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
8015 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
ad50bc8d
RH
8016
8017 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
8018 {
8019 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
8020 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
ad50bc8d 8021 }
44de5aeb
RK
8022 break;
8023
8024 case ARRAY_TYPE:
ad50bc8d 8025 /* These types may not have declarations, so handle them here. */
8e0a600b
JJ
8026 gimplify_type_sizes (TREE_TYPE (type), list_p);
8027 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
2e957792
JJ
8028 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
8029 with assigned stack slots, for -O1+ -g they should be tracked
8030 by VTA. */
08d78391
EB
8031 if (!(TYPE_NAME (type)
8032 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
8033 && DECL_IGNORED_P (TYPE_NAME (type)))
8034 && TYPE_DOMAIN (type)
802e9f8e
JJ
8035 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
8036 {
8037 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
8038 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
8039 DECL_IGNORED_P (t) = 0;
8040 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
8041 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
8042 DECL_IGNORED_P (t) = 0;
8043 }
44de5aeb
RK
8044 break;
8045
8046 case RECORD_TYPE:
8047 case UNION_TYPE:
8048 case QUAL_UNION_TYPE:
910ad8de 8049 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
44de5aeb 8050 if (TREE_CODE (field) == FIELD_DECL)
8e0a600b
JJ
8051 {
8052 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
9a9ba8d9
JJ
8053 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
8054 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
8e0a600b
JJ
8055 gimplify_type_sizes (TREE_TYPE (field), list_p);
8056 }
8057 break;
8058
8059 case POINTER_TYPE:
8060 case REFERENCE_TYPE:
706c4bb7
OH
8061 /* We used to recurse on the pointed-to type here, which turned out to
8062 be incorrect because its definition might refer to variables not
8063 yet initialized at this point if a forward declaration is involved.
8064
8065 It was actually useful for anonymous pointed-to types to ensure
8066 that the sizes evaluation dominates every possible later use of the
8067 values. Restricting to such types here would be safe since there
f63645be
KH
8068 is no possible forward declaration around, but would introduce an
8069 undesirable middle-end semantic to anonymity. We then defer to
8070 front-ends the responsibility of ensuring that the sizes are
8071 evaluated both early and late enough, e.g. by attaching artificial
706c4bb7 8072 type declarations to the tree. */
44de5aeb
RK
8073 break;
8074
8075 default:
8076 break;
8077 }
8078
65355d53
RH
8079 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
8080 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
44de5aeb 8081
ad50bc8d 8082 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
b4830636 8083 {
ad50bc8d
RH
8084 TYPE_SIZE (t) = TYPE_SIZE (type);
8085 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
8086 TYPE_SIZES_GIMPLIFIED (t) = 1;
b4830636 8087 }
b4830636
RH
8088}
8089
8090/* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
8091 a size or position, has had all of its SAVE_EXPRs evaluated.
726a989a 8092 We add any required statements to *STMT_P. */
44de5aeb
RK
8093
8094void
726a989a 8095gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
44de5aeb 8096{
a9c5ddf9
RH
8097 tree type, expr = *expr_p;
8098
44de5aeb 8099 /* We don't do anything if the value isn't there, is constant, or contains
1e748a2b 8100 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
aabcd309 8101 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
1e748a2b
RK
8102 will want to replace it with a new variable, but that will cause problems
8103 if this type is from outside the function. It's OK to have that here. */
a9c5ddf9
RH
8104 if (expr == NULL_TREE || TREE_CONSTANT (expr)
8105 || TREE_CODE (expr) == VAR_DECL
8106 || CONTAINS_PLACEHOLDER_P (expr))
44de5aeb
RK
8107 return;
8108
a9c5ddf9
RH
8109 type = TREE_TYPE (expr);
8110 *expr_p = unshare_expr (expr);
8111
ad50bc8d 8112 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue);
a9c5ddf9
RH
8113 expr = *expr_p;
8114
8115 /* Verify that we've an exact type match with the original expression.
8116 In particular, we do not wish to drop a "sizetype" in favour of a
8117 type of similar dimensions. We don't want to pollute the generic
8118 type-stripping code with this knowledge because it doesn't matter
8119 for the bulk of GENERIC/GIMPLE. It only matters that TYPE_SIZE_UNIT
8120 and friends retain their "sizetype-ness". */
7fd6694b
RH
8121 if (TREE_TYPE (expr) != type
8122 && TREE_CODE (type) == INTEGER_TYPE
8123 && TYPE_IS_SIZETYPE (type))
a9c5ddf9
RH
8124 {
8125 tree tmp;
726a989a 8126 gimple stmt;
a9c5ddf9
RH
8127
8128 *expr_p = create_tmp_var (type, NULL);
8129 tmp = build1 (NOP_EXPR, type, expr);
726a989a 8130 stmt = gimplify_assign (*expr_p, tmp, stmt_p);
ec52b111 8131 gimple_set_location (stmt, EXPR_LOC_OR_HERE (expr));
a9c5ddf9 8132 }
44de5aeb 8133}
6de9cd9a 8134
3ad065ef
EB
8135/* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
8136 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
8137 is true, also gimplify the parameters. */
726a989a
RB
8138
8139gimple
3ad065ef 8140gimplify_body (tree fndecl, bool do_parms)
6de9cd9a
DN
8141{
8142 location_t saved_location = input_location;
726a989a
RB
8143 gimple_seq parm_stmts, seq;
8144 gimple outer_bind;
d406b663 8145 struct gimplify_ctx gctx;
9f9ebcdf 8146 struct cgraph_node *cgn;
6de9cd9a
DN
8147
8148 timevar_push (TV_TREE_GIMPLIFY);
953ff289 8149
f66d6761
SB
8150 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
8151 gimplification. */
8152 default_rtl_profile ();
8153
953ff289 8154 gcc_assert (gimplify_ctxp == NULL);
d406b663 8155 push_gimplify_context (&gctx);
6de9cd9a 8156
44de5aeb
RK
8157 /* Unshare most shared trees in the body and in that of any nested functions.
8158 It would seem we don't have to do this for nested functions because
8159 they are supposed to be output and then the outer function gimplified
8160 first, but the g++ front end doesn't always do it that way. */
3ad065ef
EB
8161 unshare_body (fndecl);
8162 unvisit_body (fndecl);
6de9cd9a 8163
9f9ebcdf
MJ
8164 cgn = cgraph_get_node (fndecl);
8165 if (cgn && cgn->origin)
77f2a970
JJ
8166 nonlocal_vlas = pointer_set_create ();
8167
fa10beec 8168 /* Make sure input_location isn't set to something weird. */
6de9cd9a
DN
8169 input_location = DECL_SOURCE_LOCATION (fndecl);
8170
4744afba
RH
8171 /* Resolve callee-copies. This has to be done before processing
8172 the body so that DECL_VALUE_EXPR gets processed correctly. */
3ad065ef 8173 parm_stmts = do_parms ? gimplify_parameters () : NULL;
4744afba 8174
6de9cd9a 8175 /* Gimplify the function's body. */
726a989a 8176 seq = NULL;
3ad065ef 8177 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
726a989a
RB
8178 outer_bind = gimple_seq_first_stmt (seq);
8179 if (!outer_bind)
6de9cd9a 8180 {
726a989a
RB
8181 outer_bind = gimple_build_nop ();
8182 gimplify_seq_add_stmt (&seq, outer_bind);
6de9cd9a 8183 }
44de5aeb 8184
726a989a
RB
8185 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
8186 not the case, wrap everything in a GIMPLE_BIND to make it so. */
8187 if (gimple_code (outer_bind) == GIMPLE_BIND
8188 && gimple_seq_first (seq) == gimple_seq_last (seq))
8189 ;
8190 else
8191 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
8192
3ad065ef 8193 DECL_SAVED_TREE (fndecl) = NULL_TREE;
4744afba
RH
8194
8195 /* If we had callee-copies statements, insert them at the beginning
f0c10f0f 8196 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
726a989a 8197 if (!gimple_seq_empty_p (parm_stmts))
4744afba 8198 {
f0c10f0f
RG
8199 tree parm;
8200
726a989a
RB
8201 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
8202 gimple_bind_set_body (outer_bind, parm_stmts);
f0c10f0f
RG
8203
8204 for (parm = DECL_ARGUMENTS (current_function_decl);
910ad8de 8205 parm; parm = DECL_CHAIN (parm))
f0c10f0f
RG
8206 if (DECL_HAS_VALUE_EXPR_P (parm))
8207 {
8208 DECL_HAS_VALUE_EXPR_P (parm) = 0;
8209 DECL_IGNORED_P (parm) = 0;
8210 }
4744afba
RH
8211 }
8212
77f2a970
JJ
8213 if (nonlocal_vlas)
8214 {
8215 pointer_set_destroy (nonlocal_vlas);
8216 nonlocal_vlas = NULL;
8217 }
8218
726a989a 8219 pop_gimplify_context (outer_bind);
953ff289 8220 gcc_assert (gimplify_ctxp == NULL);
6de9cd9a 8221
1da2ed5f 8222 if (!seen_error ())
34019e28 8223 verify_gimple_in_seq (gimple_bind_body (outer_bind));
6de9cd9a
DN
8224
8225 timevar_pop (TV_TREE_GIMPLIFY);
8226 input_location = saved_location;
726a989a
RB
8227
8228 return outer_bind;
6de9cd9a
DN
8229}
8230
6a1f6c9c
JM
8231typedef char *char_p; /* For DEF_VEC_P. */
8232DEF_VEC_P(char_p);
8233DEF_VEC_ALLOC_P(char_p,heap);
8234
8235/* Return whether we should exclude FNDECL from instrumentation. */
8236
8237static bool
8238flag_instrument_functions_exclude_p (tree fndecl)
8239{
8240 VEC(char_p,heap) *vec;
8241
8242 vec = (VEC(char_p,heap) *) flag_instrument_functions_exclude_functions;
8243 if (VEC_length (char_p, vec) > 0)
8244 {
8245 const char *name;
8246 int i;
8247 char *s;
8248
8249 name = lang_hooks.decl_printable_name (fndecl, 0);
8250 FOR_EACH_VEC_ELT (char_p, vec, i, s)
8251 if (strstr (name, s) != NULL)
8252 return true;
8253 }
8254
8255 vec = (VEC(char_p,heap) *) flag_instrument_functions_exclude_files;
8256 if (VEC_length (char_p, vec) > 0)
8257 {
8258 const char *name;
8259 int i;
8260 char *s;
8261
8262 name = DECL_SOURCE_FILE (fndecl);
8263 FOR_EACH_VEC_ELT (char_p, vec, i, s)
8264 if (strstr (name, s) != NULL)
8265 return true;
8266 }
8267
8268 return false;
8269}
8270
6de9cd9a 8271/* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
726a989a 8272 node for the function we want to gimplify.
b8698a0f 8273
ad19c4be 8274 Return the sequence of GIMPLE statements corresponding to the body
726a989a 8275 of FNDECL. */
6de9cd9a
DN
8276
8277void
8278gimplify_function_tree (tree fndecl)
8279{
e41d82f5 8280 tree oldfn, parm, ret;
726a989a
RB
8281 gimple_seq seq;
8282 gimple bind;
6de9cd9a 8283
a406865a
RG
8284 gcc_assert (!gimple_body (fndecl));
8285
6de9cd9a
DN
8286 oldfn = current_function_decl;
8287 current_function_decl = fndecl;
db2960f4
SL
8288 if (DECL_STRUCT_FUNCTION (fndecl))
8289 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
8290 else
8291 push_struct_function (fndecl);
6de9cd9a 8292
910ad8de 8293 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
e41d82f5
RH
8294 {
8295 /* Preliminarily mark non-addressed complex variables as eligible
8296 for promotion to gimple registers. We'll transform their uses
8297 as we find them. */
0890b981
AP
8298 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
8299 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
e41d82f5
RH
8300 && !TREE_THIS_VOLATILE (parm)
8301 && !needs_to_live_in_memory (parm))
0890b981 8302 DECL_GIMPLE_REG_P (parm) = 1;
e41d82f5
RH
8303 }
8304
8305 ret = DECL_RESULT (fndecl);
0890b981 8306 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
7b7e6ecd 8307 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
e41d82f5 8308 && !needs_to_live_in_memory (ret))
0890b981 8309 DECL_GIMPLE_REG_P (ret) = 1;
e41d82f5 8310
3ad065ef 8311 bind = gimplify_body (fndecl, true);
726a989a
RB
8312
8313 /* The tree body of the function is no longer needed, replace it
8314 with the new GIMPLE body. */
355a7673 8315 seq = NULL;
726a989a
RB
8316 gimple_seq_add_stmt (&seq, bind);
8317 gimple_set_body (fndecl, seq);
6de9cd9a
DN
8318
8319 /* If we're instrumenting function entry/exit, then prepend the call to
8320 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
8321 catch the exit hook. */
8322 /* ??? Add some way to ignore exceptions for this TFE. */
8323 if (flag_instrument_function_entry_exit
8d5a7d1f
ILT
8324 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
8325 && !flag_instrument_functions_exclude_p (fndecl))
6de9cd9a 8326 {
726a989a
RB
8327 tree x;
8328 gimple new_bind;
8329 gimple tf;
8330 gimple_seq cleanup = NULL, body = NULL;
b01890ff
JH
8331 tree tmp_var;
8332 gimple call;
8333
e79983f4 8334 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
59527282 8335 call = gimple_build_call (x, 1, integer_zero_node);
b01890ff
JH
8336 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
8337 gimple_call_set_lhs (call, tmp_var);
8338 gimplify_seq_add_stmt (&cleanup, call);
e79983f4 8339 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
b01890ff
JH
8340 call = gimple_build_call (x, 2,
8341 build_fold_addr_expr (current_function_decl),
8342 tmp_var);
8343 gimplify_seq_add_stmt (&cleanup, call);
726a989a 8344 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
6de9cd9a 8345
e79983f4 8346 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
59527282 8347 call = gimple_build_call (x, 1, integer_zero_node);
b01890ff
JH
8348 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
8349 gimple_call_set_lhs (call, tmp_var);
8350 gimplify_seq_add_stmt (&body, call);
e79983f4 8351 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
b01890ff
JH
8352 call = gimple_build_call (x, 2,
8353 build_fold_addr_expr (current_function_decl),
8354 tmp_var);
8355 gimplify_seq_add_stmt (&body, call);
726a989a 8356 gimplify_seq_add_stmt (&body, tf);
32001f69 8357 new_bind = gimple_build_bind (NULL, body, gimple_bind_block (bind));
726a989a
RB
8358 /* Clear the block for BIND, since it is no longer directly inside
8359 the function, but within a try block. */
32001f69 8360 gimple_bind_set_block (bind, NULL);
6de9cd9a 8361
726a989a
RB
8362 /* Replace the current function body with the body
8363 wrapped in the try/finally TF. */
355a7673 8364 seq = NULL;
726a989a
RB
8365 gimple_seq_add_stmt (&seq, new_bind);
8366 gimple_set_body (fndecl, seq);
6de9cd9a
DN
8367 }
8368
726a989a 8369 DECL_SAVED_TREE (fndecl) = NULL_TREE;
a406865a 8370 cfun->curr_properties = PROP_gimple_any;
726a989a 8371
6de9cd9a 8372 current_function_decl = oldfn;
db2960f4 8373 pop_cfun ();
6de9cd9a 8374}
726a989a 8375
726a989a
RB
8376/* Some transformations like inlining may invalidate the GIMPLE form
8377 for operands. This function traverses all the operands in STMT and
8378 gimplifies anything that is not a valid gimple operand. Any new
8379 GIMPLE statements are inserted before *GSI_P. */
8380
8381void
8382gimple_regimplify_operands (gimple stmt, gimple_stmt_iterator *gsi_p)
8383{
8384 size_t i, num_ops;
8385 tree orig_lhs = NULL_TREE, lhs, t;
8386 gimple_seq pre = NULL;
8387 gimple post_stmt = NULL;
8388 struct gimplify_ctx gctx;
8389
8390 push_gimplify_context (&gctx);
8391 gimplify_ctxp->into_ssa = gimple_in_ssa_p (cfun);
8392
8393 switch (gimple_code (stmt))
8394 {
8395 case GIMPLE_COND:
8396 gimplify_expr (gimple_cond_lhs_ptr (stmt), &pre, NULL,
8397 is_gimple_val, fb_rvalue);
8398 gimplify_expr (gimple_cond_rhs_ptr (stmt), &pre, NULL,
8399 is_gimple_val, fb_rvalue);
8400 break;
e8789588
JJ
8401 case GIMPLE_SWITCH:
8402 gimplify_expr (gimple_switch_index_ptr (stmt), &pre, NULL,
8403 is_gimple_val, fb_rvalue);
8404 break;
726a989a
RB
8405 case GIMPLE_OMP_ATOMIC_LOAD:
8406 gimplify_expr (gimple_omp_atomic_load_rhs_ptr (stmt), &pre, NULL,
8407 is_gimple_val, fb_rvalue);
8408 break;
8409 case GIMPLE_ASM:
8410 {
8411 size_t i, noutputs = gimple_asm_noutputs (stmt);
8412 const char *constraint, **oconstraints;
8413 bool allows_mem, allows_reg, is_inout;
8414
8415 oconstraints
8416 = (const char **) alloca ((noutputs) * sizeof (const char *));
8417 for (i = 0; i < noutputs; i++)
8418 {
8419 tree op = gimple_asm_output_op (stmt, i);
8420 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
8421 oconstraints[i] = constraint;
8422 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
8423 &allows_reg, &is_inout);
8424 gimplify_expr (&TREE_VALUE (op), &pre, NULL,
8425 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
8426 fb_lvalue | fb_mayfail);
8427 }
8428 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
8429 {
8430 tree op = gimple_asm_input_op (stmt, i);
8431 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
8432 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
8433 oconstraints, &allows_mem, &allows_reg);
8434 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (op))) && allows_mem)
8435 allows_reg = 0;
8436 if (!allows_reg && allows_mem)
8437 gimplify_expr (&TREE_VALUE (op), &pre, NULL,
8438 is_gimple_lvalue, fb_lvalue | fb_mayfail);
8439 else
8440 gimplify_expr (&TREE_VALUE (op), &pre, NULL,
8441 is_gimple_asm_val, fb_rvalue);
8442 }
8443 }
8444 break;
8445 default:
8446 /* NOTE: We start gimplifying operands from last to first to
8447 make sure that side-effects on the RHS of calls, assignments
8448 and ASMs are executed before the LHS. The ordering is not
8449 important for other statements. */
8450 num_ops = gimple_num_ops (stmt);
8451 orig_lhs = gimple_get_lhs (stmt);
8452 for (i = num_ops; i > 0; i--)
8453 {
8454 tree op = gimple_op (stmt, i - 1);
8455 if (op == NULL_TREE)
8456 continue;
8457 if (i == 1 && (is_gimple_call (stmt) || is_gimple_assign (stmt)))
8458 gimplify_expr (&op, &pre, NULL, is_gimple_lvalue, fb_lvalue);
8459 else if (i == 2
8460 && is_gimple_assign (stmt)
8461 && num_ops == 2
8462 && get_gimple_rhs_class (gimple_expr_code (stmt))
8463 == GIMPLE_SINGLE_RHS)
8464 gimplify_expr (&op, &pre, NULL,
8465 rhs_predicate_for (gimple_assign_lhs (stmt)),
8466 fb_rvalue);
8467 else if (i == 2 && is_gimple_call (stmt))
8468 {
8469 if (TREE_CODE (op) == FUNCTION_DECL)
8470 continue;
8471 gimplify_expr (&op, &pre, NULL, is_gimple_call_addr, fb_rvalue);
8472 }
8473 else
8474 gimplify_expr (&op, &pre, NULL, is_gimple_val, fb_rvalue);
8475 gimple_set_op (stmt, i - 1, op);
8476 }
8477
8478 lhs = gimple_get_lhs (stmt);
bdec4dc7
RG
8479 /* If the LHS changed it in a way that requires a simple RHS,
8480 create temporary. */
ba4d8f9d 8481 if (lhs && !is_gimple_reg (lhs))
726a989a
RB
8482 {
8483 bool need_temp = false;
8484
8485 if (is_gimple_assign (stmt)
8486 && num_ops == 2
8487 && get_gimple_rhs_class (gimple_expr_code (stmt))
8488 == GIMPLE_SINGLE_RHS)
8489 gimplify_expr (gimple_assign_rhs1_ptr (stmt), &pre, NULL,
8490 rhs_predicate_for (gimple_assign_lhs (stmt)),
8491 fb_rvalue);
8492 else if (is_gimple_reg (lhs))
8493 {
8494 if (is_gimple_reg_type (TREE_TYPE (lhs)))
8495 {
8496 if (is_gimple_call (stmt))
8497 {
8498 i = gimple_call_flags (stmt);
8499 if ((i & ECF_LOOPING_CONST_OR_PURE)
8500 || !(i & (ECF_CONST | ECF_PURE)))
8501 need_temp = true;
8502 }
8503 if (stmt_can_throw_internal (stmt))
8504 need_temp = true;
8505 }
8506 }
8507 else
8508 {
8509 if (is_gimple_reg_type (TREE_TYPE (lhs)))
8510 need_temp = true;
8511 else if (TYPE_MODE (TREE_TYPE (lhs)) != BLKmode)
8512 {
8513 if (is_gimple_call (stmt))
8514 {
8515 tree fndecl = gimple_call_fndecl (stmt);
8516
8517 if (!aggregate_value_p (TREE_TYPE (lhs), fndecl)
8518 && !(fndecl && DECL_RESULT (fndecl)
8519 && DECL_BY_REFERENCE (DECL_RESULT (fndecl))))
8520 need_temp = true;
8521 }
8522 else
8523 need_temp = true;
8524 }
8525 }
8526 if (need_temp)
8527 {
acd63801 8528 tree temp = create_tmp_reg (TREE_TYPE (lhs), NULL);
726a989a 8529
726a989a
RB
8530 if (TREE_CODE (orig_lhs) == SSA_NAME)
8531 orig_lhs = SSA_NAME_VAR (orig_lhs);
726a989a
RB
8532
8533 if (gimple_in_ssa_p (cfun))
8534 temp = make_ssa_name (temp, NULL);
8535 gimple_set_lhs (stmt, temp);
8536 post_stmt = gimple_build_assign (lhs, temp);
8537 if (TREE_CODE (lhs) == SSA_NAME)
8538 SSA_NAME_DEF_STMT (lhs) = post_stmt;
8539 }
8540 }
8541 break;
8542 }
8543
f93bc6f5
JJ
8544 if (gimple_referenced_vars (cfun))
8545 for (t = gimplify_ctxp->temps; t ; t = TREE_CHAIN (t))
8546 add_referenced_var (t);
8547
726a989a
RB
8548 if (!gimple_seq_empty_p (pre))
8549 {
8550 if (gimple_in_ssa_p (cfun))
8551 {
8552 gimple_stmt_iterator i;
8553
8554 for (i = gsi_start (pre); !gsi_end_p (i); gsi_next (&i))
8555 mark_symbols_for_renaming (gsi_stmt (i));
8556 }
8557 gsi_insert_seq_before (gsi_p, pre, GSI_SAME_STMT);
8558 }
8559 if (post_stmt)
8560 gsi_insert_after (gsi_p, post_stmt, GSI_NEW_STMT);
8561
726a989a
RB
8562 pop_gimplify_context (NULL);
8563}
8564
ad19c4be 8565/* Expand EXPR to list of gimple statements STMTS. GIMPLE_TEST_F specifies
bcf71673 8566 the predicate that will hold for the result. If VAR is not NULL, make the
8b11a64c
ZD
8567 base variable of the final destination be VAR if suitable. */
8568
8569tree
bcf71673
RG
8570force_gimple_operand_1 (tree expr, gimple_seq *stmts,
8571 gimple_predicate gimple_test_f, tree var)
8b11a64c
ZD
8572{
8573 tree t;
8574 enum gimplify_status ret;
d406b663 8575 struct gimplify_ctx gctx;
8b11a64c 8576
726a989a 8577 *stmts = NULL;
8b11a64c 8578
844d5fca
RG
8579 /* gimple_test_f might be more strict than is_gimple_val, make
8580 sure we pass both. Just checking gimple_test_f doesn't work
8581 because most gimple predicates do not work recursively. */
8582 if (is_gimple_val (expr)
8583 && (*gimple_test_f) (expr))
8b11a64c
ZD
8584 return expr;
8585
d406b663 8586 push_gimplify_context (&gctx);
5cd4ec7f 8587 gimplify_ctxp->into_ssa = gimple_in_ssa_p (cfun);
aea74440 8588 gimplify_ctxp->allow_rhs_cond_expr = true;
8b11a64c
ZD
8589
8590 if (var)
726a989a 8591 expr = build2 (MODIFY_EXPR, TREE_TYPE (var), var, expr);
8b11a64c 8592
726a989a 8593 if (TREE_CODE (expr) != MODIFY_EXPR
917948d3
ZD
8594 && TREE_TYPE (expr) == void_type_node)
8595 {
8596 gimplify_and_add (expr, stmts);
8597 expr = NULL_TREE;
8598 }
8599 else
8600 {
726a989a 8601 ret = gimplify_expr (&expr, stmts, NULL, gimple_test_f, fb_rvalue);
917948d3
ZD
8602 gcc_assert (ret != GS_ERROR);
8603 }
8b11a64c 8604
5cd4ec7f 8605 if (gimple_referenced_vars (cfun))
910ad8de 8606 for (t = gimplify_ctxp->temps; t ; t = DECL_CHAIN (t))
726a989a 8607 add_referenced_var (t);
8b11a64c
ZD
8608
8609 pop_gimplify_context (NULL);
8610
8611 return expr;
8612}
8613
ad19c4be 8614/* Expand EXPR to list of gimple statements STMTS. If SIMPLE is true,
bcf71673
RG
8615 force the result to be either ssa_name or an invariant, otherwise
8616 just force it to be a rhs expression. If VAR is not NULL, make the
8617 base variable of the final destination be VAR if suitable. */
9885da8e
ZD
8618
8619tree
bcf71673
RG
8620force_gimple_operand (tree expr, gimple_seq *stmts, bool simple, tree var)
8621{
8622 return force_gimple_operand_1 (expr, stmts,
8623 simple ? is_gimple_val : is_gimple_reg_rhs,
8624 var);
8625}
8626
ad19c4be 8627/* Invoke force_gimple_operand_1 for EXPR with parameters GIMPLE_TEST_F
bcf71673
RG
8628 and VAR. If some statements are produced, emits them at GSI.
8629 If BEFORE is true. the statements are appended before GSI, otherwise
8630 they are appended after it. M specifies the way GSI moves after
8631 insertion (GSI_SAME_STMT or GSI_CONTINUE_LINKING are the usual values). */
8632
8633tree
8634force_gimple_operand_gsi_1 (gimple_stmt_iterator *gsi, tree expr,
8635 gimple_predicate gimple_test_f,
8636 tree var, bool before,
8637 enum gsi_iterator_update m)
9885da8e 8638{
726a989a 8639 gimple_seq stmts;
9885da8e 8640
bcf71673 8641 expr = force_gimple_operand_1 (expr, &stmts, gimple_test_f, var);
726a989a
RB
8642
8643 if (!gimple_seq_empty_p (stmts))
c6540bde 8644 {
928bc34f
EB
8645 if (gimple_in_ssa_p (cfun))
8646 {
726a989a 8647 gimple_stmt_iterator i;
928bc34f 8648
726a989a
RB
8649 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
8650 mark_symbols_for_renaming (gsi_stmt (i));
928bc34f
EB
8651 }
8652
c6540bde 8653 if (before)
726a989a 8654 gsi_insert_seq_before (gsi, stmts, m);
c6540bde 8655 else
726a989a 8656 gsi_insert_seq_after (gsi, stmts, m);
c6540bde 8657 }
9885da8e
ZD
8658
8659 return expr;
8660}
8661
ad19c4be 8662/* Invoke force_gimple_operand_1 for EXPR with parameter VAR.
bcf71673
RG
8663 If SIMPLE is true, force the result to be either ssa_name or an invariant,
8664 otherwise just force it to be a rhs expression. If some statements are
8665 produced, emits them at GSI. If BEFORE is true, the statements are
8666 appended before GSI, otherwise they are appended after it. M specifies
8667 the way GSI moves after insertion (GSI_SAME_STMT or GSI_CONTINUE_LINKING
8668 are the usual values). */
8669
8670tree
8671force_gimple_operand_gsi (gimple_stmt_iterator *gsi, tree expr,
8672 bool simple_p, tree var, bool before,
8673 enum gsi_iterator_update m)
8674{
8675 return force_gimple_operand_gsi_1 (gsi, expr,
8676 simple_p
8677 ? is_gimple_val : is_gimple_reg_rhs,
8678 var, before, m);
8679}
8680
8681
6de9cd9a 8682#include "gt-gimplify.h"