]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/gimple-expr.c
[Ada] Revert change for gnatprove that is no longer needed
[thirdparty/gcc.git] / gcc / gimple-expr.c
1 /* Gimple decl, type, and expression support functions.
2
3 Copyright (C) 2007-2019 Free Software Foundation, Inc.
4 Contributed by Aldy Hernandez <aldyh@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "backend.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "stringpool.h"
29 #include "gimple-ssa.h"
30 #include "fold-const.h"
31 #include "tree-eh.h"
32 #include "gimplify.h"
33 #include "stor-layout.h"
34 #include "demangle.h"
35 #include "hash-set.h"
36 #include "rtl.h"
37 #include "tree-pass.h"
38 #include "stringpool.h"
39 #include "attribs.h"
40
41 /* ----- Type related ----- */
42
43 /* Return true if the conversion from INNER_TYPE to OUTER_TYPE is a
44 useless type conversion, otherwise return false.
45
46 This function implicitly defines the middle-end type system. With
47 the notion of 'a < b' meaning that useless_type_conversion_p (a, b)
48 holds and 'a > b' meaning that useless_type_conversion_p (b, a) holds,
49 the following invariants shall be fulfilled:
50
51 1) useless_type_conversion_p is transitive.
52 If a < b and b < c then a < c.
53
54 2) useless_type_conversion_p is not symmetric.
55 From a < b does not follow a > b.
56
57 3) Types define the available set of operations applicable to values.
58 A type conversion is useless if the operations for the target type
59 is a subset of the operations for the source type. For example
60 casts to void* are useless, casts from void* are not (void* can't
61 be dereferenced or offsetted, but copied, hence its set of operations
62 is a strict subset of that of all other data pointer types). Casts
63 to const T* are useless (can't be written to), casts from const T*
64 to T* are not. */
65
66 bool
67 useless_type_conversion_p (tree outer_type, tree inner_type)
68 {
69 /* Do the following before stripping toplevel qualifiers. */
70 if (POINTER_TYPE_P (inner_type)
71 && POINTER_TYPE_P (outer_type))
72 {
73 /* Do not lose casts between pointers to different address spaces. */
74 if (TYPE_ADDR_SPACE (TREE_TYPE (outer_type))
75 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type)))
76 return false;
77 /* Do not lose casts to function pointer types. */
78 if ((TREE_CODE (TREE_TYPE (outer_type)) == FUNCTION_TYPE
79 || TREE_CODE (TREE_TYPE (outer_type)) == METHOD_TYPE)
80 && !(TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE
81 || TREE_CODE (TREE_TYPE (inner_type)) == METHOD_TYPE))
82 return false;
83 }
84
85 /* From now on qualifiers on value types do not matter. */
86 inner_type = TYPE_MAIN_VARIANT (inner_type);
87 outer_type = TYPE_MAIN_VARIANT (outer_type);
88
89 if (inner_type == outer_type)
90 return true;
91
92 /* Changes in machine mode are never useless conversions because the RTL
93 middle-end expects explicit conversions between modes. */
94 if (TYPE_MODE (inner_type) != TYPE_MODE (outer_type))
95 return false;
96
97 /* If both the inner and outer types are integral types, then the
98 conversion is not necessary if they have the same mode and
99 signedness and precision, and both or neither are boolean. */
100 if (INTEGRAL_TYPE_P (inner_type)
101 && INTEGRAL_TYPE_P (outer_type))
102 {
103 /* Preserve changes in signedness or precision. */
104 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
105 || TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
106 return false;
107
108 /* Preserve conversions to/from BOOLEAN_TYPE if types are not
109 of precision one. */
110 if (((TREE_CODE (inner_type) == BOOLEAN_TYPE)
111 != (TREE_CODE (outer_type) == BOOLEAN_TYPE))
112 && TYPE_PRECISION (outer_type) != 1)
113 return false;
114
115 /* We don't need to preserve changes in the types minimum or
116 maximum value in general as these do not generate code
117 unless the types precisions are different. */
118 return true;
119 }
120
121 /* Scalar floating point types with the same mode are compatible. */
122 else if (SCALAR_FLOAT_TYPE_P (inner_type)
123 && SCALAR_FLOAT_TYPE_P (outer_type))
124 return true;
125
126 /* Fixed point types with the same mode are compatible. */
127 else if (FIXED_POINT_TYPE_P (inner_type)
128 && FIXED_POINT_TYPE_P (outer_type))
129 return TYPE_SATURATING (inner_type) == TYPE_SATURATING (outer_type);
130
131 /* We need to take special care recursing to pointed-to types. */
132 else if (POINTER_TYPE_P (inner_type)
133 && POINTER_TYPE_P (outer_type))
134 {
135 /* We do not care for const qualification of the pointed-to types
136 as const qualification has no semantic value to the middle-end. */
137
138 /* Otherwise pointers/references are equivalent. */
139 return true;
140 }
141
142 /* Recurse for complex types. */
143 else if (TREE_CODE (inner_type) == COMPLEX_TYPE
144 && TREE_CODE (outer_type) == COMPLEX_TYPE)
145 return useless_type_conversion_p (TREE_TYPE (outer_type),
146 TREE_TYPE (inner_type));
147
148 /* Recurse for vector types with the same number of subparts. */
149 else if (TREE_CODE (inner_type) == VECTOR_TYPE
150 && TREE_CODE (outer_type) == VECTOR_TYPE
151 && TYPE_PRECISION (inner_type) == TYPE_PRECISION (outer_type))
152 return useless_type_conversion_p (TREE_TYPE (outer_type),
153 TREE_TYPE (inner_type));
154
155 else if (TREE_CODE (inner_type) == ARRAY_TYPE
156 && TREE_CODE (outer_type) == ARRAY_TYPE)
157 {
158 /* Preserve various attributes. */
159 if (TYPE_REVERSE_STORAGE_ORDER (inner_type)
160 != TYPE_REVERSE_STORAGE_ORDER (outer_type))
161 return false;
162 if (TYPE_STRING_FLAG (inner_type) != TYPE_STRING_FLAG (outer_type))
163 return false;
164
165 /* Conversions from array types with unknown extent to
166 array types with known extent are not useless. */
167 if (!TYPE_DOMAIN (inner_type) && TYPE_DOMAIN (outer_type))
168 return false;
169
170 /* Nor are conversions from array types with non-constant size to
171 array types with constant size or to different size. */
172 if (TYPE_SIZE (outer_type)
173 && TREE_CODE (TYPE_SIZE (outer_type)) == INTEGER_CST
174 && (!TYPE_SIZE (inner_type)
175 || TREE_CODE (TYPE_SIZE (inner_type)) != INTEGER_CST
176 || !tree_int_cst_equal (TYPE_SIZE (outer_type),
177 TYPE_SIZE (inner_type))))
178 return false;
179
180 /* Check conversions between arrays with partially known extents.
181 If the array min/max values are constant they have to match.
182 Otherwise allow conversions to unknown and variable extents.
183 In particular this declares conversions that may change the
184 mode to BLKmode as useless. */
185 if (TYPE_DOMAIN (inner_type)
186 && TYPE_DOMAIN (outer_type)
187 && TYPE_DOMAIN (inner_type) != TYPE_DOMAIN (outer_type))
188 {
189 tree inner_min = TYPE_MIN_VALUE (TYPE_DOMAIN (inner_type));
190 tree outer_min = TYPE_MIN_VALUE (TYPE_DOMAIN (outer_type));
191 tree inner_max = TYPE_MAX_VALUE (TYPE_DOMAIN (inner_type));
192 tree outer_max = TYPE_MAX_VALUE (TYPE_DOMAIN (outer_type));
193
194 /* After gimplification a variable min/max value carries no
195 additional information compared to a NULL value. All that
196 matters has been lowered to be part of the IL. */
197 if (inner_min && TREE_CODE (inner_min) != INTEGER_CST)
198 inner_min = NULL_TREE;
199 if (outer_min && TREE_CODE (outer_min) != INTEGER_CST)
200 outer_min = NULL_TREE;
201 if (inner_max && TREE_CODE (inner_max) != INTEGER_CST)
202 inner_max = NULL_TREE;
203 if (outer_max && TREE_CODE (outer_max) != INTEGER_CST)
204 outer_max = NULL_TREE;
205
206 /* Conversions NULL / variable <- cst are useless, but not
207 the other way around. */
208 if (outer_min
209 && (!inner_min
210 || !tree_int_cst_equal (inner_min, outer_min)))
211 return false;
212 if (outer_max
213 && (!inner_max
214 || !tree_int_cst_equal (inner_max, outer_max)))
215 return false;
216 }
217
218 /* Recurse on the element check. */
219 return useless_type_conversion_p (TREE_TYPE (outer_type),
220 TREE_TYPE (inner_type));
221 }
222
223 else if ((TREE_CODE (inner_type) == FUNCTION_TYPE
224 || TREE_CODE (inner_type) == METHOD_TYPE)
225 && TREE_CODE (inner_type) == TREE_CODE (outer_type))
226 {
227 tree outer_parm, inner_parm;
228
229 /* If the return types are not compatible bail out. */
230 if (!useless_type_conversion_p (TREE_TYPE (outer_type),
231 TREE_TYPE (inner_type)))
232 return false;
233
234 /* Method types should belong to a compatible base class. */
235 if (TREE_CODE (inner_type) == METHOD_TYPE
236 && !useless_type_conversion_p (TYPE_METHOD_BASETYPE (outer_type),
237 TYPE_METHOD_BASETYPE (inner_type)))
238 return false;
239
240 /* A conversion to an unprototyped argument list is ok. */
241 if (!prototype_p (outer_type))
242 return true;
243
244 /* If the unqualified argument types are compatible the conversion
245 is useless. */
246 if (TYPE_ARG_TYPES (outer_type) == TYPE_ARG_TYPES (inner_type))
247 return true;
248
249 for (outer_parm = TYPE_ARG_TYPES (outer_type),
250 inner_parm = TYPE_ARG_TYPES (inner_type);
251 outer_parm && inner_parm;
252 outer_parm = TREE_CHAIN (outer_parm),
253 inner_parm = TREE_CHAIN (inner_parm))
254 if (!useless_type_conversion_p
255 (TYPE_MAIN_VARIANT (TREE_VALUE (outer_parm)),
256 TYPE_MAIN_VARIANT (TREE_VALUE (inner_parm))))
257 return false;
258
259 /* If there is a mismatch in the number of arguments the functions
260 are not compatible. */
261 if (outer_parm || inner_parm)
262 return false;
263
264 /* Defer to the target if necessary. */
265 if (TYPE_ATTRIBUTES (inner_type) || TYPE_ATTRIBUTES (outer_type))
266 return comp_type_attributes (outer_type, inner_type) != 0;
267
268 return true;
269 }
270
271 /* For aggregates we rely on TYPE_CANONICAL exclusively and require
272 explicit conversions for types involving to be structurally
273 compared types. */
274 else if (AGGREGATE_TYPE_P (inner_type)
275 && TREE_CODE (inner_type) == TREE_CODE (outer_type))
276 return TYPE_CANONICAL (inner_type)
277 && TYPE_CANONICAL (inner_type) == TYPE_CANONICAL (outer_type);
278
279 else if (TREE_CODE (inner_type) == OFFSET_TYPE
280 && TREE_CODE (outer_type) == OFFSET_TYPE)
281 return useless_type_conversion_p (TREE_TYPE (outer_type),
282 TREE_TYPE (inner_type))
283 && useless_type_conversion_p
284 (TYPE_OFFSET_BASETYPE (outer_type),
285 TYPE_OFFSET_BASETYPE (inner_type));
286
287 return false;
288 }
289
290
291 /* ----- Decl related ----- */
292
293 /* Set sequence SEQ to be the GIMPLE body for function FN. */
294
295 void
296 gimple_set_body (tree fndecl, gimple_seq seq)
297 {
298 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
299 if (fn == NULL)
300 {
301 /* If FNDECL still does not have a function structure associated
302 with it, then it does not make sense for it to receive a
303 GIMPLE body. */
304 gcc_assert (seq == NULL);
305 }
306 else
307 fn->gimple_body = seq;
308 }
309
310
311 /* Return the body of GIMPLE statements for function FN. After the
312 CFG pass, the function body doesn't exist anymore because it has
313 been split up into basic blocks. In this case, it returns
314 NULL. */
315
316 gimple_seq
317 gimple_body (tree fndecl)
318 {
319 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
320 return fn ? fn->gimple_body : NULL;
321 }
322
323 /* Return true when FNDECL has Gimple body either in unlowered
324 or CFG form. */
325 bool
326 gimple_has_body_p (tree fndecl)
327 {
328 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
329 return (gimple_body (fndecl) || (fn && fn->cfg && !(fn->curr_properties & PROP_rtl)));
330 }
331
332 /* Return a printable name for symbol DECL. */
333
334 const char *
335 gimple_decl_printable_name (tree decl, int verbosity)
336 {
337 if (!DECL_NAME (decl))
338 return NULL;
339
340 if (HAS_DECL_ASSEMBLER_NAME_P (decl) && DECL_ASSEMBLER_NAME_SET_P (decl))
341 {
342 int dmgl_opts = DMGL_NO_OPTS;
343
344 if (verbosity >= 2)
345 {
346 dmgl_opts = DMGL_VERBOSE
347 | DMGL_ANSI
348 | DMGL_GNU_V3
349 | DMGL_RET_POSTFIX;
350 if (TREE_CODE (decl) == FUNCTION_DECL)
351 dmgl_opts |= DMGL_PARAMS;
352 }
353
354 const char *mangled_str
355 = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME_RAW (decl));
356 const char *str = cplus_demangle_v3 (mangled_str, dmgl_opts);
357 return str ? str : mangled_str;
358 }
359
360 return IDENTIFIER_POINTER (DECL_NAME (decl));
361 }
362
363
364 /* Create a new VAR_DECL and copy information from VAR to it. */
365
366 tree
367 copy_var_decl (tree var, tree name, tree type)
368 {
369 tree copy = build_decl (DECL_SOURCE_LOCATION (var), VAR_DECL, name, type);
370
371 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (var);
372 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (var);
373 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (var);
374 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (var);
375 DECL_IGNORED_P (copy) = DECL_IGNORED_P (var);
376 DECL_CONTEXT (copy) = DECL_CONTEXT (var);
377 TREE_NO_WARNING (copy) = TREE_NO_WARNING (var);
378 TREE_USED (copy) = 1;
379 DECL_SEEN_IN_BIND_EXPR_P (copy) = 1;
380 DECL_ATTRIBUTES (copy) = DECL_ATTRIBUTES (var);
381 if (DECL_USER_ALIGN (var))
382 {
383 SET_DECL_ALIGN (copy, DECL_ALIGN (var));
384 DECL_USER_ALIGN (copy) = 1;
385 }
386
387 return copy;
388 }
389
390 /* Strip off a legitimate source ending from the input string NAME of
391 length LEN. Rather than having to know the names used by all of
392 our front ends, we strip off an ending of a period followed by
393 up to four characters. (like ".cpp".) */
394
395 static inline void
396 remove_suffix (char *name, int len)
397 {
398 int i;
399
400 for (i = 2; i < 7 && len > i; i++)
401 {
402 if (name[len - i] == '.')
403 {
404 name[len - i] = '\0';
405 break;
406 }
407 }
408 }
409
410 /* Create a new temporary name with PREFIX. Return an identifier. */
411
412 static GTY(()) unsigned int tmp_var_id_num;
413
414 tree
415 create_tmp_var_name (const char *prefix)
416 {
417 char *tmp_name;
418
419 if (prefix)
420 {
421 char *preftmp = ASTRDUP (prefix);
422
423 remove_suffix (preftmp, strlen (preftmp));
424 clean_symbol_name (preftmp);
425
426 prefix = preftmp;
427 }
428
429 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix ? prefix : "T", tmp_var_id_num++);
430 return get_identifier (tmp_name);
431 }
432
433 /* Create a new temporary variable declaration of type TYPE.
434 Do NOT push it into the current binding. */
435
436 tree
437 create_tmp_var_raw (tree type, const char *prefix)
438 {
439 tree tmp_var;
440
441 tmp_var = build_decl (input_location,
442 VAR_DECL, prefix ? create_tmp_var_name (prefix) : NULL,
443 type);
444
445 /* The variable was declared by the compiler. */
446 DECL_ARTIFICIAL (tmp_var) = 1;
447 /* And we don't want debug info for it. */
448 DECL_IGNORED_P (tmp_var) = 1;
449 /* And we don't want even the fancy names of those printed in
450 -fdump-final-insns= dumps. */
451 DECL_NAMELESS (tmp_var) = 1;
452
453 /* Make the variable writable. */
454 TREE_READONLY (tmp_var) = 0;
455
456 DECL_EXTERNAL (tmp_var) = 0;
457 TREE_STATIC (tmp_var) = 0;
458 TREE_USED (tmp_var) = 1;
459
460 return tmp_var;
461 }
462
463 /* Create a new temporary variable declaration of type TYPE. DO push the
464 variable into the current binding. Further, assume that this is called
465 only from gimplification or optimization, at which point the creation of
466 certain types are bugs. */
467
468 tree
469 create_tmp_var (tree type, const char *prefix)
470 {
471 tree tmp_var;
472
473 /* We don't allow types that are addressable (meaning we can't make copies),
474 or incomplete. We also used to reject every variable size objects here,
475 but now support those for which a constant upper bound can be obtained.
476 The processing for variable sizes is performed in gimple_add_tmp_var,
477 point at which it really matters and possibly reached via paths not going
478 through this function, e.g. after direct calls to create_tmp_var_raw. */
479 gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type));
480
481 tmp_var = create_tmp_var_raw (type, prefix);
482 gimple_add_tmp_var (tmp_var);
483 return tmp_var;
484 }
485
486 /* Create a new temporary variable declaration of type TYPE by calling
487 create_tmp_var and if TYPE is a vector or a complex number, mark the new
488 temporary as gimple register. */
489
490 tree
491 create_tmp_reg (tree type, const char *prefix)
492 {
493 tree tmp;
494
495 tmp = create_tmp_var (type, prefix);
496 if (TREE_CODE (type) == COMPLEX_TYPE
497 || TREE_CODE (type) == VECTOR_TYPE)
498 DECL_GIMPLE_REG_P (tmp) = 1;
499
500 return tmp;
501 }
502
503 /* Create a new temporary variable declaration of type TYPE by calling
504 create_tmp_var and if TYPE is a vector or a complex number, mark the new
505 temporary as gimple register. */
506
507 tree
508 create_tmp_reg_fn (struct function *fn, tree type, const char *prefix)
509 {
510 tree tmp;
511
512 tmp = create_tmp_var_raw (type, prefix);
513 gimple_add_tmp_var_fn (fn, tmp);
514 if (TREE_CODE (type) == COMPLEX_TYPE
515 || TREE_CODE (type) == VECTOR_TYPE)
516 DECL_GIMPLE_REG_P (tmp) = 1;
517
518 return tmp;
519 }
520
521
522 /* ----- Expression related ----- */
523
524 /* Extract the operands and code for expression EXPR into *SUBCODE_P,
525 *OP1_P, *OP2_P and *OP3_P respectively. */
526
527 void
528 extract_ops_from_tree (tree expr, enum tree_code *subcode_p, tree *op1_p,
529 tree *op2_p, tree *op3_p)
530 {
531 enum gimple_rhs_class grhs_class;
532
533 *subcode_p = TREE_CODE (expr);
534 grhs_class = get_gimple_rhs_class (*subcode_p);
535
536 if (grhs_class == GIMPLE_TERNARY_RHS)
537 {
538 *op1_p = TREE_OPERAND (expr, 0);
539 *op2_p = TREE_OPERAND (expr, 1);
540 *op3_p = TREE_OPERAND (expr, 2);
541 }
542 else if (grhs_class == GIMPLE_BINARY_RHS)
543 {
544 *op1_p = TREE_OPERAND (expr, 0);
545 *op2_p = TREE_OPERAND (expr, 1);
546 *op3_p = NULL_TREE;
547 }
548 else if (grhs_class == GIMPLE_UNARY_RHS)
549 {
550 *op1_p = TREE_OPERAND (expr, 0);
551 *op2_p = NULL_TREE;
552 *op3_p = NULL_TREE;
553 }
554 else if (grhs_class == GIMPLE_SINGLE_RHS)
555 {
556 *op1_p = expr;
557 *op2_p = NULL_TREE;
558 *op3_p = NULL_TREE;
559 }
560 else
561 gcc_unreachable ();
562 }
563
564 /* Extract operands for a GIMPLE_COND statement out of COND_EXPR tree COND. */
565
566 void
567 gimple_cond_get_ops_from_tree (tree cond, enum tree_code *code_p,
568 tree *lhs_p, tree *rhs_p)
569 {
570 gcc_assert (COMPARISON_CLASS_P (cond)
571 || TREE_CODE (cond) == TRUTH_NOT_EXPR
572 || is_gimple_min_invariant (cond)
573 || SSA_VAR_P (cond));
574
575 extract_ops_from_tree (cond, code_p, lhs_p, rhs_p);
576
577 /* Canonicalize conditionals of the form 'if (!VAL)'. */
578 if (*code_p == TRUTH_NOT_EXPR)
579 {
580 *code_p = EQ_EXPR;
581 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
582 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
583 }
584 /* Canonicalize conditionals of the form 'if (VAL)' */
585 else if (TREE_CODE_CLASS (*code_p) != tcc_comparison)
586 {
587 *code_p = NE_EXPR;
588 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
589 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
590 }
591 }
592
593 /* Return true if T is a valid LHS for a GIMPLE assignment expression. */
594
595 bool
596 is_gimple_lvalue (tree t)
597 {
598 return (is_gimple_addressable (t)
599 || TREE_CODE (t) == WITH_SIZE_EXPR
600 /* These are complex lvalues, but don't have addresses, so they
601 go here. */
602 || TREE_CODE (t) == BIT_FIELD_REF);
603 }
604
605 /* Return true if T is a GIMPLE condition. */
606
607 bool
608 is_gimple_condexpr (tree t)
609 {
610 return (is_gimple_val (t) || (COMPARISON_CLASS_P (t)
611 && !tree_could_throw_p (t)
612 && is_gimple_val (TREE_OPERAND (t, 0))
613 && is_gimple_val (TREE_OPERAND (t, 1))));
614 }
615
616 /* Return true if T is a gimple address. */
617
618 bool
619 is_gimple_address (const_tree t)
620 {
621 tree op;
622
623 if (TREE_CODE (t) != ADDR_EXPR)
624 return false;
625
626 op = TREE_OPERAND (t, 0);
627 while (handled_component_p (op))
628 {
629 if ((TREE_CODE (op) == ARRAY_REF
630 || TREE_CODE (op) == ARRAY_RANGE_REF)
631 && !is_gimple_val (TREE_OPERAND (op, 1)))
632 return false;
633
634 op = TREE_OPERAND (op, 0);
635 }
636
637 if (CONSTANT_CLASS_P (op)
638 || TREE_CODE (op) == TARGET_MEM_REF
639 || TREE_CODE (op) == MEM_REF)
640 return true;
641
642 switch (TREE_CODE (op))
643 {
644 case PARM_DECL:
645 case RESULT_DECL:
646 case LABEL_DECL:
647 case FUNCTION_DECL:
648 case VAR_DECL:
649 case CONST_DECL:
650 return true;
651
652 default:
653 return false;
654 }
655 }
656
657 /* Return true if T is a gimple invariant address. */
658
659 bool
660 is_gimple_invariant_address (const_tree t)
661 {
662 const_tree op;
663
664 if (TREE_CODE (t) != ADDR_EXPR)
665 return false;
666
667 op = strip_invariant_refs (TREE_OPERAND (t, 0));
668 if (!op)
669 return false;
670
671 if (TREE_CODE (op) == MEM_REF)
672 {
673 const_tree op0 = TREE_OPERAND (op, 0);
674 return (TREE_CODE (op0) == ADDR_EXPR
675 && (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))
676 || decl_address_invariant_p (TREE_OPERAND (op0, 0))));
677 }
678
679 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
680 }
681
682 /* Return true if T is a gimple invariant address at IPA level
683 (so addresses of variables on stack are not allowed). */
684
685 bool
686 is_gimple_ip_invariant_address (const_tree t)
687 {
688 const_tree op;
689
690 if (TREE_CODE (t) != ADDR_EXPR)
691 return false;
692
693 op = strip_invariant_refs (TREE_OPERAND (t, 0));
694 if (!op)
695 return false;
696
697 if (TREE_CODE (op) == MEM_REF)
698 {
699 const_tree op0 = TREE_OPERAND (op, 0);
700 return (TREE_CODE (op0) == ADDR_EXPR
701 && (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))
702 || decl_address_ip_invariant_p (TREE_OPERAND (op0, 0))));
703 }
704
705 return CONSTANT_CLASS_P (op) || decl_address_ip_invariant_p (op);
706 }
707
708 /* Return true if T is a GIMPLE minimal invariant. It's a restricted
709 form of function invariant. */
710
711 bool
712 is_gimple_min_invariant (const_tree t)
713 {
714 if (TREE_CODE (t) == ADDR_EXPR)
715 return is_gimple_invariant_address (t);
716
717 return is_gimple_constant (t);
718 }
719
720 /* Return true if T is a GIMPLE interprocedural invariant. It's a restricted
721 form of gimple minimal invariant. */
722
723 bool
724 is_gimple_ip_invariant (const_tree t)
725 {
726 if (TREE_CODE (t) == ADDR_EXPR)
727 return is_gimple_ip_invariant_address (t);
728
729 return is_gimple_constant (t);
730 }
731
732 /* Return true if T is a non-aggregate register variable. */
733
734 bool
735 is_gimple_reg (tree t)
736 {
737 if (virtual_operand_p (t))
738 return false;
739
740 if (TREE_CODE (t) == SSA_NAME)
741 return true;
742
743 if (!is_gimple_variable (t))
744 return false;
745
746 if (!is_gimple_reg_type (TREE_TYPE (t)))
747 return false;
748
749 /* A volatile decl is not acceptable because we can't reuse it as
750 needed. We need to copy it into a temp first. */
751 if (TREE_THIS_VOLATILE (t))
752 return false;
753
754 /* We define "registers" as things that can be renamed as needed,
755 which with our infrastructure does not apply to memory. */
756 if (needs_to_live_in_memory (t))
757 return false;
758
759 /* Hard register variables are an interesting case. For those that
760 are call-clobbered, we don't know where all the calls are, since
761 we don't (want to) take into account which operations will turn
762 into libcalls at the rtl level. For those that are call-saved,
763 we don't currently model the fact that calls may in fact change
764 global hard registers, nor do we examine ASM_CLOBBERS at the tree
765 level, and so miss variable changes that might imply. All around,
766 it seems safest to not do too much optimization with these at the
767 tree level at all. We'll have to rely on the rtl optimizers to
768 clean this up, as there we've got all the appropriate bits exposed. */
769 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
770 return false;
771
772 /* Complex and vector values must have been put into SSA-like form.
773 That is, no assignments to the individual components. */
774 if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
775 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
776 return DECL_GIMPLE_REG_P (t);
777
778 return true;
779 }
780
781
782 /* Return true if T is a GIMPLE rvalue, i.e. an identifier or a constant. */
783
784 bool
785 is_gimple_val (tree t)
786 {
787 /* Make loads from volatiles and memory vars explicit. */
788 if (is_gimple_variable (t)
789 && is_gimple_reg_type (TREE_TYPE (t))
790 && !is_gimple_reg (t))
791 return false;
792
793 return (is_gimple_variable (t) || is_gimple_min_invariant (t));
794 }
795
796 /* Similarly, but accept hard registers as inputs to asm statements. */
797
798 bool
799 is_gimple_asm_val (tree t)
800 {
801 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
802 return true;
803
804 return is_gimple_val (t);
805 }
806
807 /* Return true if T is a GIMPLE minimal lvalue. */
808
809 bool
810 is_gimple_min_lval (tree t)
811 {
812 if (!(t = CONST_CAST_TREE (strip_invariant_refs (t))))
813 return false;
814 return (is_gimple_id (t) || TREE_CODE (t) == MEM_REF);
815 }
816
817 /* Return true if T is a valid function operand of a CALL_EXPR. */
818
819 bool
820 is_gimple_call_addr (tree t)
821 {
822 return (TREE_CODE (t) == OBJ_TYPE_REF || is_gimple_val (t));
823 }
824
825 /* Return true if T is a valid address operand of a MEM_REF. */
826
827 bool
828 is_gimple_mem_ref_addr (tree t)
829 {
830 return (is_gimple_reg (t)
831 || TREE_CODE (t) == INTEGER_CST
832 || (TREE_CODE (t) == ADDR_EXPR
833 && (CONSTANT_CLASS_P (TREE_OPERAND (t, 0))
834 || decl_address_invariant_p (TREE_OPERAND (t, 0)))));
835 }
836
837 /* Hold trees marked addressable during expand. */
838
839 static hash_set<tree> *mark_addressable_queue;
840
841 /* Mark X as addressable or queue it up if called during expand. We
842 don't want to apply it immediately during expand because decls are
843 made addressable at that point due to RTL-only concerns, such as
844 uses of memcpy for block moves, and TREE_ADDRESSABLE changes
845 is_gimple_reg, which might make it seem like a variable that used
846 to be a gimple_reg shouldn't have been an SSA name. So we queue up
847 this flag setting and only apply it when we're done with GIMPLE and
848 only RTL issues matter. */
849
850 static void
851 mark_addressable_1 (tree x)
852 {
853 if (!currently_expanding_to_rtl)
854 {
855 TREE_ADDRESSABLE (x) = 1;
856 return;
857 }
858
859 if (!mark_addressable_queue)
860 mark_addressable_queue = new hash_set<tree>();
861 mark_addressable_queue->add (x);
862 }
863
864 /* Adaptor for mark_addressable_1 for use in hash_set traversal. */
865
866 bool
867 mark_addressable_2 (tree const &x, void * ATTRIBUTE_UNUSED = NULL)
868 {
869 mark_addressable_1 (x);
870 return false;
871 }
872
873 /* Mark all queued trees as addressable, and empty the queue. To be
874 called right after clearing CURRENTLY_EXPANDING_TO_RTL. */
875
876 void
877 flush_mark_addressable_queue ()
878 {
879 gcc_assert (!currently_expanding_to_rtl);
880 if (mark_addressable_queue)
881 {
882 mark_addressable_queue->traverse<void*, mark_addressable_2> (NULL);
883 delete mark_addressable_queue;
884 mark_addressable_queue = NULL;
885 }
886 }
887
888 /* Mark X addressable. Unlike the langhook we expect X to be in gimple
889 form and we don't do any syntax checking. */
890
891 void
892 mark_addressable (tree x)
893 {
894 while (handled_component_p (x))
895 x = TREE_OPERAND (x, 0);
896 if (TREE_CODE (x) == MEM_REF
897 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
898 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
899 if (!VAR_P (x)
900 && TREE_CODE (x) != PARM_DECL
901 && TREE_CODE (x) != RESULT_DECL)
902 return;
903 mark_addressable_1 (x);
904
905 /* Also mark the artificial SSA_NAME that points to the partition of X. */
906 if (TREE_CODE (x) == VAR_DECL
907 && !DECL_EXTERNAL (x)
908 && !TREE_STATIC (x)
909 && cfun->gimple_df != NULL
910 && cfun->gimple_df->decls_to_pointers != NULL)
911 {
912 tree *namep = cfun->gimple_df->decls_to_pointers->get (x);
913 if (namep)
914 mark_addressable_1 (*namep);
915 }
916 }
917
918 /* Returns true iff T is a valid RHS for an assignment to a renamed
919 user -- or front-end generated artificial -- variable. */
920
921 bool
922 is_gimple_reg_rhs (tree t)
923 {
924 return get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS;
925 }
926
927 #include "gt-gimple-expr.h"