]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/stor-layout.c
convert.c (convert_to_real, [...]): Replace calls to build with calls to buildN.
[thirdparty/gcc.git] / gcc / stor-layout.c
CommitLineData
7306ed3f 1/* C-compiler utilities for types and variables storage layout
06ceef4e 2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1996, 1998,
aa335b76 3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
7306ed3f 4
1322177d 5This file is part of GCC.
7306ed3f 6
1322177d
LB
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
7306ed3f 11
1322177d
LB
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
7306ed3f
JW
16
17You should have received a copy of the GNU General Public License
1322177d
LB
18along with GCC; see the file COPYING. If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA. */
7306ed3f
JW
21
22
23#include "config.h"
670ee920 24#include "system.h"
4977bab6
ZW
25#include "coretypes.h"
26#include "tm.h"
7306ed3f 27#include "tree.h"
d05a5492 28#include "rtl.h"
6baf1cc8 29#include "tm_p.h"
566cdc73 30#include "flags.h"
7306ed3f 31#include "function.h"
234042f4 32#include "expr.h"
10f0ad3d 33#include "toplev.h"
d7db6646 34#include "ggc.h"
f913c102 35#include "target.h"
43577e6b 36#include "langhooks.h"
26277d41 37#include "regs.h"
7306ed3f 38
fed3cef0
RK
39/* Set to one when set_sizetype has been called. */
40static int sizetype_set;
41
42/* List of types created before set_sizetype has been called. We do not
43 make this a GGC root since we want these nodes to be reclaimed. */
44static tree early_type_list;
45
7306ed3f 46/* Data type for the expressions representing sizes of data types.
896cced4 47 It is the first integer type laid out. */
fed3cef0 48tree sizetype_tab[(int) TYPE_KIND_LAST];
7306ed3f 49
d4c40650
RS
50/* If nonzero, this is an upper limit on alignment of structure fields.
51 The value is measured in bits. */
729a2125 52unsigned int maximum_field_alignment;
d4c40650 53
0e9e1e0a 54/* If nonzero, the alignment of a bitstring or (power-)set value, in bits.
b5d11e41 55 May be overridden by front-ends. */
729a2125 56unsigned int set_alignment = 0;
b5d11e41 57
b5d6a2ff
RK
58/* Nonzero if all REFERENCE_TYPEs are internal and hence should be
59 allocated in Pmode, not ptr_mode. Set only by internal_reference_types
60 called only by a front end. */
61static int reference_types_internal = 0;
62
46c5ad27
AJ
63static void finalize_record_size (record_layout_info);
64static void finalize_type_size (tree);
65static void place_union_field (record_layout_info, tree);
b8089d8d 66#if defined (PCC_BITFIELD_TYPE_MATTERS) || defined (BITFIELD_NBYTES_LIMITED)
46c5ad27
AJ
67static int excess_unit_span (HOST_WIDE_INT, HOST_WIDE_INT, HOST_WIDE_INT,
68 HOST_WIDE_INT, tree);
b8089d8d 69#endif
46c5ad27 70extern void debug_rli (record_layout_info);
7306ed3f
JW
71\f
72/* SAVE_EXPRs for sizes of types and decls, waiting to be expanded. */
73
e2500fed 74static GTY(()) tree pending_sizes;
7306ed3f 75
b5d6a2ff
RK
76/* Show that REFERENCE_TYPES are internal and should be Pmode. Called only
77 by front end. */
78
79void
46c5ad27 80internal_reference_types (void)
b5d6a2ff
RK
81{
82 reference_types_internal = 1;
83}
84
770ae6cc
RK
85/* Get a list of all the objects put on the pending sizes list. */
86
7306ed3f 87tree
46c5ad27 88get_pending_sizes (void)
7306ed3f
JW
89{
90 tree chain = pending_sizes;
d4b60170 91
7306ed3f
JW
92 pending_sizes = 0;
93 return chain;
94}
95
fe375cf1
JJ
96/* Add EXPR to the pending sizes list. */
97
98void
46c5ad27 99put_pending_size (tree expr)
fe375cf1 100{
3874585e
RK
101 /* Strip any simple arithmetic from EXPR to see if it has an underlying
102 SAVE_EXPR. */
a9ecacf6 103 expr = skip_simple_arithmetic (expr);
3874585e
RK
104
105 if (TREE_CODE (expr) == SAVE_EXPR)
106 pending_sizes = tree_cons (NULL_TREE, expr, pending_sizes);
fe375cf1
JJ
107}
108
770ae6cc
RK
109/* Put a chain of objects into the pending sizes list, which must be
110 empty. */
111
1fd7c4ac 112void
46c5ad27 113put_pending_sizes (tree chain)
1fd7c4ac
RK
114{
115 if (pending_sizes)
116 abort ();
117
118 pending_sizes = chain;
119}
120
76ffb3a0 121/* Given a size SIZE that may not be a constant, return a SAVE_EXPR
7306ed3f
JW
122 to serve as the actual size-expression for a type or decl. */
123
4e4b555d 124tree
46c5ad27 125variable_size (tree size)
7306ed3f 126{
3695c25f
JM
127 tree save;
128
5e9bec99
RK
129 /* If the language-processor is to take responsibility for variable-sized
130 items (e.g., languages which have elaboration procedures like Ada),
ac79cd5a
RK
131 just return SIZE unchanged. Likewise for self-referential sizes and
132 constant sizes. */
76ffb3a0 133 if (TREE_CONSTANT (size)
ae2bcd98 134 || lang_hooks.decls.global_bindings_p () < 0
679035f3 135 || CONTAINS_PLACEHOLDER_P (size))
5e9bec99
RK
136 return size;
137
1c9766da 138 size = save_expr (size);
68de3831 139
d26f8097
MM
140 /* If an array with a variable number of elements is declared, and
141 the elements require destruction, we will emit a cleanup for the
142 array. That cleanup is run both on normal exit from the block
143 and in the exception-handler for the block. Normally, when code
144 is used in both ordinary code and in an exception handler it is
145 `unsaved', i.e., all SAVE_EXPRs are recalculated. However, we do
146 not wish to do that here; the array-size is the same in both
147 places. */
1c9766da 148 save = skip_simple_arithmetic (size);
d26f8097 149
6a0bec2c 150 if (cfun && cfun->x_dont_save_pending_sizes_p)
6de9cd9a
DN
151 /* The front-end doesn't want us to keep a list of the expressions
152 that determine sizes for variable size objects. Trust it. */
153 return size;
154
ae2bcd98 155 if (lang_hooks.decls.global_bindings_p ())
7306ed3f 156 {
80f9c711
RS
157 if (TREE_CONSTANT (size))
158 error ("type size can't be explicitly evaluated");
159 else
160 error ("variable-size type declared outside of any function");
161
fed3cef0 162 return size_one_node;
7306ed3f
JW
163 }
164
6a0bec2c 165 put_pending_size (save);
7306ed3f
JW
166
167 return size;
168}
169\f
170#ifndef MAX_FIXED_MODE_SIZE
171#define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
172#endif
173
37783865
ZW
174/* Return the machine mode to use for a nonscalar of SIZE bits. The
175 mode must be in class CLASS, and have exactly that many value bits;
176 it may have padding as well. If LIMIT is nonzero, modes of wider
177 than MAX_FIXED_MODE_SIZE will not be used. */
7306ed3f
JW
178
179enum machine_mode
46c5ad27 180mode_for_size (unsigned int size, enum mode_class class, int limit)
7306ed3f 181{
b3694847 182 enum machine_mode mode;
7306ed3f 183
72c602fc 184 if (limit && size > MAX_FIXED_MODE_SIZE)
7306ed3f
JW
185 return BLKmode;
186
5e9bec99 187 /* Get the first mode which has this size, in the specified class. */
7306ed3f
JW
188 for (mode = GET_CLASS_NARROWEST_MODE (class); mode != VOIDmode;
189 mode = GET_MODE_WIDER_MODE (mode))
37783865 190 if (GET_MODE_PRECISION (mode) == size)
7306ed3f
JW
191 return mode;
192
193 return BLKmode;
194}
195
72c602fc
RK
196/* Similar, except passed a tree node. */
197
198enum machine_mode
46c5ad27 199mode_for_size_tree (tree size, enum mode_class class, int limit)
72c602fc
RK
200{
201 if (TREE_CODE (size) != INTEGER_CST
5826955d 202 || TREE_OVERFLOW (size)
72c602fc
RK
203 /* What we really want to say here is that the size can fit in a
204 host integer, but we know there's no way we'd find a mode for
205 this many bits, so there's no point in doing the precise test. */
05bccae2 206 || compare_tree_int (size, 1000) > 0)
72c602fc
RK
207 return BLKmode;
208 else
0384674e 209 return mode_for_size (tree_low_cst (size, 1), class, limit);
72c602fc
RK
210}
211
5e9bec99 212/* Similar, but never return BLKmode; return the narrowest mode that
37783865 213 contains at least the requested number of value bits. */
5e9bec99 214
27922c13 215enum machine_mode
46c5ad27 216smallest_mode_for_size (unsigned int size, enum mode_class class)
5e9bec99 217{
b3694847 218 enum machine_mode mode;
5e9bec99
RK
219
220 /* Get the first mode which has at least this size, in the
221 specified class. */
222 for (mode = GET_CLASS_NARROWEST_MODE (class); mode != VOIDmode;
223 mode = GET_MODE_WIDER_MODE (mode))
37783865 224 if (GET_MODE_PRECISION (mode) >= size)
5e9bec99
RK
225 return mode;
226
227 abort ();
228}
229
d006aa54
RH
230/* Find an integer mode of the exact same size, or BLKmode on failure. */
231
232enum machine_mode
46c5ad27 233int_mode_for_mode (enum machine_mode mode)
d006aa54
RH
234{
235 switch (GET_MODE_CLASS (mode))
236 {
237 case MODE_INT:
238 case MODE_PARTIAL_INT:
239 break;
240
241 case MODE_COMPLEX_INT:
242 case MODE_COMPLEX_FLOAT:
243 case MODE_FLOAT:
62c07905
JM
244 case MODE_VECTOR_INT:
245 case MODE_VECTOR_FLOAT:
d006aa54
RH
246 mode = mode_for_size (GET_MODE_BITSIZE (mode), MODE_INT, 0);
247 break;
248
249 case MODE_RANDOM:
250 if (mode == BLKmode)
786de7eb 251 break;
d4b60170 252
2d76cb1a 253 /* ... fall through ... */
d006aa54
RH
254
255 case MODE_CC:
256 default:
05bccae2 257 abort ();
d006aa54
RH
258 }
259
260 return mode;
261}
262
187515f5
AO
263/* Return the alignment of MODE. This will be bounded by 1 and
264 BIGGEST_ALIGNMENT. */
265
266unsigned int
46c5ad27 267get_mode_alignment (enum machine_mode mode)
187515f5 268{
0974c7d7 269 return MIN (BIGGEST_ALIGNMENT, MAX (1, mode_base_align[mode]*BITS_PER_UNIT));
187515f5
AO
270}
271
fed3cef0
RK
272/* Return the value of VALUE, rounded up to a multiple of DIVISOR.
273 This can only be applied to objects of a sizetype. */
7306ed3f
JW
274
275tree
46c5ad27 276round_up (tree value, int divisor)
7306ed3f 277{
e4d3eef1 278 tree t;
fed3cef0 279
e4d3eef1
RH
280 /* If divisor is a power of two, simplify this to bit manipulation. */
281 if (divisor == (divisor & -divisor))
282 {
283 t = size_int_type (divisor - 1, TREE_TYPE (value));
284 value = size_binop (PLUS_EXPR, value, t);
285 t = size_int_type (-divisor, TREE_TYPE (value));
286 value = size_binop (BIT_AND_EXPR, value, t);
287 }
288 else
289 {
290 t = size_int_type (divisor, TREE_TYPE (value));
291 value = size_binop (CEIL_DIV_EXPR, value, t);
292 value = size_binop (MULT_EXPR, value, t);
293 }
294
295 return value;
fed3cef0
RK
296}
297
298/* Likewise, but round down. */
299
300tree
46c5ad27 301round_down (tree value, int divisor)
fed3cef0 302{
e4d3eef1
RH
303 tree t;
304
305 /* If divisor is a power of two, simplify this to bit manipulation. */
306 if (divisor == (divisor & -divisor))
307 {
308 t = size_int_type (-divisor, TREE_TYPE (value));
309 value = size_binop (BIT_AND_EXPR, value, t);
310 }
311 else
312 {
313 t = size_int_type (divisor, TREE_TYPE (value));
314 value = size_binop (FLOOR_DIV_EXPR, value, t);
315 value = size_binop (MULT_EXPR, value, t);
316 }
fed3cef0 317
e4d3eef1 318 return value;
7306ed3f
JW
319}
320\f
78d55cc8
JM
321/* Subroutine of layout_decl: Force alignment required for the data type.
322 But if the decl itself wants greater alignment, don't override that. */
323
324static inline void
325do_type_align (tree type, tree decl)
326{
327 if (TYPE_ALIGN (type) > DECL_ALIGN (decl))
328 {
329 DECL_ALIGN (decl) = TYPE_ALIGN (type);
3acef2ae
JM
330 if (TREE_CODE (decl) == FIELD_DECL)
331 DECL_USER_ALIGN (decl) = TYPE_USER_ALIGN (type);
78d55cc8
JM
332 }
333}
334
7306ed3f
JW
335/* Set the size, mode and alignment of a ..._DECL node.
336 TYPE_DECL does need this for C++.
337 Note that LABEL_DECL and CONST_DECL nodes do not need this,
338 and FUNCTION_DECL nodes have them set up in a special (and simple) way.
339 Don't call layout_decl for them.
340
341 KNOWN_ALIGN is the amount of alignment we can assume this
342 decl has with no special effort. It is relevant only for FIELD_DECLs
343 and depends on the previous fields.
344 All that matters about KNOWN_ALIGN is which powers of 2 divide it.
345 If KNOWN_ALIGN is 0, it means, "as much alignment as you like":
346 the record will be aligned to suit. */
347
348void
46c5ad27 349layout_decl (tree decl, unsigned int known_align)
7306ed3f 350{
b3694847
SS
351 tree type = TREE_TYPE (decl);
352 enum tree_code code = TREE_CODE (decl);
a46666a9 353 rtx rtl = NULL_RTX;
7306ed3f
JW
354
355 if (code == CONST_DECL)
356 return;
9df2c88c 357 else if (code != VAR_DECL && code != PARM_DECL && code != RESULT_DECL
33433751 358 && code != TYPE_DECL && code != FIELD_DECL)
7306ed3f
JW
359 abort ();
360
a46666a9
RH
361 rtl = DECL_RTL_IF_SET (decl);
362
7306ed3f 363 if (type == error_mark_node)
33433751 364 type = void_type_node;
7306ed3f 365
770ae6cc
RK
366 /* Usually the size and mode come from the data type without change,
367 however, the front-end may set the explicit width of the field, so its
368 size may not be the same as the size of its type. This happens with
369 bitfields, of course (an `int' bitfield may be only 2 bits, say), but it
370 also happens with other fields. For example, the C++ front-end creates
371 zero-sized fields corresponding to empty base classes, and depends on
372 layout_type setting DECL_FIELD_BITPOS correctly for the field. Set the
4b6bf620
RK
373 size in bytes from the size in bits. If we have already set the mode,
374 don't set it again since we can be called twice for FIELD_DECLs. */
770ae6cc 375
a150de29 376 DECL_UNSIGNED (decl) = TYPE_UNSIGNED (type);
4b6bf620
RK
377 if (DECL_MODE (decl) == VOIDmode)
378 DECL_MODE (decl) = TYPE_MODE (type);
770ae6cc 379
5e9bec99 380 if (DECL_SIZE (decl) == 0)
06ceef4e
RK
381 {
382 DECL_SIZE (decl) = TYPE_SIZE (type);
383 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
384 }
1a96dc46 385 else if (DECL_SIZE_UNIT (decl) == 0)
770ae6cc
RK
386 DECL_SIZE_UNIT (decl)
387 = convert (sizetype, size_binop (CEIL_DIV_EXPR, DECL_SIZE (decl),
388 bitsize_unit_node));
06ceef4e 389
78d55cc8
JM
390 if (code != FIELD_DECL)
391 /* For non-fields, update the alignment from the type. */
392 do_type_align (type, decl);
393 else
394 /* For fields, it's a bit more complicated... */
786de7eb 395 {
40aae178
JM
396 bool old_user_align = DECL_USER_ALIGN (decl);
397
78d55cc8
JM
398 if (DECL_BIT_FIELD (decl))
399 {
400 DECL_BIT_FIELD_TYPE (decl) = type;
7306ed3f 401
78d55cc8
JM
402 /* A zero-length bit-field affects the alignment of the next
403 field. */
404 if (integer_zerop (DECL_SIZE (decl))
405 && ! DECL_PACKED (decl)
5fd9b178 406 && ! targetm.ms_bitfield_layout_p (DECL_FIELD_CONTEXT (decl)))
78d55cc8
JM
407 {
408#ifdef PCC_BITFIELD_TYPE_MATTERS
409 if (PCC_BITFIELD_TYPE_MATTERS)
410 do_type_align (type, decl);
411 else
412#endif
ad3f5759 413 {
78d55cc8 414#ifdef EMPTY_FIELD_BOUNDARY
ad3f5759
AS
415 if (EMPTY_FIELD_BOUNDARY > DECL_ALIGN (decl))
416 {
417 DECL_ALIGN (decl) = EMPTY_FIELD_BOUNDARY;
418 DECL_USER_ALIGN (decl) = 0;
419 }
78d55cc8 420#endif
ad3f5759 421 }
78d55cc8
JM
422 }
423
424 /* See if we can use an ordinary integer mode for a bit-field.
425 Conditions are: a fixed size that is correct for another mode
426 and occupying a complete byte or bytes on proper boundary. */
427 if (TYPE_SIZE (type) != 0
428 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
429 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT)
430 {
431 enum machine_mode xmode
432 = mode_for_size_tree (DECL_SIZE (decl), MODE_INT, 1);
433
9a706ec7
MM
434 if (xmode != BLKmode
435 && (known_align == 0
436 || known_align >= GET_MODE_ALIGNMENT (xmode)))
78d55cc8
JM
437 {
438 DECL_ALIGN (decl) = MAX (GET_MODE_ALIGNMENT (xmode),
439 DECL_ALIGN (decl));
440 DECL_MODE (decl) = xmode;
441 DECL_BIT_FIELD (decl) = 0;
442 }
443 }
444
445 /* Turn off DECL_BIT_FIELD if we won't need it set. */
446 if (TYPE_MODE (type) == BLKmode && DECL_MODE (decl) == BLKmode
447 && known_align >= TYPE_ALIGN (type)
448 && DECL_ALIGN (decl) >= TYPE_ALIGN (type))
449 DECL_BIT_FIELD (decl) = 0;
450 }
451 else if (DECL_PACKED (decl) && DECL_USER_ALIGN (decl))
452 /* Don't touch DECL_ALIGN. For other packed fields, go ahead and
2038bd69 453 round up; we'll reduce it again below. We want packing to
ba228239 454 supersede USER_ALIGN inherited from the type, but defer to
2038bd69 455 alignment explicitly specified on the field decl. */;
78d55cc8 456 else
40aae178
JM
457 do_type_align (type, decl);
458
459 /* If the field is of variable size, we can't misalign it since we
460 have no way to make a temporary to align the result. But this
461 isn't an issue if the decl is not addressable. Likewise if it
462 is of unknown size.
463
464 Note that do_type_align may set DECL_USER_ALIGN, so we need to
465 check old_user_align instead. */
466 if (DECL_PACKED (decl)
467 && !old_user_align
468 && (DECL_NONADDRESSABLE_P (decl)
469 || DECL_SIZE_UNIT (decl) == 0
470 || TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST))
471 DECL_ALIGN (decl) = MIN (DECL_ALIGN (decl), BITS_PER_UNIT);
78d55cc8 472
9ca75f15 473 if (! DECL_USER_ALIGN (decl) && ! DECL_PACKED (decl))
7306ed3f 474 {
78d55cc8
JM
475 /* Some targets (i.e. i386, VMS) limit struct field alignment
476 to a lower boundary than alignment of variables unless
477 it was overridden by attribute aligned. */
478#ifdef BIGGEST_FIELD_ALIGNMENT
479 DECL_ALIGN (decl)
480 = MIN (DECL_ALIGN (decl), (unsigned) BIGGEST_FIELD_ALIGNMENT);
481#endif
482#ifdef ADJUST_FIELD_ALIGN
483 DECL_ALIGN (decl) = ADJUST_FIELD_ALIGN (decl, DECL_ALIGN (decl));
484#endif
7306ed3f 485 }
9ca75f15
DJ
486
487 /* Should this be controlled by DECL_USER_ALIGN, too? */
488 if (maximum_field_alignment != 0)
489 DECL_ALIGN (decl) = MIN (DECL_ALIGN (decl), maximum_field_alignment);
7306ed3f
JW
490 }
491
492 /* Evaluate nonconstant size only once, either now or as soon as safe. */
493 if (DECL_SIZE (decl) != 0 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
494 DECL_SIZE (decl) = variable_size (DECL_SIZE (decl));
06ceef4e
RK
495 if (DECL_SIZE_UNIT (decl) != 0
496 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST)
497 DECL_SIZE_UNIT (decl) = variable_size (DECL_SIZE_UNIT (decl));
498
499 /* If requested, warn about definitions of large data objects. */
500 if (warn_larger_than
17aec3eb 501 && (code == VAR_DECL || code == PARM_DECL)
06ceef4e
RK
502 && ! DECL_EXTERNAL (decl))
503 {
504 tree size = DECL_SIZE_UNIT (decl);
505
506 if (size != 0 && TREE_CODE (size) == INTEGER_CST
05bccae2 507 && compare_tree_int (size, larger_than_size) > 0)
06ceef4e 508 {
0384674e 509 int size_as_int = TREE_INT_CST_LOW (size);
06ceef4e 510
05bccae2 511 if (compare_tree_int (size, size_as_int) == 0)
ddd2d57e 512 warning ("%Jsize of '%D' is %d bytes", decl, decl, size_as_int);
06ceef4e 513 else
ddd2d57e
RH
514 warning ("%Jsize of '%D' is larger than %d bytes",
515 decl, decl, larger_than_size);
06ceef4e
RK
516 }
517 }
a46666a9
RH
518
519 /* If the RTL was already set, update its mode and mem attributes. */
520 if (rtl)
521 {
522 PUT_MODE (rtl, DECL_MODE (decl));
523 SET_DECL_RTL (decl, 0);
524 set_mem_attributes (rtl, decl, 1);
525 SET_DECL_RTL (decl, rtl);
526 }
7306ed3f 527}
d8472c75
JM
528
529/* Given a VAR_DECL, PARM_DECL or RESULT_DECL, clears the results of
530 a previous call to layout_decl and calls it again. */
531
532void
533relayout_decl (tree decl)
534{
535 DECL_SIZE (decl) = DECL_SIZE_UNIT (decl) = 0;
536 DECL_MODE (decl) = VOIDmode;
537 DECL_ALIGN (decl) = 0;
538 SET_DECL_RTL (decl, 0);
539
540 layout_decl (decl, 0);
541}
7306ed3f 542\f
e0cea8d9
RK
543/* Hook for a front-end function that can modify the record layout as needed
544 immediately before it is finalized. */
545
46c5ad27 546void (*lang_adjust_rli) (record_layout_info) = 0;
e0cea8d9
RK
547
548void
46c5ad27 549set_lang_adjust_rli (void (*f) (record_layout_info))
e0cea8d9
RK
550{
551 lang_adjust_rli = f;
552}
553
770ae6cc
RK
554/* Begin laying out type T, which may be a RECORD_TYPE, UNION_TYPE, or
555 QUAL_UNION_TYPE. Return a pointer to a struct record_layout_info which
556 is to be passed to all other layout functions for this record. It is the
786de7eb 557 responsibility of the caller to call `free' for the storage returned.
770ae6cc
RK
558 Note that garbage collection is not permitted until we finish laying
559 out the record. */
7306ed3f 560
9328904c 561record_layout_info
46c5ad27 562start_record_layout (tree t)
7306ed3f 563{
703ad42b 564 record_layout_info rli = xmalloc (sizeof (struct record_layout_info_s));
9328904c
MM
565
566 rli->t = t;
770ae6cc 567
9328904c
MM
568 /* If the type has a minimum specified alignment (via an attribute
569 declaration, for example) use it -- otherwise, start with a
570 one-byte alignment. */
571 rli->record_align = MAX (BITS_PER_UNIT, TYPE_ALIGN (t));
78d55cc8 572 rli->unpacked_align = rli->record_align;
770ae6cc 573 rli->offset_align = MAX (rli->record_align, BIGGEST_ALIGNMENT);
7306ed3f 574
5c19a356
MS
575#ifdef STRUCTURE_SIZE_BOUNDARY
576 /* Packed structures don't need to have minimum size. */
f132af85 577 if (! TYPE_PACKED (t))
fc555370 578 rli->record_align = MAX (rli->record_align, (unsigned) STRUCTURE_SIZE_BOUNDARY);
5c19a356 579#endif
7306ed3f 580
770ae6cc
RK
581 rli->offset = size_zero_node;
582 rli->bitpos = bitsize_zero_node;
f913c102 583 rli->prev_field = 0;
770ae6cc
RK
584 rli->pending_statics = 0;
585 rli->packed_maybe_necessary = 0;
586
9328904c
MM
587 return rli;
588}
7306ed3f 589
f2704b9f
RK
590/* These four routines perform computations that convert between
591 the offset/bitpos forms and byte and bit offsets. */
592
593tree
46c5ad27 594bit_from_pos (tree offset, tree bitpos)
f2704b9f
RK
595{
596 return size_binop (PLUS_EXPR, bitpos,
597 size_binop (MULT_EXPR, convert (bitsizetype, offset),
598 bitsize_unit_node));
599}
600
601tree
46c5ad27 602byte_from_pos (tree offset, tree bitpos)
f2704b9f
RK
603{
604 return size_binop (PLUS_EXPR, offset,
605 convert (sizetype,
f0fddb15 606 size_binop (TRUNC_DIV_EXPR, bitpos,
f2704b9f
RK
607 bitsize_unit_node)));
608}
609
f2704b9f 610void
46c5ad27
AJ
611pos_from_bit (tree *poffset, tree *pbitpos, unsigned int off_align,
612 tree pos)
f2704b9f
RK
613{
614 *poffset = size_binop (MULT_EXPR,
615 convert (sizetype,
616 size_binop (FLOOR_DIV_EXPR, pos,
617 bitsize_int (off_align))),
618 size_int (off_align / BITS_PER_UNIT));
619 *pbitpos = size_binop (FLOOR_MOD_EXPR, pos, bitsize_int (off_align));
620}
621
622/* Given a pointer to bit and byte offsets and an offset alignment,
623 normalize the offsets so they are within the alignment. */
624
625void
46c5ad27 626normalize_offset (tree *poffset, tree *pbitpos, unsigned int off_align)
f2704b9f
RK
627{
628 /* If the bit position is now larger than it should be, adjust it
629 downwards. */
630 if (compare_tree_int (*pbitpos, off_align) >= 0)
631 {
632 tree extra_aligns = size_binop (FLOOR_DIV_EXPR, *pbitpos,
633 bitsize_int (off_align));
634
635 *poffset
636 = size_binop (PLUS_EXPR, *poffset,
637 size_binop (MULT_EXPR, convert (sizetype, extra_aligns),
638 size_int (off_align / BITS_PER_UNIT)));
786de7eb 639
f2704b9f
RK
640 *pbitpos
641 = size_binop (FLOOR_MOD_EXPR, *pbitpos, bitsize_int (off_align));
642 }
643}
644
770ae6cc 645/* Print debugging information about the information in RLI. */
cc9d4a85 646
770ae6cc 647void
46c5ad27 648debug_rli (record_layout_info rli)
cc9d4a85 649{
770ae6cc
RK
650 print_node_brief (stderr, "type", rli->t, 0);
651 print_node_brief (stderr, "\noffset", rli->offset, 0);
652 print_node_brief (stderr, " bitpos", rli->bitpos, 0);
cc9d4a85 653
78d55cc8
JM
654 fprintf (stderr, "\naligns: rec = %u, unpack = %u, off = %u\n",
655 rli->record_align, rli->unpacked_align,
e0cea8d9 656 rli->offset_align);
770ae6cc
RK
657 if (rli->packed_maybe_necessary)
658 fprintf (stderr, "packed may be necessary\n");
659
660 if (rli->pending_statics)
661 {
662 fprintf (stderr, "pending statics:\n");
663 debug_tree (rli->pending_statics);
664 }
665}
666
667/* Given an RLI with a possibly-incremented BITPOS, adjust OFFSET and
668 BITPOS if necessary to keep BITPOS below OFFSET_ALIGN. */
669
670void
46c5ad27 671normalize_rli (record_layout_info rli)
770ae6cc 672{
f2704b9f 673 normalize_offset (&rli->offset, &rli->bitpos, rli->offset_align);
770ae6cc 674}
cc9d4a85 675
770ae6cc
RK
676/* Returns the size in bytes allocated so far. */
677
678tree
46c5ad27 679rli_size_unit_so_far (record_layout_info rli)
770ae6cc 680{
f2704b9f 681 return byte_from_pos (rli->offset, rli->bitpos);
770ae6cc
RK
682}
683
684/* Returns the size in bits allocated so far. */
685
686tree
46c5ad27 687rli_size_so_far (record_layout_info rli)
770ae6cc 688{
f2704b9f 689 return bit_from_pos (rli->offset, rli->bitpos);
770ae6cc
RK
690}
691
0645ba8f
MM
692/* FIELD is about to be added to RLI->T. The alignment (in bits) of
693 the next available location is given by KNOWN_ALIGN. Update the
694 variable alignment fields in RLI, and return the alignment to give
695 the FIELD. */
770ae6cc 696
6de9cd9a 697unsigned int
46c5ad27
AJ
698update_alignment_for_field (record_layout_info rli, tree field,
699 unsigned int known_align)
9328904c
MM
700{
701 /* The alignment required for FIELD. */
702 unsigned int desired_align;
9328904c
MM
703 /* The type of this field. */
704 tree type = TREE_TYPE (field);
0645ba8f
MM
705 /* True if the field was explicitly aligned by the user. */
706 bool user_align;
78d55cc8 707 bool is_bitfield;
9328904c 708
78d55cc8
JM
709 /* Lay out the field so we know what alignment it needs. */
710 layout_decl (field, known_align);
770ae6cc 711 desired_align = DECL_ALIGN (field);
11cf4d18 712 user_align = DECL_USER_ALIGN (field);
770ae6cc 713
78d55cc8
JM
714 is_bitfield = (type != error_mark_node
715 && DECL_BIT_FIELD_TYPE (field)
716 && ! integer_zerop (TYPE_SIZE (type)));
7306ed3f 717
9328904c
MM
718 /* Record must have at least as much alignment as any field.
719 Otherwise, the alignment of the field within the record is
720 meaningless. */
245f1bfa 721 if (is_bitfield && targetm.ms_bitfield_layout_p (rli->t))
f913c102 722 {
e4850f36
DR
723 /* Here, the alignment of the underlying type of a bitfield can
724 affect the alignment of a record; even a zero-sized field
725 can do this. The alignment should be to the alignment of
726 the type, except that for zero-size bitfields this only
0e9e1e0a 727 applies if there was an immediately prior, nonzero-size
e4850f36
DR
728 bitfield. (That's the way it is, experimentally.) */
729 if (! integer_zerop (DECL_SIZE (field))
46c5ad27
AJ
730 ? ! DECL_PACKED (field)
731 : (rli->prev_field
732 && DECL_BIT_FIELD_TYPE (rli->prev_field)
733 && ! integer_zerop (DECL_SIZE (rli->prev_field))))
f913c102 734 {
e4850f36
DR
735 unsigned int type_align = TYPE_ALIGN (type);
736 type_align = MAX (type_align, desired_align);
737 if (maximum_field_alignment != 0)
738 type_align = MIN (type_align, maximum_field_alignment);
739 rli->record_align = MAX (rli->record_align, type_align);
f913c102
AO
740 rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type));
741 }
786de7eb 742 }
3c12fcc2 743#ifdef PCC_BITFIELD_TYPE_MATTERS
78d55cc8 744 else if (is_bitfield && PCC_BITFIELD_TYPE_MATTERS)
9328904c 745 {
8dc65b6e 746 /* Named bit-fields cause the entire structure to have the
13c1cd82
PB
747 alignment implied by their type. Some targets also apply the same
748 rules to unnamed bitfields. */
749 if (DECL_NAME (field) != 0
750 || targetm.align_anon_bitfield ())
7306ed3f 751 {
9328904c 752 unsigned int type_align = TYPE_ALIGN (type);
729a2125 753
ad9335eb
JJ
754#ifdef ADJUST_FIELD_ALIGN
755 if (! TYPE_USER_ALIGN (type))
756 type_align = ADJUST_FIELD_ALIGN (field, type_align);
757#endif
758
9328904c
MM
759 if (maximum_field_alignment != 0)
760 type_align = MIN (type_align, maximum_field_alignment);
761 else if (DECL_PACKED (field))
762 type_align = MIN (type_align, BITS_PER_UNIT);
e2301a83 763
8dc65b6e
MM
764 /* The alignment of the record is increased to the maximum
765 of the current alignment, the alignment indicated on the
766 field (i.e., the alignment specified by an __aligned__
767 attribute), and the alignment indicated by the type of
768 the field. */
769 rli->record_align = MAX (rli->record_align, desired_align);
9328904c 770 rli->record_align = MAX (rli->record_align, type_align);
8dc65b6e 771
3c12fcc2 772 if (warn_packed)
e0cea8d9 773 rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type));
daf06049 774 user_align |= TYPE_USER_ALIGN (type);
3c12fcc2 775 }
9328904c 776 }
9328904c 777#endif
78d55cc8 778 else
9328904c
MM
779 {
780 rli->record_align = MAX (rli->record_align, desired_align);
770ae6cc 781 rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type));
9328904c 782 }
3c12fcc2 783
0645ba8f
MM
784 TYPE_USER_ALIGN (rli->t) |= user_align;
785
786 return desired_align;
787}
788
789/* Called from place_field to handle unions. */
790
791static void
46c5ad27 792place_union_field (record_layout_info rli, tree field)
0645ba8f
MM
793{
794 update_alignment_for_field (rli, field, /*known_align=*/0);
795
796 DECL_FIELD_OFFSET (field) = size_zero_node;
797 DECL_FIELD_BIT_OFFSET (field) = bitsize_zero_node;
798 SET_DECL_OFFSET_ALIGN (field, BIGGEST_ALIGNMENT);
799
800 /* We assume the union's size will be a multiple of a byte so we don't
801 bother with BITPOS. */
802 if (TREE_CODE (rli->t) == UNION_TYPE)
803 rli->offset = size_binop (MAX_EXPR, rli->offset, DECL_SIZE_UNIT (field));
804 else if (TREE_CODE (rli->t) == QUAL_UNION_TYPE)
3244e67d
RS
805 rli->offset = fold (build3 (COND_EXPR, sizetype,
806 DECL_QUALIFIER (field),
807 DECL_SIZE_UNIT (field), rli->offset));
0645ba8f
MM
808}
809
b8089d8d 810#if defined (PCC_BITFIELD_TYPE_MATTERS) || defined (BITFIELD_NBYTES_LIMITED)
4977bab6 811/* A bitfield of SIZE with a required access alignment of ALIGN is allocated
272d0bee 812 at BYTE_OFFSET / BIT_OFFSET. Return nonzero if the field would span more
4977bab6
ZW
813 units of alignment than the underlying TYPE. */
814static int
46c5ad27
AJ
815excess_unit_span (HOST_WIDE_INT byte_offset, HOST_WIDE_INT bit_offset,
816 HOST_WIDE_INT size, HOST_WIDE_INT align, tree type)
4977bab6
ZW
817{
818 /* Note that the calculation of OFFSET might overflow; we calculate it so
819 that we still get the right result as long as ALIGN is a power of two. */
820 unsigned HOST_WIDE_INT offset = byte_offset * BITS_PER_UNIT + bit_offset;
821
822 offset = offset % align;
823 return ((offset + size + align - 1) / align
824 > ((unsigned HOST_WIDE_INT) tree_low_cst (TYPE_SIZE (type), 1)
825 / align));
826}
b8089d8d 827#endif
4977bab6 828
0645ba8f
MM
829/* RLI contains information about the layout of a RECORD_TYPE. FIELD
830 is a FIELD_DECL to be added after those fields already present in
831 T. (FIELD is not actually added to the TYPE_FIELDS list here;
832 callers that desire that behavior must manually perform that step.) */
833
834void
46c5ad27 835place_field (record_layout_info rli, tree field)
0645ba8f
MM
836{
837 /* The alignment required for FIELD. */
838 unsigned int desired_align;
839 /* The alignment FIELD would have if we just dropped it into the
840 record as it presently stands. */
841 unsigned int known_align;
842 unsigned int actual_align;
843 /* The type of this field. */
844 tree type = TREE_TYPE (field);
845
846 if (TREE_CODE (field) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
847 return;
848
849 /* If FIELD is static, then treat it like a separate variable, not
850 really like a structure field. If it is a FUNCTION_DECL, it's a
851 method. In both cases, all we do is lay out the decl, and we do
852 it *after* the record is laid out. */
853 if (TREE_CODE (field) == VAR_DECL)
854 {
855 rli->pending_statics = tree_cons (NULL_TREE, field,
856 rli->pending_statics);
857 return;
858 }
859
860 /* Enumerators and enum types which are local to this class need not
861 be laid out. Likewise for initialized constant fields. */
862 else if (TREE_CODE (field) != FIELD_DECL)
863 return;
864
865 /* Unions are laid out very differently than records, so split
866 that code off to another function. */
867 else if (TREE_CODE (rli->t) != RECORD_TYPE)
868 {
869 place_union_field (rli, field);
870 return;
871 }
872
873 /* Work out the known alignment so far. Note that A & (-A) is the
874 value of the least-significant bit in A that is one. */
875 if (! integer_zerop (rli->bitpos))
876 known_align = (tree_low_cst (rli->bitpos, 1)
877 & - tree_low_cst (rli->bitpos, 1));
878 else if (integer_zerop (rli->offset))
879 known_align = BIGGEST_ALIGNMENT;
880 else if (host_integerp (rli->offset, 1))
881 known_align = (BITS_PER_UNIT
882 * (tree_low_cst (rli->offset, 1)
883 & - tree_low_cst (rli->offset, 1)));
884 else
885 known_align = rli->offset_align;
46c5ad27 886
0645ba8f
MM
887 desired_align = update_alignment_for_field (rli, field, known_align);
888
9328904c
MM
889 if (warn_packed && DECL_PACKED (field))
890 {
78d55cc8 891 if (known_align >= TYPE_ALIGN (type))
3c12fcc2 892 {
9328904c 893 if (TYPE_ALIGN (type) > desired_align)
3c12fcc2 894 {
9328904c 895 if (STRICT_ALIGNMENT)
ddd2d57e
RH
896 warning ("%Jpacked attribute causes inefficient alignment "
897 "for '%D'", field, field);
9328904c 898 else
ddd2d57e
RH
899 warning ("%Jpacked attribute is unnecessary for '%D'",
900 field, field);
3c12fcc2 901 }
3c12fcc2 902 }
9328904c
MM
903 else
904 rli->packed_maybe_necessary = 1;
905 }
7306ed3f 906
9328904c
MM
907 /* Does this field automatically have alignment it needs by virtue
908 of the fields that precede it and the record's own alignment? */
770ae6cc 909 if (known_align < desired_align)
9328904c
MM
910 {
911 /* No, we need to skip space before this field.
912 Bump the cumulative size to multiple of field alignment. */
7306ed3f 913
9328904c 914 if (warn_padded)
ddd2d57e 915 warning ("%Jpadding struct to align '%D'", field, field);
3c12fcc2 916
770ae6cc
RK
917 /* If the alignment is still within offset_align, just align
918 the bit position. */
919 if (desired_align < rli->offset_align)
920 rli->bitpos = round_up (rli->bitpos, desired_align);
9328904c
MM
921 else
922 {
770ae6cc
RK
923 /* First adjust OFFSET by the partial bits, then align. */
924 rli->offset
925 = size_binop (PLUS_EXPR, rli->offset,
926 convert (sizetype,
927 size_binop (CEIL_DIV_EXPR, rli->bitpos,
928 bitsize_unit_node)));
929 rli->bitpos = bitsize_zero_node;
930
931 rli->offset = round_up (rli->offset, desired_align / BITS_PER_UNIT);
7306ed3f 932 }
770ae6cc 933
b1254b72
RK
934 if (! TREE_CONSTANT (rli->offset))
935 rli->offset_align = desired_align;
936
9328904c 937 }
7306ed3f 938
770ae6cc
RK
939 /* Handle compatibility with PCC. Note that if the record has any
940 variable-sized fields, we need not worry about compatibility. */
7306ed3f 941#ifdef PCC_BITFIELD_TYPE_MATTERS
9328904c 942 if (PCC_BITFIELD_TYPE_MATTERS
245f1bfa 943 && ! targetm.ms_bitfield_layout_p (rli->t)
9328904c
MM
944 && TREE_CODE (field) == FIELD_DECL
945 && type != error_mark_node
770ae6cc
RK
946 && DECL_BIT_FIELD (field)
947 && ! DECL_PACKED (field)
9328904c 948 && maximum_field_alignment == 0
770ae6cc
RK
949 && ! integer_zerop (DECL_SIZE (field))
950 && host_integerp (DECL_SIZE (field), 1)
951 && host_integerp (rli->offset, 1)
952 && host_integerp (TYPE_SIZE (type), 1))
9328904c
MM
953 {
954 unsigned int type_align = TYPE_ALIGN (type);
770ae6cc
RK
955 tree dsize = DECL_SIZE (field);
956 HOST_WIDE_INT field_size = tree_low_cst (dsize, 1);
957 HOST_WIDE_INT offset = tree_low_cst (rli->offset, 0);
958 HOST_WIDE_INT bit_offset = tree_low_cst (rli->bitpos, 0);
9328904c 959
ad9335eb
JJ
960#ifdef ADJUST_FIELD_ALIGN
961 if (! TYPE_USER_ALIGN (type))
962 type_align = ADJUST_FIELD_ALIGN (field, type_align);
963#endif
964
9328904c
MM
965 /* A bit field may not span more units of alignment of its type
966 than its type itself. Advance to next boundary if necessary. */
4977bab6 967 if (excess_unit_span (offset, bit_offset, field_size, type_align, type))
770ae6cc 968 rli->bitpos = round_up (rli->bitpos, type_align);
daf06049 969
0645ba8f 970 TYPE_USER_ALIGN (rli->t) |= TYPE_USER_ALIGN (type);
9328904c 971 }
7306ed3f
JW
972#endif
973
7306ed3f 974#ifdef BITFIELD_NBYTES_LIMITED
9328904c 975 if (BITFIELD_NBYTES_LIMITED
245f1bfa 976 && ! targetm.ms_bitfield_layout_p (rli->t)
9328904c
MM
977 && TREE_CODE (field) == FIELD_DECL
978 && type != error_mark_node
979 && DECL_BIT_FIELD_TYPE (field)
770ae6cc
RK
980 && ! DECL_PACKED (field)
981 && ! integer_zerop (DECL_SIZE (field))
982 && host_integerp (DECL_SIZE (field), 1)
163d3408 983 && host_integerp (rli->offset, 1)
770ae6cc 984 && host_integerp (TYPE_SIZE (type), 1))
9328904c
MM
985 {
986 unsigned int type_align = TYPE_ALIGN (type);
770ae6cc
RK
987 tree dsize = DECL_SIZE (field);
988 HOST_WIDE_INT field_size = tree_low_cst (dsize, 1);
989 HOST_WIDE_INT offset = tree_low_cst (rli->offset, 0);
990 HOST_WIDE_INT bit_offset = tree_low_cst (rli->bitpos, 0);
e2301a83 991
ad9335eb
JJ
992#ifdef ADJUST_FIELD_ALIGN
993 if (! TYPE_USER_ALIGN (type))
994 type_align = ADJUST_FIELD_ALIGN (field, type_align);
995#endif
996
9328904c
MM
997 if (maximum_field_alignment != 0)
998 type_align = MIN (type_align, maximum_field_alignment);
999 /* ??? This test is opposite the test in the containing if
1000 statement, so this code is unreachable currently. */
1001 else if (DECL_PACKED (field))
1002 type_align = MIN (type_align, BITS_PER_UNIT);
1003
1004 /* A bit field may not span the unit of alignment of its type.
1005 Advance to next boundary if necessary. */
4977bab6 1006 if (excess_unit_span (offset, bit_offset, field_size, type_align, type))
770ae6cc 1007 rli->bitpos = round_up (rli->bitpos, type_align);
daf06049 1008
0645ba8f 1009 TYPE_USER_ALIGN (rli->t) |= TYPE_USER_ALIGN (type);
9328904c 1010 }
7306ed3f
JW
1011#endif
1012
e4850f36
DR
1013 /* See the docs for TARGET_MS_BITFIELD_LAYOUT_P for details.
1014 A subtlety:
1015 When a bit field is inserted into a packed record, the whole
1016 size of the underlying type is used by one or more same-size
4977bab6 1017 adjacent bitfields. (That is, if its long:3, 32 bits is
e4850f36
DR
1018 used in the record, and any additional adjacent long bitfields are
1019 packed into the same chunk of 32 bits. However, if the size
1020 changes, a new field of that size is allocated.) In an unpacked
14b493d6 1021 record, this is the same as using alignment, but not equivalent
4977bab6 1022 when packing.
e4850f36 1023
14b493d6 1024 Note: for compatibility, we use the type size, not the type alignment
e4850f36
DR
1025 to determine alignment, since that matches the documentation */
1026
245f1bfa 1027 if (targetm.ms_bitfield_layout_p (rli->t)
e4850f36 1028 && ((DECL_BIT_FIELD_TYPE (field) && ! DECL_PACKED (field))
46c5ad27 1029 || (rli->prev_field && ! DECL_PACKED (rli->prev_field))))
f913c102 1030 {
e4850f36 1031 /* At this point, either the prior or current are bitfields,
991b6592 1032 (possibly both), and we're dealing with MS packing. */
e4850f36 1033 tree prev_saved = rli->prev_field;
f913c102 1034
e4850f36 1035 /* Is the prior field a bitfield? If so, handle "runs" of same
991b6592
KH
1036 type size fields. */
1037 if (rli->prev_field /* necessarily a bitfield if it exists. */)
e4850f36
DR
1038 {
1039 /* If both are bitfields, nonzero, and the same size, this is
1040 the middle of a run. Zero declared size fields are special
1041 and handled as "end of run". (Note: it's nonzero declared
1042 size, but equal type sizes!) (Since we know that both
1043 the current and previous fields are bitfields by the
1044 time we check it, DECL_SIZE must be present for both.) */
1045 if (DECL_BIT_FIELD_TYPE (field)
1046 && !integer_zerop (DECL_SIZE (field))
1047 && !integer_zerop (DECL_SIZE (rli->prev_field))
0384674e
RK
1048 && host_integerp (DECL_SIZE (rli->prev_field), 0)
1049 && host_integerp (TYPE_SIZE (type), 0)
e4850f36 1050 && simple_cst_equal (TYPE_SIZE (type),
0384674e 1051 TYPE_SIZE (TREE_TYPE (rli->prev_field))))
e4850f36
DR
1052 {
1053 /* We're in the middle of a run of equal type size fields; make
1054 sure we realign if we run out of bits. (Not decl size,
1055 type size!) */
0384674e 1056 HOST_WIDE_INT bitsize = tree_low_cst (DECL_SIZE (field), 0);
e4850f36
DR
1057
1058 if (rli->remaining_in_alignment < bitsize)
1059 {
991b6592 1060 /* out of bits; bump up to next 'word'. */
5354730b 1061 rli->offset = DECL_FIELD_OFFSET (rli->prev_field);
0384674e
RK
1062 rli->bitpos
1063 = size_binop (PLUS_EXPR, TYPE_SIZE (type),
1064 DECL_FIELD_BIT_OFFSET (rli->prev_field));
e4850f36 1065 rli->prev_field = field;
0384674e
RK
1066 rli->remaining_in_alignment
1067 = tree_low_cst (TYPE_SIZE (type), 0);
e4850f36 1068 }
0384674e 1069
e4850f36
DR
1070 rli->remaining_in_alignment -= bitsize;
1071 }
1072 else
1073 {
4977bab6
ZW
1074 /* End of a run: if leaving a run of bitfields of the same type
1075 size, we have to "use up" the rest of the bits of the type
e4850f36
DR
1076 size.
1077
1078 Compute the new position as the sum of the size for the prior
1079 type and where we first started working on that type.
1080 Note: since the beginning of the field was aligned then
1081 of course the end will be too. No round needed. */
1082
1083 if (!integer_zerop (DECL_SIZE (rli->prev_field)))
1084 {
0384674e
RK
1085 tree type_size = TYPE_SIZE (TREE_TYPE (rli->prev_field));
1086
1087 rli->bitpos
1088 = size_binop (PLUS_EXPR, type_size,
1089 DECL_FIELD_BIT_OFFSET (rli->prev_field));
e4850f36
DR
1090 }
1091 else
0384674e
RK
1092 /* We "use up" size zero fields; the code below should behave
1093 as if the prior field was not a bitfield. */
1094 prev_saved = NULL;
e4850f36 1095
4977bab6 1096 /* Cause a new bitfield to be captured, either this time (if
991b6592 1097 currently a bitfield) or next time we see one. */
e4850f36
DR
1098 if (!DECL_BIT_FIELD_TYPE(field)
1099 || integer_zerop (DECL_SIZE (field)))
0384674e 1100 rli->prev_field = NULL;
e4850f36 1101 }
0384674e 1102
e4850f36
DR
1103 normalize_rli (rli);
1104 }
1105
1106 /* If we're starting a new run of same size type bitfields
1107 (or a run of non-bitfields), set up the "first of the run"
4977bab6 1108 fields.
e4850f36
DR
1109
1110 That is, if the current field is not a bitfield, or if there
1111 was a prior bitfield the type sizes differ, or if there wasn't
1112 a prior bitfield the size of the current field is nonzero.
1113
1114 Note: we must be sure to test ONLY the type size if there was
1115 a prior bitfield and ONLY for the current field being zero if
1116 there wasn't. */
1117
1118 if (!DECL_BIT_FIELD_TYPE (field)
4977bab6 1119 || ( prev_saved != NULL
e4850f36 1120 ? !simple_cst_equal (TYPE_SIZE (type),
0384674e
RK
1121 TYPE_SIZE (TREE_TYPE (prev_saved)))
1122 : !integer_zerop (DECL_SIZE (field)) ))
e4850f36 1123 {
0384674e
RK
1124 /* Never smaller than a byte for compatibility. */
1125 unsigned int type_align = BITS_PER_UNIT;
e4850f36 1126
4977bab6 1127 /* (When not a bitfield), we could be seeing a flex array (with
e4850f36 1128 no DECL_SIZE). Since we won't be using remaining_in_alignment
4977bab6 1129 until we see a bitfield (and come by here again) we just skip
e4850f36 1130 calculating it. */
0384674e
RK
1131 if (DECL_SIZE (field) != NULL
1132 && host_integerp (TYPE_SIZE (TREE_TYPE (field)), 0)
1133 && host_integerp (DECL_SIZE (field), 0))
1134 rli->remaining_in_alignment
1135 = tree_low_cst (TYPE_SIZE (TREE_TYPE(field)), 0)
1136 - tree_low_cst (DECL_SIZE (field), 0);
e4850f36 1137
991b6592 1138 /* Now align (conventionally) for the new type. */
e4850f36 1139 if (!DECL_PACKED(field))
0384674e 1140 type_align = MAX(TYPE_ALIGN (type), type_align);
e4850f36
DR
1141
1142 if (prev_saved
1143 && DECL_BIT_FIELD_TYPE (prev_saved)
1144 /* If the previous bit-field is zero-sized, we've already
1145 accounted for its alignment needs (or ignored it, if
1146 appropriate) while placing it. */
1147 && ! integer_zerop (DECL_SIZE (prev_saved)))
1148 type_align = MAX (type_align,
1149 TYPE_ALIGN (TREE_TYPE (prev_saved)));
f913c102 1150
e4850f36
DR
1151 if (maximum_field_alignment != 0)
1152 type_align = MIN (type_align, maximum_field_alignment);
f913c102 1153
e4850f36 1154 rli->bitpos = round_up (rli->bitpos, type_align);
0384674e 1155
e4850f36 1156 /* If we really aligned, don't allow subsequent bitfields
991b6592 1157 to undo that. */
e4850f36
DR
1158 rli->prev_field = NULL;
1159 }
f913c102
AO
1160 }
1161
770ae6cc
RK
1162 /* Offset so far becomes the position of this field after normalizing. */
1163 normalize_rli (rli);
1164 DECL_FIELD_OFFSET (field) = rli->offset;
1165 DECL_FIELD_BIT_OFFSET (field) = rli->bitpos;
2f5c7f45 1166 SET_DECL_OFFSET_ALIGN (field, rli->offset_align);
770ae6cc
RK
1167
1168 /* If this field ended up more aligned than we thought it would be (we
1169 approximate this by seeing if its position changed), lay out the field
1170 again; perhaps we can use an integral mode for it now. */
4b6bf620 1171 if (! integer_zerop (DECL_FIELD_BIT_OFFSET (field)))
770ae6cc
RK
1172 actual_align = (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1173 & - tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1));
4b6bf620
RK
1174 else if (integer_zerop (DECL_FIELD_OFFSET (field)))
1175 actual_align = BIGGEST_ALIGNMENT;
770ae6cc
RK
1176 else if (host_integerp (DECL_FIELD_OFFSET (field), 1))
1177 actual_align = (BITS_PER_UNIT
1178 * (tree_low_cst (DECL_FIELD_OFFSET (field), 1)
1179 & - tree_low_cst (DECL_FIELD_OFFSET (field), 1)));
9328904c 1180 else
770ae6cc
RK
1181 actual_align = DECL_OFFSET_ALIGN (field);
1182
1183 if (known_align != actual_align)
1184 layout_decl (field, actual_align);
1185
991b6592 1186 /* Only the MS bitfields use this. */
e4850f36
DR
1187 if (rli->prev_field == NULL && DECL_BIT_FIELD_TYPE(field))
1188 rli->prev_field = field;
f913c102 1189
770ae6cc
RK
1190 /* Now add size of this field to the size of the record. If the size is
1191 not constant, treat the field as being a multiple of bytes and just
1192 adjust the offset, resetting the bit position. Otherwise, apportion the
1193 size amongst the bit position and offset. First handle the case of an
1194 unspecified size, which can happen when we have an invalid nested struct
1195 definition, such as struct j { struct j { int i; } }. The error message
1196 is printed in finish_struct. */
1197 if (DECL_SIZE (field) == 0)
1198 /* Do nothing. */;
67011d81
RK
1199 else if (TREE_CODE (DECL_SIZE_UNIT (field)) != INTEGER_CST
1200 || TREE_CONSTANT_OVERFLOW (DECL_SIZE_UNIT (field)))
9328904c 1201 {
770ae6cc
RK
1202 rli->offset
1203 = size_binop (PLUS_EXPR, rli->offset,
1204 convert (sizetype,
1205 size_binop (CEIL_DIV_EXPR, rli->bitpos,
1206 bitsize_unit_node)));
1207 rli->offset
1208 = size_binop (PLUS_EXPR, rli->offset, DECL_SIZE_UNIT (field));
1209 rli->bitpos = bitsize_zero_node;
3923e410 1210 rli->offset_align = MIN (rli->offset_align, desired_align);
9328904c 1211 }
9328904c
MM
1212 else
1213 {
770ae6cc
RK
1214 rli->bitpos = size_binop (PLUS_EXPR, rli->bitpos, DECL_SIZE (field));
1215 normalize_rli (rli);
7306ed3f 1216 }
9328904c 1217}
7306ed3f 1218
9328904c
MM
1219/* Assuming that all the fields have been laid out, this function uses
1220 RLI to compute the final TYPE_SIZE, TYPE_ALIGN, etc. for the type
14b493d6 1221 indicated by RLI. */
7306ed3f 1222
9328904c 1223static void
46c5ad27 1224finalize_record_size (record_layout_info rli)
9328904c 1225{
770ae6cc
RK
1226 tree unpadded_size, unpadded_size_unit;
1227
65e14bf5
RK
1228 /* Now we want just byte and bit offsets, so set the offset alignment
1229 to be a byte and then normalize. */
1230 rli->offset_align = BITS_PER_UNIT;
1231 normalize_rli (rli);
7306ed3f
JW
1232
1233 /* Determine the desired alignment. */
1234#ifdef ROUND_TYPE_ALIGN
9328904c 1235 TYPE_ALIGN (rli->t) = ROUND_TYPE_ALIGN (rli->t, TYPE_ALIGN (rli->t),
b451555a 1236 rli->record_align);
7306ed3f 1237#else
9328904c 1238 TYPE_ALIGN (rli->t) = MAX (TYPE_ALIGN (rli->t), rli->record_align);
7306ed3f
JW
1239#endif
1240
65e14bf5
RK
1241 /* Compute the size so far. Be sure to allow for extra bits in the
1242 size in bytes. We have guaranteed above that it will be no more
1243 than a single byte. */
1244 unpadded_size = rli_size_so_far (rli);
1245 unpadded_size_unit = rli_size_unit_so_far (rli);
1246 if (! integer_zerop (rli->bitpos))
1247 unpadded_size_unit
1248 = size_binop (PLUS_EXPR, unpadded_size_unit, size_one_node);
770ae6cc 1249
f9da5064 1250 /* Round the size up to be a multiple of the required alignment. */
770ae6cc
RK
1251 TYPE_SIZE (rli->t) = round_up (unpadded_size, TYPE_ALIGN (rli->t));
1252 TYPE_SIZE_UNIT (rli->t) = round_up (unpadded_size_unit,
1253 TYPE_ALIGN (rli->t) / BITS_PER_UNIT);
729a2125 1254
770ae6cc
RK
1255 if (warn_padded && TREE_CONSTANT (unpadded_size)
1256 && simple_cst_equal (unpadded_size, TYPE_SIZE (rli->t)) == 0)
1257 warning ("padding struct size to alignment boundary");
786de7eb 1258
770ae6cc
RK
1259 if (warn_packed && TREE_CODE (rli->t) == RECORD_TYPE
1260 && TYPE_PACKED (rli->t) && ! rli->packed_maybe_necessary
1261 && TREE_CONSTANT (unpadded_size))
3c12fcc2
GM
1262 {
1263 tree unpacked_size;
729a2125 1264
3c12fcc2 1265#ifdef ROUND_TYPE_ALIGN
9328904c
MM
1266 rli->unpacked_align
1267 = ROUND_TYPE_ALIGN (rli->t, TYPE_ALIGN (rli->t), rli->unpacked_align);
3c12fcc2 1268#else
9328904c 1269 rli->unpacked_align = MAX (TYPE_ALIGN (rli->t), rli->unpacked_align);
3c12fcc2 1270#endif
770ae6cc 1271
9328904c 1272 unpacked_size = round_up (TYPE_SIZE (rli->t), rli->unpacked_align);
9328904c 1273 if (simple_cst_equal (unpacked_size, TYPE_SIZE (rli->t)))
3c12fcc2 1274 {
770ae6cc
RK
1275 TYPE_PACKED (rli->t) = 0;
1276
9328904c 1277 if (TYPE_NAME (rli->t))
3c12fcc2 1278 {
63ad61ed 1279 const char *name;
729a2125 1280
9328904c
MM
1281 if (TREE_CODE (TYPE_NAME (rli->t)) == IDENTIFIER_NODE)
1282 name = IDENTIFIER_POINTER (TYPE_NAME (rli->t));
3c12fcc2 1283 else
9328904c 1284 name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (rli->t)));
770ae6cc 1285
3c12fcc2
GM
1286 if (STRICT_ALIGNMENT)
1287 warning ("packed attribute causes inefficient alignment for `%s'", name);
1288 else
1289 warning ("packed attribute is unnecessary for `%s'", name);
1290 }
1291 else
1292 {
1293 if (STRICT_ALIGNMENT)
1294 warning ("packed attribute causes inefficient alignment");
1295 else
1296 warning ("packed attribute is unnecessary");
1297 }
1298 }
3c12fcc2 1299 }
9328904c
MM
1300}
1301
1302/* Compute the TYPE_MODE for the TYPE (which is a RECORD_TYPE). */
7306ed3f 1303
65e14bf5 1304void
46c5ad27 1305compute_record_mode (tree type)
9328904c 1306{
770ae6cc
RK
1307 tree field;
1308 enum machine_mode mode = VOIDmode;
1309
9328904c
MM
1310 /* Most RECORD_TYPEs have BLKmode, so we start off assuming that.
1311 However, if possible, we use a mode that fits in a register
1312 instead, in order to allow for better optimization down the
1313 line. */
1314 TYPE_MODE (type) = BLKmode;
9328904c 1315
770ae6cc
RK
1316 if (! host_integerp (TYPE_SIZE (type), 1))
1317 return;
9328904c 1318
770ae6cc
RK
1319 /* A record which has any BLKmode members must itself be
1320 BLKmode; it can't go in a register. Unless the member is
1321 BLKmode only because it isn't aligned. */
1322 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1323 {
770ae6cc
RK
1324 if (TREE_CODE (field) != FIELD_DECL)
1325 continue;
9328904c 1326
770ae6cc
RK
1327 if (TREE_CODE (TREE_TYPE (field)) == ERROR_MARK
1328 || (TYPE_MODE (TREE_TYPE (field)) == BLKmode
7a06d606
RK
1329 && ! TYPE_NO_FORCE_BLK (TREE_TYPE (field))
1330 && !(TYPE_SIZE (TREE_TYPE (field)) != 0
1331 && integer_zerop (TYPE_SIZE (TREE_TYPE (field)))))
770ae6cc 1332 || ! host_integerp (bit_position (field), 1)
6a9f6727 1333 || DECL_SIZE (field) == 0
770ae6cc
RK
1334 || ! host_integerp (DECL_SIZE (field), 1))
1335 return;
1336
770ae6cc
RK
1337 /* If this field is the whole struct, remember its mode so
1338 that, say, we can put a double in a class into a DF
a8ca7756
JW
1339 register instead of forcing it to live in the stack. */
1340 if (simple_cst_equal (TYPE_SIZE (type), DECL_SIZE (field)))
770ae6cc 1341 mode = DECL_MODE (field);
9328904c 1342
31a02448 1343#ifdef MEMBER_TYPE_FORCES_BLK
770ae6cc
RK
1344 /* With some targets, eg. c4x, it is sub-optimal
1345 to access an aligned BLKmode structure as a scalar. */
0d7839da 1346
182e515e 1347 if (MEMBER_TYPE_FORCES_BLK (field, mode))
770ae6cc 1348 return;
31a02448 1349#endif /* MEMBER_TYPE_FORCES_BLK */
770ae6cc 1350 }
9328904c 1351
a8ca7756
JW
1352 /* If we only have one real field; use its mode. This only applies to
1353 RECORD_TYPE. This does not apply to unions. */
1354 if (TREE_CODE (type) == RECORD_TYPE && mode != VOIDmode)
770ae6cc
RK
1355 TYPE_MODE (type) = mode;
1356 else
1357 TYPE_MODE (type) = mode_for_size_tree (TYPE_SIZE (type), MODE_INT, 1);
1358
1359 /* If structure's known alignment is less than what the scalar
1360 mode would need, and it matters, then stick with BLKmode. */
1361 if (TYPE_MODE (type) != BLKmode
1362 && STRICT_ALIGNMENT
1363 && ! (TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT
1364 || TYPE_ALIGN (type) >= GET_MODE_ALIGNMENT (TYPE_MODE (type))))
1365 {
1366 /* If this is the only reason this type is BLKmode, then
1367 don't force containing types to be BLKmode. */
1368 TYPE_NO_FORCE_BLK (type) = 1;
1369 TYPE_MODE (type) = BLKmode;
9328904c 1370 }
7306ed3f 1371}
9328904c
MM
1372
1373/* Compute TYPE_SIZE and TYPE_ALIGN for TYPE, once it has been laid
1374 out. */
1375
1376static void
46c5ad27 1377finalize_type_size (tree type)
9328904c
MM
1378{
1379 /* Normally, use the alignment corresponding to the mode chosen.
1380 However, where strict alignment is not required, avoid
1381 over-aligning structures, since most compilers do not do this
1382 alignment. */
1383
1384 if (TYPE_MODE (type) != BLKmode && TYPE_MODE (type) != VOIDmode
1385 && (STRICT_ALIGNMENT
1386 || (TREE_CODE (type) != RECORD_TYPE && TREE_CODE (type) != UNION_TYPE
1387 && TREE_CODE (type) != QUAL_UNION_TYPE
1388 && TREE_CODE (type) != ARRAY_TYPE)))
11cf4d18
JJ
1389 {
1390 TYPE_ALIGN (type) = GET_MODE_ALIGNMENT (TYPE_MODE (type));
1391 TYPE_USER_ALIGN (type) = 0;
1392 }
9328904c
MM
1393
1394 /* Do machine-dependent extra alignment. */
1395#ifdef ROUND_TYPE_ALIGN
1396 TYPE_ALIGN (type)
1397 = ROUND_TYPE_ALIGN (type, TYPE_ALIGN (type), BITS_PER_UNIT);
1398#endif
1399
9328904c 1400 /* If we failed to find a simple way to calculate the unit size
770ae6cc 1401 of the type, find it by division. */
9328904c
MM
1402 if (TYPE_SIZE_UNIT (type) == 0 && TYPE_SIZE (type) != 0)
1403 /* TYPE_SIZE (type) is computed in bitsizetype. After the division, the
1404 result will fit in sizetype. We will get more efficient code using
1405 sizetype, so we force a conversion. */
1406 TYPE_SIZE_UNIT (type)
1407 = convert (sizetype,
1408 size_binop (FLOOR_DIV_EXPR, TYPE_SIZE (type),
770ae6cc 1409 bitsize_unit_node));
9328904c 1410
770ae6cc
RK
1411 if (TYPE_SIZE (type) != 0)
1412 {
770ae6cc
RK
1413 TYPE_SIZE (type) = round_up (TYPE_SIZE (type), TYPE_ALIGN (type));
1414 TYPE_SIZE_UNIT (type)
1415 = round_up (TYPE_SIZE_UNIT (type), TYPE_ALIGN (type) / BITS_PER_UNIT);
770ae6cc
RK
1416 }
1417
1418 /* Evaluate nonconstant sizes only once, either now or as soon as safe. */
1419 if (TYPE_SIZE (type) != 0 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
1420 TYPE_SIZE (type) = variable_size (TYPE_SIZE (type));
9328904c
MM
1421 if (TYPE_SIZE_UNIT (type) != 0
1422 && TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST)
1423 TYPE_SIZE_UNIT (type) = variable_size (TYPE_SIZE_UNIT (type));
1424
1425 /* Also layout any other variants of the type. */
1426 if (TYPE_NEXT_VARIANT (type)
1427 || type != TYPE_MAIN_VARIANT (type))
1428 {
1429 tree variant;
1430 /* Record layout info of this variant. */
1431 tree size = TYPE_SIZE (type);
1432 tree size_unit = TYPE_SIZE_UNIT (type);
1433 unsigned int align = TYPE_ALIGN (type);
11cf4d18 1434 unsigned int user_align = TYPE_USER_ALIGN (type);
9328904c
MM
1435 enum machine_mode mode = TYPE_MODE (type);
1436
1437 /* Copy it into all variants. */
1438 for (variant = TYPE_MAIN_VARIANT (type);
1439 variant != 0;
1440 variant = TYPE_NEXT_VARIANT (variant))
1441 {
1442 TYPE_SIZE (variant) = size;
1443 TYPE_SIZE_UNIT (variant) = size_unit;
1444 TYPE_ALIGN (variant) = align;
11cf4d18 1445 TYPE_USER_ALIGN (variant) = user_align;
9328904c
MM
1446 TYPE_MODE (variant) = mode;
1447 }
1448 }
1449}
1450
1451/* Do all of the work required to layout the type indicated by RLI,
1452 once the fields have been laid out. This function will call `free'
17bbb839
MM
1453 for RLI, unless FREE_P is false. Passing a value other than false
1454 for FREE_P is bad practice; this option only exists to support the
1455 G++ 3.2 ABI. */
9328904c
MM
1456
1457void
46c5ad27 1458finish_record_layout (record_layout_info rli, int free_p)
9328904c 1459{
770ae6cc
RK
1460 /* Compute the final size. */
1461 finalize_record_size (rli);
1462
1463 /* Compute the TYPE_MODE for the record. */
1464 compute_record_mode (rli->t);
cc9d4a85 1465
8d8238b6
JM
1466 /* Perform any last tweaks to the TYPE_SIZE, etc. */
1467 finalize_type_size (rli->t);
1468
9328904c
MM
1469 /* Lay out any static members. This is done now because their type
1470 may use the record's type. */
1471 while (rli->pending_statics)
1472 {
1473 layout_decl (TREE_VALUE (rli->pending_statics), 0);
1474 rli->pending_statics = TREE_CHAIN (rli->pending_statics);
1475 }
cc9d4a85 1476
9328904c 1477 /* Clean up. */
17bbb839
MM
1478 if (free_p)
1479 free (rli);
9328904c 1480}
7306ed3f 1481\f
4977bab6
ZW
1482
1483/* Finish processing a builtin RECORD_TYPE type TYPE. It's name is
1484 NAME, its fields are chained in reverse on FIELDS.
1485
1486 If ALIGN_TYPE is non-null, it is given the same alignment as
1487 ALIGN_TYPE. */
1488
1489void
46c5ad27
AJ
1490finish_builtin_struct (tree type, const char *name, tree fields,
1491 tree align_type)
4977bab6
ZW
1492{
1493 tree tail, next;
1494
1495 for (tail = NULL_TREE; fields; tail = fields, fields = next)
1496 {
1497 DECL_FIELD_CONTEXT (fields) = type;
1498 next = TREE_CHAIN (fields);
1499 TREE_CHAIN (fields) = tail;
1500 }
1501 TYPE_FIELDS (type) = tail;
1502
1503 if (align_type)
1504 {
1505 TYPE_ALIGN (type) = TYPE_ALIGN (align_type);
1506 TYPE_USER_ALIGN (type) = TYPE_USER_ALIGN (align_type);
1507 }
1508
1509 layout_type (type);
1510#if 0 /* not yet, should get fixed properly later */
1511 TYPE_NAME (type) = make_type_decl (get_identifier (name), type);
1512#else
1513 TYPE_NAME (type) = build_decl (TYPE_DECL, get_identifier (name), type);
1514#endif
1515 TYPE_STUB_DECL (type) = TYPE_NAME (type);
1516 layout_decl (TYPE_NAME (type), 0);
1517}
1518
7306ed3f
JW
1519/* Calculate the mode, size, and alignment for TYPE.
1520 For an array type, calculate the element separation as well.
1521 Record TYPE on the chain of permanent or temporary types
1522 so that dbxout will find out about it.
1523
1524 TYPE_SIZE of a type is nonzero if the type has been laid out already.
1525 layout_type does nothing on such a type.
1526
1527 If the type is incomplete, its TYPE_SIZE remains zero. */
1528
1529void
46c5ad27 1530layout_type (tree type)
7306ed3f 1531{
7306ed3f
JW
1532 if (type == 0)
1533 abort ();
1534
6de9cd9a
DN
1535 if (type == error_mark_node)
1536 return;
1537
7306ed3f
JW
1538 /* Do nothing if type has been laid out before. */
1539 if (TYPE_SIZE (type))
1540 return;
1541
7306ed3f
JW
1542 switch (TREE_CODE (type))
1543 {
1544 case LANG_TYPE:
1545 /* This kind of type is the responsibility
9faa82d8 1546 of the language-specific code. */
7306ed3f
JW
1547 abort ();
1548
2d76cb1a 1549 case BOOLEAN_TYPE: /* Used for Java, Pascal, and Chill. */
e9a25f70 1550 if (TYPE_PRECISION (type) == 0)
2d76cb1a 1551 TYPE_PRECISION (type) = 1; /* default to one byte/boolean. */
d4b60170 1552
2d76cb1a 1553 /* ... fall through ... */
e9a25f70 1554
7306ed3f
JW
1555 case INTEGER_TYPE:
1556 case ENUMERAL_TYPE:
fc69eca0 1557 case CHAR_TYPE:
e2a77f99
RK
1558 if (TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST
1559 && tree_int_cst_sgn (TYPE_MIN_VALUE (type)) >= 0)
8df83eae 1560 TYPE_UNSIGNED (type) = 1;
7306ed3f 1561
5e9bec99
RK
1562 TYPE_MODE (type) = smallest_mode_for_size (TYPE_PRECISION (type),
1563 MODE_INT);
06ceef4e 1564 TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)));
ead17059 1565 TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type)));
7306ed3f
JW
1566 break;
1567
1568 case REAL_TYPE:
1569 TYPE_MODE (type) = mode_for_size (TYPE_PRECISION (type), MODE_FLOAT, 0);
06ceef4e 1570 TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)));
ead17059 1571 TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type)));
7306ed3f
JW
1572 break;
1573
1574 case COMPLEX_TYPE:
8df83eae 1575 TYPE_UNSIGNED (type) = TYPE_UNSIGNED (TREE_TYPE (type));
7306ed3f
JW
1576 TYPE_MODE (type)
1577 = mode_for_size (2 * TYPE_PRECISION (TREE_TYPE (type)),
8df83eae
RK
1578 (TREE_CODE (TREE_TYPE (type)) == REAL_TYPE
1579 ? MODE_COMPLEX_FLOAT : MODE_COMPLEX_INT),
7306ed3f 1580 0);
06ceef4e 1581 TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)));
ead17059 1582 TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type)));
7306ed3f
JW
1583 break;
1584
0b4565c9 1585 case VECTOR_TYPE:
26277d41
PB
1586 {
1587 int nunits = TYPE_VECTOR_SUBPARTS (type);
1588 tree nunits_tree = build_int_2 (nunits, 0);
1589 tree innertype = TREE_TYPE (type);
1590
1591 if (nunits & (nunits - 1))
1592 abort ();
1593
1594 /* Find an appropriate mode for the vector type. */
1595 if (TYPE_MODE (type) == VOIDmode)
1596 {
1597 enum machine_mode innermode = TYPE_MODE (innertype);
1598 enum machine_mode mode;
1599
1600 /* First, look for a supported vector type. */
1601 if (GET_MODE_CLASS (innermode) == MODE_FLOAT)
1602 mode = MIN_MODE_VECTOR_FLOAT;
1603 else
1604 mode = MIN_MODE_VECTOR_INT;
1605
1606 for (; mode != VOIDmode ; mode = GET_MODE_WIDER_MODE (mode))
1607 if (GET_MODE_NUNITS (mode) == nunits
1608 && GET_MODE_INNER (mode) == innermode
1609 && VECTOR_MODE_SUPPORTED_P (mode))
1610 break;
1611
1612 /* For integers, try mapping it to a same-sized scalar mode. */
1613 if (mode == VOIDmode
1614 && GET_MODE_CLASS (innermode) == MODE_INT)
1615 mode = mode_for_size (nunits * GET_MODE_BITSIZE (innermode),
1616 MODE_INT, 0);
1617
1618 if (mode == VOIDmode || !have_regs_of_mode[mode])
1619 TYPE_MODE (type) = BLKmode;
1620 else
1621 TYPE_MODE (type) = mode;
1622 }
1623
1624 TYPE_UNSIGNED (type) = TYPE_UNSIGNED (TREE_TYPE (type));
1625 TYPE_SIZE_UNIT (type) = int_const_binop (MULT_EXPR,
1626 TYPE_SIZE_UNIT (innertype),
1627 nunits_tree, 0);
1628 TYPE_SIZE (type) = int_const_binop (MULT_EXPR, TYPE_SIZE (innertype),
1629 nunits_tree, 0);
1630 break;
1631 }
0b4565c9 1632
7306ed3f 1633 case VOID_TYPE:
770ae6cc 1634 /* This is an incomplete type and so doesn't have a size. */
7306ed3f 1635 TYPE_ALIGN (type) = 1;
11cf4d18 1636 TYPE_USER_ALIGN (type) = 0;
7306ed3f
JW
1637 TYPE_MODE (type) = VOIDmode;
1638 break;
1639
321cb743 1640 case OFFSET_TYPE:
06ceef4e 1641 TYPE_SIZE (type) = bitsize_int (POINTER_SIZE);
ead17059 1642 TYPE_SIZE_UNIT (type) = size_int (POINTER_SIZE / BITS_PER_UNIT);
25caaba8
R
1643 /* A pointer might be MODE_PARTIAL_INT,
1644 but ptrdiff_t must be integral. */
1645 TYPE_MODE (type) = mode_for_size (POINTER_SIZE, MODE_INT, 0);
321cb743
MT
1646 break;
1647
7306ed3f
JW
1648 case FUNCTION_TYPE:
1649 case METHOD_TYPE:
019dd4ec
RK
1650 /* It's hard to see what the mode and size of a function ought to
1651 be, but we do know the alignment is FUNCTION_BOUNDARY, so
1652 make it consistent with that. */
1653 TYPE_MODE (type) = mode_for_size (FUNCTION_BOUNDARY, MODE_INT, 0);
1654 TYPE_SIZE (type) = bitsize_int (FUNCTION_BOUNDARY);
1655 TYPE_SIZE_UNIT (type) = size_int (FUNCTION_BOUNDARY / BITS_PER_UNIT);
7306ed3f
JW
1656 break;
1657
1658 case POINTER_TYPE:
1659 case REFERENCE_TYPE:
b5d6a2ff 1660 {
b5d6a2ff 1661
4977bab6
ZW
1662 enum machine_mode mode = ((TREE_CODE (type) == REFERENCE_TYPE
1663 && reference_types_internal)
1664 ? Pmode : TYPE_MODE (type));
1665
1666 int nbits = GET_MODE_BITSIZE (mode);
1667
b5d6a2ff 1668 TYPE_SIZE (type) = bitsize_int (nbits);
4977bab6 1669 TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (mode));
8df83eae 1670 TYPE_UNSIGNED (type) = 1;
b5d6a2ff
RK
1671 TYPE_PRECISION (type) = nbits;
1672 }
7306ed3f
JW
1673 break;
1674
1675 case ARRAY_TYPE:
1676 {
b3694847
SS
1677 tree index = TYPE_DOMAIN (type);
1678 tree element = TREE_TYPE (type);
7306ed3f
JW
1679
1680 build_pointer_type (element);
1681
1682 /* We need to know both bounds in order to compute the size. */
1683 if (index && TYPE_MAX_VALUE (index) && TYPE_MIN_VALUE (index)
1684 && TYPE_SIZE (element))
1685 {
e24ff973
RK
1686 tree ub = TYPE_MAX_VALUE (index);
1687 tree lb = TYPE_MIN_VALUE (index);
1688 tree length;
74a4fbfc 1689 tree element_size;
e24ff973 1690
a2d53b28
RH
1691 /* The initial subtraction should happen in the original type so
1692 that (possible) negative values are handled appropriately. */
e24ff973 1693 length = size_binop (PLUS_EXPR, size_one_node,
fed3cef0 1694 convert (sizetype,
3244e67d
RS
1695 fold (build2 (MINUS_EXPR,
1696 TREE_TYPE (lb),
1697 ub, lb))));
7306ed3f 1698
74a4fbfc
DB
1699 /* Special handling for arrays of bits (for Chill). */
1700 element_size = TYPE_SIZE (element);
382110c0
RK
1701 if (TYPE_PACKED (type) && INTEGRAL_TYPE_P (element)
1702 && (integer_zerop (TYPE_MAX_VALUE (element))
1703 || integer_onep (TYPE_MAX_VALUE (element)))
1704 && host_integerp (TYPE_MIN_VALUE (element), 1))
74a4fbfc 1705 {
d4b60170 1706 HOST_WIDE_INT maxvalue
382110c0 1707 = tree_low_cst (TYPE_MAX_VALUE (element), 1);
d4b60170 1708 HOST_WIDE_INT minvalue
382110c0 1709 = tree_low_cst (TYPE_MIN_VALUE (element), 1);
d4b60170 1710
74a4fbfc
DB
1711 if (maxvalue - minvalue == 1
1712 && (maxvalue == 1 || maxvalue == 0))
1713 element_size = integer_one_node;
1714 }
1715
0d3c8800
RK
1716 /* If neither bound is a constant and sizetype is signed, make
1717 sure the size is never negative. We should really do this
1718 if *either* bound is non-constant, but this is the best
1719 compromise between C and Ada. */
8df83eae 1720 if (!TYPE_UNSIGNED (sizetype)
0d3c8800
RK
1721 && TREE_CODE (TYPE_MIN_VALUE (index)) != INTEGER_CST
1722 && TREE_CODE (TYPE_MAX_VALUE (index)) != INTEGER_CST)
1723 length = size_binop (MAX_EXPR, length, size_zero_node);
1724
fed3cef0
RK
1725 TYPE_SIZE (type) = size_binop (MULT_EXPR, element_size,
1726 convert (bitsizetype, length));
ead17059
RH
1727
1728 /* If we know the size of the element, calculate the total
1729 size directly, rather than do some division thing below.
1730 This optimization helps Fortran assumed-size arrays
1731 (where the size of the array is determined at runtime)
7771032e
DB
1732 substantially.
1733 Note that we can't do this in the case where the size of
1734 the elements is one bit since TYPE_SIZE_UNIT cannot be
1735 set correctly in that case. */
fed3cef0 1736 if (TYPE_SIZE_UNIT (element) != 0 && ! integer_onep (element_size))
d4b60170
RK
1737 TYPE_SIZE_UNIT (type)
1738 = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (element), length);
7306ed3f
JW
1739 }
1740
1741 /* Now round the alignment and size,
1742 using machine-dependent criteria if any. */
1743
1744#ifdef ROUND_TYPE_ALIGN
1745 TYPE_ALIGN (type)
1746 = ROUND_TYPE_ALIGN (type, TYPE_ALIGN (element), BITS_PER_UNIT);
1747#else
1748 TYPE_ALIGN (type) = MAX (TYPE_ALIGN (element), BITS_PER_UNIT);
1749#endif
c163d21d 1750 TYPE_USER_ALIGN (type) = TYPE_USER_ALIGN (element);
7306ed3f
JW
1751 TYPE_MODE (type) = BLKmode;
1752 if (TYPE_SIZE (type) != 0
31a02448 1753#ifdef MEMBER_TYPE_FORCES_BLK
182e515e 1754 && ! MEMBER_TYPE_FORCES_BLK (type, VOIDmode)
31a02448 1755#endif
7306ed3f
JW
1756 /* BLKmode elements force BLKmode aggregate;
1757 else extract/store fields may lose. */
1758 && (TYPE_MODE (TREE_TYPE (type)) != BLKmode
1759 || TYPE_NO_FORCE_BLK (TREE_TYPE (type))))
1760 {
a1471322
RK
1761 /* One-element arrays get the component type's mode. */
1762 if (simple_cst_equal (TYPE_SIZE (type),
1763 TYPE_SIZE (TREE_TYPE (type))))
1764 TYPE_MODE (type) = TYPE_MODE (TREE_TYPE (type));
1765 else
1766 TYPE_MODE (type)
1767 = mode_for_size_tree (TYPE_SIZE (type), MODE_INT, 1);
7306ed3f 1768
72c602fc
RK
1769 if (TYPE_MODE (type) != BLKmode
1770 && STRICT_ALIGNMENT && TYPE_ALIGN (type) < BIGGEST_ALIGNMENT
1771 && TYPE_ALIGN (type) < GET_MODE_ALIGNMENT (TYPE_MODE (type))
7306ed3f
JW
1772 && TYPE_MODE (type) != BLKmode)
1773 {
1774 TYPE_NO_FORCE_BLK (type) = 1;
1775 TYPE_MODE (type) = BLKmode;
1776 }
7306ed3f
JW
1777 }
1778 break;
1779 }
1780
1781 case RECORD_TYPE:
cc9d4a85
MM
1782 case UNION_TYPE:
1783 case QUAL_UNION_TYPE:
9328904c
MM
1784 {
1785 tree field;
1786 record_layout_info rli;
1787
1788 /* Initialize the layout information. */
770ae6cc
RK
1789 rli = start_record_layout (type);
1790
cc9d4a85
MM
1791 /* If this is a QUAL_UNION_TYPE, we want to process the fields
1792 in the reverse order in building the COND_EXPR that denotes
1793 its size. We reverse them again later. */
1794 if (TREE_CODE (type) == QUAL_UNION_TYPE)
1795 TYPE_FIELDS (type) = nreverse (TYPE_FIELDS (type));
770ae6cc
RK
1796
1797 /* Place all the fields. */
9328904c 1798 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
770ae6cc
RK
1799 place_field (rli, field);
1800
cc9d4a85
MM
1801 if (TREE_CODE (type) == QUAL_UNION_TYPE)
1802 TYPE_FIELDS (type) = nreverse (TYPE_FIELDS (type));
770ae6cc 1803
e0cea8d9
RK
1804 if (lang_adjust_rli)
1805 (*lang_adjust_rli) (rli);
1806
9328904c 1807 /* Finish laying out the record. */
17bbb839 1808 finish_record_layout (rli, /*free_p=*/true);
9328904c 1809 }
7306ed3f
JW
1810 break;
1811
2d76cb1a 1812 case SET_TYPE: /* Used by Chill and Pascal. */
b5d11e41
PB
1813 if (TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) != INTEGER_CST
1814 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (type))) != INTEGER_CST)
cf403648 1815 abort ();
b5d11e41
PB
1816 else
1817 {
1818#ifndef SET_WORD_SIZE
1819#define SET_WORD_SIZE BITS_PER_WORD
1820#endif
729a2125
RK
1821 unsigned int alignment
1822 = set_alignment ? set_alignment : SET_WORD_SIZE;
0384674e
RK
1823 HOST_WIDE_INT size_in_bits
1824 = (tree_low_cst (TYPE_MAX_VALUE (TYPE_DOMAIN (type)), 0)
1825 - tree_low_cst (TYPE_MIN_VALUE (TYPE_DOMAIN (type)), 0) + 1);
1826 HOST_WIDE_INT rounded_size
b5d11e41 1827 = ((size_in_bits + alignment - 1) / alignment) * alignment;
729a2125
RK
1828
1829 if (rounded_size > (int) alignment)
b5d11e41
PB
1830 TYPE_MODE (type) = BLKmode;
1831 else
1832 TYPE_MODE (type) = mode_for_size (alignment, MODE_INT, 1);
729a2125 1833
06ceef4e 1834 TYPE_SIZE (type) = bitsize_int (rounded_size);
ead17059 1835 TYPE_SIZE_UNIT (type) = size_int (rounded_size / BITS_PER_UNIT);
b5d11e41 1836 TYPE_ALIGN (type) = alignment;
11cf4d18 1837 TYPE_USER_ALIGN (type) = 0;
b5d11e41
PB
1838 TYPE_PRECISION (type) = size_in_bits;
1839 }
1840 break;
1841
4cc89e53
RS
1842 case FILE_TYPE:
1843 /* The size may vary in different languages, so the language front end
1844 should fill in the size. */
1845 TYPE_ALIGN (type) = BIGGEST_ALIGNMENT;
11cf4d18 1846 TYPE_USER_ALIGN (type) = 0;
4cc89e53
RS
1847 TYPE_MODE (type) = BLKmode;
1848 break;
1849
7306ed3f
JW
1850 default:
1851 abort ();
729a2125 1852 }
7306ed3f 1853
9328904c 1854 /* Compute the final TYPE_SIZE, TYPE_ALIGN, etc. for TYPE. For
cc9d4a85
MM
1855 records and unions, finish_record_layout already called this
1856 function. */
786de7eb 1857 if (TREE_CODE (type) != RECORD_TYPE
cc9d4a85
MM
1858 && TREE_CODE (type) != UNION_TYPE
1859 && TREE_CODE (type) != QUAL_UNION_TYPE)
9328904c 1860 finalize_type_size (type);
7306ed3f 1861
fed3cef0
RK
1862 /* If this type is created before sizetype has been permanently set,
1863 record it so set_sizetype can fix it up. */
1864 if (! sizetype_set)
ad41cc2a 1865 early_type_list = tree_cons (NULL_TREE, type, early_type_list);
dc5041ab
JJ
1866
1867 /* If an alias set has been set for this aggregate when it was incomplete,
1868 force it into alias set 0.
1869 This is too conservative, but we cannot call record_component_aliases
1870 here because some frontends still change the aggregates after
1871 layout_type. */
1872 if (AGGREGATE_TYPE_P (type) && TYPE_ALIAS_SET_KNOWN_P (type))
1873 TYPE_ALIAS_SET (type) = 0;
7306ed3f
JW
1874}
1875\f
1876/* Create and return a type for signed integers of PRECISION bits. */
1877
1878tree
46c5ad27 1879make_signed_type (int precision)
7306ed3f 1880{
b3694847 1881 tree type = make_node (INTEGER_TYPE);
7306ed3f
JW
1882
1883 TYPE_PRECISION (type) = precision;
1884
fed3cef0 1885 fixup_signed_type (type);
7306ed3f
JW
1886 return type;
1887}
1888
1889/* Create and return a type for unsigned integers of PRECISION bits. */
1890
1891tree
46c5ad27 1892make_unsigned_type (int precision)
7306ed3f 1893{
b3694847 1894 tree type = make_node (INTEGER_TYPE);
7306ed3f
JW
1895
1896 TYPE_PRECISION (type) = precision;
1897
7306ed3f
JW
1898 fixup_unsigned_type (type);
1899 return type;
1900}
fed3cef0
RK
1901\f
1902/* Initialize sizetype and bitsizetype to a reasonable and temporary
1903 value to enable integer types to be created. */
1904
1905void
46c5ad27 1906initialize_sizetypes (void)
fed3cef0
RK
1907{
1908 tree t = make_node (INTEGER_TYPE);
1909
1910 /* Set this so we do something reasonable for the build_int_2 calls
1911 below. */
1912 integer_type_node = t;
1913
1914 TYPE_MODE (t) = SImode;
1915 TYPE_ALIGN (t) = GET_MODE_ALIGNMENT (SImode);
11cf4d18 1916 TYPE_USER_ALIGN (t) = 0;
fed3cef0
RK
1917 TYPE_SIZE (t) = build_int_2 (GET_MODE_BITSIZE (SImode), 0);
1918 TYPE_SIZE_UNIT (t) = build_int_2 (GET_MODE_SIZE (SImode), 0);
8df83eae 1919 TYPE_UNSIGNED (t) = 1;
fed3cef0
RK
1920 TYPE_PRECISION (t) = GET_MODE_BITSIZE (SImode);
1921 TYPE_MIN_VALUE (t) = build_int_2 (0, 0);
770ae6cc 1922 TYPE_IS_SIZETYPE (t) = 1;
fed3cef0
RK
1923
1924 /* 1000 avoids problems with possible overflow and is certainly
1925 larger than any size value we'd want to be storing. */
1926 TYPE_MAX_VALUE (t) = build_int_2 (1000, 0);
1927
1928 /* These two must be different nodes because of the caching done in
1929 size_int_wide. */
1930 sizetype = t;
1931 bitsizetype = copy_node (t);
1932 integer_type_node = 0;
1933}
7306ed3f 1934
896cced4 1935/* Set sizetype to TYPE, and initialize *sizetype accordingly.
f8dac6eb
R
1936 Also update the type of any standard type's sizes made so far. */
1937
1938void
46c5ad27 1939set_sizetype (tree type)
f8dac6eb 1940{
d4b60170 1941 int oprecision = TYPE_PRECISION (type);
f8dac6eb 1942 /* The *bitsizetype types use a precision that avoids overflows when
d4b60170
RK
1943 calculating signed sizes / offsets in bits. However, when
1944 cross-compiling from a 32 bit to a 64 bit host, we are limited to 64 bit
1945 precision. */
11a6092b 1946 int precision = MIN (oprecision + BITS_PER_UNIT_LOG + 1,
d4b60170 1947 2 * HOST_BITS_PER_WIDE_INT);
fed3cef0 1948 unsigned int i;
ad41cc2a 1949 tree t;
fed3cef0
RK
1950
1951 if (sizetype_set)
1952 abort ();
81b3411c 1953
fed3cef0
RK
1954 /* Make copies of nodes since we'll be setting TYPE_IS_SIZETYPE. */
1955 sizetype = copy_node (type);
d3b6a5fb 1956 TYPE_ORIG_SIZE_TYPE (sizetype) = type;
770ae6cc 1957 TYPE_IS_SIZETYPE (sizetype) = 1;
81b3411c
BS
1958 bitsizetype = make_node (INTEGER_TYPE);
1959 TYPE_NAME (bitsizetype) = TYPE_NAME (type);
f8dac6eb 1960 TYPE_PRECISION (bitsizetype) = precision;
770ae6cc 1961 TYPE_IS_SIZETYPE (bitsizetype) = 1;
d4b60170 1962
8df83eae 1963 if (TYPE_UNSIGNED (type))
896cced4
RH
1964 fixup_unsigned_type (bitsizetype);
1965 else
1966 fixup_signed_type (bitsizetype);
d4b60170 1967
f8dac6eb
R
1968 layout_type (bitsizetype);
1969
8df83eae 1970 if (TYPE_UNSIGNED (type))
896cced4
RH
1971 {
1972 usizetype = sizetype;
1973 ubitsizetype = bitsizetype;
fed3cef0
RK
1974 ssizetype = copy_node (make_signed_type (oprecision));
1975 sbitsizetype = copy_node (make_signed_type (precision));
896cced4
RH
1976 }
1977 else
1978 {
1979 ssizetype = sizetype;
1980 sbitsizetype = bitsizetype;
fed3cef0
RK
1981 usizetype = copy_node (make_unsigned_type (oprecision));
1982 ubitsizetype = copy_node (make_unsigned_type (precision));
896cced4 1983 }
fed3cef0
RK
1984
1985 TYPE_NAME (bitsizetype) = get_identifier ("bit_size_type");
1986
21318741 1987 /* Show is a sizetype, is a main type, and has no pointers to it. */
b6a1cbae 1988 for (i = 0; i < ARRAY_SIZE (sizetype_tab); i++)
21318741
RK
1989 {
1990 TYPE_IS_SIZETYPE (sizetype_tab[i]) = 1;
1991 TYPE_MAIN_VARIANT (sizetype_tab[i]) = sizetype_tab[i];
1992 TYPE_NEXT_VARIANT (sizetype_tab[i]) = 0;
1993 TYPE_POINTER_TO (sizetype_tab[i]) = 0;
1994 TYPE_REFERENCE_TO (sizetype_tab[i]) = 0;
1995 }
d7db6646 1996
fed3cef0
RK
1997 /* Go down each of the types we already made and set the proper type
1998 for the sizes in them. */
ad41cc2a 1999 for (t = early_type_list; t != 0; t = TREE_CHAIN (t))
fed3cef0 2000 {
de7df9eb
JM
2001 if (TREE_CODE (TREE_VALUE (t)) != INTEGER_TYPE
2002 && TREE_CODE (TREE_VALUE (t)) != BOOLEAN_TYPE)
fed3cef0
RK
2003 abort ();
2004
ad41cc2a
RK
2005 TREE_TYPE (TYPE_SIZE (TREE_VALUE (t))) = bitsizetype;
2006 TREE_TYPE (TYPE_SIZE_UNIT (TREE_VALUE (t))) = sizetype;
fed3cef0
RK
2007 }
2008
2009 early_type_list = 0;
2010 sizetype_set = 1;
2011}
2012\f
7b6d72fc
MM
2013/* TYPE is an integral type, i.e., an INTEGRAL_TYPE, ENUMERAL_TYPE,
2014 BOOLEAN_TYPE, or CHAR_TYPE. Set TYPE_MIN_VALUE and TYPE_MAX_VALUE
2015 for TYPE, based on the PRECISION and whether or not the TYPE
2016 IS_UNSIGNED. PRECISION need not correspond to a width supported
2017 natively by the hardware; for example, on a machine with 8-bit,
2018 16-bit, and 32-bit register modes, PRECISION might be 7, 23, or
2019 61. */
2020
2021void
2022set_min_and_max_values_for_integral_type (tree type,
2023 int precision,
2024 bool is_unsigned)
2025{
2026 tree min_value;
2027 tree max_value;
2028
2029 if (is_unsigned)
2030 {
2031 min_value = build_int_2 (0, 0);
2032 max_value
2033 = build_int_2 (precision - HOST_BITS_PER_WIDE_INT >= 0
2034 ? -1 : ((HOST_WIDE_INT) 1 << precision) - 1,
2035 precision - HOST_BITS_PER_WIDE_INT > 0
2036 ? ((unsigned HOST_WIDE_INT) ~0
2037 >> (HOST_BITS_PER_WIDE_INT
2038 - (precision - HOST_BITS_PER_WIDE_INT)))
2039 : 0);
2040 }
2041 else
2042 {
2043 min_value
2044 = build_int_2 ((precision - HOST_BITS_PER_WIDE_INT > 0
2045 ? 0 : (HOST_WIDE_INT) (-1) << (precision - 1)),
2046 (((HOST_WIDE_INT) (-1)
2047 << (precision - HOST_BITS_PER_WIDE_INT - 1 > 0
2048 ? precision - HOST_BITS_PER_WIDE_INT - 1
2049 : 0))));
2050 max_value
2051 = build_int_2 ((precision - HOST_BITS_PER_WIDE_INT > 0
2052 ? -1 : ((HOST_WIDE_INT) 1 << (precision - 1)) - 1),
2053 (precision - HOST_BITS_PER_WIDE_INT - 1 > 0
2054 ? (((HOST_WIDE_INT) 1
2055 << (precision - HOST_BITS_PER_WIDE_INT - 1))) - 1
2056 : 0));
2057 }
2058
2059 TREE_TYPE (min_value) = type;
2060 TREE_TYPE (max_value) = type;
2061 TYPE_MIN_VALUE (type) = min_value;
2062 TYPE_MAX_VALUE (type) = max_value;
2063}
2064
4cc89e53 2065/* Set the extreme values of TYPE based on its precision in bits,
13756074 2066 then lay it out. Used when make_signed_type won't do
4cc89e53
RS
2067 because the tree code is not INTEGER_TYPE.
2068 E.g. for Pascal, when the -fsigned-char option is given. */
2069
2070void
46c5ad27 2071fixup_signed_type (tree type)
4cc89e53 2072{
b3694847 2073 int precision = TYPE_PRECISION (type);
4cc89e53 2074
9cd56be1
JH
2075 /* We can not represent properly constants greater then
2076 2 * HOST_BITS_PER_WIDE_INT, still we need the types
2077 as they are used by i386 vector extensions and friends. */
2078 if (precision > HOST_BITS_PER_WIDE_INT * 2)
2079 precision = HOST_BITS_PER_WIDE_INT * 2;
2080
7b6d72fc
MM
2081 set_min_and_max_values_for_integral_type (type, precision,
2082 /*is_unsigned=*/false);
4cc89e53
RS
2083
2084 /* Lay out the type: set its alignment, size, etc. */
4cc89e53
RS
2085 layout_type (type);
2086}
2087
7306ed3f 2088/* Set the extreme values of TYPE based on its precision in bits,
13756074 2089 then lay it out. This is used both in `make_unsigned_type'
7306ed3f
JW
2090 and for enumeral types. */
2091
2092void
46c5ad27 2093fixup_unsigned_type (tree type)
7306ed3f 2094{
b3694847 2095 int precision = TYPE_PRECISION (type);
7306ed3f 2096
9cd56be1
JH
2097 /* We can not represent properly constants greater then
2098 2 * HOST_BITS_PER_WIDE_INT, still we need the types
2099 as they are used by i386 vector extensions and friends. */
2100 if (precision > HOST_BITS_PER_WIDE_INT * 2)
2101 precision = HOST_BITS_PER_WIDE_INT * 2;
2102
7b6d72fc
MM
2103 set_min_and_max_values_for_integral_type (type, precision,
2104 /*is_unsigned=*/true);
7306ed3f
JW
2105
2106 /* Lay out the type: set its alignment, size, etc. */
7306ed3f
JW
2107 layout_type (type);
2108}
2109\f
2110/* Find the best machine mode to use when referencing a bit field of length
2111 BITSIZE bits starting at BITPOS.
2112
2113 The underlying object is known to be aligned to a boundary of ALIGN bits.
2114 If LARGEST_MODE is not VOIDmode, it means that we should not use a mode
2115 larger than LARGEST_MODE (usually SImode).
2116
2117 If no mode meets all these conditions, we return VOIDmode. Otherwise, if
2118 VOLATILEP is true or SLOW_BYTE_ACCESS is false, we return the smallest
2119 mode meeting these conditions.
2120
77fa0940
RK
2121 Otherwise (VOLATILEP is false and SLOW_BYTE_ACCESS is true), we return
2122 the largest mode (but a mode no wider than UNITS_PER_WORD) that meets
2123 all the conditions. */
7306ed3f
JW
2124
2125enum machine_mode
46c5ad27
AJ
2126get_best_mode (int bitsize, int bitpos, unsigned int align,
2127 enum machine_mode largest_mode, int volatilep)
7306ed3f
JW
2128{
2129 enum machine_mode mode;
770ae6cc 2130 unsigned int unit = 0;
7306ed3f
JW
2131
2132 /* Find the narrowest integer mode that contains the bit field. */
2133 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2134 mode = GET_MODE_WIDER_MODE (mode))
2135 {
2136 unit = GET_MODE_BITSIZE (mode);
956d6950 2137 if ((bitpos % unit) + bitsize <= unit)
7306ed3f
JW
2138 break;
2139 }
2140
0c61f541 2141 if (mode == VOIDmode
7306ed3f 2142 /* It is tempting to omit the following line
4e4b555d 2143 if STRICT_ALIGNMENT is true.
7306ed3f
JW
2144 But that is incorrect, since if the bitfield uses part of 3 bytes
2145 and we use a 4-byte mode, we could get a spurious segv
2146 if the extra 4th byte is past the end of memory.
2147 (Though at least one Unix compiler ignores this problem:
2148 that on the Sequent 386 machine. */
770ae6cc 2149 || MIN (unit, BIGGEST_ALIGNMENT) > align
7306ed3f
JW
2150 || (largest_mode != VOIDmode && unit > GET_MODE_BITSIZE (largest_mode)))
2151 return VOIDmode;
2152
77fa0940
RK
2153 if (SLOW_BYTE_ACCESS && ! volatilep)
2154 {
2155 enum machine_mode wide_mode = VOIDmode, tmode;
2156
2157 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); tmode != VOIDmode;
2158 tmode = GET_MODE_WIDER_MODE (tmode))
2159 {
2160 unit = GET_MODE_BITSIZE (tmode);
2161 if (bitpos / unit == (bitpos + bitsize - 1) / unit
2162 && unit <= BITS_PER_WORD
770ae6cc 2163 && unit <= MIN (align, BIGGEST_ALIGNMENT)
77fa0940
RK
2164 && (largest_mode == VOIDmode
2165 || unit <= GET_MODE_BITSIZE (largest_mode)))
2166 wide_mode = tmode;
2167 }
2168
2169 if (wide_mode != VOIDmode)
2170 return wide_mode;
2171 }
7306ed3f
JW
2172
2173 return mode;
2174}
d7db6646 2175
50654f6c 2176/* Gets minimal and maximal values for MODE (signed or unsigned depending on
0aea6467 2177 SIGN). The returned constants are made to be usable in TARGET_MODE. */
50654f6c
ZD
2178
2179void
0aea6467
ZD
2180get_mode_bounds (enum machine_mode mode, int sign,
2181 enum machine_mode target_mode,
2182 rtx *mmin, rtx *mmax)
50654f6c 2183{
0aea6467
ZD
2184 unsigned size = GET_MODE_BITSIZE (mode);
2185 unsigned HOST_WIDE_INT min_val, max_val;
50654f6c
ZD
2186
2187 if (size > HOST_BITS_PER_WIDE_INT)
2188 abort ();
2189
2190 if (sign)
2191 {
0aea6467
ZD
2192 min_val = -((unsigned HOST_WIDE_INT) 1 << (size - 1));
2193 max_val = ((unsigned HOST_WIDE_INT) 1 << (size - 1)) - 1;
50654f6c
ZD
2194 }
2195 else
2196 {
0aea6467
ZD
2197 min_val = 0;
2198 max_val = ((unsigned HOST_WIDE_INT) 1 << (size - 1) << 1) - 1;
50654f6c 2199 }
0aea6467
ZD
2200
2201 *mmin = GEN_INT (trunc_int_for_mode (min_val, target_mode));
2202 *mmax = GEN_INT (trunc_int_for_mode (max_val, target_mode));
50654f6c
ZD
2203}
2204
e2500fed 2205#include "gt-stor-layout.h"