]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/stor-layout.c
function.h: Flatten file.
[thirdparty/gcc.git] / gcc / stor-layout.c
CommitLineData
7306ed3f 1/* C-compiler utilities for types and variables storage layout
23a5b65a 2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
7306ed3f 3
1322177d 4This file is part of GCC.
7306ed3f 5
1322177d
LB
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
1322177d 9version.
7306ed3f 10
1322177d
LB
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
7306ed3f
JW
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
7306ed3f
JW
19
20
21#include "config.h"
670ee920 22#include "system.h"
4977bab6
ZW
23#include "coretypes.h"
24#include "tm.h"
7306ed3f 25#include "tree.h"
d8a2d370
DN
26#include "stor-layout.h"
27#include "stringpool.h"
28#include "varasm.h"
29#include "print-tree.h"
d05a5492 30#include "rtl.h"
6baf1cc8 31#include "tm_p.h"
566cdc73 32#include "flags.h"
83685514
AM
33#include "hashtab.h"
34#include "hash-set.h"
35#include "vec.h"
36#include "machmode.h"
37#include "hard-reg-set.h"
38#include "input.h"
7306ed3f 39#include "function.h"
234042f4 40#include "expr.h"
718f9c0f 41#include "diagnostic-core.h"
f913c102 42#include "target.h"
43577e6b 43#include "langhooks.h"
26277d41 44#include "regs.h"
89b0433e 45#include "params.h"
f82a627c
EB
46#include "cgraph.h"
47#include "tree-inline.h"
48#include "tree-dump.h"
45b0be94 49#include "gimplify.h"
7306ed3f 50
7306ed3f 51/* Data type for the expressions representing sizes of data types.
896cced4 52 It is the first integer type laid out. */
18dae016 53tree sizetype_tab[(int) stk_type_kind_last];
7306ed3f 54
d4c40650
RS
55/* If nonzero, this is an upper limit on alignment of structure fields.
56 The value is measured in bits. */
467cecf3 57unsigned int maximum_field_alignment = TARGET_DEFAULT_PACK_STRUCT * BITS_PER_UNIT;
d4c40650 58
d4ebfa65
BE
59/* Nonzero if all REFERENCE_TYPEs are internal and hence should be allocated
60 in the address spaces' address_mode, not pointer_mode. Set only by
61 internal_reference_types called only by a front end. */
b5d6a2ff
RK
62static int reference_types_internal = 0;
63
f82a627c 64static tree self_referential_size (tree);
46c5ad27
AJ
65static void finalize_record_size (record_layout_info);
66static void finalize_type_size (tree);
67static void place_union_field (record_layout_info, tree);
b8089d8d 68#if defined (PCC_BITFIELD_TYPE_MATTERS) || defined (BITFIELD_NBYTES_LIMITED)
46c5ad27
AJ
69static int excess_unit_span (HOST_WIDE_INT, HOST_WIDE_INT, HOST_WIDE_INT,
70 HOST_WIDE_INT, tree);
b8089d8d 71#endif
46c5ad27 72extern void debug_rli (record_layout_info);
7306ed3f 73\f
d4ebfa65
BE
74/* Show that REFERENCE_TYPES are internal and should use address_mode.
75 Called only by front end. */
b5d6a2ff
RK
76
77void
46c5ad27 78internal_reference_types (void)
b5d6a2ff
RK
79{
80 reference_types_internal = 1;
81}
82
76ffb3a0 83/* Given a size SIZE that may not be a constant, return a SAVE_EXPR
7306ed3f
JW
84 to serve as the actual size-expression for a type or decl. */
85
4e4b555d 86tree
46c5ad27 87variable_size (tree size)
7306ed3f 88{
f82a627c
EB
89 /* Obviously. */
90 if (TREE_CONSTANT (size))
91 return size;
92
93 /* If the size is self-referential, we can't make a SAVE_EXPR (see
94 save_expr for the rationale). But we can do something else. */
95 if (CONTAINS_PLACEHOLDER_P (size))
96 return self_referential_size (size);
97
c99c0026
EB
98 /* If we are in the global binding level, we can't make a SAVE_EXPR
99 since it may end up being shared across functions, so it is up
100 to the front-end to deal with this case. */
101 if (lang_hooks.decls.global_bindings_p ())
5e9bec99
RK
102 return size;
103
907a08d9 104 return save_expr (size);
7306ed3f 105}
f82a627c
EB
106
107/* An array of functions used for self-referential size computation. */
9771b263 108static GTY(()) vec<tree, va_gc> *size_functions;
f82a627c
EB
109
110/* Similar to copy_tree_r but do not copy component references involving
111 PLACEHOLDER_EXPRs. These nodes are spotted in find_placeholder_in_expr
112 and substituted in substitute_in_expr. */
113
114static tree
115copy_self_referential_tree_r (tree *tp, int *walk_subtrees, void *data)
116{
117 enum tree_code code = TREE_CODE (*tp);
118
119 /* Stop at types, decls, constants like copy_tree_r. */
120 if (TREE_CODE_CLASS (code) == tcc_type
121 || TREE_CODE_CLASS (code) == tcc_declaration
122 || TREE_CODE_CLASS (code) == tcc_constant)
123 {
124 *walk_subtrees = 0;
125 return NULL_TREE;
126 }
127
128 /* This is the pattern built in ada/make_aligning_type. */
129 else if (code == ADDR_EXPR
130 && TREE_CODE (TREE_OPERAND (*tp, 0)) == PLACEHOLDER_EXPR)
131 {
132 *walk_subtrees = 0;
133 return NULL_TREE;
134 }
135
136 /* Default case: the component reference. */
137 else if (code == COMPONENT_REF)
138 {
139 tree inner;
140 for (inner = TREE_OPERAND (*tp, 0);
141 REFERENCE_CLASS_P (inner);
142 inner = TREE_OPERAND (inner, 0))
143 ;
144
145 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
146 {
147 *walk_subtrees = 0;
148 return NULL_TREE;
149 }
150 }
151
152 /* We're not supposed to have them in self-referential size trees
153 because we wouldn't properly control when they are evaluated.
154 However, not creating superfluous SAVE_EXPRs requires accurate
155 tracking of readonly-ness all the way down to here, which we
156 cannot always guarantee in practice. So punt in this case. */
157 else if (code == SAVE_EXPR)
158 return error_mark_node;
159
deb5046b
JM
160 else if (code == STATEMENT_LIST)
161 gcc_unreachable ();
162
f82a627c
EB
163 return copy_tree_r (tp, walk_subtrees, data);
164}
165
166/* Given a SIZE expression that is self-referential, return an equivalent
167 expression to serve as the actual size expression for a type. */
168
169static tree
170self_referential_size (tree size)
171{
172 static unsigned HOST_WIDE_INT fnno = 0;
6e1aa848 173 vec<tree> self_refs = vNULL;
3bb06db4 174 tree param_type_list = NULL, param_decl_list = NULL;
f82a627c
EB
175 tree t, ref, return_type, fntype, fnname, fndecl;
176 unsigned int i;
177 char buf[128];
9771b263 178 vec<tree, va_gc> *args = NULL;
f82a627c
EB
179
180 /* Do not factor out simple operations. */
0d475ac5 181 t = skip_simple_constant_arithmetic (size);
f82a627c
EB
182 if (TREE_CODE (t) == CALL_EXPR)
183 return size;
184
185 /* Collect the list of self-references in the expression. */
186 find_placeholder_in_expr (size, &self_refs);
9771b263 187 gcc_assert (self_refs.length () > 0);
f82a627c
EB
188
189 /* Obtain a private copy of the expression. */
190 t = size;
191 if (walk_tree (&t, copy_self_referential_tree_r, NULL, NULL) != NULL_TREE)
192 return size;
193 size = t;
194
195 /* Build the parameter and argument lists in parallel; also
196 substitute the former for the latter in the expression. */
9771b263
DN
197 vec_alloc (args, self_refs.length ());
198 FOR_EACH_VEC_ELT (self_refs, i, ref)
f82a627c
EB
199 {
200 tree subst, param_name, param_type, param_decl;
201
202 if (DECL_P (ref))
203 {
204 /* We shouldn't have true variables here. */
205 gcc_assert (TREE_READONLY (ref));
206 subst = ref;
207 }
208 /* This is the pattern built in ada/make_aligning_type. */
209 else if (TREE_CODE (ref) == ADDR_EXPR)
210 subst = ref;
211 /* Default case: the component reference. */
212 else
213 subst = TREE_OPERAND (ref, 1);
214
215 sprintf (buf, "p%d", i);
216 param_name = get_identifier (buf);
217 param_type = TREE_TYPE (ref);
218 param_decl
219 = build_decl (input_location, PARM_DECL, param_name, param_type);
d110c3ed 220 DECL_ARG_TYPE (param_decl) = param_type;
f82a627c
EB
221 DECL_ARTIFICIAL (param_decl) = 1;
222 TREE_READONLY (param_decl) = 1;
223
224 size = substitute_in_expr (size, subst, param_decl);
225
226 param_type_list = tree_cons (NULL_TREE, param_type, param_type_list);
227 param_decl_list = chainon (param_decl, param_decl_list);
9771b263 228 args->quick_push (ref);
f82a627c
EB
229 }
230
9771b263 231 self_refs.release ();
f82a627c
EB
232
233 /* Append 'void' to indicate that the number of parameters is fixed. */
234 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
235
236 /* The 3 lists have been created in reverse order. */
237 param_type_list = nreverse (param_type_list);
238 param_decl_list = nreverse (param_decl_list);
f82a627c
EB
239
240 /* Build the function type. */
241 return_type = TREE_TYPE (size);
242 fntype = build_function_type (return_type, param_type_list);
243
244 /* Build the function declaration. */
245 sprintf (buf, "SZ"HOST_WIDE_INT_PRINT_UNSIGNED, fnno++);
246 fnname = get_file_function_name (buf);
247 fndecl = build_decl (input_location, FUNCTION_DECL, fnname, fntype);
910ad8de 248 for (t = param_decl_list; t; t = DECL_CHAIN (t))
f82a627c
EB
249 DECL_CONTEXT (t) = fndecl;
250 DECL_ARGUMENTS (fndecl) = param_decl_list;
251 DECL_RESULT (fndecl)
252 = build_decl (input_location, RESULT_DECL, 0, return_type);
253 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
254
255 /* The function has been created by the compiler and we don't
256 want to emit debug info for it. */
257 DECL_ARTIFICIAL (fndecl) = 1;
258 DECL_IGNORED_P (fndecl) = 1;
259
260 /* It is supposed to be "const" and never throw. */
261 TREE_READONLY (fndecl) = 1;
262 TREE_NOTHROW (fndecl) = 1;
263
264 /* We want it to be inlined when this is deemed profitable, as
265 well as discarded if every call has been integrated. */
266 DECL_DECLARED_INLINE_P (fndecl) = 1;
267
268 /* It is made up of a unique return statement. */
269 DECL_INITIAL (fndecl) = make_node (BLOCK);
270 BLOCK_SUPERCONTEXT (DECL_INITIAL (fndecl)) = fndecl;
271 t = build2 (MODIFY_EXPR, return_type, DECL_RESULT (fndecl), size);
272 DECL_SAVED_TREE (fndecl) = build1 (RETURN_EXPR, void_type_node, t);
273 TREE_STATIC (fndecl) = 1;
274
275 /* Put it onto the list of size functions. */
9771b263 276 vec_safe_push (size_functions, fndecl);
f82a627c
EB
277
278 /* Replace the original expression with a call to the size function. */
c5911a55 279 return build_call_expr_loc_vec (UNKNOWN_LOCATION, fndecl, args);
f82a627c
EB
280}
281
282/* Take, queue and compile all the size functions. It is essential that
283 the size functions be gimplified at the very end of the compilation
284 in order to guarantee transparent handling of self-referential sizes.
285 Otherwise the GENERIC inliner would not be able to inline them back
286 at each of their call sites, thus creating artificial non-constant
287 size expressions which would trigger nasty problems later on. */
288
289void
290finalize_size_functions (void)
291{
292 unsigned int i;
293 tree fndecl;
294
9771b263 295 for (i = 0; size_functions && size_functions->iterate (i, &fndecl); i++)
f82a627c 296 {
22ad5e0a
EB
297 allocate_struct_function (fndecl, false);
298 set_cfun (NULL);
f82a627c
EB
299 dump_function (TDI_original, fndecl);
300 gimplify_function_tree (fndecl);
301 dump_function (TDI_generic, fndecl);
3dafb85c 302 cgraph_node::finalize_function (fndecl, false);
f82a627c
EB
303 }
304
9771b263 305 vec_free (size_functions);
f82a627c 306}
7306ed3f 307\f
37783865 308/* Return the machine mode to use for a nonscalar of SIZE bits. The
55d796da 309 mode must be in class MCLASS, and have exactly that many value bits;
37783865
ZW
310 it may have padding as well. If LIMIT is nonzero, modes of wider
311 than MAX_FIXED_MODE_SIZE will not be used. */
7306ed3f
JW
312
313enum machine_mode
55d796da 314mode_for_size (unsigned int size, enum mode_class mclass, int limit)
7306ed3f 315{
b3694847 316 enum machine_mode mode;
78a7c317 317 int i;
7306ed3f 318
72c602fc 319 if (limit && size > MAX_FIXED_MODE_SIZE)
7306ed3f
JW
320 return BLKmode;
321
5e9bec99 322 /* Get the first mode which has this size, in the specified class. */
55d796da 323 for (mode = GET_CLASS_NARROWEST_MODE (mclass); mode != VOIDmode;
7306ed3f 324 mode = GET_MODE_WIDER_MODE (mode))
37783865 325 if (GET_MODE_PRECISION (mode) == size)
7306ed3f
JW
326 return mode;
327
78a7c317
DD
328 if (mclass == MODE_INT || mclass == MODE_PARTIAL_INT)
329 for (i = 0; i < NUM_INT_N_ENTS; i ++)
330 if (int_n_data[i].bitsize == size
331 && int_n_enabled_p[i])
332 return int_n_data[i].m;
333
7306ed3f
JW
334 return BLKmode;
335}
336
72c602fc
RK
337/* Similar, except passed a tree node. */
338
339enum machine_mode
55d796da 340mode_for_size_tree (const_tree size, enum mode_class mclass, int limit)
72c602fc 341{
a6a12bb9
RS
342 unsigned HOST_WIDE_INT uhwi;
343 unsigned int ui;
344
cc269bb6 345 if (!tree_fits_uhwi_p (size))
72c602fc 346 return BLKmode;
ae7e9ddd 347 uhwi = tree_to_uhwi (size);
a6a12bb9
RS
348 ui = uhwi;
349 if (uhwi != ui)
350 return BLKmode;
55d796da 351 return mode_for_size (ui, mclass, limit);
72c602fc
RK
352}
353
5e9bec99 354/* Similar, but never return BLKmode; return the narrowest mode that
37783865 355 contains at least the requested number of value bits. */
5e9bec99 356
27922c13 357enum machine_mode
55d796da 358smallest_mode_for_size (unsigned int size, enum mode_class mclass)
5e9bec99 359{
78a7c317
DD
360 enum machine_mode mode = VOIDmode;
361 int i;
5e9bec99
RK
362
363 /* Get the first mode which has at least this size, in the
364 specified class. */
55d796da 365 for (mode = GET_CLASS_NARROWEST_MODE (mclass); mode != VOIDmode;
5e9bec99 366 mode = GET_MODE_WIDER_MODE (mode))
37783865 367 if (GET_MODE_PRECISION (mode) >= size)
78a7c317 368 break;
5e9bec99 369
78a7c317
DD
370 if (mclass == MODE_INT || mclass == MODE_PARTIAL_INT)
371 for (i = 0; i < NUM_INT_N_ENTS; i ++)
372 if (int_n_data[i].bitsize >= size
373 && int_n_data[i].bitsize < GET_MODE_PRECISION (mode)
374 && int_n_enabled_p[i])
375 mode = int_n_data[i].m;
376
377 if (mode == VOIDmode)
378 gcc_unreachable ();
379
380 return mode;
5e9bec99
RK
381}
382
d006aa54
RH
383/* Find an integer mode of the exact same size, or BLKmode on failure. */
384
385enum machine_mode
46c5ad27 386int_mode_for_mode (enum machine_mode mode)
d006aa54
RH
387{
388 switch (GET_MODE_CLASS (mode))
389 {
390 case MODE_INT:
391 case MODE_PARTIAL_INT:
392 break;
393
394 case MODE_COMPLEX_INT:
395 case MODE_COMPLEX_FLOAT:
396 case MODE_FLOAT:
15ed7b52 397 case MODE_DECIMAL_FLOAT:
62c07905
JM
398 case MODE_VECTOR_INT:
399 case MODE_VECTOR_FLOAT:
325217ed
CF
400 case MODE_FRACT:
401 case MODE_ACCUM:
402 case MODE_UFRACT:
403 case MODE_UACCUM:
404 case MODE_VECTOR_FRACT:
405 case MODE_VECTOR_ACCUM:
406 case MODE_VECTOR_UFRACT:
407 case MODE_VECTOR_UACCUM:
d006aa54
RH
408 mode = mode_for_size (GET_MODE_BITSIZE (mode), MODE_INT, 0);
409 break;
410
411 case MODE_RANDOM:
412 if (mode == BLKmode)
786de7eb 413 break;
d4b60170 414
2d76cb1a 415 /* ... fall through ... */
d006aa54
RH
416
417 case MODE_CC:
418 default:
41374e13 419 gcc_unreachable ();
d006aa54
RH
420 }
421
422 return mode;
423}
424
dbf23a79
RS
425/* Find a mode that can be used for efficient bitwise operations on MODE.
426 Return BLKmode if no such mode exists. */
427
428enum machine_mode
429bitwise_mode_for_mode (enum machine_mode mode)
430{
431 /* Quick exit if we already have a suitable mode. */
432 unsigned int bitsize = GET_MODE_BITSIZE (mode);
433 if (SCALAR_INT_MODE_P (mode) && bitsize <= MAX_FIXED_MODE_SIZE)
434 return mode;
435
436 /* Reuse the sanity checks from int_mode_for_mode. */
437 gcc_checking_assert ((int_mode_for_mode (mode), true));
438
439 /* Try to replace complex modes with complex modes. In general we
440 expect both components to be processed independently, so we only
441 care whether there is a register for the inner mode. */
442 if (COMPLEX_MODE_P (mode))
443 {
444 enum machine_mode trial = mode;
445 if (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT)
446 trial = mode_for_size (bitsize, MODE_COMPLEX_INT, false);
447 if (trial != BLKmode
448 && have_regs_of_mode[GET_MODE_INNER (trial)])
449 return trial;
450 }
451
452 /* Try to replace vector modes with vector modes. Also try using vector
453 modes if an integer mode would be too big. */
454 if (VECTOR_MODE_P (mode) || bitsize > MAX_FIXED_MODE_SIZE)
455 {
456 enum machine_mode trial = mode;
457 if (GET_MODE_CLASS (mode) != MODE_VECTOR_INT)
458 trial = mode_for_size (bitsize, MODE_VECTOR_INT, 0);
459 if (trial != BLKmode
460 && have_regs_of_mode[trial]
461 && targetm.vector_mode_supported_p (trial))
462 return trial;
463 }
464
465 /* Otherwise fall back on integers while honoring MAX_FIXED_MODE_SIZE. */
466 return mode_for_size (bitsize, MODE_INT, true);
467}
468
469/* Find a type that can be used for efficient bitwise operations on MODE.
470 Return null if no such mode exists. */
471
472tree
473bitwise_type_for_mode (enum machine_mode mode)
474{
475 mode = bitwise_mode_for_mode (mode);
476 if (mode == BLKmode)
477 return NULL_TREE;
478
479 unsigned int inner_size = GET_MODE_UNIT_BITSIZE (mode);
480 tree inner_type = build_nonstandard_integer_type (inner_size, true);
481
482 if (VECTOR_MODE_P (mode))
483 return build_vector_type_for_mode (inner_type, mode);
484
485 if (COMPLEX_MODE_P (mode))
486 return build_complex_type (inner_type);
487
488 gcc_checking_assert (GET_MODE_INNER (mode) == VOIDmode);
489 return inner_type;
490}
491
bb67d9c7
RG
492/* Find a mode that is suitable for representing a vector with
493 NUNITS elements of mode INNERMODE. Returns BLKmode if there
494 is no suitable mode. */
495
496enum machine_mode
497mode_for_vector (enum machine_mode innermode, unsigned nunits)
498{
499 enum machine_mode mode;
500
501 /* First, look for a supported vector type. */
502 if (SCALAR_FLOAT_MODE_P (innermode))
503 mode = MIN_MODE_VECTOR_FLOAT;
504 else if (SCALAR_FRACT_MODE_P (innermode))
505 mode = MIN_MODE_VECTOR_FRACT;
506 else if (SCALAR_UFRACT_MODE_P (innermode))
507 mode = MIN_MODE_VECTOR_UFRACT;
508 else if (SCALAR_ACCUM_MODE_P (innermode))
509 mode = MIN_MODE_VECTOR_ACCUM;
510 else if (SCALAR_UACCUM_MODE_P (innermode))
511 mode = MIN_MODE_VECTOR_UACCUM;
512 else
513 mode = MIN_MODE_VECTOR_INT;
514
515 /* Do not check vector_mode_supported_p here. We'll do that
516 later in vector_type_mode. */
517 for (; mode != VOIDmode ; mode = GET_MODE_WIDER_MODE (mode))
518 if (GET_MODE_NUNITS (mode) == nunits
519 && GET_MODE_INNER (mode) == innermode)
520 break;
521
522 /* For integers, try mapping it to a same-sized scalar mode. */
523 if (mode == VOIDmode
524 && GET_MODE_CLASS (innermode) == MODE_INT)
525 mode = mode_for_size (nunits * GET_MODE_BITSIZE (innermode),
526 MODE_INT, 0);
527
528 if (mode == VOIDmode
529 || (GET_MODE_CLASS (mode) == MODE_INT
530 && !have_regs_of_mode[mode]))
531 return BLKmode;
532
533 return mode;
534}
535
187515f5
AO
536/* Return the alignment of MODE. This will be bounded by 1 and
537 BIGGEST_ALIGNMENT. */
538
539unsigned int
46c5ad27 540get_mode_alignment (enum machine_mode mode)
187515f5 541{
0974c7d7 542 return MIN (BIGGEST_ALIGNMENT, MAX (1, mode_base_align[mode]*BITS_PER_UNIT));
187515f5
AO
543}
544
a5e0cd1d
MG
545/* Return the precision of the mode, or for a complex or vector mode the
546 precision of the mode of its elements. */
547
548unsigned int
549element_precision (enum machine_mode mode)
550{
551 if (COMPLEX_MODE_P (mode) || VECTOR_MODE_P (mode))
552 mode = GET_MODE_INNER (mode);
553
554 return GET_MODE_PRECISION (mode);
555}
556
0f6d54f7
RS
557/* Return the natural mode of an array, given that it is SIZE bytes in
558 total and has elements of type ELEM_TYPE. */
559
560static enum machine_mode
561mode_for_array (tree elem_type, tree size)
562{
563 tree elem_size;
564 unsigned HOST_WIDE_INT int_size, int_elem_size;
565 bool limit_p;
566
567 /* One-element arrays get the component type's mode. */
568 elem_size = TYPE_SIZE (elem_type);
569 if (simple_cst_equal (size, elem_size))
570 return TYPE_MODE (elem_type);
571
572 limit_p = true;
cc269bb6 573 if (tree_fits_uhwi_p (size) && tree_fits_uhwi_p (elem_size))
0f6d54f7 574 {
ae7e9ddd
RS
575 int_size = tree_to_uhwi (size);
576 int_elem_size = tree_to_uhwi (elem_size);
0f6d54f7
RS
577 if (int_elem_size > 0
578 && int_size % int_elem_size == 0
579 && targetm.array_mode_supported_p (TYPE_MODE (elem_type),
580 int_size / int_elem_size))
581 limit_p = false;
582 }
583 return mode_for_size_tree (size, MODE_INT, limit_p);
584}
7306ed3f 585\f
78d55cc8
JM
586/* Subroutine of layout_decl: Force alignment required for the data type.
587 But if the decl itself wants greater alignment, don't override that. */
588
589static inline void
590do_type_align (tree type, tree decl)
591{
592 if (TYPE_ALIGN (type) > DECL_ALIGN (decl))
593 {
594 DECL_ALIGN (decl) = TYPE_ALIGN (type);
3acef2ae
JM
595 if (TREE_CODE (decl) == FIELD_DECL)
596 DECL_USER_ALIGN (decl) = TYPE_USER_ALIGN (type);
78d55cc8
JM
597 }
598}
599
7306ed3f
JW
600/* Set the size, mode and alignment of a ..._DECL node.
601 TYPE_DECL does need this for C++.
602 Note that LABEL_DECL and CONST_DECL nodes do not need this,
603 and FUNCTION_DECL nodes have them set up in a special (and simple) way.
604 Don't call layout_decl for them.
605
606 KNOWN_ALIGN is the amount of alignment we can assume this
607 decl has with no special effort. It is relevant only for FIELD_DECLs
608 and depends on the previous fields.
609 All that matters about KNOWN_ALIGN is which powers of 2 divide it.
610 If KNOWN_ALIGN is 0, it means, "as much alignment as you like":
611 the record will be aligned to suit. */
612
613void
46c5ad27 614layout_decl (tree decl, unsigned int known_align)
7306ed3f 615{
b3694847
SS
616 tree type = TREE_TYPE (decl);
617 enum tree_code code = TREE_CODE (decl);
a46666a9 618 rtx rtl = NULL_RTX;
db3927fb 619 location_t loc = DECL_SOURCE_LOCATION (decl);
7306ed3f
JW
620
621 if (code == CONST_DECL)
622 return;
0ac11108 623
41374e13
NS
624 gcc_assert (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL
625 || code == TYPE_DECL ||code == FIELD_DECL);
0ac11108 626
a46666a9
RH
627 rtl = DECL_RTL_IF_SET (decl);
628
7306ed3f 629 if (type == error_mark_node)
33433751 630 type = void_type_node;
7306ed3f 631
770ae6cc
RK
632 /* Usually the size and mode come from the data type without change,
633 however, the front-end may set the explicit width of the field, so its
634 size may not be the same as the size of its type. This happens with
635 bitfields, of course (an `int' bitfield may be only 2 bits, say), but it
636 also happens with other fields. For example, the C++ front-end creates
637 zero-sized fields corresponding to empty base classes, and depends on
638 layout_type setting DECL_FIELD_BITPOS correctly for the field. Set the
4b6bf620
RK
639 size in bytes from the size in bits. If we have already set the mode,
640 don't set it again since we can be called twice for FIELD_DECLs. */
770ae6cc 641
a150de29 642 DECL_UNSIGNED (decl) = TYPE_UNSIGNED (type);
4b6bf620
RK
643 if (DECL_MODE (decl) == VOIDmode)
644 DECL_MODE (decl) = TYPE_MODE (type);
770ae6cc 645
5e9bec99 646 if (DECL_SIZE (decl) == 0)
06ceef4e 647 {
ad50bc8d
RH
648 DECL_SIZE (decl) = TYPE_SIZE (type);
649 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
06ceef4e 650 }
1a96dc46 651 else if (DECL_SIZE_UNIT (decl) == 0)
770ae6cc 652 DECL_SIZE_UNIT (decl)
db3927fb
AH
653 = fold_convert_loc (loc, sizetype,
654 size_binop_loc (loc, CEIL_DIV_EXPR, DECL_SIZE (decl),
655 bitsize_unit_node));
06ceef4e 656
78d55cc8
JM
657 if (code != FIELD_DECL)
658 /* For non-fields, update the alignment from the type. */
659 do_type_align (type, decl);
660 else
661 /* For fields, it's a bit more complicated... */
786de7eb 662 {
40aae178 663 bool old_user_align = DECL_USER_ALIGN (decl);
d1a701eb
MM
664 bool zero_bitfield = false;
665 bool packed_p = DECL_PACKED (decl);
666 unsigned int mfa;
40aae178 667
78d55cc8
JM
668 if (DECL_BIT_FIELD (decl))
669 {
670 DECL_BIT_FIELD_TYPE (decl) = type;
7306ed3f 671
78d55cc8 672 /* A zero-length bit-field affects the alignment of the next
d1a701eb
MM
673 field. In essence such bit-fields are not influenced by
674 any packing due to #pragma pack or attribute packed. */
78d55cc8 675 if (integer_zerop (DECL_SIZE (decl))
5fd9b178 676 && ! targetm.ms_bitfield_layout_p (DECL_FIELD_CONTEXT (decl)))
78d55cc8 677 {
d1a701eb
MM
678 zero_bitfield = true;
679 packed_p = false;
78d55cc8
JM
680#ifdef PCC_BITFIELD_TYPE_MATTERS
681 if (PCC_BITFIELD_TYPE_MATTERS)
682 do_type_align (type, decl);
683 else
684#endif
ad3f5759 685 {
78d55cc8 686#ifdef EMPTY_FIELD_BOUNDARY
ad3f5759
AS
687 if (EMPTY_FIELD_BOUNDARY > DECL_ALIGN (decl))
688 {
689 DECL_ALIGN (decl) = EMPTY_FIELD_BOUNDARY;
690 DECL_USER_ALIGN (decl) = 0;
691 }
78d55cc8 692#endif
ad3f5759 693 }
78d55cc8
JM
694 }
695
696 /* See if we can use an ordinary integer mode for a bit-field.
f5c88dbf 697 Conditions are: a fixed size that is correct for another mode,
5dfd32cd 698 occupying a complete byte or bytes on proper boundary. */
78d55cc8
JM
699 if (TYPE_SIZE (type) != 0
700 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5dfd32cd 701 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT)
78d55cc8
JM
702 {
703 enum machine_mode xmode
704 = mode_for_size_tree (DECL_SIZE (decl), MODE_INT, 1);
d4cba6d4 705 unsigned int xalign = GET_MODE_ALIGNMENT (xmode);
78d55cc8 706
f676971a 707 if (xmode != BLKmode
d4cba6d4
EB
708 && !(xalign > BITS_PER_UNIT && DECL_PACKED (decl))
709 && (known_align == 0 || known_align >= xalign))
78d55cc8 710 {
d4cba6d4 711 DECL_ALIGN (decl) = MAX (xalign, DECL_ALIGN (decl));
78d55cc8
JM
712 DECL_MODE (decl) = xmode;
713 DECL_BIT_FIELD (decl) = 0;
714 }
715 }
716
717 /* Turn off DECL_BIT_FIELD if we won't need it set. */
718 if (TYPE_MODE (type) == BLKmode && DECL_MODE (decl) == BLKmode
719 && known_align >= TYPE_ALIGN (type)
720 && DECL_ALIGN (decl) >= TYPE_ALIGN (type))
721 DECL_BIT_FIELD (decl) = 0;
722 }
d1a701eb 723 else if (packed_p && DECL_USER_ALIGN (decl))
78d55cc8 724 /* Don't touch DECL_ALIGN. For other packed fields, go ahead and
2038bd69 725 round up; we'll reduce it again below. We want packing to
ba228239 726 supersede USER_ALIGN inherited from the type, but defer to
2038bd69 727 alignment explicitly specified on the field decl. */;
78d55cc8 728 else
40aae178
JM
729 do_type_align (type, decl);
730
7e4aeb32
JM
731 /* If the field is packed and not explicitly aligned, give it the
732 minimum alignment. Note that do_type_align may set
733 DECL_USER_ALIGN, so we need to check old_user_align instead. */
d1a701eb 734 if (packed_p
7e4aeb32 735 && !old_user_align)
40aae178 736 DECL_ALIGN (decl) = MIN (DECL_ALIGN (decl), BITS_PER_UNIT);
78d55cc8 737
d1a701eb 738 if (! packed_p && ! DECL_USER_ALIGN (decl))
7306ed3f 739 {
78d55cc8
JM
740 /* Some targets (i.e. i386, VMS) limit struct field alignment
741 to a lower boundary than alignment of variables unless
742 it was overridden by attribute aligned. */
743#ifdef BIGGEST_FIELD_ALIGNMENT
744 DECL_ALIGN (decl)
745 = MIN (DECL_ALIGN (decl), (unsigned) BIGGEST_FIELD_ALIGNMENT);
746#endif
747#ifdef ADJUST_FIELD_ALIGN
748 DECL_ALIGN (decl) = ADJUST_FIELD_ALIGN (decl, DECL_ALIGN (decl));
749#endif
7306ed3f 750 }
9ca75f15 751
d1a701eb
MM
752 if (zero_bitfield)
753 mfa = initial_max_fld_align * BITS_PER_UNIT;
754 else
755 mfa = maximum_field_alignment;
9ca75f15 756 /* Should this be controlled by DECL_USER_ALIGN, too? */
d1a701eb
MM
757 if (mfa != 0)
758 DECL_ALIGN (decl) = MIN (DECL_ALIGN (decl), mfa);
7306ed3f
JW
759 }
760
761 /* Evaluate nonconstant size only once, either now or as soon as safe. */
762 if (DECL_SIZE (decl) != 0 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
763 DECL_SIZE (decl) = variable_size (DECL_SIZE (decl));
06ceef4e
RK
764 if (DECL_SIZE_UNIT (decl) != 0
765 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST)
766 DECL_SIZE_UNIT (decl) = variable_size (DECL_SIZE_UNIT (decl));
767
768 /* If requested, warn about definitions of large data objects. */
769 if (warn_larger_than
17aec3eb 770 && (code == VAR_DECL || code == PARM_DECL)
06ceef4e
RK
771 && ! DECL_EXTERNAL (decl))
772 {
773 tree size = DECL_SIZE_UNIT (decl);
774
775 if (size != 0 && TREE_CODE (size) == INTEGER_CST
05bccae2 776 && compare_tree_int (size, larger_than_size) > 0)
06ceef4e 777 {
0384674e 778 int size_as_int = TREE_INT_CST_LOW (size);
06ceef4e 779
05bccae2 780 if (compare_tree_int (size, size_as_int) == 0)
5de8299c 781 warning (OPT_Wlarger_than_, "size of %q+D is %d bytes", decl, size_as_int);
06ceef4e 782 else
5de8299c 783 warning (OPT_Wlarger_than_, "size of %q+D is larger than %wd bytes",
dee15844 784 decl, larger_than_size);
06ceef4e
RK
785 }
786 }
a46666a9
RH
787
788 /* If the RTL was already set, update its mode and mem attributes. */
789 if (rtl)
790 {
791 PUT_MODE (rtl, DECL_MODE (decl));
792 SET_DECL_RTL (decl, 0);
793 set_mem_attributes (rtl, decl, 1);
794 SET_DECL_RTL (decl, rtl);
795 }
7306ed3f 796}
d8472c75
JM
797
798/* Given a VAR_DECL, PARM_DECL or RESULT_DECL, clears the results of
799 a previous call to layout_decl and calls it again. */
800
801void
802relayout_decl (tree decl)
803{
804 DECL_SIZE (decl) = DECL_SIZE_UNIT (decl) = 0;
805 DECL_MODE (decl) = VOIDmode;
c9eee440
AP
806 if (!DECL_USER_ALIGN (decl))
807 DECL_ALIGN (decl) = 0;
d8472c75
JM
808 SET_DECL_RTL (decl, 0);
809
810 layout_decl (decl, 0);
811}
7306ed3f 812\f
770ae6cc
RK
813/* Begin laying out type T, which may be a RECORD_TYPE, UNION_TYPE, or
814 QUAL_UNION_TYPE. Return a pointer to a struct record_layout_info which
815 is to be passed to all other layout functions for this record. It is the
786de7eb 816 responsibility of the caller to call `free' for the storage returned.
770ae6cc
RK
817 Note that garbage collection is not permitted until we finish laying
818 out the record. */
7306ed3f 819
9328904c 820record_layout_info
46c5ad27 821start_record_layout (tree t)
7306ed3f 822{
1634b18f 823 record_layout_info rli = XNEW (struct record_layout_info_s);
9328904c
MM
824
825 rli->t = t;
770ae6cc 826
9328904c
MM
827 /* If the type has a minimum specified alignment (via an attribute
828 declaration, for example) use it -- otherwise, start with a
829 one-byte alignment. */
830 rli->record_align = MAX (BITS_PER_UNIT, TYPE_ALIGN (t));
78d55cc8 831 rli->unpacked_align = rli->record_align;
770ae6cc 832 rli->offset_align = MAX (rli->record_align, BIGGEST_ALIGNMENT);
7306ed3f 833
5c19a356
MS
834#ifdef STRUCTURE_SIZE_BOUNDARY
835 /* Packed structures don't need to have minimum size. */
f132af85 836 if (! TYPE_PACKED (t))
353293e7
PB
837 {
838 unsigned tmp;
839
840 /* #pragma pack overrides STRUCTURE_SIZE_BOUNDARY. */
841 tmp = (unsigned) STRUCTURE_SIZE_BOUNDARY;
842 if (maximum_field_alignment != 0)
843 tmp = MIN (tmp, maximum_field_alignment);
844 rli->record_align = MAX (rli->record_align, tmp);
845 }
5c19a356 846#endif
7306ed3f 847
770ae6cc
RK
848 rli->offset = size_zero_node;
849 rli->bitpos = bitsize_zero_node;
f913c102 850 rli->prev_field = 0;
9771b263 851 rli->pending_statics = 0;
770ae6cc 852 rli->packed_maybe_necessary = 0;
0ac11108 853 rli->remaining_in_alignment = 0;
770ae6cc 854
9328904c
MM
855 return rli;
856}
7306ed3f 857
2af27301 858/* Return the combined bit position for the byte offset OFFSET and the
9cbbba28
EB
859 bit position BITPOS.
860
861 These functions operate on byte and bit positions present in FIELD_DECLs
862 and assume that these expressions result in no (intermediate) overflow.
863 This assumption is necessary to fold the expressions as much as possible,
864 so as to avoid creating artificially variable-sized types in languages
865 supporting variable-sized types like Ada. */
f2704b9f
RK
866
867tree
46c5ad27 868bit_from_pos (tree offset, tree bitpos)
f2704b9f 869{
9cbbba28
EB
870 if (TREE_CODE (offset) == PLUS_EXPR)
871 offset = size_binop (PLUS_EXPR,
872 fold_convert (bitsizetype, TREE_OPERAND (offset, 0)),
873 fold_convert (bitsizetype, TREE_OPERAND (offset, 1)));
874 else
875 offset = fold_convert (bitsizetype, offset);
f2704b9f 876 return size_binop (PLUS_EXPR, bitpos,
9cbbba28 877 size_binop (MULT_EXPR, offset, bitsize_unit_node));
f2704b9f
RK
878}
879
2af27301 880/* Return the combined truncated byte position for the byte offset OFFSET and
9cbbba28 881 the bit position BITPOS. */
2af27301 882
f2704b9f 883tree
46c5ad27 884byte_from_pos (tree offset, tree bitpos)
f2704b9f 885{
2af27301
RG
886 tree bytepos;
887 if (TREE_CODE (bitpos) == MULT_EXPR
888 && tree_int_cst_equal (TREE_OPERAND (bitpos, 1), bitsize_unit_node))
889 bytepos = TREE_OPERAND (bitpos, 0);
890 else
891 bytepos = size_binop (TRUNC_DIV_EXPR, bitpos, bitsize_unit_node);
892 return size_binop (PLUS_EXPR, offset, fold_convert (sizetype, bytepos));
f2704b9f
RK
893}
894
2af27301
RG
895/* Split the bit position POS into a byte offset *POFFSET and a bit
896 position *PBITPOS with the byte offset aligned to OFF_ALIGN bits. */
897
f2704b9f 898void
46c5ad27
AJ
899pos_from_bit (tree *poffset, tree *pbitpos, unsigned int off_align,
900 tree pos)
f2704b9f 901{
2af27301
RG
902 tree toff_align = bitsize_int (off_align);
903 if (TREE_CODE (pos) == MULT_EXPR
904 && tree_int_cst_equal (TREE_OPERAND (pos, 1), toff_align))
905 {
906 *poffset = size_binop (MULT_EXPR,
907 fold_convert (sizetype, TREE_OPERAND (pos, 0)),
908 size_int (off_align / BITS_PER_UNIT));
909 *pbitpos = bitsize_zero_node;
910 }
911 else
912 {
913 *poffset = size_binop (MULT_EXPR,
914 fold_convert (sizetype,
915 size_binop (FLOOR_DIV_EXPR, pos,
916 toff_align)),
917 size_int (off_align / BITS_PER_UNIT));
918 *pbitpos = size_binop (FLOOR_MOD_EXPR, pos, toff_align);
919 }
f2704b9f
RK
920}
921
922/* Given a pointer to bit and byte offsets and an offset alignment,
923 normalize the offsets so they are within the alignment. */
924
925void
46c5ad27 926normalize_offset (tree *poffset, tree *pbitpos, unsigned int off_align)
f2704b9f
RK
927{
928 /* If the bit position is now larger than it should be, adjust it
929 downwards. */
930 if (compare_tree_int (*pbitpos, off_align) >= 0)
931 {
2af27301
RG
932 tree offset, bitpos;
933 pos_from_bit (&offset, &bitpos, off_align, *pbitpos);
934 *poffset = size_binop (PLUS_EXPR, *poffset, offset);
935 *pbitpos = bitpos;
f2704b9f
RK
936 }
937}
938
770ae6cc 939/* Print debugging information about the information in RLI. */
cc9d4a85 940
24e47c76 941DEBUG_FUNCTION void
46c5ad27 942debug_rli (record_layout_info rli)
cc9d4a85 943{
770ae6cc
RK
944 print_node_brief (stderr, "type", rli->t, 0);
945 print_node_brief (stderr, "\noffset", rli->offset, 0);
946 print_node_brief (stderr, " bitpos", rli->bitpos, 0);
cc9d4a85 947
78d55cc8
JM
948 fprintf (stderr, "\naligns: rec = %u, unpack = %u, off = %u\n",
949 rli->record_align, rli->unpacked_align,
e0cea8d9 950 rli->offset_align);
0ac11108
EC
951
952 /* The ms_struct code is the only that uses this. */
953 if (targetm.ms_bitfield_layout_p (rli->t))
0c397f9c 954 fprintf (stderr, "remaining in alignment = %u\n", rli->remaining_in_alignment);
0ac11108 955
770ae6cc
RK
956 if (rli->packed_maybe_necessary)
957 fprintf (stderr, "packed may be necessary\n");
958
9771b263 959 if (!vec_safe_is_empty (rli->pending_statics))
770ae6cc
RK
960 {
961 fprintf (stderr, "pending statics:\n");
76d971cc 962 debug_vec_tree (rli->pending_statics);
770ae6cc
RK
963 }
964}
965
966/* Given an RLI with a possibly-incremented BITPOS, adjust OFFSET and
967 BITPOS if necessary to keep BITPOS below OFFSET_ALIGN. */
968
969void
46c5ad27 970normalize_rli (record_layout_info rli)
770ae6cc 971{
f2704b9f 972 normalize_offset (&rli->offset, &rli->bitpos, rli->offset_align);
770ae6cc 973}
cc9d4a85 974
770ae6cc
RK
975/* Returns the size in bytes allocated so far. */
976
977tree
46c5ad27 978rli_size_unit_so_far (record_layout_info rli)
770ae6cc 979{
f2704b9f 980 return byte_from_pos (rli->offset, rli->bitpos);
770ae6cc
RK
981}
982
983/* Returns the size in bits allocated so far. */
984
985tree
46c5ad27 986rli_size_so_far (record_layout_info rli)
770ae6cc 987{
f2704b9f 988 return bit_from_pos (rli->offset, rli->bitpos);
770ae6cc
RK
989}
990
0645ba8f 991/* FIELD is about to be added to RLI->T. The alignment (in bits) of
cbbaf4ae
R
992 the next available location within the record is given by KNOWN_ALIGN.
993 Update the variable alignment fields in RLI, and return the alignment
994 to give the FIELD. */
770ae6cc 995
6de9cd9a 996unsigned int
46c5ad27
AJ
997update_alignment_for_field (record_layout_info rli, tree field,
998 unsigned int known_align)
9328904c
MM
999{
1000 /* The alignment required for FIELD. */
1001 unsigned int desired_align;
9328904c
MM
1002 /* The type of this field. */
1003 tree type = TREE_TYPE (field);
0645ba8f
MM
1004 /* True if the field was explicitly aligned by the user. */
1005 bool user_align;
78d55cc8 1006 bool is_bitfield;
9328904c 1007
9dfb66b9
CD
1008 /* Do not attempt to align an ERROR_MARK node */
1009 if (TREE_CODE (type) == ERROR_MARK)
1010 return 0;
1011
78d55cc8
JM
1012 /* Lay out the field so we know what alignment it needs. */
1013 layout_decl (field, known_align);
770ae6cc 1014 desired_align = DECL_ALIGN (field);
11cf4d18 1015 user_align = DECL_USER_ALIGN (field);
770ae6cc 1016
78d55cc8
JM
1017 is_bitfield = (type != error_mark_node
1018 && DECL_BIT_FIELD_TYPE (field)
1019 && ! integer_zerop (TYPE_SIZE (type)));
7306ed3f 1020
9328904c
MM
1021 /* Record must have at least as much alignment as any field.
1022 Otherwise, the alignment of the field within the record is
1023 meaningless. */
0ac11108 1024 if (targetm.ms_bitfield_layout_p (rli->t))
f913c102 1025 {
e4850f36
DR
1026 /* Here, the alignment of the underlying type of a bitfield can
1027 affect the alignment of a record; even a zero-sized field
1028 can do this. The alignment should be to the alignment of
1029 the type, except that for zero-size bitfields this only
0e9e1e0a 1030 applies if there was an immediately prior, nonzero-size
e4850f36 1031 bitfield. (That's the way it is, experimentally.) */
cb312376 1032 if ((!is_bitfield && !DECL_PACKED (field))
f44157f8
JJ
1033 || ((DECL_SIZE (field) == NULL_TREE
1034 || !integer_zerop (DECL_SIZE (field)))
0ac11108
EC
1035 ? !DECL_PACKED (field)
1036 : (rli->prev_field
1037 && DECL_BIT_FIELD_TYPE (rli->prev_field)
1038 && ! integer_zerop (DECL_SIZE (rli->prev_field)))))
f913c102 1039 {
e4850f36
DR
1040 unsigned int type_align = TYPE_ALIGN (type);
1041 type_align = MAX (type_align, desired_align);
1042 if (maximum_field_alignment != 0)
1043 type_align = MIN (type_align, maximum_field_alignment);
1044 rli->record_align = MAX (rli->record_align, type_align);
f913c102
AO
1045 rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type));
1046 }
786de7eb 1047 }
3c12fcc2 1048#ifdef PCC_BITFIELD_TYPE_MATTERS
78d55cc8 1049 else if (is_bitfield && PCC_BITFIELD_TYPE_MATTERS)
9328904c 1050 {
8dc65b6e 1051 /* Named bit-fields cause the entire structure to have the
13c1cd82
PB
1052 alignment implied by their type. Some targets also apply the same
1053 rules to unnamed bitfields. */
1054 if (DECL_NAME (field) != 0
1055 || targetm.align_anon_bitfield ())
7306ed3f 1056 {
9328904c 1057 unsigned int type_align = TYPE_ALIGN (type);
729a2125 1058
ad9335eb
JJ
1059#ifdef ADJUST_FIELD_ALIGN
1060 if (! TYPE_USER_ALIGN (type))
1061 type_align = ADJUST_FIELD_ALIGN (field, type_align);
1062#endif
1063
d1a701eb
MM
1064 /* Targets might chose to handle unnamed and hence possibly
1065 zero-width bitfield. Those are not influenced by #pragmas
1066 or packed attributes. */
1067 if (integer_zerop (DECL_SIZE (field)))
1068 {
1069 if (initial_max_fld_align)
1070 type_align = MIN (type_align,
1071 initial_max_fld_align * BITS_PER_UNIT);
1072 }
1073 else if (maximum_field_alignment != 0)
9328904c
MM
1074 type_align = MIN (type_align, maximum_field_alignment);
1075 else if (DECL_PACKED (field))
1076 type_align = MIN (type_align, BITS_PER_UNIT);
e2301a83 1077
8dc65b6e
MM
1078 /* The alignment of the record is increased to the maximum
1079 of the current alignment, the alignment indicated on the
1080 field (i.e., the alignment specified by an __aligned__
1081 attribute), and the alignment indicated by the type of
1082 the field. */
1083 rli->record_align = MAX (rli->record_align, desired_align);
9328904c 1084 rli->record_align = MAX (rli->record_align, type_align);
8dc65b6e 1085
3c12fcc2 1086 if (warn_packed)
e0cea8d9 1087 rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type));
daf06049 1088 user_align |= TYPE_USER_ALIGN (type);
3c12fcc2 1089 }
9328904c 1090 }
9328904c 1091#endif
78d55cc8 1092 else
9328904c
MM
1093 {
1094 rli->record_align = MAX (rli->record_align, desired_align);
770ae6cc 1095 rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type));
9328904c 1096 }
3c12fcc2 1097
0645ba8f
MM
1098 TYPE_USER_ALIGN (rli->t) |= user_align;
1099
1100 return desired_align;
1101}
1102
1103/* Called from place_field to handle unions. */
1104
1105static void
46c5ad27 1106place_union_field (record_layout_info rli, tree field)
0645ba8f
MM
1107{
1108 update_alignment_for_field (rli, field, /*known_align=*/0);
1109
1110 DECL_FIELD_OFFSET (field) = size_zero_node;
1111 DECL_FIELD_BIT_OFFSET (field) = bitsize_zero_node;
1112 SET_DECL_OFFSET_ALIGN (field, BIGGEST_ALIGNMENT);
1113
0ac11108 1114 /* If this is an ERROR_MARK return *after* having set the
9dfb66b9
CD
1115 field at the start of the union. This helps when parsing
1116 invalid fields. */
1117 if (TREE_CODE (TREE_TYPE (field)) == ERROR_MARK)
1118 return;
1119
0645ba8f
MM
1120 /* We assume the union's size will be a multiple of a byte so we don't
1121 bother with BITPOS. */
1122 if (TREE_CODE (rli->t) == UNION_TYPE)
1123 rli->offset = size_binop (MAX_EXPR, rli->offset, DECL_SIZE_UNIT (field));
1124 else if (TREE_CODE (rli->t) == QUAL_UNION_TYPE)
985c87c9 1125 rli->offset = fold_build3 (COND_EXPR, sizetype, DECL_QUALIFIER (field),
4845b383 1126 DECL_SIZE_UNIT (field), rli->offset);
0645ba8f
MM
1127}
1128
b8089d8d 1129#if defined (PCC_BITFIELD_TYPE_MATTERS) || defined (BITFIELD_NBYTES_LIMITED)
4977bab6 1130/* A bitfield of SIZE with a required access alignment of ALIGN is allocated
272d0bee 1131 at BYTE_OFFSET / BIT_OFFSET. Return nonzero if the field would span more
4977bab6
ZW
1132 units of alignment than the underlying TYPE. */
1133static int
46c5ad27
AJ
1134excess_unit_span (HOST_WIDE_INT byte_offset, HOST_WIDE_INT bit_offset,
1135 HOST_WIDE_INT size, HOST_WIDE_INT align, tree type)
4977bab6
ZW
1136{
1137 /* Note that the calculation of OFFSET might overflow; we calculate it so
1138 that we still get the right result as long as ALIGN is a power of two. */
1139 unsigned HOST_WIDE_INT offset = byte_offset * BITS_PER_UNIT + bit_offset;
1140
1141 offset = offset % align;
1142 return ((offset + size + align - 1) / align
7d362f6c 1143 > tree_to_uhwi (TYPE_SIZE (type)) / align);
4977bab6 1144}
b8089d8d 1145#endif
4977bab6 1146
0645ba8f
MM
1147/* RLI contains information about the layout of a RECORD_TYPE. FIELD
1148 is a FIELD_DECL to be added after those fields already present in
1149 T. (FIELD is not actually added to the TYPE_FIELDS list here;
1150 callers that desire that behavior must manually perform that step.) */
1151
1152void
46c5ad27 1153place_field (record_layout_info rli, tree field)
0645ba8f
MM
1154{
1155 /* The alignment required for FIELD. */
1156 unsigned int desired_align;
1157 /* The alignment FIELD would have if we just dropped it into the
1158 record as it presently stands. */
1159 unsigned int known_align;
1160 unsigned int actual_align;
1161 /* The type of this field. */
1162 tree type = TREE_TYPE (field);
1163
dbe91deb 1164 gcc_assert (TREE_CODE (field) != ERROR_MARK);
0645ba8f
MM
1165
1166 /* If FIELD is static, then treat it like a separate variable, not
1167 really like a structure field. If it is a FUNCTION_DECL, it's a
1168 method. In both cases, all we do is lay out the decl, and we do
1169 it *after* the record is laid out. */
1170 if (TREE_CODE (field) == VAR_DECL)
1171 {
9771b263 1172 vec_safe_push (rli->pending_statics, field);
0645ba8f
MM
1173 return;
1174 }
1175
1176 /* Enumerators and enum types which are local to this class need not
1177 be laid out. Likewise for initialized constant fields. */
1178 else if (TREE_CODE (field) != FIELD_DECL)
1179 return;
1180
1181 /* Unions are laid out very differently than records, so split
1182 that code off to another function. */
1183 else if (TREE_CODE (rli->t) != RECORD_TYPE)
1184 {
1185 place_union_field (rli, field);
1186 return;
1187 }
1188
0ac11108 1189 else if (TREE_CODE (type) == ERROR_MARK)
9dfb66b9
CD
1190 {
1191 /* Place this field at the current allocation position, so we
1192 maintain monotonicity. */
1193 DECL_FIELD_OFFSET (field) = rli->offset;
1194 DECL_FIELD_BIT_OFFSET (field) = rli->bitpos;
1195 SET_DECL_OFFSET_ALIGN (field, rli->offset_align);
1196 return;
1197 }
1198
0645ba8f
MM
1199 /* Work out the known alignment so far. Note that A & (-A) is the
1200 value of the least-significant bit in A that is one. */
1201 if (! integer_zerop (rli->bitpos))
ae7e9ddd
RS
1202 known_align = (tree_to_uhwi (rli->bitpos)
1203 & - tree_to_uhwi (rli->bitpos));
0645ba8f 1204 else if (integer_zerop (rli->offset))
cbbaf4ae 1205 known_align = 0;
cc269bb6 1206 else if (tree_fits_uhwi_p (rli->offset))
0645ba8f 1207 known_align = (BITS_PER_UNIT
ae7e9ddd
RS
1208 * (tree_to_uhwi (rli->offset)
1209 & - tree_to_uhwi (rli->offset)));
0645ba8f
MM
1210 else
1211 known_align = rli->offset_align;
46c5ad27 1212
0645ba8f 1213 desired_align = update_alignment_for_field (rli, field, known_align);
cbbaf4ae
R
1214 if (known_align == 0)
1215 known_align = MAX (BIGGEST_ALIGNMENT, rli->record_align);
0645ba8f 1216
9328904c
MM
1217 if (warn_packed && DECL_PACKED (field))
1218 {
78d55cc8 1219 if (known_align >= TYPE_ALIGN (type))
3c12fcc2 1220 {
9328904c 1221 if (TYPE_ALIGN (type) > desired_align)
3c12fcc2 1222 {
9328904c 1223 if (STRICT_ALIGNMENT)
dee15844
JM
1224 warning (OPT_Wattributes, "packed attribute causes "
1225 "inefficient alignment for %q+D", field);
a3b20e90
JM
1226 /* Don't warn if DECL_PACKED was set by the type. */
1227 else if (!TYPE_PACKED (rli->t))
dee15844
JM
1228 warning (OPT_Wattributes, "packed attribute is "
1229 "unnecessary for %q+D", field);
3c12fcc2 1230 }
3c12fcc2 1231 }
9328904c
MM
1232 else
1233 rli->packed_maybe_necessary = 1;
1234 }
7306ed3f 1235
9328904c 1236 /* Does this field automatically have alignment it needs by virtue
9954e17f
KT
1237 of the fields that precede it and the record's own alignment? */
1238 if (known_align < desired_align)
9328904c
MM
1239 {
1240 /* No, we need to skip space before this field.
1241 Bump the cumulative size to multiple of field alignment. */
7306ed3f 1242
9954e17f
KT
1243 if (!targetm.ms_bitfield_layout_p (rli->t)
1244 && DECL_SOURCE_LOCATION (field) != BUILTINS_LOCATION)
4c0a0455 1245 warning (OPT_Wpadded, "padding struct to align %q+D", field);
3c12fcc2 1246
770ae6cc
RK
1247 /* If the alignment is still within offset_align, just align
1248 the bit position. */
1249 if (desired_align < rli->offset_align)
1250 rli->bitpos = round_up (rli->bitpos, desired_align);
9328904c
MM
1251 else
1252 {
770ae6cc
RK
1253 /* First adjust OFFSET by the partial bits, then align. */
1254 rli->offset
1255 = size_binop (PLUS_EXPR, rli->offset,
455f19cb
MM
1256 fold_convert (sizetype,
1257 size_binop (CEIL_DIV_EXPR, rli->bitpos,
1258 bitsize_unit_node)));
770ae6cc
RK
1259 rli->bitpos = bitsize_zero_node;
1260
1261 rli->offset = round_up (rli->offset, desired_align / BITS_PER_UNIT);
7306ed3f 1262 }
770ae6cc 1263
b1254b72
RK
1264 if (! TREE_CONSTANT (rli->offset))
1265 rli->offset_align = desired_align;
9954e17f
KT
1266 if (targetm.ms_bitfield_layout_p (rli->t))
1267 rli->prev_field = NULL;
9328904c 1268 }
7306ed3f 1269
770ae6cc
RK
1270 /* Handle compatibility with PCC. Note that if the record has any
1271 variable-sized fields, we need not worry about compatibility. */
7306ed3f 1272#ifdef PCC_BITFIELD_TYPE_MATTERS
9328904c 1273 if (PCC_BITFIELD_TYPE_MATTERS
245f1bfa 1274 && ! targetm.ms_bitfield_layout_p (rli->t)
9328904c
MM
1275 && TREE_CODE (field) == FIELD_DECL
1276 && type != error_mark_node
770ae6cc 1277 && DECL_BIT_FIELD (field)
2cd36c22
AN
1278 && (! DECL_PACKED (field)
1279 /* Enter for these packed fields only to issue a warning. */
1280 || TYPE_ALIGN (type) <= BITS_PER_UNIT)
9328904c 1281 && maximum_field_alignment == 0
770ae6cc 1282 && ! integer_zerop (DECL_SIZE (field))
cc269bb6
RS
1283 && tree_fits_uhwi_p (DECL_SIZE (field))
1284 && tree_fits_uhwi_p (rli->offset)
1285 && tree_fits_uhwi_p (TYPE_SIZE (type)))
9328904c
MM
1286 {
1287 unsigned int type_align = TYPE_ALIGN (type);
770ae6cc 1288 tree dsize = DECL_SIZE (field);
ae7e9ddd 1289 HOST_WIDE_INT field_size = tree_to_uhwi (dsize);
905b0e07 1290 HOST_WIDE_INT offset = tree_to_uhwi (rli->offset);
9439e9a1 1291 HOST_WIDE_INT bit_offset = tree_to_shwi (rli->bitpos);
9328904c 1292
ad9335eb
JJ
1293#ifdef ADJUST_FIELD_ALIGN
1294 if (! TYPE_USER_ALIGN (type))
1295 type_align = ADJUST_FIELD_ALIGN (field, type_align);
1296#endif
1297
9328904c
MM
1298 /* A bit field may not span more units of alignment of its type
1299 than its type itself. Advance to next boundary if necessary. */
4977bab6 1300 if (excess_unit_span (offset, bit_offset, field_size, type_align, type))
2cd36c22
AN
1301 {
1302 if (DECL_PACKED (field))
1303 {
bee6fa6d 1304 if (warn_packed_bitfield_compat == 1)
2cd36c22
AN
1305 inform
1306 (input_location,
d8a07487 1307 "offset of packed bit-field %qD has changed in GCC 4.4",
2cd36c22
AN
1308 field);
1309 }
1310 else
985c87c9 1311 rli->bitpos = round_up (rli->bitpos, type_align);
2cd36c22 1312 }
daf06049 1313
2cd36c22
AN
1314 if (! DECL_PACKED (field))
1315 TYPE_USER_ALIGN (rli->t) |= TYPE_USER_ALIGN (type);
9328904c 1316 }
7306ed3f
JW
1317#endif
1318
7306ed3f 1319#ifdef BITFIELD_NBYTES_LIMITED
9328904c 1320 if (BITFIELD_NBYTES_LIMITED
245f1bfa 1321 && ! targetm.ms_bitfield_layout_p (rli->t)
9328904c
MM
1322 && TREE_CODE (field) == FIELD_DECL
1323 && type != error_mark_node
1324 && DECL_BIT_FIELD_TYPE (field)
770ae6cc
RK
1325 && ! DECL_PACKED (field)
1326 && ! integer_zerop (DECL_SIZE (field))
cc269bb6
RS
1327 && tree_fits_uhwi_p (DECL_SIZE (field))
1328 && tree_fits_uhwi_p (rli->offset)
1329 && tree_fits_uhwi_p (TYPE_SIZE (type)))
9328904c
MM
1330 {
1331 unsigned int type_align = TYPE_ALIGN (type);
770ae6cc 1332 tree dsize = DECL_SIZE (field);
ae7e9ddd 1333 HOST_WIDE_INT field_size = tree_to_uhwi (dsize);
905b0e07 1334 HOST_WIDE_INT offset = tree_to_uhwi (rli->offset);
9439e9a1 1335 HOST_WIDE_INT bit_offset = tree_to_shwi (rli->bitpos);
e2301a83 1336
ad9335eb
JJ
1337#ifdef ADJUST_FIELD_ALIGN
1338 if (! TYPE_USER_ALIGN (type))
1339 type_align = ADJUST_FIELD_ALIGN (field, type_align);
1340#endif
1341
9328904c
MM
1342 if (maximum_field_alignment != 0)
1343 type_align = MIN (type_align, maximum_field_alignment);
1344 /* ??? This test is opposite the test in the containing if
1345 statement, so this code is unreachable currently. */
1346 else if (DECL_PACKED (field))
1347 type_align = MIN (type_align, BITS_PER_UNIT);
1348
1349 /* A bit field may not span the unit of alignment of its type.
1350 Advance to next boundary if necessary. */
4977bab6 1351 if (excess_unit_span (offset, bit_offset, field_size, type_align, type))
770ae6cc 1352 rli->bitpos = round_up (rli->bitpos, type_align);
daf06049 1353
0645ba8f 1354 TYPE_USER_ALIGN (rli->t) |= TYPE_USER_ALIGN (type);
9328904c 1355 }
7306ed3f
JW
1356#endif
1357
e4850f36
DR
1358 /* See the docs for TARGET_MS_BITFIELD_LAYOUT_P for details.
1359 A subtlety:
1360 When a bit field is inserted into a packed record, the whole
1361 size of the underlying type is used by one or more same-size
4977bab6 1362 adjacent bitfields. (That is, if its long:3, 32 bits is
e4850f36
DR
1363 used in the record, and any additional adjacent long bitfields are
1364 packed into the same chunk of 32 bits. However, if the size
1365 changes, a new field of that size is allocated.) In an unpacked
14b493d6 1366 record, this is the same as using alignment, but not equivalent
4977bab6 1367 when packing.
e4850f36 1368
14b493d6 1369 Note: for compatibility, we use the type size, not the type alignment
e4850f36
DR
1370 to determine alignment, since that matches the documentation */
1371
0ac11108 1372 if (targetm.ms_bitfield_layout_p (rli->t))
f913c102 1373 {
e4850f36 1374 tree prev_saved = rli->prev_field;
72aeff7c 1375 tree prev_type = prev_saved ? DECL_BIT_FIELD_TYPE (prev_saved) : NULL;
f913c102 1376
0ac11108
EC
1377 /* This is a bitfield if it exists. */
1378 if (rli->prev_field)
e4850f36
DR
1379 {
1380 /* If both are bitfields, nonzero, and the same size, this is
1381 the middle of a run. Zero declared size fields are special
1382 and handled as "end of run". (Note: it's nonzero declared
1383 size, but equal type sizes!) (Since we know that both
1384 the current and previous fields are bitfields by the
1385 time we check it, DECL_SIZE must be present for both.) */
1386 if (DECL_BIT_FIELD_TYPE (field)
1387 && !integer_zerop (DECL_SIZE (field))
1388 && !integer_zerop (DECL_SIZE (rli->prev_field))
9541ffee 1389 && tree_fits_shwi_p (DECL_SIZE (rli->prev_field))
905b0e07 1390 && tree_fits_uhwi_p (TYPE_SIZE (type))
72aeff7c 1391 && simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (prev_type)))
e4850f36
DR
1392 {
1393 /* We're in the middle of a run of equal type size fields; make
1394 sure we realign if we run out of bits. (Not decl size,
1395 type size!) */
ae7e9ddd 1396 HOST_WIDE_INT bitsize = tree_to_uhwi (DECL_SIZE (field));
e4850f36
DR
1397
1398 if (rli->remaining_in_alignment < bitsize)
1399 {
ae7e9ddd 1400 HOST_WIDE_INT typesize = tree_to_uhwi (TYPE_SIZE (type));
72aeff7c 1401
0ac11108 1402 /* out of bits; bump up to next 'word'. */
0ac11108 1403 rli->bitpos
72aeff7c
KK
1404 = size_binop (PLUS_EXPR, rli->bitpos,
1405 bitsize_int (rli->remaining_in_alignment));
0ac11108 1406 rli->prev_field = field;
72aeff7c
KK
1407 if (typesize < bitsize)
1408 rli->remaining_in_alignment = 0;
1409 else
1410 rli->remaining_in_alignment = typesize - bitsize;
e4850f36 1411 }
72aeff7c
KK
1412 else
1413 rli->remaining_in_alignment -= bitsize;
e4850f36
DR
1414 }
1415 else
1416 {
4977bab6
ZW
1417 /* End of a run: if leaving a run of bitfields of the same type
1418 size, we have to "use up" the rest of the bits of the type
e4850f36
DR
1419 size.
1420
1421 Compute the new position as the sum of the size for the prior
1422 type and where we first started working on that type.
1423 Note: since the beginning of the field was aligned then
1424 of course the end will be too. No round needed. */
1425
f7eb0dcd 1426 if (!integer_zerop (DECL_SIZE (rli->prev_field)))
e4850f36 1427 {
0ac11108
EC
1428 rli->bitpos
1429 = size_binop (PLUS_EXPR, rli->bitpos,
1430 bitsize_int (rli->remaining_in_alignment));
e4850f36
DR
1431 }
1432 else
0384674e
RK
1433 /* We "use up" size zero fields; the code below should behave
1434 as if the prior field was not a bitfield. */
1435 prev_saved = NULL;
e4850f36 1436
4977bab6 1437 /* Cause a new bitfield to be captured, either this time (if
991b6592 1438 currently a bitfield) or next time we see one. */
c3284718 1439 if (!DECL_BIT_FIELD_TYPE (field)
f7eb0dcd 1440 || integer_zerop (DECL_SIZE (field)))
0384674e 1441 rli->prev_field = NULL;
e4850f36 1442 }
0384674e 1443
e4850f36
DR
1444 normalize_rli (rli);
1445 }
1446
67ae67ec 1447 /* If we're starting a new run of same type size bitfields
e4850f36 1448 (or a run of non-bitfields), set up the "first of the run"
4977bab6 1449 fields.
e4850f36
DR
1450
1451 That is, if the current field is not a bitfield, or if there
1452 was a prior bitfield the type sizes differ, or if there wasn't
1453 a prior bitfield the size of the current field is nonzero.
1454
1455 Note: we must be sure to test ONLY the type size if there was
1456 a prior bitfield and ONLY for the current field being zero if
1457 there wasn't. */
1458
1459 if (!DECL_BIT_FIELD_TYPE (field)
f7eb0dcd 1460 || (prev_saved != NULL
72aeff7c 1461 ? !simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (prev_type))
0384674e 1462 : !integer_zerop (DECL_SIZE (field)) ))
e4850f36 1463 {
0384674e
RK
1464 /* Never smaller than a byte for compatibility. */
1465 unsigned int type_align = BITS_PER_UNIT;
e4850f36 1466
4977bab6 1467 /* (When not a bitfield), we could be seeing a flex array (with
e4850f36 1468 no DECL_SIZE). Since we won't be using remaining_in_alignment
4977bab6 1469 until we see a bitfield (and come by here again) we just skip
e4850f36 1470 calculating it. */
0384674e 1471 if (DECL_SIZE (field) != NULL
cc269bb6
RS
1472 && tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (field)))
1473 && tree_fits_uhwi_p (DECL_SIZE (field)))
72aeff7c 1474 {
fb6807b8 1475 unsigned HOST_WIDE_INT bitsize
ae7e9ddd 1476 = tree_to_uhwi (DECL_SIZE (field));
fb6807b8 1477 unsigned HOST_WIDE_INT typesize
ae7e9ddd 1478 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (field)));
72aeff7c
KK
1479
1480 if (typesize < bitsize)
1481 rli->remaining_in_alignment = 0;
1482 else
1483 rli->remaining_in_alignment = typesize - bitsize;
1484 }
e4850f36 1485
991b6592 1486 /* Now align (conventionally) for the new type. */
0ac11108 1487 type_align = TYPE_ALIGN (TREE_TYPE (field));
f913c102 1488
e4850f36
DR
1489 if (maximum_field_alignment != 0)
1490 type_align = MIN (type_align, maximum_field_alignment);
f913c102 1491
985c87c9 1492 rli->bitpos = round_up (rli->bitpos, type_align);
0384674e 1493
e4850f36 1494 /* If we really aligned, don't allow subsequent bitfields
991b6592 1495 to undo that. */
e4850f36
DR
1496 rli->prev_field = NULL;
1497 }
f913c102
AO
1498 }
1499
770ae6cc
RK
1500 /* Offset so far becomes the position of this field after normalizing. */
1501 normalize_rli (rli);
1502 DECL_FIELD_OFFSET (field) = rli->offset;
1503 DECL_FIELD_BIT_OFFSET (field) = rli->bitpos;
2f5c7f45 1504 SET_DECL_OFFSET_ALIGN (field, rli->offset_align);
770ae6cc 1505
cb27986c
EB
1506 /* Evaluate nonconstant offsets only once, either now or as soon as safe. */
1507 if (TREE_CODE (DECL_FIELD_OFFSET (field)) != INTEGER_CST)
1508 DECL_FIELD_OFFSET (field) = variable_size (DECL_FIELD_OFFSET (field));
1509
770ae6cc
RK
1510 /* If this field ended up more aligned than we thought it would be (we
1511 approximate this by seeing if its position changed), lay out the field
1512 again; perhaps we can use an integral mode for it now. */
4b6bf620 1513 if (! integer_zerop (DECL_FIELD_BIT_OFFSET (field)))
ae7e9ddd
RS
1514 actual_align = (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
1515 & - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field)));
4b6bf620 1516 else if (integer_zerop (DECL_FIELD_OFFSET (field)))
cbbaf4ae 1517 actual_align = MAX (BIGGEST_ALIGNMENT, rli->record_align);
cc269bb6 1518 else if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field)))
770ae6cc 1519 actual_align = (BITS_PER_UNIT
ae7e9ddd
RS
1520 * (tree_to_uhwi (DECL_FIELD_OFFSET (field))
1521 & - tree_to_uhwi (DECL_FIELD_OFFSET (field))));
9328904c 1522 else
770ae6cc 1523 actual_align = DECL_OFFSET_ALIGN (field);
cbbaf4ae
R
1524 /* ACTUAL_ALIGN is still the actual alignment *within the record* .
1525 store / extract bit field operations will check the alignment of the
1526 record against the mode of bit fields. */
770ae6cc
RK
1527
1528 if (known_align != actual_align)
1529 layout_decl (field, actual_align);
1530
0ac11108
EC
1531 if (rli->prev_field == NULL && DECL_BIT_FIELD_TYPE (field))
1532 rli->prev_field = field;
f913c102 1533
770ae6cc
RK
1534 /* Now add size of this field to the size of the record. If the size is
1535 not constant, treat the field as being a multiple of bytes and just
1536 adjust the offset, resetting the bit position. Otherwise, apportion the
1537 size amongst the bit position and offset. First handle the case of an
1538 unspecified size, which can happen when we have an invalid nested struct
1539 definition, such as struct j { struct j { int i; } }. The error message
1540 is printed in finish_struct. */
1541 if (DECL_SIZE (field) == 0)
1542 /* Do nothing. */;
292f30c5 1543 else if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
455f14dd 1544 || TREE_OVERFLOW (DECL_SIZE (field)))
9328904c 1545 {
770ae6cc
RK
1546 rli->offset
1547 = size_binop (PLUS_EXPR, rli->offset,
455f19cb
MM
1548 fold_convert (sizetype,
1549 size_binop (CEIL_DIV_EXPR, rli->bitpos,
1550 bitsize_unit_node)));
770ae6cc
RK
1551 rli->offset
1552 = size_binop (PLUS_EXPR, rli->offset, DECL_SIZE_UNIT (field));
1553 rli->bitpos = bitsize_zero_node;
3923e410 1554 rli->offset_align = MIN (rli->offset_align, desired_align);
9328904c 1555 }
0ac11108
EC
1556 else if (targetm.ms_bitfield_layout_p (rli->t))
1557 {
1558 rli->bitpos = size_binop (PLUS_EXPR, rli->bitpos, DECL_SIZE (field));
1559
1560 /* If we ended a bitfield before the full length of the type then
1561 pad the struct out to the full length of the last type. */
910ad8de
NF
1562 if ((DECL_CHAIN (field) == NULL
1563 || TREE_CODE (DECL_CHAIN (field)) != FIELD_DECL)
0ac11108
EC
1564 && DECL_BIT_FIELD_TYPE (field)
1565 && !integer_zerop (DECL_SIZE (field)))
1566 rli->bitpos = size_binop (PLUS_EXPR, rli->bitpos,
1567 bitsize_int (rli->remaining_in_alignment));
1568
1569 normalize_rli (rli);
1570 }
9328904c
MM
1571 else
1572 {
770ae6cc
RK
1573 rli->bitpos = size_binop (PLUS_EXPR, rli->bitpos, DECL_SIZE (field));
1574 normalize_rli (rli);
7306ed3f 1575 }
9328904c 1576}
7306ed3f 1577
9328904c
MM
1578/* Assuming that all the fields have been laid out, this function uses
1579 RLI to compute the final TYPE_SIZE, TYPE_ALIGN, etc. for the type
14b493d6 1580 indicated by RLI. */
7306ed3f 1581
9328904c 1582static void
46c5ad27 1583finalize_record_size (record_layout_info rli)
9328904c 1584{
770ae6cc
RK
1585 tree unpadded_size, unpadded_size_unit;
1586
65e14bf5
RK
1587 /* Now we want just byte and bit offsets, so set the offset alignment
1588 to be a byte and then normalize. */
1589 rli->offset_align = BITS_PER_UNIT;
1590 normalize_rli (rli);
7306ed3f
JW
1591
1592 /* Determine the desired alignment. */
1593#ifdef ROUND_TYPE_ALIGN
9328904c 1594 TYPE_ALIGN (rli->t) = ROUND_TYPE_ALIGN (rli->t, TYPE_ALIGN (rli->t),
b451555a 1595 rli->record_align);
7306ed3f 1596#else
9328904c 1597 TYPE_ALIGN (rli->t) = MAX (TYPE_ALIGN (rli->t), rli->record_align);
7306ed3f
JW
1598#endif
1599
65e14bf5
RK
1600 /* Compute the size so far. Be sure to allow for extra bits in the
1601 size in bytes. We have guaranteed above that it will be no more
1602 than a single byte. */
1603 unpadded_size = rli_size_so_far (rli);
1604 unpadded_size_unit = rli_size_unit_so_far (rli);
1605 if (! integer_zerop (rli->bitpos))
1606 unpadded_size_unit
1607 = size_binop (PLUS_EXPR, unpadded_size_unit, size_one_node);
770ae6cc 1608
605f12f4
MP
1609 if (TREE_CODE (unpadded_size_unit) == INTEGER_CST
1610 && !TREE_OVERFLOW (unpadded_size_unit)
1611 && !valid_constant_size_p (unpadded_size_unit))
1612 error ("type %qT is too large", rli->t);
1613
f9da5064 1614 /* Round the size up to be a multiple of the required alignment. */
985c87c9 1615 TYPE_SIZE (rli->t) = round_up (unpadded_size, TYPE_ALIGN (rli->t));
a4e9ffe5 1616 TYPE_SIZE_UNIT (rli->t)
985c87c9 1617 = round_up (unpadded_size_unit, TYPE_ALIGN_UNIT (rli->t));
729a2125 1618
3176a0c2 1619 if (TREE_CONSTANT (unpadded_size)
4c0a0455
JJ
1620 && simple_cst_equal (unpadded_size, TYPE_SIZE (rli->t)) == 0
1621 && input_location != BUILTINS_LOCATION)
3176a0c2 1622 warning (OPT_Wpadded, "padding struct size to alignment boundary");
786de7eb 1623
770ae6cc
RK
1624 if (warn_packed && TREE_CODE (rli->t) == RECORD_TYPE
1625 && TYPE_PACKED (rli->t) && ! rli->packed_maybe_necessary
1626 && TREE_CONSTANT (unpadded_size))
3c12fcc2
GM
1627 {
1628 tree unpacked_size;
729a2125 1629
3c12fcc2 1630#ifdef ROUND_TYPE_ALIGN
9328904c
MM
1631 rli->unpacked_align
1632 = ROUND_TYPE_ALIGN (rli->t, TYPE_ALIGN (rli->t), rli->unpacked_align);
3c12fcc2 1633#else
9328904c 1634 rli->unpacked_align = MAX (TYPE_ALIGN (rli->t), rli->unpacked_align);
3c12fcc2 1635#endif
770ae6cc 1636
985c87c9 1637 unpacked_size = round_up (TYPE_SIZE (rli->t), rli->unpacked_align);
9328904c 1638 if (simple_cst_equal (unpacked_size, TYPE_SIZE (rli->t)))
3c12fcc2 1639 {
9328904c 1640 if (TYPE_NAME (rli->t))
3c12fcc2 1641 {
4f1e4960 1642 tree name;
729a2125 1643
9328904c 1644 if (TREE_CODE (TYPE_NAME (rli->t)) == IDENTIFIER_NODE)
4f1e4960 1645 name = TYPE_NAME (rli->t);
3c12fcc2 1646 else
4f1e4960 1647 name = DECL_NAME (TYPE_NAME (rli->t));
770ae6cc 1648
3c12fcc2 1649 if (STRICT_ALIGNMENT)
3176a0c2 1650 warning (OPT_Wpacked, "packed attribute causes inefficient "
4f1e4960 1651 "alignment for %qE", name);
3c12fcc2 1652 else
3176a0c2 1653 warning (OPT_Wpacked,
4f1e4960 1654 "packed attribute is unnecessary for %qE", name);
3c12fcc2
GM
1655 }
1656 else
1657 {
1658 if (STRICT_ALIGNMENT)
3176a0c2 1659 warning (OPT_Wpacked,
5c498b10 1660 "packed attribute causes inefficient alignment");
3c12fcc2 1661 else
3176a0c2 1662 warning (OPT_Wpacked, "packed attribute is unnecessary");
3c12fcc2
GM
1663 }
1664 }
3c12fcc2 1665 }
9328904c
MM
1666}
1667
1668/* Compute the TYPE_MODE for the TYPE (which is a RECORD_TYPE). */
7306ed3f 1669
65e14bf5 1670void
46c5ad27 1671compute_record_mode (tree type)
9328904c 1672{
770ae6cc
RK
1673 tree field;
1674 enum machine_mode mode = VOIDmode;
1675
9328904c
MM
1676 /* Most RECORD_TYPEs have BLKmode, so we start off assuming that.
1677 However, if possible, we use a mode that fits in a register
1678 instead, in order to allow for better optimization down the
1679 line. */
179d2f74 1680 SET_TYPE_MODE (type, BLKmode);
9328904c 1681
cc269bb6 1682 if (! tree_fits_uhwi_p (TYPE_SIZE (type)))
770ae6cc 1683 return;
9328904c 1684
770ae6cc
RK
1685 /* A record which has any BLKmode members must itself be
1686 BLKmode; it can't go in a register. Unless the member is
1687 BLKmode only because it isn't aligned. */
910ad8de 1688 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
770ae6cc 1689 {
770ae6cc
RK
1690 if (TREE_CODE (field) != FIELD_DECL)
1691 continue;
9328904c 1692
770ae6cc
RK
1693 if (TREE_CODE (TREE_TYPE (field)) == ERROR_MARK
1694 || (TYPE_MODE (TREE_TYPE (field)) == BLKmode
7a06d606
RK
1695 && ! TYPE_NO_FORCE_BLK (TREE_TYPE (field))
1696 && !(TYPE_SIZE (TREE_TYPE (field)) != 0
1697 && integer_zerop (TYPE_SIZE (TREE_TYPE (field)))))
cc269bb6 1698 || ! tree_fits_uhwi_p (bit_position (field))
6a9f6727 1699 || DECL_SIZE (field) == 0
cc269bb6 1700 || ! tree_fits_uhwi_p (DECL_SIZE (field)))
770ae6cc
RK
1701 return;
1702
770ae6cc
RK
1703 /* If this field is the whole struct, remember its mode so
1704 that, say, we can put a double in a class into a DF
a8ca7756
JW
1705 register instead of forcing it to live in the stack. */
1706 if (simple_cst_equal (TYPE_SIZE (type), DECL_SIZE (field)))
770ae6cc 1707 mode = DECL_MODE (field);
9328904c 1708
d9886a9e
L
1709 /* With some targets, it is sub-optimal to access an aligned
1710 BLKmode structure as a scalar. */
1711 if (targetm.member_type_forces_blk (field, mode))
770ae6cc 1712 return;
770ae6cc 1713 }
9328904c 1714
897f610b
RS
1715 /* If we only have one real field; use its mode if that mode's size
1716 matches the type's size. This only applies to RECORD_TYPE. This
1717 does not apply to unions. */
1718 if (TREE_CODE (type) == RECORD_TYPE && mode != VOIDmode
cc269bb6 1719 && tree_fits_uhwi_p (TYPE_SIZE (type))
eb1ce453 1720 && GET_MODE_BITSIZE (mode) == tree_to_uhwi (TYPE_SIZE (type)))
179d2f74 1721 SET_TYPE_MODE (type, mode);
f439f9a5 1722 else
179d2f74 1723 SET_TYPE_MODE (type, mode_for_size_tree (TYPE_SIZE (type), MODE_INT, 1));
770ae6cc
RK
1724
1725 /* If structure's known alignment is less than what the scalar
1726 mode would need, and it matters, then stick with BLKmode. */
1727 if (TYPE_MODE (type) != BLKmode
1728 && STRICT_ALIGNMENT
1729 && ! (TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT
1730 || TYPE_ALIGN (type) >= GET_MODE_ALIGNMENT (TYPE_MODE (type))))
1731 {
1732 /* If this is the only reason this type is BLKmode, then
1733 don't force containing types to be BLKmode. */
1734 TYPE_NO_FORCE_BLK (type) = 1;
179d2f74 1735 SET_TYPE_MODE (type, BLKmode);
9328904c 1736 }
7306ed3f 1737}
9328904c
MM
1738
1739/* Compute TYPE_SIZE and TYPE_ALIGN for TYPE, once it has been laid
1740 out. */
1741
1742static void
46c5ad27 1743finalize_type_size (tree type)
9328904c
MM
1744{
1745 /* Normally, use the alignment corresponding to the mode chosen.
1746 However, where strict alignment is not required, avoid
1747 over-aligning structures, since most compilers do not do this
490272b4 1748 alignment. */
9328904c
MM
1749
1750 if (TYPE_MODE (type) != BLKmode && TYPE_MODE (type) != VOIDmode
490272b4 1751 && (STRICT_ALIGNMENT
9328904c
MM
1752 || (TREE_CODE (type) != RECORD_TYPE && TREE_CODE (type) != UNION_TYPE
1753 && TREE_CODE (type) != QUAL_UNION_TYPE
1754 && TREE_CODE (type) != ARRAY_TYPE)))
11cf4d18 1755 {
490272b4
RH
1756 unsigned mode_align = GET_MODE_ALIGNMENT (TYPE_MODE (type));
1757
1758 /* Don't override a larger alignment requirement coming from a user
1759 alignment of one of the fields. */
1760 if (mode_align >= TYPE_ALIGN (type))
1761 {
1762 TYPE_ALIGN (type) = mode_align;
1763 TYPE_USER_ALIGN (type) = 0;
1764 }
11cf4d18 1765 }
9328904c
MM
1766
1767 /* Do machine-dependent extra alignment. */
1768#ifdef ROUND_TYPE_ALIGN
1769 TYPE_ALIGN (type)
1770 = ROUND_TYPE_ALIGN (type, TYPE_ALIGN (type), BITS_PER_UNIT);
1771#endif
1772
9328904c 1773 /* If we failed to find a simple way to calculate the unit size
770ae6cc 1774 of the type, find it by division. */
9328904c
MM
1775 if (TYPE_SIZE_UNIT (type) == 0 && TYPE_SIZE (type) != 0)
1776 /* TYPE_SIZE (type) is computed in bitsizetype. After the division, the
1777 result will fit in sizetype. We will get more efficient code using
1778 sizetype, so we force a conversion. */
1779 TYPE_SIZE_UNIT (type)
455f19cb
MM
1780 = fold_convert (sizetype,
1781 size_binop (FLOOR_DIV_EXPR, TYPE_SIZE (type),
1782 bitsize_unit_node));
9328904c 1783
770ae6cc
RK
1784 if (TYPE_SIZE (type) != 0)
1785 {
985c87c9
EB
1786 TYPE_SIZE (type) = round_up (TYPE_SIZE (type), TYPE_ALIGN (type));
1787 TYPE_SIZE_UNIT (type)
1788 = round_up (TYPE_SIZE_UNIT (type), TYPE_ALIGN_UNIT (type));
770ae6cc
RK
1789 }
1790
1791 /* Evaluate nonconstant sizes only once, either now or as soon as safe. */
1792 if (TYPE_SIZE (type) != 0 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
1793 TYPE_SIZE (type) = variable_size (TYPE_SIZE (type));
9328904c
MM
1794 if (TYPE_SIZE_UNIT (type) != 0
1795 && TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST)
1796 TYPE_SIZE_UNIT (type) = variable_size (TYPE_SIZE_UNIT (type));
1797
1798 /* Also layout any other variants of the type. */
1799 if (TYPE_NEXT_VARIANT (type)
1800 || type != TYPE_MAIN_VARIANT (type))
1801 {
1802 tree variant;
1803 /* Record layout info of this variant. */
1804 tree size = TYPE_SIZE (type);
1805 tree size_unit = TYPE_SIZE_UNIT (type);
1806 unsigned int align = TYPE_ALIGN (type);
50b6ee8b 1807 unsigned int precision = TYPE_PRECISION (type);
11cf4d18 1808 unsigned int user_align = TYPE_USER_ALIGN (type);
9328904c
MM
1809 enum machine_mode mode = TYPE_MODE (type);
1810
1811 /* Copy it into all variants. */
1812 for (variant = TYPE_MAIN_VARIANT (type);
1813 variant != 0;
1814 variant = TYPE_NEXT_VARIANT (variant))
1815 {
1816 TYPE_SIZE (variant) = size;
1817 TYPE_SIZE_UNIT (variant) = size_unit;
1818 TYPE_ALIGN (variant) = align;
50b6ee8b 1819 TYPE_PRECISION (variant) = precision;
11cf4d18 1820 TYPE_USER_ALIGN (variant) = user_align;
179d2f74 1821 SET_TYPE_MODE (variant, mode);
9328904c
MM
1822 }
1823 }
1824}
1825
26c71b93
RG
1826/* Return a new underlying object for a bitfield started with FIELD. */
1827
1828static tree
1829start_bitfield_representative (tree field)
1830{
1831 tree repr = make_node (FIELD_DECL);
1832 DECL_FIELD_OFFSET (repr) = DECL_FIELD_OFFSET (field);
1833 /* Force the representative to begin at a BITS_PER_UNIT aligned
1834 boundary - C++ may use tail-padding of a base object to
1835 continue packing bits so the bitfield region does not start
1836 at bit zero (see g++.dg/abi/bitfield5.C for example).
1837 Unallocated bits may happen for other reasons as well,
1838 for example Ada which allows explicit bit-granular structure layout. */
1839 DECL_FIELD_BIT_OFFSET (repr)
1840 = size_binop (BIT_AND_EXPR,
1841 DECL_FIELD_BIT_OFFSET (field),
1842 bitsize_int (~(BITS_PER_UNIT - 1)));
1843 SET_DECL_OFFSET_ALIGN (repr, DECL_OFFSET_ALIGN (field));
1844 DECL_SIZE (repr) = DECL_SIZE (field);
1845 DECL_SIZE_UNIT (repr) = DECL_SIZE_UNIT (field);
1846 DECL_PACKED (repr) = DECL_PACKED (field);
1847 DECL_CONTEXT (repr) = DECL_CONTEXT (field);
1848 return repr;
1849}
1850
1851/* Finish up a bitfield group that was started by creating the underlying
1852 object REPR with the last field in the bitfield group FIELD. */
1853
1854static void
1855finish_bitfield_representative (tree repr, tree field)
1856{
1857 unsigned HOST_WIDE_INT bitsize, maxbitsize;
1858 enum machine_mode mode;
1859 tree nextf, size;
1860
1861 size = size_diffop (DECL_FIELD_OFFSET (field),
1862 DECL_FIELD_OFFSET (repr));
cc269bb6 1863 gcc_assert (tree_fits_uhwi_p (size));
ae7e9ddd
RS
1864 bitsize = (tree_to_uhwi (size) * BITS_PER_UNIT
1865 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
1866 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr))
1867 + tree_to_uhwi (DECL_SIZE (field)));
26c71b93 1868
2447776c
RG
1869 /* Round up bitsize to multiples of BITS_PER_UNIT. */
1870 bitsize = (bitsize + BITS_PER_UNIT - 1) & ~(BITS_PER_UNIT - 1);
1871
26c71b93
RG
1872 /* Now nothing tells us how to pad out bitsize ... */
1873 nextf = DECL_CHAIN (field);
1874 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
1875 nextf = DECL_CHAIN (nextf);
1876 if (nextf)
1877 {
1878 tree maxsize;
073a8998 1879 /* If there was an error, the field may be not laid out
26c71b93
RG
1880 correctly. Don't bother to do anything. */
1881 if (TREE_TYPE (nextf) == error_mark_node)
1882 return;
1883 maxsize = size_diffop (DECL_FIELD_OFFSET (nextf),
1884 DECL_FIELD_OFFSET (repr));
cc269bb6 1885 if (tree_fits_uhwi_p (maxsize))
7ebf9677 1886 {
ae7e9ddd
RS
1887 maxbitsize = (tree_to_uhwi (maxsize) * BITS_PER_UNIT
1888 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (nextf))
1889 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
7ebf9677
RG
1890 /* If the group ends within a bitfield nextf does not need to be
1891 aligned to BITS_PER_UNIT. Thus round up. */
1892 maxbitsize = (maxbitsize + BITS_PER_UNIT - 1) & ~(BITS_PER_UNIT - 1);
1893 }
1894 else
1895 maxbitsize = bitsize;
26c71b93
RG
1896 }
1897 else
1898 {
1899 /* ??? If you consider that tail-padding of this struct might be
1900 re-used when deriving from it we cannot really do the following
2447776c
RG
1901 and thus need to set maxsize to bitsize? Also we cannot
1902 generally rely on maxsize to fold to an integer constant, so
1903 use bitsize as fallback for this case. */
26c71b93
RG
1904 tree maxsize = size_diffop (TYPE_SIZE_UNIT (DECL_CONTEXT (field)),
1905 DECL_FIELD_OFFSET (repr));
cc269bb6 1906 if (tree_fits_uhwi_p (maxsize))
ae7e9ddd
RS
1907 maxbitsize = (tree_to_uhwi (maxsize) * BITS_PER_UNIT
1908 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
2447776c
RG
1909 else
1910 maxbitsize = bitsize;
26c71b93
RG
1911 }
1912
1913 /* Only if we don't artificially break up the representative in
1914 the middle of a large bitfield with different possibly
1915 overlapping representatives. And all representatives start
1916 at byte offset. */
1917 gcc_assert (maxbitsize % BITS_PER_UNIT == 0);
1918
26c71b93
RG
1919 /* Find the smallest nice mode to use. */
1920 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1921 mode = GET_MODE_WIDER_MODE (mode))
1922 if (GET_MODE_BITSIZE (mode) >= bitsize)
1923 break;
1924 if (mode != VOIDmode
1925 && (GET_MODE_BITSIZE (mode) > maxbitsize
1926 || GET_MODE_BITSIZE (mode) > MAX_FIXED_MODE_SIZE))
1927 mode = VOIDmode;
1928
1929 if (mode == VOIDmode)
1930 {
1931 /* We really want a BLKmode representative only as a last resort,
1932 considering the member b in
1933 struct { int a : 7; int b : 17; int c; } __attribute__((packed));
1934 Otherwise we simply want to split the representative up
1935 allowing for overlaps within the bitfield region as required for
1936 struct { int a : 7; int b : 7;
1937 int c : 10; int d; } __attribute__((packed));
1938 [0, 15] HImode for a and b, [8, 23] HImode for c. */
1939 DECL_SIZE (repr) = bitsize_int (bitsize);
1940 DECL_SIZE_UNIT (repr) = size_int (bitsize / BITS_PER_UNIT);
1941 DECL_MODE (repr) = BLKmode;
1942 TREE_TYPE (repr) = build_array_type_nelts (unsigned_char_type_node,
1943 bitsize / BITS_PER_UNIT);
1944 }
1945 else
1946 {
1947 unsigned HOST_WIDE_INT modesize = GET_MODE_BITSIZE (mode);
1948 DECL_SIZE (repr) = bitsize_int (modesize);
1949 DECL_SIZE_UNIT (repr) = size_int (modesize / BITS_PER_UNIT);
1950 DECL_MODE (repr) = mode;
1951 TREE_TYPE (repr) = lang_hooks.types.type_for_mode (mode, 1);
1952 }
1953
1954 /* Remember whether the bitfield group is at the end of the
1955 structure or not. */
1956 DECL_CHAIN (repr) = nextf;
1957}
1958
1959/* Compute and set FIELD_DECLs for the underlying objects we should
073a8998 1960 use for bitfield access for the structure laid out with RLI. */
26c71b93
RG
1961
1962static void
1963finish_bitfield_layout (record_layout_info rli)
1964{
1965 tree field, prev;
1966 tree repr = NULL_TREE;
1967
1968 /* Unions would be special, for the ease of type-punning optimizations
1969 we could use the underlying type as hint for the representative
1970 if the bitfield would fit and the representative would not exceed
1971 the union in size. */
1972 if (TREE_CODE (rli->t) != RECORD_TYPE)
1973 return;
1974
1975 for (prev = NULL_TREE, field = TYPE_FIELDS (rli->t);
1976 field; field = DECL_CHAIN (field))
1977 {
1978 if (TREE_CODE (field) != FIELD_DECL)
1979 continue;
1980
1981 /* In the C++ memory model, consecutive bit fields in a structure are
1982 considered one memory location and updating a memory location
1983 may not store into adjacent memory locations. */
1984 if (!repr
1985 && DECL_BIT_FIELD_TYPE (field))
1986 {
1987 /* Start new representative. */
1988 repr = start_bitfield_representative (field);
1989 }
1990 else if (repr
1991 && ! DECL_BIT_FIELD_TYPE (field))
1992 {
1993 /* Finish off new representative. */
1994 finish_bitfield_representative (repr, prev);
1995 repr = NULL_TREE;
1996 }
1997 else if (DECL_BIT_FIELD_TYPE (field))
1998 {
7ebf9677
RG
1999 gcc_assert (repr != NULL_TREE);
2000
26c71b93
RG
2001 /* Zero-size bitfields finish off a representative and
2002 do not have a representative themselves. This is
2003 required by the C++ memory model. */
2004 if (integer_zerop (DECL_SIZE (field)))
2005 {
2006 finish_bitfield_representative (repr, prev);
2007 repr = NULL_TREE;
2008 }
7ebf9677
RG
2009
2010 /* We assume that either DECL_FIELD_OFFSET of the representative
2011 and each bitfield member is a constant or they are equal.
2012 This is because we need to be able to compute the bit-offset
2013 of each field relative to the representative in get_bit_range
2014 during RTL expansion.
2015 If these constraints are not met, simply force a new
2016 representative to be generated. That will at most
2017 generate worse code but still maintain correctness with
2018 respect to the C++ memory model. */
cc269bb6
RS
2019 else if (!((tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr))
2020 && tree_fits_uhwi_p (DECL_FIELD_OFFSET (field)))
7ebf9677
RG
2021 || operand_equal_p (DECL_FIELD_OFFSET (repr),
2022 DECL_FIELD_OFFSET (field), 0)))
2023 {
2024 finish_bitfield_representative (repr, prev);
2025 repr = start_bitfield_representative (field);
2026 }
26c71b93
RG
2027 }
2028 else
2029 continue;
2030
2031 if (repr)
2032 DECL_BIT_FIELD_REPRESENTATIVE (field) = repr;
2033
2034 prev = field;
2035 }
2036
2037 if (repr)
2038 finish_bitfield_representative (repr, prev);
2039}
2040
9328904c
MM
2041/* Do all of the work required to layout the type indicated by RLI,
2042 once the fields have been laid out. This function will call `free'
17bbb839
MM
2043 for RLI, unless FREE_P is false. Passing a value other than false
2044 for FREE_P is bad practice; this option only exists to support the
2045 G++ 3.2 ABI. */
9328904c
MM
2046
2047void
46c5ad27 2048finish_record_layout (record_layout_info rli, int free_p)
9328904c 2049{
1937f939
JM
2050 tree variant;
2051
770ae6cc
RK
2052 /* Compute the final size. */
2053 finalize_record_size (rli);
2054
2055 /* Compute the TYPE_MODE for the record. */
2056 compute_record_mode (rli->t);
cc9d4a85 2057
8d8238b6
JM
2058 /* Perform any last tweaks to the TYPE_SIZE, etc. */
2059 finalize_type_size (rli->t);
2060
26c71b93
RG
2061 /* Compute bitfield representatives. */
2062 finish_bitfield_layout (rli);
2063
1937f939
JM
2064 /* Propagate TYPE_PACKED to variants. With C++ templates,
2065 handle_packed_attribute is too early to do this. */
2066 for (variant = TYPE_NEXT_VARIANT (rli->t); variant;
2067 variant = TYPE_NEXT_VARIANT (variant))
2068 TYPE_PACKED (variant) = TYPE_PACKED (rli->t);
2069
9328904c
MM
2070 /* Lay out any static members. This is done now because their type
2071 may use the record's type. */
9771b263
DN
2072 while (!vec_safe_is_empty (rli->pending_statics))
2073 layout_decl (rli->pending_statics->pop (), 0);
cc9d4a85 2074
9328904c 2075 /* Clean up. */
17bbb839 2076 if (free_p)
76d971cc 2077 {
9771b263 2078 vec_free (rli->pending_statics);
76d971cc
NF
2079 free (rli);
2080 }
9328904c 2081}
7306ed3f 2082\f
4977bab6
ZW
2083
2084/* Finish processing a builtin RECORD_TYPE type TYPE. It's name is
2085 NAME, its fields are chained in reverse on FIELDS.
2086
2087 If ALIGN_TYPE is non-null, it is given the same alignment as
2088 ALIGN_TYPE. */
2089
2090void
46c5ad27
AJ
2091finish_builtin_struct (tree type, const char *name, tree fields,
2092 tree align_type)
4977bab6 2093{
1469344a 2094 tree tail, next;
4977bab6
ZW
2095
2096 for (tail = NULL_TREE; fields; tail = fields, fields = next)
2097 {
2098 DECL_FIELD_CONTEXT (fields) = type;
910ad8de
NF
2099 next = DECL_CHAIN (fields);
2100 DECL_CHAIN (fields) = tail;
4977bab6
ZW
2101 }
2102 TYPE_FIELDS (type) = tail;
2103
2104 if (align_type)
2105 {
2106 TYPE_ALIGN (type) = TYPE_ALIGN (align_type);
2107 TYPE_USER_ALIGN (type) = TYPE_USER_ALIGN (align_type);
2108 }
2109
2110 layout_type (type);
2111#if 0 /* not yet, should get fixed properly later */
2112 TYPE_NAME (type) = make_type_decl (get_identifier (name), type);
2113#else
c2255bc4
AH
2114 TYPE_NAME (type) = build_decl (BUILTINS_LOCATION,
2115 TYPE_DECL, get_identifier (name), type);
4977bab6
ZW
2116#endif
2117 TYPE_STUB_DECL (type) = TYPE_NAME (type);
2118 layout_decl (TYPE_NAME (type), 0);
2119}
2120
7306ed3f
JW
2121/* Calculate the mode, size, and alignment for TYPE.
2122 For an array type, calculate the element separation as well.
2123 Record TYPE on the chain of permanent or temporary types
2124 so that dbxout will find out about it.
2125
2126 TYPE_SIZE of a type is nonzero if the type has been laid out already.
2127 layout_type does nothing on such a type.
2128
2129 If the type is incomplete, its TYPE_SIZE remains zero. */
2130
2131void
46c5ad27 2132layout_type (tree type)
7306ed3f 2133{
41374e13 2134 gcc_assert (type);
7306ed3f 2135
6de9cd9a
DN
2136 if (type == error_mark_node)
2137 return;
2138
7306ed3f
JW
2139 /* Do nothing if type has been laid out before. */
2140 if (TYPE_SIZE (type))
2141 return;
2142
7306ed3f
JW
2143 switch (TREE_CODE (type))
2144 {
2145 case LANG_TYPE:
2146 /* This kind of type is the responsibility
9faa82d8 2147 of the language-specific code. */
41374e13 2148 gcc_unreachable ();
7306ed3f 2149
c0e081a9 2150 case BOOLEAN_TYPE:
7306ed3f
JW
2151 case INTEGER_TYPE:
2152 case ENUMERAL_TYPE:
179d2f74
RH
2153 SET_TYPE_MODE (type,
2154 smallest_mode_for_size (TYPE_PRECISION (type), MODE_INT));
06ceef4e 2155 TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)));
50b6ee8b 2156 /* Don't set TYPE_PRECISION here, as it may be set by a bitfield. */
ead17059 2157 TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type)));
7306ed3f
JW
2158 break;
2159
2160 case REAL_TYPE:
179d2f74
RH
2161 SET_TYPE_MODE (type,
2162 mode_for_size (TYPE_PRECISION (type), MODE_FLOAT, 0));
06ceef4e 2163 TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)));
ead17059 2164 TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type)));
7306ed3f
JW
2165 break;
2166
325217ed
CF
2167 case FIXED_POINT_TYPE:
2168 /* TYPE_MODE (type) has been set already. */
2169 TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)));
2170 TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type)));
2171 break;
2172
7306ed3f 2173 case COMPLEX_TYPE:
8df83eae 2174 TYPE_UNSIGNED (type) = TYPE_UNSIGNED (TREE_TYPE (type));
179d2f74
RH
2175 SET_TYPE_MODE (type,
2176 mode_for_size (2 * TYPE_PRECISION (TREE_TYPE (type)),
2177 (TREE_CODE (TREE_TYPE (type)) == REAL_TYPE
2178 ? MODE_COMPLEX_FLOAT : MODE_COMPLEX_INT),
2179 0));
06ceef4e 2180 TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)));
ead17059 2181 TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type)));
7306ed3f
JW
2182 break;
2183
0b4565c9 2184 case VECTOR_TYPE:
26277d41
PB
2185 {
2186 int nunits = TYPE_VECTOR_SUBPARTS (type);
26277d41
PB
2187 tree innertype = TREE_TYPE (type);
2188
41374e13 2189 gcc_assert (!(nunits & (nunits - 1)));
26277d41
PB
2190
2191 /* Find an appropriate mode for the vector type. */
2192 if (TYPE_MODE (type) == VOIDmode)
bb67d9c7
RG
2193 SET_TYPE_MODE (type,
2194 mode_for_vector (TYPE_MODE (innertype), nunits));
26277d41 2195
325217ed 2196 TYPE_SATURATING (type) = TYPE_SATURATING (TREE_TYPE (type));
26277d41
PB
2197 TYPE_UNSIGNED (type) = TYPE_UNSIGNED (TREE_TYPE (type));
2198 TYPE_SIZE_UNIT (type) = int_const_binop (MULT_EXPR,
2199 TYPE_SIZE_UNIT (innertype),
d35936ab 2200 size_int (nunits));
26277d41 2201 TYPE_SIZE (type) = int_const_binop (MULT_EXPR, TYPE_SIZE (innertype),
d35936ab 2202 bitsize_int (nunits));
e4ca3dc3 2203
5aea1e76
UW
2204 /* For vector types, we do not default to the mode's alignment.
2205 Instead, query a target hook, defaulting to natural alignment.
2206 This prevents ABI changes depending on whether or not native
2207 vector modes are supported. */
2208 TYPE_ALIGN (type) = targetm.vector_alignment (type);
2209
2210 /* However, if the underlying mode requires a bigger alignment than
2211 what the target hook provides, we cannot use the mode. For now,
2212 simply reject that case. */
2213 gcc_assert (TYPE_ALIGN (type)
2214 >= GET_MODE_ALIGNMENT (TYPE_MODE (type)));
26277d41
PB
2215 break;
2216 }
0b4565c9 2217
7306ed3f 2218 case VOID_TYPE:
770ae6cc 2219 /* This is an incomplete type and so doesn't have a size. */
7306ed3f 2220 TYPE_ALIGN (type) = 1;
11cf4d18 2221 TYPE_USER_ALIGN (type) = 0;
179d2f74 2222 SET_TYPE_MODE (type, VOIDmode);
7306ed3f
JW
2223 break;
2224
321cb743 2225 case OFFSET_TYPE:
06ceef4e 2226 TYPE_SIZE (type) = bitsize_int (POINTER_SIZE);
50b6ee8b
DD
2227 TYPE_SIZE_UNIT (type) = size_int (POINTER_SIZE_UNITS);
2228 /* A pointer might be MODE_PARTIAL_INT, but ptrdiff_t must be
2229 integral, which may be an __intN. */
179d2f74 2230 SET_TYPE_MODE (type, mode_for_size (POINTER_SIZE, MODE_INT, 0));
d4ebfa65 2231 TYPE_PRECISION (type) = POINTER_SIZE;
321cb743
MT
2232 break;
2233
7306ed3f
JW
2234 case FUNCTION_TYPE:
2235 case METHOD_TYPE:
019dd4ec
RK
2236 /* It's hard to see what the mode and size of a function ought to
2237 be, but we do know the alignment is FUNCTION_BOUNDARY, so
2238 make it consistent with that. */
179d2f74 2239 SET_TYPE_MODE (type, mode_for_size (FUNCTION_BOUNDARY, MODE_INT, 0));
019dd4ec
RK
2240 TYPE_SIZE (type) = bitsize_int (FUNCTION_BOUNDARY);
2241 TYPE_SIZE_UNIT (type) = size_int (FUNCTION_BOUNDARY / BITS_PER_UNIT);
7306ed3f
JW
2242 break;
2243
2244 case POINTER_TYPE:
2245 case REFERENCE_TYPE:
b5d6a2ff 2246 {
d4ebfa65
BE
2247 enum machine_mode mode = TYPE_MODE (type);
2248 if (TREE_CODE (type) == REFERENCE_TYPE && reference_types_internal)
2249 {
2250 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (type));
2251 mode = targetm.addr_space.address_mode (as);
2252 }
4977bab6 2253
d4ebfa65 2254 TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (mode));
4977bab6 2255 TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (mode));
8df83eae 2256 TYPE_UNSIGNED (type) = 1;
50b6ee8b 2257 TYPE_PRECISION (type) = GET_MODE_PRECISION (mode);
b5d6a2ff 2258 }
7306ed3f
JW
2259 break;
2260
2261 case ARRAY_TYPE:
2262 {
b3694847
SS
2263 tree index = TYPE_DOMAIN (type);
2264 tree element = TREE_TYPE (type);
7306ed3f
JW
2265
2266 build_pointer_type (element);
2267
2268 /* We need to know both bounds in order to compute the size. */
2269 if (index && TYPE_MAX_VALUE (index) && TYPE_MIN_VALUE (index)
2270 && TYPE_SIZE (element))
2271 {
ad50bc8d
RH
2272 tree ub = TYPE_MAX_VALUE (index);
2273 tree lb = TYPE_MIN_VALUE (index);
473ebbc5 2274 tree element_size = TYPE_SIZE (element);
e24ff973
RK
2275 tree length;
2276
c2ce8cdc
EB
2277 /* Make sure that an array of zero-sized element is zero-sized
2278 regardless of its extent. */
2279 if (integer_zerop (element_size))
2280 length = size_zero_node;
2281
830c740f
RG
2282 /* The computation should happen in the original signedness so
2283 that (possible) negative values are handled appropriately
2284 when determining overflow. */
c2ce8cdc 2285 else
56099f00
RG
2286 {
2287 /* ??? When it is obvious that the range is signed
2288 represent it using ssizetype. */
2289 if (TREE_CODE (lb) == INTEGER_CST
2290 && TREE_CODE (ub) == INTEGER_CST
2291 && TYPE_UNSIGNED (TREE_TYPE (lb))
2292 && tree_int_cst_lt (ub, lb))
2293 {
807e902e
KZ
2294 lb = wide_int_to_tree (ssizetype,
2295 offset_int::from (lb, SIGNED));
2296 ub = wide_int_to_tree (ssizetype,
2297 offset_int::from (ub, SIGNED));
56099f00
RG
2298 }
2299 length
2300 = fold_convert (sizetype,
2301 size_binop (PLUS_EXPR,
2302 build_int_cst (TREE_TYPE (lb), 1),
2303 size_binop (MINUS_EXPR, ub, lb)));
2304 }
2305
ce3da0d0
EB
2306 /* ??? We have no way to distinguish a null-sized array from an
2307 array spanning the whole sizetype range, so we arbitrarily
2308 decide that [0, -1] is the only valid representation. */
56099f00 2309 if (integer_zerop (length)
ce3da0d0
EB
2310 && TREE_OVERFLOW (length)
2311 && integer_zerop (lb))
56099f00 2312 length = size_zero_node;
7306ed3f 2313
fed3cef0 2314 TYPE_SIZE (type) = size_binop (MULT_EXPR, element_size,
0ac11108 2315 fold_convert (bitsizetype,
455f19cb 2316 length));
ead17059 2317
473ebbc5
EB
2318 /* If we know the size of the element, calculate the total size
2319 directly, rather than do some division thing below. This
2320 optimization helps Fortran assumed-size arrays (where the
2321 size of the array is determined at runtime) substantially. */
2322 if (TYPE_SIZE_UNIT (element))
d4b60170
RK
2323 TYPE_SIZE_UNIT (type)
2324 = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (element), length);
7306ed3f
JW
2325 }
2326
2327 /* Now round the alignment and size,
2328 using machine-dependent criteria if any. */
2329
2330#ifdef ROUND_TYPE_ALIGN
2331 TYPE_ALIGN (type)
2332 = ROUND_TYPE_ALIGN (type, TYPE_ALIGN (element), BITS_PER_UNIT);
2333#else
2334 TYPE_ALIGN (type) = MAX (TYPE_ALIGN (element), BITS_PER_UNIT);
2335#endif
c163d21d 2336 TYPE_USER_ALIGN (type) = TYPE_USER_ALIGN (element);
179d2f74 2337 SET_TYPE_MODE (type, BLKmode);
7306ed3f 2338 if (TYPE_SIZE (type) != 0
d9886a9e 2339 && ! targetm.member_type_forces_blk (type, VOIDmode)
7306ed3f
JW
2340 /* BLKmode elements force BLKmode aggregate;
2341 else extract/store fields may lose. */
2342 && (TYPE_MODE (TREE_TYPE (type)) != BLKmode
2343 || TYPE_NO_FORCE_BLK (TREE_TYPE (type))))
2344 {
0f6d54f7
RS
2345 SET_TYPE_MODE (type, mode_for_array (TREE_TYPE (type),
2346 TYPE_SIZE (type)));
72c602fc
RK
2347 if (TYPE_MODE (type) != BLKmode
2348 && STRICT_ALIGNMENT && TYPE_ALIGN (type) < BIGGEST_ALIGNMENT
b9d49351 2349 && TYPE_ALIGN (type) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7306ed3f
JW
2350 {
2351 TYPE_NO_FORCE_BLK (type) = 1;
179d2f74 2352 SET_TYPE_MODE (type, BLKmode);
7306ed3f 2353 }
7306ed3f 2354 }
b606b65c
OH
2355 /* When the element size is constant, check that it is at least as
2356 large as the element alignment. */
002a9071
SE
2357 if (TYPE_SIZE_UNIT (element)
2358 && TREE_CODE (TYPE_SIZE_UNIT (element)) == INTEGER_CST
b606b65c
OH
2359 /* If TYPE_SIZE_UNIT overflowed, then it is certainly larger than
2360 TYPE_ALIGN_UNIT. */
455f14dd 2361 && !TREE_OVERFLOW (TYPE_SIZE_UNIT (element))
002a9071
SE
2362 && !integer_zerop (TYPE_SIZE_UNIT (element))
2363 && compare_tree_int (TYPE_SIZE_UNIT (element),
2364 TYPE_ALIGN_UNIT (element)) < 0)
2365 error ("alignment of array elements is greater than element size");
7306ed3f
JW
2366 break;
2367 }
2368
2369 case RECORD_TYPE:
cc9d4a85
MM
2370 case UNION_TYPE:
2371 case QUAL_UNION_TYPE:
9328904c
MM
2372 {
2373 tree field;
2374 record_layout_info rli;
2375
2376 /* Initialize the layout information. */
770ae6cc
RK
2377 rli = start_record_layout (type);
2378
cc9d4a85
MM
2379 /* If this is a QUAL_UNION_TYPE, we want to process the fields
2380 in the reverse order in building the COND_EXPR that denotes
2381 its size. We reverse them again later. */
2382 if (TREE_CODE (type) == QUAL_UNION_TYPE)
2383 TYPE_FIELDS (type) = nreverse (TYPE_FIELDS (type));
770ae6cc
RK
2384
2385 /* Place all the fields. */
910ad8de 2386 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
770ae6cc
RK
2387 place_field (rli, field);
2388
cc9d4a85
MM
2389 if (TREE_CODE (type) == QUAL_UNION_TYPE)
2390 TYPE_FIELDS (type) = nreverse (TYPE_FIELDS (type));
770ae6cc 2391
9328904c 2392 /* Finish laying out the record. */
17bbb839 2393 finish_record_layout (rli, /*free_p=*/true);
9328904c 2394 }
7306ed3f
JW
2395 break;
2396
7306ed3f 2397 default:
41374e13 2398 gcc_unreachable ();
729a2125 2399 }
7306ed3f 2400
9328904c 2401 /* Compute the final TYPE_SIZE, TYPE_ALIGN, etc. for TYPE. For
cc9d4a85
MM
2402 records and unions, finish_record_layout already called this
2403 function. */
786de7eb 2404 if (TREE_CODE (type) != RECORD_TYPE
cc9d4a85
MM
2405 && TREE_CODE (type) != UNION_TYPE
2406 && TREE_CODE (type) != QUAL_UNION_TYPE)
9328904c 2407 finalize_type_size (type);
7306ed3f 2408
36784d0e
RG
2409 /* We should never see alias sets on incomplete aggregates. And we
2410 should not call layout_type on not incomplete aggregates. */
2411 if (AGGREGATE_TYPE_P (type))
2412 gcc_assert (!TYPE_ALIAS_SET_KNOWN_P (type));
7306ed3f 2413}
179d2f74 2414
944fa280
JJ
2415/* Return the least alignment required for type TYPE. */
2416
2417unsigned int
2418min_align_of_type (tree type)
2419{
2420 unsigned int align = TYPE_ALIGN (type);
2421 align = MIN (align, BIGGEST_ALIGNMENT);
1740f8a1
JJ
2422 if (!TYPE_USER_ALIGN (type))
2423 {
944fa280 2424#ifdef BIGGEST_FIELD_ALIGNMENT
1740f8a1 2425 align = MIN (align, BIGGEST_FIELD_ALIGNMENT);
944fa280 2426#endif
1740f8a1 2427 unsigned int field_align = align;
944fa280 2428#ifdef ADJUST_FIELD_ALIGN
1740f8a1
JJ
2429 tree field = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE, type);
2430 field_align = ADJUST_FIELD_ALIGN (field, field_align);
2431 ggc_free (field);
944fa280 2432#endif
1740f8a1
JJ
2433 align = MIN (align, field_align);
2434 }
944fa280
JJ
2435 return align / BITS_PER_UNIT;
2436}
2437
179d2f74
RH
2438/* Vector types need to re-check the target flags each time we report
2439 the machine mode. We need to do this because attribute target can
2440 change the result of vector_mode_supported_p and have_regs_of_mode
2441 on a per-function basis. Thus the TYPE_MODE of a VECTOR_TYPE can
2442 change on a per-function basis. */
b8698a0f 2443/* ??? Possibly a better solution is to run through all the types
179d2f74
RH
2444 referenced by a function and re-compute the TYPE_MODE once, rather
2445 than make the TYPE_MODE macro call a function. */
2446
2447enum machine_mode
2448vector_type_mode (const_tree t)
2449{
2450 enum machine_mode mode;
2451
2452 gcc_assert (TREE_CODE (t) == VECTOR_TYPE);
2453
51545682 2454 mode = t->type_common.mode;
179d2f74
RH
2455 if (VECTOR_MODE_P (mode)
2456 && (!targetm.vector_mode_supported_p (mode)
2457 || !have_regs_of_mode[mode]))
2458 {
51545682 2459 enum machine_mode innermode = TREE_TYPE (t)->type_common.mode;
179d2f74
RH
2460
2461 /* For integers, try mapping it to a same-sized scalar mode. */
2462 if (GET_MODE_CLASS (innermode) == MODE_INT)
2463 {
2464 mode = mode_for_size (TYPE_VECTOR_SUBPARTS (t)
2465 * GET_MODE_BITSIZE (innermode), MODE_INT, 0);
2466
2467 if (mode != VOIDmode && have_regs_of_mode[mode])
2468 return mode;
2469 }
2470
2471 return BLKmode;
2472 }
2473
2474 return mode;
2475}
7306ed3f
JW
2476\f
2477/* Create and return a type for signed integers of PRECISION bits. */
2478
2479tree
46c5ad27 2480make_signed_type (int precision)
7306ed3f 2481{
b3694847 2482 tree type = make_node (INTEGER_TYPE);
7306ed3f
JW
2483
2484 TYPE_PRECISION (type) = precision;
2485
fed3cef0 2486 fixup_signed_type (type);
7306ed3f
JW
2487 return type;
2488}
2489
2490/* Create and return a type for unsigned integers of PRECISION bits. */
2491
2492tree
46c5ad27 2493make_unsigned_type (int precision)
7306ed3f 2494{
b3694847 2495 tree type = make_node (INTEGER_TYPE);
7306ed3f
JW
2496
2497 TYPE_PRECISION (type) = precision;
2498
7306ed3f
JW
2499 fixup_unsigned_type (type);
2500 return type;
2501}
fed3cef0 2502\f
325217ed
CF
2503/* Create and return a type for fract of PRECISION bits, UNSIGNEDP,
2504 and SATP. */
2505
2506tree
2507make_fract_type (int precision, int unsignedp, int satp)
2508{
2509 tree type = make_node (FIXED_POINT_TYPE);
2510
2511 TYPE_PRECISION (type) = precision;
2512
2513 if (satp)
2514 TYPE_SATURATING (type) = 1;
2515
2516 /* Lay out the type: set its alignment, size, etc. */
2517 if (unsignedp)
2518 {
2519 TYPE_UNSIGNED (type) = 1;
179d2f74 2520 SET_TYPE_MODE (type, mode_for_size (precision, MODE_UFRACT, 0));
325217ed
CF
2521 }
2522 else
179d2f74 2523 SET_TYPE_MODE (type, mode_for_size (precision, MODE_FRACT, 0));
325217ed
CF
2524 layout_type (type);
2525
2526 return type;
2527}
2528
2529/* Create and return a type for accum of PRECISION bits, UNSIGNEDP,
2530 and SATP. */
2531
2532tree
2533make_accum_type (int precision, int unsignedp, int satp)
2534{
2535 tree type = make_node (FIXED_POINT_TYPE);
2536
2537 TYPE_PRECISION (type) = precision;
2538
2539 if (satp)
2540 TYPE_SATURATING (type) = 1;
2541
2542 /* Lay out the type: set its alignment, size, etc. */
2543 if (unsignedp)
2544 {
2545 TYPE_UNSIGNED (type) = 1;
179d2f74 2546 SET_TYPE_MODE (type, mode_for_size (precision, MODE_UACCUM, 0));
325217ed
CF
2547 }
2548 else
179d2f74 2549 SET_TYPE_MODE (type, mode_for_size (precision, MODE_ACCUM, 0));
325217ed
CF
2550 layout_type (type);
2551
2552 return type;
2553}
2554
67b88453 2555/* Initialize sizetypes so layout_type can use them. */
fed3cef0
RK
2556
2557void
3b9e5d95 2558initialize_sizetypes (void)
fed3cef0 2559{
67b88453
RG
2560 int precision, bprecision;
2561
2562 /* Get sizetypes precision from the SIZE_TYPE target macro. */
18dae016 2563 if (strcmp (SIZETYPE, "unsigned int") == 0)
67b88453 2564 precision = INT_TYPE_SIZE;
18dae016 2565 else if (strcmp (SIZETYPE, "long unsigned int") == 0)
67b88453 2566 precision = LONG_TYPE_SIZE;
18dae016 2567 else if (strcmp (SIZETYPE, "long long unsigned int") == 0)
67b88453 2568 precision = LONG_LONG_TYPE_SIZE;
18dae016 2569 else if (strcmp (SIZETYPE, "short unsigned int") == 0)
b87ac615 2570 precision = SHORT_TYPE_SIZE;
67b88453 2571 else
78a7c317
DD
2572 {
2573 int i;
2574
2575 precision = -1;
2576 for (i = 0; i < NUM_INT_N_ENTS; i++)
2577 if (int_n_enabled_p[i])
2578 {
2579 char name[50];
2580 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
2581
2582 if (strcmp (name, SIZETYPE) == 0)
2583 {
2584 precision = int_n_data[i].bitsize;
2585 }
2586 }
2587 if (precision == -1)
2588 gcc_unreachable ();
2589 }
0ac11108 2590
67b88453
RG
2591 bprecision
2592 = MIN (precision + BITS_PER_UNIT_LOG + 1, MAX_FIXED_MODE_SIZE);
2593 bprecision
2594 = GET_MODE_PRECISION (smallest_mode_for_size (bprecision, MODE_INT));
49ab6098
KZ
2595 if (bprecision > HOST_BITS_PER_DOUBLE_INT)
2596 bprecision = HOST_BITS_PER_DOUBLE_INT;
67b88453
RG
2597
2598 /* Create stubs for sizetype and bitsizetype so we can create constants. */
2599 sizetype = make_node (INTEGER_TYPE);
f93fe5a0 2600 TYPE_NAME (sizetype) = get_identifier ("sizetype");
67b88453
RG
2601 TYPE_PRECISION (sizetype) = precision;
2602 TYPE_UNSIGNED (sizetype) = 1;
67b88453
RG
2603 bitsizetype = make_node (INTEGER_TYPE);
2604 TYPE_NAME (bitsizetype) = get_identifier ("bitsizetype");
2605 TYPE_PRECISION (bitsizetype) = bprecision;
2606 TYPE_UNSIGNED (bitsizetype) = 1;
67b88453
RG
2607
2608 /* Now layout both types manually. */
2609 SET_TYPE_MODE (sizetype, smallest_mode_for_size (precision, MODE_INT));
2610 TYPE_ALIGN (sizetype) = GET_MODE_ALIGNMENT (TYPE_MODE (sizetype));
2611 TYPE_SIZE (sizetype) = bitsize_int (precision);
2612 TYPE_SIZE_UNIT (sizetype) = size_int (GET_MODE_SIZE (TYPE_MODE (sizetype)));
807e902e 2613 set_min_and_max_values_for_integral_type (sizetype, precision, UNSIGNED);
67b88453
RG
2614
2615 SET_TYPE_MODE (bitsizetype, smallest_mode_for_size (bprecision, MODE_INT));
2616 TYPE_ALIGN (bitsizetype) = GET_MODE_ALIGNMENT (TYPE_MODE (bitsizetype));
2617 TYPE_SIZE (bitsizetype) = bitsize_int (bprecision);
2618 TYPE_SIZE_UNIT (bitsizetype)
2619 = size_int (GET_MODE_SIZE (TYPE_MODE (bitsizetype)));
807e902e 2620 set_min_and_max_values_for_integral_type (bitsizetype, bprecision, UNSIGNED);
7f18f917 2621
3b9e5d95 2622 /* Create the signed variants of *sizetype. */
67b88453 2623 ssizetype = make_signed_type (TYPE_PRECISION (sizetype));
f93fe5a0 2624 TYPE_NAME (ssizetype) = get_identifier ("ssizetype");
67b88453 2625 sbitsizetype = make_signed_type (TYPE_PRECISION (bitsizetype));
f93fe5a0 2626 TYPE_NAME (sbitsizetype) = get_identifier ("sbitsizetype");
fed3cef0
RK
2627}
2628\f
71d59383
RS
2629/* TYPE is an integral type, i.e., an INTEGRAL_TYPE, ENUMERAL_TYPE
2630 or BOOLEAN_TYPE. Set TYPE_MIN_VALUE and TYPE_MAX_VALUE
7b6d72fc
MM
2631 for TYPE, based on the PRECISION and whether or not the TYPE
2632 IS_UNSIGNED. PRECISION need not correspond to a width supported
2633 natively by the hardware; for example, on a machine with 8-bit,
2634 16-bit, and 32-bit register modes, PRECISION might be 7, 23, or
2635 61. */
2636
2637void
2638set_min_and_max_values_for_integral_type (tree type,
2639 int precision,
807e902e 2640 signop sgn)
7b6d72fc 2641{
c0e081a9
RB
2642 /* For bitfields with zero width we end up creating integer types
2643 with zero precision. Don't assign any minimum/maximum values
2644 to those types, they don't have any valid value. */
2645 if (precision < 1)
2646 return;
2647
807e902e
KZ
2648 TYPE_MIN_VALUE (type)
2649 = wide_int_to_tree (type, wi::min_value (precision, sgn));
2650 TYPE_MAX_VALUE (type)
2651 = wide_int_to_tree (type, wi::max_value (precision, sgn));
7b6d72fc
MM
2652}
2653
4cc89e53 2654/* Set the extreme values of TYPE based on its precision in bits,
13756074 2655 then lay it out. Used when make_signed_type won't do
4cc89e53
RS
2656 because the tree code is not INTEGER_TYPE.
2657 E.g. for Pascal, when the -fsigned-char option is given. */
2658
2659void
46c5ad27 2660fixup_signed_type (tree type)
4cc89e53 2661{
b3694847 2662 int precision = TYPE_PRECISION (type);
4cc89e53 2663
807e902e 2664 set_min_and_max_values_for_integral_type (type, precision, SIGNED);
4cc89e53
RS
2665
2666 /* Lay out the type: set its alignment, size, etc. */
4cc89e53
RS
2667 layout_type (type);
2668}
2669
7306ed3f 2670/* Set the extreme values of TYPE based on its precision in bits,
13756074 2671 then lay it out. This is used both in `make_unsigned_type'
7306ed3f
JW
2672 and for enumeral types. */
2673
2674void
46c5ad27 2675fixup_unsigned_type (tree type)
7306ed3f 2676{
b3694847 2677 int precision = TYPE_PRECISION (type);
7306ed3f 2678
89b0433e 2679 TYPE_UNSIGNED (type) = 1;
f676971a 2680
807e902e 2681 set_min_and_max_values_for_integral_type (type, precision, UNSIGNED);
7306ed3f
JW
2682
2683 /* Lay out the type: set its alignment, size, etc. */
7306ed3f
JW
2684 layout_type (type);
2685}
2686\f
073a544d
RS
2687/* Construct an iterator for a bitfield that spans BITSIZE bits,
2688 starting at BITPOS.
2689
2690 BITREGION_START is the bit position of the first bit in this
2691 sequence of bit fields. BITREGION_END is the last bit in this
2692 sequence. If these two fields are non-zero, we should restrict the
2693 memory access to that range. Otherwise, we are allowed to touch
2694 any adjacent non bit-fields.
2695
2696 ALIGN is the alignment of the underlying object in bits.
2697 VOLATILEP says whether the bitfield is volatile. */
2698
2699bit_field_mode_iterator
2700::bit_field_mode_iterator (HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
2701 HOST_WIDE_INT bitregion_start,
2702 HOST_WIDE_INT bitregion_end,
2703 unsigned int align, bool volatilep)
65d3284b
RS
2704: m_mode (GET_CLASS_NARROWEST_MODE (MODE_INT)), m_bitsize (bitsize),
2705 m_bitpos (bitpos), m_bitregion_start (bitregion_start),
2706 m_bitregion_end (bitregion_end), m_align (align),
2707 m_volatilep (volatilep), m_count (0)
073a544d 2708{
65d3284b 2709 if (!m_bitregion_end)
8b7d5dab 2710 {
a37d67b6
EB
2711 /* We can assume that any aligned chunk of ALIGN bits that overlaps
2712 the bitfield is mapped and won't trap, provided that ALIGN isn't
2713 too large. The cap is the biggest required alignment for data,
2714 or at least the word size. And force one such chunk at least. */
2715 unsigned HOST_WIDE_INT units
2716 = MIN (align, MAX (BIGGEST_ALIGNMENT, BITS_PER_WORD));
2717 if (bitsize <= 0)
2718 bitsize = 1;
65d3284b
RS
2719 m_bitregion_end = bitpos + bitsize + units - 1;
2720 m_bitregion_end -= m_bitregion_end % units + 1;
8b7d5dab 2721 }
073a544d
RS
2722}
2723
2724/* Calls to this function return successively larger modes that can be used
2725 to represent the bitfield. Return true if another bitfield mode is
2726 available, storing it in *OUT_MODE if so. */
2727
2728bool
2729bit_field_mode_iterator::next_mode (enum machine_mode *out_mode)
2730{
65d3284b 2731 for (; m_mode != VOIDmode; m_mode = GET_MODE_WIDER_MODE (m_mode))
073a544d 2732 {
65d3284b 2733 unsigned int unit = GET_MODE_BITSIZE (m_mode);
073a544d
RS
2734
2735 /* Skip modes that don't have full precision. */
65d3284b 2736 if (unit != GET_MODE_PRECISION (m_mode))
073a544d
RS
2737 continue;
2738
073a544d
RS
2739 /* Stop if the mode is too wide to handle efficiently. */
2740 if (unit > MAX_FIXED_MODE_SIZE)
2741 break;
2742
2743 /* Don't deliver more than one multiword mode; the smallest one
2744 should be used. */
65d3284b 2745 if (m_count > 0 && unit > BITS_PER_WORD)
073a544d
RS
2746 break;
2747
ec593a8f 2748 /* Skip modes that are too small. */
65d3284b
RS
2749 unsigned HOST_WIDE_INT substart = (unsigned HOST_WIDE_INT) m_bitpos % unit;
2750 unsigned HOST_WIDE_INT subend = substart + m_bitsize;
ec593a8f
RS
2751 if (subend > unit)
2752 continue;
2753
073a544d 2754 /* Stop if the mode goes outside the bitregion. */
65d3284b
RS
2755 HOST_WIDE_INT start = m_bitpos - substart;
2756 if (m_bitregion_start && start < m_bitregion_start)
073a544d 2757 break;
ec593a8f 2758 HOST_WIDE_INT end = start + unit;
65d3284b 2759 if (end > m_bitregion_end + 1)
8b7d5dab
RS
2760 break;
2761
2762 /* Stop if the mode requires too much alignment. */
65d3284b
RS
2763 if (GET_MODE_ALIGNMENT (m_mode) > m_align
2764 && SLOW_UNALIGNED_ACCESS (m_mode, m_align))
073a544d
RS
2765 break;
2766
65d3284b
RS
2767 *out_mode = m_mode;
2768 m_mode = GET_MODE_WIDER_MODE (m_mode);
2769 m_count++;
073a544d
RS
2770 return true;
2771 }
2772 return false;
2773}
2774
2775/* Return true if smaller modes are generally preferred for this kind
2776 of bitfield. */
2777
2778bool
2779bit_field_mode_iterator::prefer_smaller_modes ()
2780{
65d3284b 2781 return (m_volatilep
073a544d
RS
2782 ? targetm.narrow_volatile_bitfield ()
2783 : !SLOW_BYTE_ACCESS);
2784}
2785
7306ed3f
JW
2786/* Find the best machine mode to use when referencing a bit field of length
2787 BITSIZE bits starting at BITPOS.
2788
1169e45d
AH
2789 BITREGION_START is the bit position of the first bit in this
2790 sequence of bit fields. BITREGION_END is the last bit in this
2791 sequence. If these two fields are non-zero, we should restrict the
073a544d 2792 memory access to that range. Otherwise, we are allowed to touch
1169e45d
AH
2793 any adjacent non bit-fields.
2794
7306ed3f
JW
2795 The underlying object is known to be aligned to a boundary of ALIGN bits.
2796 If LARGEST_MODE is not VOIDmode, it means that we should not use a mode
2797 larger than LARGEST_MODE (usually SImode).
2798
c2a64439 2799 If no mode meets all these conditions, we return VOIDmode.
0ac11108 2800
c2a64439
PB
2801 If VOLATILEP is false and SLOW_BYTE_ACCESS is false, we return the
2802 smallest mode meeting these conditions.
2803
2804 If VOLATILEP is false and SLOW_BYTE_ACCESS is true, we return the
2805 largest mode (but a mode no wider than UNITS_PER_WORD) that meets
2806 all the conditions.
0ac11108 2807
c2a64439
PB
2808 If VOLATILEP is true the narrow_volatile_bitfields target hook is used to
2809 decide which of the above modes should be used. */
7306ed3f
JW
2810
2811enum machine_mode
1169e45d
AH
2812get_best_mode (int bitsize, int bitpos,
2813 unsigned HOST_WIDE_INT bitregion_start,
2814 unsigned HOST_WIDE_INT bitregion_end,
2815 unsigned int align,
073a544d 2816 enum machine_mode largest_mode, bool volatilep)
7306ed3f 2817{
073a544d
RS
2818 bit_field_mode_iterator iter (bitsize, bitpos, bitregion_start,
2819 bitregion_end, align, volatilep);
2820 enum machine_mode widest_mode = VOIDmode;
7306ed3f 2821 enum machine_mode mode;
073a544d 2822 while (iter.next_mode (&mode)
00efe3ea
RS
2823 /* ??? For historical reasons, reject modes that would normally
2824 receive greater alignment, even if unaligned accesses are
2825 acceptable. This has both advantages and disadvantages.
8b7d5dab
RS
2826 Removing this check means that something like:
2827
2828 struct s { unsigned int x; unsigned int y; };
2829 int f (struct s *s) { return s->x == 0 && s->y == 0; }
2830
2831 can be implemented using a single load and compare on
2832 64-bit machines that have no alignment restrictions.
2833 For example, on powerpc64-linux-gnu, we would generate:
2834
2835 ld 3,0(3)
2836 cntlzd 3,3
2837 srdi 3,3,6
2838 blr
2839
2840 rather than:
2841
2842 lwz 9,0(3)
2843 cmpwi 7,9,0
2844 bne 7,.L3
2845 lwz 3,4(3)
2846 cntlzw 3,3
2847 srwi 3,3,5
2848 extsw 3,3
2849 blr
2850 .p2align 4,,15
2851 .L3:
2852 li 3,0
2853 blr
2854
2855 However, accessing more than one field can make life harder
2856 for the gimple optimizers. For example, gcc.dg/vect/bb-slp-5.c
2857 has a series of unsigned short copies followed by a series of
2858 unsigned short comparisons. With this check, both the copies
2859 and comparisons remain 16-bit accesses and FRE is able
2860 to eliminate the latter. Without the check, the comparisons
2861 can be done using 2 64-bit operations, which FRE isn't able
2862 to handle in the same way.
2863
2864 Either way, it would probably be worth disabling this check
2865 during expand. One particular example where removing the
2866 check would help is the get_best_mode call in store_bit_field.
2867 If we are given a memory bitregion of 128 bits that is aligned
2868 to a 64-bit boundary, and the bitfield we want to modify is
2869 in the second half of the bitregion, this check causes
2870 store_bitfield to turn the memory into a 64-bit reference
2871 to the _first_ half of the region. We later use
2872 adjust_bitfield_address to get a reference to the correct half,
2873 but doing so looks to adjust_bitfield_address as though we are
2874 moving past the end of the original object, so it drops the
2875 associated MEM_EXPR and MEM_OFFSET. Removing the check
2876 causes store_bit_field to keep a 128-bit memory reference,
2877 so that the final bitfield reference still has a MEM_EXPR
2878 and MEM_OFFSET. */
00efe3ea 2879 && GET_MODE_ALIGNMENT (mode) <= align
073a544d
RS
2880 && (largest_mode == VOIDmode
2881 || GET_MODE_SIZE (mode) <= GET_MODE_SIZE (largest_mode)))
7306ed3f 2882 {
073a544d
RS
2883 widest_mode = mode;
2884 if (iter.prefer_smaller_modes ())
7306ed3f
JW
2885 break;
2886 }
073a544d 2887 return widest_mode;
7306ed3f 2888}
d7db6646 2889
50654f6c 2890/* Gets minimal and maximal values for MODE (signed or unsigned depending on
0aea6467 2891 SIGN). The returned constants are made to be usable in TARGET_MODE. */
50654f6c
ZD
2892
2893void
0aea6467
ZD
2894get_mode_bounds (enum machine_mode mode, int sign,
2895 enum machine_mode target_mode,
2896 rtx *mmin, rtx *mmax)
50654f6c 2897{
e1a55837 2898 unsigned size = GET_MODE_PRECISION (mode);
0aea6467 2899 unsigned HOST_WIDE_INT min_val, max_val;
50654f6c 2900
41374e13 2901 gcc_assert (size <= HOST_BITS_PER_WIDE_INT);
50654f6c 2902
c15677b6
JJ
2903 /* Special case BImode, which has values 0 and STORE_FLAG_VALUE. */
2904 if (mode == BImode)
2905 {
2906 if (STORE_FLAG_VALUE < 0)
2907 {
2908 min_val = STORE_FLAG_VALUE;
2909 max_val = 0;
2910 }
2911 else
2912 {
2913 min_val = 0;
2914 max_val = STORE_FLAG_VALUE;
2915 }
2916 }
2917 else if (sign)
50654f6c 2918 {
0aea6467
ZD
2919 min_val = -((unsigned HOST_WIDE_INT) 1 << (size - 1));
2920 max_val = ((unsigned HOST_WIDE_INT) 1 << (size - 1)) - 1;
50654f6c
ZD
2921 }
2922 else
2923 {
0aea6467
ZD
2924 min_val = 0;
2925 max_val = ((unsigned HOST_WIDE_INT) 1 << (size - 1) << 1) - 1;
50654f6c 2926 }
0aea6467 2927
bb80db7b
KH
2928 *mmin = gen_int_mode (min_val, target_mode);
2929 *mmax = gen_int_mode (max_val, target_mode);
50654f6c
ZD
2930}
2931
e2500fed 2932#include "gt-stor-layout.h"