]>
Commit | Line | Data |
---|---|---|
1 | /* Structure for saving state for a nested function. | |
2 | Copyright (C) 1989-2025 Free Software Foundation, Inc. | |
3 | ||
4 | This file is part of GCC. | |
5 | ||
6 | GCC is free software; you can redistribute it and/or modify it under | |
7 | the terms of the GNU General Public License as published by the Free | |
8 | Software Foundation; either version 3, or (at your option) any later | |
9 | version. | |
10 | ||
11 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY | |
12 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
14 | for more details. | |
15 | ||
16 | You should have received a copy of the GNU General Public License | |
17 | along with GCC; see the file COPYING3. If not see | |
18 | <http://www.gnu.org/licenses/>. */ | |
19 | ||
20 | #ifndef GCC_FUNCTION_H | |
21 | #define GCC_FUNCTION_H | |
22 | ||
23 | ||
24 | /* Stack of pending (incomplete) sequences saved by `start_sequence'. | |
25 | Each element describes one pending sequence. | |
26 | The main insn-chain is saved in the last element of the chain, | |
27 | unless the chain is empty. */ | |
28 | ||
29 | struct GTY(()) sequence_stack { | |
30 | /* First and last insns in the chain of the saved sequence. */ | |
31 | rtx_insn *first; | |
32 | rtx_insn *last; | |
33 | struct sequence_stack *next; | |
34 | }; | |
35 | \f | |
36 | struct GTY(()) emit_status { | |
37 | void ensure_regno_capacity (); | |
38 | ||
39 | /* This is reset to LAST_VIRTUAL_REGISTER + 1 at the start of each function. | |
40 | After rtl generation, it is 1 plus the largest register number used. */ | |
41 | int x_reg_rtx_no; | |
42 | ||
43 | /* Lowest label number in current function. */ | |
44 | int x_first_label_num; | |
45 | ||
46 | /* seq.first and seq.last are the ends of the doubly-linked chain of | |
47 | rtl for the current function. Both are reset to null at the | |
48 | start of rtl generation for the function. | |
49 | ||
50 | start_sequence saves both of these on seq.next and then starts | |
51 | a new, nested sequence of insns. | |
52 | ||
53 | seq.next is a stack of pending (incomplete) sequences saved by | |
54 | start_sequence. Each element describes one pending sequence. | |
55 | The main insn-chain is the last element of the chain. */ | |
56 | struct sequence_stack seq; | |
57 | ||
58 | /* INSN_UID for next insn emitted. | |
59 | Reset to 1 for each function compiled. */ | |
60 | int x_cur_insn_uid; | |
61 | ||
62 | /* INSN_UID for next debug insn emitted. Only used if | |
63 | --param min-nondebug-insn-uid=<value> is given with nonzero value. */ | |
64 | int x_cur_debug_insn_uid; | |
65 | ||
66 | /* The length of the regno_pointer_align, regno_decl, and x_regno_reg_rtx | |
67 | vectors. Since these vectors are needed during the expansion phase when | |
68 | the total number of registers in the function is not yet known, the | |
69 | vectors are copied and made bigger when necessary. */ | |
70 | int regno_pointer_align_length; | |
71 | ||
72 | /* Indexed by pseudo register number, if nonzero gives the known alignment | |
73 | for that pseudo (if REG_POINTER is set in x_regno_reg_rtx). | |
74 | Allocated in parallel with x_regno_reg_rtx. */ | |
75 | unsigned char * GTY((skip)) regno_pointer_align; | |
76 | }; | |
77 | ||
78 | ||
79 | /* Indexed by register number, gives an rtx for that register (and only | |
80 | that register). For pseudo registers, it is the unique rtx for | |
81 | that pseudo. For hard registers, it is an rtx of the mode specified | |
82 | by reg_raw_mode. | |
83 | ||
84 | FIXME: We could put it into emit_status struct, but gengtype is not | |
85 | able to deal with length attribute nested in top level structures. */ | |
86 | ||
87 | extern GTY ((length ("crtl->emit.x_reg_rtx_no"))) rtx * regno_reg_rtx; | |
88 | ||
89 | /* For backward compatibility... eventually these should all go away. */ | |
90 | #define reg_rtx_no (crtl->emit.x_reg_rtx_no) | |
91 | ||
92 | #define REGNO_POINTER_ALIGN(REGNO) (crtl->emit.regno_pointer_align[REGNO]) | |
93 | ||
94 | struct GTY(()) expr_status { | |
95 | /* Number of units that we should eventually pop off the stack. | |
96 | These are the arguments to function calls that have already returned. */ | |
97 | poly_int64 x_pending_stack_adjust; | |
98 | ||
99 | /* Under some ABIs, it is the caller's responsibility to pop arguments | |
100 | pushed for function calls. A naive implementation would simply pop | |
101 | the arguments immediately after each call. However, if several | |
102 | function calls are made in a row, it is typically cheaper to pop | |
103 | all the arguments after all of the calls are complete since a | |
104 | single pop instruction can be used. Therefore, GCC attempts to | |
105 | defer popping the arguments until absolutely necessary. (For | |
106 | example, at the end of a conditional, the arguments must be popped, | |
107 | since code outside the conditional won't know whether or not the | |
108 | arguments need to be popped.) | |
109 | ||
110 | When INHIBIT_DEFER_POP is nonzero, however, the compiler does not | |
111 | attempt to defer pops. Instead, the stack is popped immediately | |
112 | after each call. Rather then setting this variable directly, use | |
113 | NO_DEFER_POP and OK_DEFER_POP. */ | |
114 | int x_inhibit_defer_pop; | |
115 | ||
116 | /* If PREFERRED_STACK_BOUNDARY and PUSH_ROUNDING are defined, the stack | |
117 | boundary can be momentarily unaligned while pushing the arguments. | |
118 | Record the delta since last aligned boundary here in order to get | |
119 | stack alignment in the nested function calls working right. */ | |
120 | poly_int64 x_stack_pointer_delta; | |
121 | ||
122 | /* Nonzero means __builtin_saveregs has already been done in this function. | |
123 | The value is the pseudoreg containing the value __builtin_saveregs | |
124 | returned. */ | |
125 | rtx x_saveregs_value; | |
126 | ||
127 | /* Similarly for __builtin_apply_args. */ | |
128 | rtx x_apply_args_value; | |
129 | ||
130 | /* List of labels that must never be deleted. */ | |
131 | vec<rtx_insn *, va_gc> *x_forced_labels; | |
132 | }; | |
133 | ||
134 | typedef struct call_site_record_d *call_site_record; | |
135 | ||
136 | /* RTL representation of exception handling. */ | |
137 | struct GTY(()) rtl_eh { | |
138 | rtx ehr_stackadj; | |
139 | rtx ehr_handler; | |
140 | rtx_code_label *ehr_label; | |
141 | ||
142 | rtx sjlj_fc; | |
143 | rtx_insn *sjlj_exit_after; | |
144 | ||
145 | vec<uchar, va_gc> *action_record_data; | |
146 | ||
147 | vec<call_site_record, va_gc> *call_site_record_v[2]; | |
148 | }; | |
149 | ||
150 | #define pending_stack_adjust (crtl->expr.x_pending_stack_adjust) | |
151 | #define inhibit_defer_pop (crtl->expr.x_inhibit_defer_pop) | |
152 | #define saveregs_value (crtl->expr.x_saveregs_value) | |
153 | #define apply_args_value (crtl->expr.x_apply_args_value) | |
154 | #define forced_labels (crtl->expr.x_forced_labels) | |
155 | #define stack_pointer_delta (crtl->expr.x_stack_pointer_delta) | |
156 | ||
157 | struct gimple_df; | |
158 | struct call_site_record_d; | |
159 | struct dw_fde_node; | |
160 | class range_query; | |
161 | ||
162 | struct GTY(()) varasm_status { | |
163 | /* If we're using a per-function constant pool, this is it. */ | |
164 | struct rtx_constant_pool *pool; | |
165 | ||
166 | /* Number of tree-constants deferred during the expansion of this | |
167 | function. */ | |
168 | unsigned int deferred_constants; | |
169 | }; | |
170 | ||
171 | ||
172 | /* Data for function partitioning. */ | |
173 | struct GTY(()) function_subsections { | |
174 | /* Assembly labels for the hot and cold text sections, to | |
175 | be used by debugger functions for determining the size of text | |
176 | sections. */ | |
177 | ||
178 | const char *hot_section_label; | |
179 | const char *cold_section_label; | |
180 | const char *hot_section_end_label; | |
181 | const char *cold_section_end_label; | |
182 | }; | |
183 | ||
184 | /* Describe an empty area of space in the stack frame. These can be chained | |
185 | into a list; this is used to keep track of space wasted for alignment | |
186 | reasons. */ | |
187 | class GTY(()) frame_space | |
188 | { | |
189 | public: | |
190 | class frame_space *next; | |
191 | ||
192 | poly_int64 start; | |
193 | poly_int64 length; | |
194 | }; | |
195 | ||
196 | /* Describe emitted calls for -fcallgraph-info. */ | |
197 | struct GTY(()) callinfo_callee | |
198 | { | |
199 | location_t location; | |
200 | tree decl; | |
201 | }; | |
202 | ||
203 | /* Describe dynamic allocation for -fcallgraph-info=da. */ | |
204 | struct GTY(()) callinfo_dalloc | |
205 | { | |
206 | location_t location; | |
207 | char const *name; | |
208 | }; | |
209 | ||
210 | class GTY(()) stack_usage | |
211 | { | |
212 | public: | |
213 | /* # of bytes of static stack space allocated by the function. */ | |
214 | HOST_WIDE_INT static_stack_size; | |
215 | ||
216 | /* # of bytes of dynamic stack space allocated by the function. This is | |
217 | meaningful only if has_unbounded_dynamic_stack_size is zero. */ | |
218 | HOST_WIDE_INT dynamic_stack_size; | |
219 | ||
220 | /* Upper bound on the number of bytes pushed onto the stack after the | |
221 | prologue. If !ACCUMULATE_OUTGOING_ARGS, it contains the outgoing | |
222 | arguments. */ | |
223 | poly_int64 pushed_stack_size; | |
224 | ||
225 | /* Nonzero if the amount of stack space allocated dynamically cannot | |
226 | be bounded at compile-time. */ | |
227 | unsigned int has_unbounded_dynamic_stack_size : 1; | |
228 | ||
229 | /* Functions called within the function, if callgraph is enabled. */ | |
230 | vec<callinfo_callee, va_gc> *callees; | |
231 | ||
232 | /* Dynamic allocations encountered within the function, if callgraph | |
233 | da is enabled. */ | |
234 | vec<callinfo_dalloc, va_gc> *dallocs; | |
235 | }; | |
236 | ||
237 | #define current_function_static_stack_size (cfun->su->static_stack_size) | |
238 | #define current_function_dynamic_stack_size (cfun->su->dynamic_stack_size) | |
239 | #define current_function_pushed_stack_size (cfun->su->pushed_stack_size) | |
240 | #define current_function_has_unbounded_dynamic_stack_size \ | |
241 | (cfun->su->has_unbounded_dynamic_stack_size) | |
242 | #define current_function_allocates_dynamic_stack_space \ | |
243 | (current_function_dynamic_stack_size != 0 \ | |
244 | || current_function_has_unbounded_dynamic_stack_size) | |
245 | ||
246 | /* This structure can save all the important global and static variables | |
247 | describing the status of the current function. */ | |
248 | ||
249 | struct GTY(()) function { | |
250 | struct eh_status *eh; | |
251 | ||
252 | /* The control flow graph for this function. */ | |
253 | struct control_flow_graph *cfg; | |
254 | ||
255 | /* GIMPLE body for this function. */ | |
256 | gimple_seq gimple_body; | |
257 | ||
258 | /* SSA and dataflow information. */ | |
259 | struct gimple_df *gimple_df; | |
260 | ||
261 | /* The loops in this function. */ | |
262 | struct loops *x_current_loops; | |
263 | ||
264 | /* Filled by the GIMPLE and RTL FEs, pass to start compilation with. */ | |
265 | char *pass_startwith; | |
266 | ||
267 | /* The stack usage of this function. */ | |
268 | class stack_usage *su; | |
269 | ||
270 | /* Value histograms attached to particular statements. */ | |
271 | htab_t GTY((skip)) value_histograms; | |
272 | ||
273 | /* Annotated gconds so that basic conditions in the same expression map to | |
274 | the same uid. This is used for condition coverage. */ | |
275 | hash_map <gcond*, unsigned> *GTY((skip)) cond_uids; | |
276 | ||
277 | /* For function.cc. */ | |
278 | ||
279 | /* Points to the FUNCTION_DECL of this function. */ | |
280 | tree decl; | |
281 | ||
282 | /* A PARM_DECL that should contain the static chain for this function. | |
283 | It will be initialized at the beginning of the function. */ | |
284 | tree static_chain_decl; | |
285 | ||
286 | /* An expression that contains the non-local goto save area. The first | |
287 | word is the saved frame pointer and the second is the saved stack | |
288 | pointer. */ | |
289 | tree nonlocal_goto_save_area; | |
290 | ||
291 | /* Vector of function local variables, functions, types and constants. */ | |
292 | vec<tree, va_gc> *local_decls; | |
293 | ||
294 | /* For md files. */ | |
295 | ||
296 | /* tm.h can use this to store whatever it likes. */ | |
297 | struct machine_function * GTY ((maybe_undef)) machine; | |
298 | ||
299 | /* Language-specific code can use this to store whatever it likes. */ | |
300 | struct language_function * language; | |
301 | ||
302 | /* Used types hash table. */ | |
303 | hash_set<tree> *GTY (()) used_types_hash; | |
304 | ||
305 | /* Dwarf2 Frame Description Entry, containing the Call Frame Instructions | |
306 | used for unwinding. Only set when either dwarf2 unwinding or dwarf2 | |
307 | debugging is enabled. */ | |
308 | struct dw_fde_node *fde; | |
309 | ||
310 | /* Range query mechanism for functions. The default is to pick up | |
311 | global ranges. If a pass wants on-demand ranges OTOH, it must | |
312 | call enable/disable_ranger(). The pointer is never null. It | |
313 | should be queried by calling get_range_query(). */ | |
314 | range_query * GTY ((skip)) x_range_query; | |
315 | ||
316 | /* Last statement uid. */ | |
317 | int last_stmt_uid; | |
318 | ||
319 | /* Debug marker counter. Count begin stmt markers. We don't have | |
320 | to keep it exact, it's more of a rough estimate to enable us to | |
321 | decide whether they are too many to copy during inlining, or when | |
322 | expanding to RTL. */ | |
323 | int debug_marker_count; | |
324 | ||
325 | /* Function sequence number for profiling, debugging, etc. */ | |
326 | int funcdef_no; | |
327 | ||
328 | /* Line number of the start of the function for debugging purposes. */ | |
329 | location_t function_start_locus; | |
330 | ||
331 | /* Line number of the end of the function. */ | |
332 | location_t function_end_locus; | |
333 | ||
334 | /* Properties used by the pass manager. */ | |
335 | unsigned int curr_properties; | |
336 | ||
337 | /* Different from normal TODO_flags which are handled right at the | |
338 | beginning or the end of one pass execution, the pending_TODOs | |
339 | are passed down in the pipeline until one of its consumers can | |
340 | perform the requested action. Consumers should then clear the | |
341 | flags for the actions that they have taken. */ | |
342 | unsigned int pending_TODOs; | |
343 | ||
344 | /* Non-null if the function does something that would prevent it from | |
345 | being copied; this applies to both versioning and inlining. Set to | |
346 | a string describing the reason for failure. */ | |
347 | const char * GTY((skip)) cannot_be_copied_reason; | |
348 | ||
349 | /* Last assigned dependence info clique. */ | |
350 | unsigned short last_clique; | |
351 | ||
352 | /* Collected bit flags. */ | |
353 | ||
354 | /* Number of units of general registers that need saving in stdarg | |
355 | function. What unit is depends on the backend, either it is number | |
356 | of bytes, or it can be number of registers. */ | |
357 | unsigned int va_list_gpr_size : 8; | |
358 | ||
359 | /* Number of units of floating point registers that need saving in stdarg | |
360 | function. */ | |
361 | unsigned int va_list_fpr_size : 8; | |
362 | ||
363 | /* Nonzero if function being compiled can call setjmp. */ | |
364 | unsigned int calls_setjmp : 1; | |
365 | ||
366 | /* Nonzero if function being compiled can call alloca, | |
367 | either as a subroutine or builtin. */ | |
368 | unsigned int calls_alloca : 1; | |
369 | ||
370 | /* Nonzero if function being compiled can call __builtin_eh_return. */ | |
371 | unsigned int calls_eh_return : 1; | |
372 | ||
373 | /* Nonzero if function being compiled receives nonlocal gotos | |
374 | from nested functions. */ | |
375 | unsigned int has_nonlocal_label : 1; | |
376 | ||
377 | /* Nonzero if function being compiled has a forced label | |
378 | placed into static storage. */ | |
379 | unsigned int has_forced_label_in_static : 1; | |
380 | ||
381 | /* Nonzero if we've set cannot_be_copied_reason. I.e. if | |
382 | (cannot_be_copied_set && !cannot_be_copied_reason), the function | |
383 | can in fact be copied. */ | |
384 | unsigned int cannot_be_copied_set : 1; | |
385 | ||
386 | /* Nonzero if current function uses stdarg.h or equivalent. */ | |
387 | unsigned int stdarg : 1; | |
388 | ||
389 | unsigned int after_inlining : 1; | |
390 | unsigned int always_inline_functions_inlined : 1; | |
391 | ||
392 | /* Nonzero if function being compiled can throw synchronous non-call | |
393 | exceptions. */ | |
394 | unsigned int can_throw_non_call_exceptions : 1; | |
395 | ||
396 | /* Nonzero if instructions that may throw exceptions but don't otherwise | |
397 | contribute to the execution of the program can be deleted. */ | |
398 | unsigned int can_delete_dead_exceptions : 1; | |
399 | ||
400 | /* Fields below this point are not set for abstract functions; see | |
401 | allocate_struct_function. */ | |
402 | ||
403 | /* Nonzero if function being compiled needs to be given an address | |
404 | where the value should be stored. */ | |
405 | unsigned int returns_struct : 1; | |
406 | ||
407 | /* Nonzero if function being compiled needs to | |
408 | return the address of where it has put a structure value. */ | |
409 | unsigned int returns_pcc_struct : 1; | |
410 | ||
411 | /* Nonzero if this function has local DECL_HARD_REGISTER variables. | |
412 | In this case code motion has to be done more carefully. */ | |
413 | unsigned int has_local_explicit_reg_vars : 1; | |
414 | ||
415 | /* Nonzero if the current function is a thunk, i.e., a lightweight | |
416 | function implemented by the output_mi_thunk hook) that just | |
417 | adjusts one of its arguments and forwards to another | |
418 | function. */ | |
419 | unsigned int is_thunk : 1; | |
420 | ||
421 | /* Nonzero if the current function contains any loops with | |
422 | loop->force_vectorize set. */ | |
423 | unsigned int has_force_vectorize_loops : 1; | |
424 | ||
425 | /* Nonzero if the current function contains any loops with | |
426 | nonzero value in loop->simduid. */ | |
427 | unsigned int has_simduid_loops : 1; | |
428 | ||
429 | /* Nonzero when the tail call has been identified. */ | |
430 | unsigned int tail_call_marked : 1; | |
431 | ||
432 | /* Has musttail marked calls. */ | |
433 | unsigned int has_musttail : 1; | |
434 | ||
435 | /* Nonzero if the current function contains a #pragma GCC unroll. */ | |
436 | unsigned int has_unroll : 1; | |
437 | ||
438 | /* Set when the function was compiled with generation of debug | |
439 | (begin stmt, inline entry, ...) markers enabled. */ | |
440 | unsigned int debug_nonbind_markers : 1; | |
441 | ||
442 | /* Set if this is a coroutine-related function. */ | |
443 | unsigned int coroutine_component : 1; | |
444 | ||
445 | /* Set if there are any OMP_TARGET regions in the function. */ | |
446 | unsigned int has_omp_target : 1; | |
447 | ||
448 | /* Set for artificial function created for [[assume (cond)]]. | |
449 | These should be GIMPLE optimized, but not expanded to RTL. */ | |
450 | unsigned int assume_function : 1; | |
451 | ||
452 | /* Nonzero if reload will have to split basic blocks. */ | |
453 | unsigned int split_basic_blocks_after_reload : 1; | |
454 | }; | |
455 | ||
456 | /* Add the decl D to the local_decls list of FUN. */ | |
457 | ||
458 | void add_local_decl (struct function *fun, tree d); | |
459 | ||
460 | #define FOR_EACH_LOCAL_DECL(FUN, I, D) \ | |
461 | FOR_EACH_VEC_SAFE_ELT_REVERSE ((FUN)->local_decls, I, D) | |
462 | ||
463 | /* Record a final call to CALLEE at LOCATION. */ | |
464 | void record_final_call (tree callee, location_t location); | |
465 | ||
466 | /* Record a dynamic allocation made for DECL_OR_EXP. */ | |
467 | void record_dynamic_alloc (tree decl_or_exp); | |
468 | ||
469 | /* If va_list_[gf]pr_size is set to this, it means we don't know how | |
470 | many units need to be saved. */ | |
471 | #define VA_LIST_MAX_GPR_SIZE 255 | |
472 | #define VA_LIST_MAX_FPR_SIZE 255 | |
473 | ||
474 | /* The function currently being compiled. */ | |
475 | extern GTY(()) struct function *cfun; | |
476 | ||
477 | /* In order to ensure that cfun is not set directly, we redefine it so | |
478 | that it is not an lvalue. Rather than assign to cfun, use | |
479 | push_cfun or set_cfun. */ | |
480 | #define cfun (cfun + 0) | |
481 | ||
482 | /* Nonzero if we've already converted virtual regs to hard regs. */ | |
483 | extern int virtuals_instantiated; | |
484 | ||
485 | /* Nonzero if at least one trampoline has been created. */ | |
486 | extern int trampolines_created; | |
487 | ||
488 | struct GTY((for_user)) types_used_by_vars_entry { | |
489 | tree type; | |
490 | tree var_decl; | |
491 | }; | |
492 | ||
493 | struct used_type_hasher : ggc_ptr_hash<types_used_by_vars_entry> | |
494 | { | |
495 | static hashval_t hash (types_used_by_vars_entry *); | |
496 | static bool equal (types_used_by_vars_entry *, types_used_by_vars_entry *); | |
497 | }; | |
498 | ||
499 | /* Hash table making the relationship between a global variable | |
500 | and the types it references in its initializer. The key of the | |
501 | entry is a referenced type, and the value is the DECL of the global | |
502 | variable. types_use_by_vars_do_hash and types_used_by_vars_eq below are | |
503 | the hash and equality functions to use for this hash table. */ | |
504 | extern GTY(()) hash_table<used_type_hasher> *types_used_by_vars_hash; | |
505 | ||
506 | void types_used_by_var_decl_insert (tree type, tree var_decl); | |
507 | ||
508 | /* During parsing of a global variable, this vector contains the types | |
509 | referenced by the global variable. */ | |
510 | extern GTY(()) vec<tree, va_gc> *types_used_by_cur_var_decl; | |
511 | ||
512 | ||
513 | /* Return the loop tree of FN. */ | |
514 | ||
515 | inline struct loops * | |
516 | loops_for_fn (struct function *fn) | |
517 | { | |
518 | return fn->x_current_loops; | |
519 | } | |
520 | ||
521 | /* Set the loop tree of FN to LOOPS. */ | |
522 | ||
523 | inline void | |
524 | set_loops_for_fn (struct function *fn, struct loops *loops) | |
525 | { | |
526 | gcc_checking_assert (fn->x_current_loops == NULL || loops == NULL); | |
527 | fn->x_current_loops = loops; | |
528 | } | |
529 | ||
530 | /* Get a new unique dependence clique or zero if none is left. */ | |
531 | ||
532 | inline unsigned short | |
533 | get_new_clique (function *fn) | |
534 | { | |
535 | unsigned short clique = fn->last_clique + 1; | |
536 | if (clique != 0) | |
537 | fn->last_clique = clique; | |
538 | return clique; | |
539 | } | |
540 | ||
541 | /* For backward compatibility... eventually these should all go away. */ | |
542 | #define current_function_funcdef_no (cfun->funcdef_no) | |
543 | ||
544 | #define current_loops (cfun->x_current_loops) | |
545 | #define dom_computed (cfun->cfg->x_dom_computed) | |
546 | #define n_bbs_in_dom_tree (cfun->cfg->x_n_bbs_in_dom_tree) | |
547 | #define VALUE_HISTOGRAMS(fun) (fun)->value_histograms | |
548 | ||
549 | /* A pointer to a function to create target specific, per-function | |
550 | data structures. */ | |
551 | extern struct machine_function * (*init_machine_status) (void); | |
552 | ||
553 | /* Structure to record the size of a sequence of arguments | |
554 | as the sum of a tree-expression and a constant. This structure is | |
555 | also used to store offsets from the stack, which might be negative, | |
556 | so the variable part must be ssizetype, not sizetype. */ | |
557 | ||
558 | struct args_size | |
559 | { | |
560 | poly_int64 constant; | |
561 | tree var; | |
562 | }; | |
563 | ||
564 | /* Package up various arg related fields of struct args for | |
565 | locate_and_pad_parm. */ | |
566 | struct locate_and_pad_arg_data | |
567 | { | |
568 | /* Size of this argument on the stack, rounded up for any padding it | |
569 | gets. If REG_PARM_STACK_SPACE is defined, then register parms are | |
570 | counted here, otherwise they aren't. */ | |
571 | struct args_size size; | |
572 | /* Offset of this argument from beginning of stack-args. */ | |
573 | struct args_size offset; | |
574 | /* Offset to the start of the stack slot. Different from OFFSET | |
575 | if this arg pads downward. */ | |
576 | struct args_size slot_offset; | |
577 | /* The amount that the stack pointer needs to be adjusted to | |
578 | force alignment for the next argument. */ | |
579 | struct args_size alignment_pad; | |
580 | /* Which way we should pad this arg. */ | |
581 | pad_direction where_pad; | |
582 | /* slot_offset is at least this aligned. */ | |
583 | unsigned int boundary; | |
584 | }; | |
585 | ||
586 | /* Add the value of the tree INC to the `struct args_size' TO. */ | |
587 | ||
588 | #define ADD_PARM_SIZE(TO, INC) \ | |
589 | do { \ | |
590 | tree inc = (INC); \ | |
591 | if (tree_fits_shwi_p (inc)) \ | |
592 | (TO).constant += tree_to_shwi (inc); \ | |
593 | else if ((TO).var == 0) \ | |
594 | (TO).var = fold_convert (ssizetype, inc); \ | |
595 | else \ | |
596 | (TO).var = size_binop (PLUS_EXPR, (TO).var, \ | |
597 | fold_convert (ssizetype, inc)); \ | |
598 | } while (0) | |
599 | ||
600 | #define SUB_PARM_SIZE(TO, DEC) \ | |
601 | do { \ | |
602 | tree dec = (DEC); \ | |
603 | if (tree_fits_shwi_p (dec)) \ | |
604 | (TO).constant -= tree_to_shwi (dec); \ | |
605 | else if ((TO).var == 0) \ | |
606 | (TO).var = size_binop (MINUS_EXPR, ssize_int (0), \ | |
607 | fold_convert (ssizetype, dec)); \ | |
608 | else \ | |
609 | (TO).var = size_binop (MINUS_EXPR, (TO).var, \ | |
610 | fold_convert (ssizetype, dec)); \ | |
611 | } while (0) | |
612 | ||
613 | /* Convert the implicit sum in a `struct args_size' into a tree | |
614 | of type ssizetype. */ | |
615 | #define ARGS_SIZE_TREE(SIZE) \ | |
616 | ((SIZE).var == 0 ? ssize_int ((SIZE).constant) \ | |
617 | : size_binop (PLUS_EXPR, fold_convert (ssizetype, (SIZE).var), \ | |
618 | ssize_int ((SIZE).constant))) | |
619 | ||
620 | /* Convert the implicit sum in a `struct args_size' into an rtx. */ | |
621 | #define ARGS_SIZE_RTX(SIZE) \ | |
622 | ((SIZE).var == 0 ? gen_int_mode ((SIZE).constant, Pmode) \ | |
623 | : expand_normal (ARGS_SIZE_TREE (SIZE))) | |
624 | ||
625 | #define ASLK_REDUCE_ALIGN 1 | |
626 | #define ASLK_RECORD_PAD 2 | |
627 | ||
628 | /* If pointers to member functions use the least significant bit to | |
629 | indicate whether a function is virtual, ensure a pointer | |
630 | to this function will have that bit clear. */ | |
631 | #define MINIMUM_METHOD_BOUNDARY \ | |
632 | ((TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn) \ | |
633 | ? MAX (FUNCTION_BOUNDARY, 2 * BITS_PER_UNIT) : FUNCTION_BOUNDARY) | |
634 | ||
635 | enum stack_clash_probes { | |
636 | NO_PROBE_NO_FRAME, | |
637 | NO_PROBE_SMALL_FRAME, | |
638 | PROBE_INLINE, | |
639 | PROBE_LOOP | |
640 | }; | |
641 | ||
642 | extern void dump_stack_clash_frame_info (enum stack_clash_probes, bool); | |
643 | \f | |
644 | ||
645 | extern void push_function_context (void); | |
646 | extern void pop_function_context (void); | |
647 | ||
648 | /* Save and restore status information for a nested function. */ | |
649 | extern void free_after_parsing (struct function *); | |
650 | extern void free_after_compilation (struct function *); | |
651 | ||
652 | /* Return size needed for stack frame based on slots so far allocated. | |
653 | This size counts from zero. It is not rounded to STACK_BOUNDARY; | |
654 | the caller may have to do that. */ | |
655 | extern poly_int64 get_frame_size (void); | |
656 | ||
657 | /* Issue an error message and return TRUE if frame OFFSET overflows in | |
658 | the signed target pointer arithmetics for function FUNC. Otherwise | |
659 | return FALSE. */ | |
660 | extern bool frame_offset_overflow (poly_int64, tree); | |
661 | ||
662 | extern unsigned int spill_slot_alignment (machine_mode); | |
663 | ||
664 | extern rtx assign_stack_local_1 (machine_mode, poly_int64, int, int); | |
665 | extern rtx assign_stack_local (machine_mode, poly_int64, int); | |
666 | extern rtx assign_stack_temp_for_type (machine_mode, poly_int64, tree); | |
667 | extern rtx assign_stack_temp (machine_mode, poly_int64); | |
668 | extern rtx assign_temp (tree, int, int); | |
669 | extern void update_temp_slot_address (rtx, rtx); | |
670 | extern void preserve_temp_slots (rtx); | |
671 | extern void free_temp_slots (void); | |
672 | extern void push_temp_slots (void); | |
673 | extern void pop_temp_slots (void); | |
674 | extern void init_temp_slots (void); | |
675 | extern rtx get_hard_reg_initial_reg (rtx); | |
676 | extern rtx get_hard_reg_initial_val (machine_mode, unsigned int); | |
677 | extern rtx has_hard_reg_initial_val (machine_mode, unsigned int); | |
678 | ||
679 | /* Called from gimple_expand_cfg. */ | |
680 | extern void emit_initial_value_sets (void); | |
681 | ||
682 | extern bool initial_value_entry (int i, rtx *, rtx *); | |
683 | extern void instantiate_decl_rtl (rtx x); | |
684 | extern bool aggregate_value_p (const_tree, const_tree); | |
685 | extern bool use_register_for_decl (const_tree); | |
686 | extern gimple_seq gimplify_parameters (gimple_seq *); | |
687 | extern void locate_and_pad_parm (machine_mode, tree, int, int, int, | |
688 | tree, struct args_size *, | |
689 | struct locate_and_pad_arg_data *); | |
690 | extern void generate_setjmp_warnings (void); | |
691 | ||
692 | /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END}, | |
693 | and create duplicate blocks. */ | |
694 | extern void reorder_blocks (void); | |
695 | extern void clear_block_marks (tree); | |
696 | extern tree blocks_nreverse (tree); | |
697 | extern tree block_chainon (tree, tree); | |
698 | ||
699 | /* Set BLOCK_NUMBER for all the blocks in FN. */ | |
700 | extern void number_blocks (tree); | |
701 | ||
702 | /* cfun shouldn't be set directly; use one of these functions instead. */ | |
703 | extern void set_cfun (struct function *new_cfun, bool force = false); | |
704 | extern void push_cfun (struct function *new_cfun); | |
705 | extern void pop_cfun (void); | |
706 | extern void push_function_decl (tree, bool = false); | |
707 | extern void pop_function_decl (void); | |
708 | ||
709 | extern int get_next_funcdef_no (void); | |
710 | extern int get_last_funcdef_no (void); | |
711 | extern void allocate_struct_function (tree, bool); | |
712 | extern void push_struct_function (tree fndecl, bool = false); | |
713 | extern void push_dummy_function (bool); | |
714 | extern void pop_dummy_function (void); | |
715 | extern void init_dummy_function_start (void); | |
716 | extern void init_function_start (tree); | |
717 | extern void stack_protect_epilogue (void); | |
718 | extern void expand_function_start (tree); | |
719 | extern void expand_dummy_function_end (void); | |
720 | ||
721 | extern void thread_prologue_and_epilogue_insns (void); | |
722 | extern void diddle_return_value (void (*)(rtx, void*), void*); | |
723 | extern void clobber_return_register (void); | |
724 | extern void expand_function_end (void); | |
725 | extern rtx get_arg_pointer_save_area (void); | |
726 | extern void maybe_copy_prologue_epilogue_insn (rtx, rtx); | |
727 | extern bool prologue_contains (const rtx_insn *); | |
728 | extern bool epilogue_contains (const rtx_insn *); | |
729 | extern bool prologue_epilogue_contains (const rtx_insn *); | |
730 | extern void record_prologue_seq (rtx_insn *); | |
731 | extern void record_epilogue_seq (rtx_insn *); | |
732 | extern void emit_return_into_block (bool simple_p, basic_block bb); | |
733 | extern void set_return_jump_label (rtx_insn *); | |
734 | extern bool active_insn_between (rtx_insn *head, rtx_insn *tail); | |
735 | extern vec<edge> convert_jumps_to_returns (basic_block last_bb, bool simple_p, | |
736 | vec<edge> unconverted); | |
737 | extern basic_block emit_return_for_exit (edge exit_fallthru_edge, | |
738 | bool simple_p); | |
739 | extern void reposition_prologue_and_epilogue_notes (void); | |
740 | extern poly_int64 get_stack_dynamic_offset (); | |
741 | ||
742 | /* Returns the name of the current function. */ | |
743 | extern const char *fndecl_name (tree); | |
744 | extern const char *function_name (const function *); | |
745 | extern const char *current_function_name (void); | |
746 | ||
747 | extern void used_types_insert (tree); | |
748 | ||
749 | extern bool currently_expanding_function_start; | |
750 | ||
751 | #endif /* GCC_FUNCTION_H */ |