]>
Commit | Line | Data |
---|---|---|
1 | /* Definitions for code generation pass of GNU compiler. | |
2 | Copyright (C) 1987-2025 Free Software Foundation, Inc. | |
3 | ||
4 | This file is part of GCC. | |
5 | ||
6 | GCC is free software; you can redistribute it and/or modify it under | |
7 | the terms of the GNU General Public License as published by the Free | |
8 | Software Foundation; either version 3, or (at your option) any later | |
9 | version. | |
10 | ||
11 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY | |
12 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
14 | for more details. | |
15 | ||
16 | You should have received a copy of the GNU General Public License | |
17 | along with GCC; see the file COPYING3. If not see | |
18 | <http://www.gnu.org/licenses/>. */ | |
19 | ||
20 | #ifndef GCC_EXPR_H | |
21 | #define GCC_EXPR_H | |
22 | ||
23 | /* This is the 4th arg to `expand_expr'. | |
24 | EXPAND_STACK_PARM means we are possibly expanding a call param onto | |
25 | the stack. | |
26 | EXPAND_SUM means it is ok to return a PLUS rtx or MULT rtx. | |
27 | EXPAND_INITIALIZER is similar but also record any labels on forced_labels. | |
28 | EXPAND_CONST_ADDRESS means it is ok to return a MEM whose address | |
29 | is a constant that is not a legitimate address. | |
30 | EXPAND_WRITE means we are only going to write to the resulting rtx. | |
31 | EXPAND_MEMORY means we are interested in a memory result, even if | |
32 | the memory is constant and we could have propagated a constant value, | |
33 | or the memory is unaligned on a STRICT_ALIGNMENT target. */ | |
34 | enum expand_modifier {EXPAND_NORMAL = 0, EXPAND_STACK_PARM, EXPAND_SUM, | |
35 | EXPAND_CONST_ADDRESS, EXPAND_INITIALIZER, EXPAND_WRITE, | |
36 | EXPAND_MEMORY}; | |
37 | ||
38 | /* Prevent the compiler from deferring stack pops. See | |
39 | inhibit_defer_pop for more information. */ | |
40 | #define NO_DEFER_POP (inhibit_defer_pop += 1) | |
41 | ||
42 | /* Allow the compiler to defer stack pops. See inhibit_defer_pop for | |
43 | more information. */ | |
44 | #define OK_DEFER_POP (inhibit_defer_pop -= 1) | |
45 | \f | |
46 | /* This structure is used to pass around information about exploded | |
47 | unary, binary and trinary expressions between expand_expr_real_1 and | |
48 | friends. */ | |
49 | typedef struct separate_ops | |
50 | { | |
51 | enum tree_code code; | |
52 | location_t location; | |
53 | tree type; | |
54 | tree op0, op1, op2; | |
55 | } *sepops; | |
56 | ||
57 | typedef const struct separate_ops *const_sepops; | |
58 | \f | |
59 | /* This is run during target initialization to set up which modes can be | |
60 | used directly in memory and to initialize the block move optab. */ | |
61 | extern void init_expr_target (void); | |
62 | ||
63 | /* This is run at the start of compiling a function. */ | |
64 | extern void init_expr (void); | |
65 | ||
66 | /* Emit some rtl insns to move data between rtx's, converting machine modes. | |
67 | Both modes must be floating or both fixed. */ | |
68 | extern void convert_move (rtx, rtx, int); | |
69 | ||
70 | /* Convert an rtx to specified machine mode and return the result. */ | |
71 | extern rtx convert_to_mode (machine_mode, rtx, int); | |
72 | ||
73 | /* Convert an rtx to MODE from OLDMODE and return the result. */ | |
74 | extern rtx convert_modes (machine_mode mode, machine_mode oldmode, | |
75 | rtx x, int unsignedp); | |
76 | ||
77 | /* Variant of convert_modes for ABI parameter passing/return. */ | |
78 | extern rtx convert_float_to_wider_int (machine_mode mode, machine_mode fmode, | |
79 | rtx x); | |
80 | ||
81 | /* Variant of convert_modes for ABI parameter passing/return. */ | |
82 | extern rtx convert_wider_int_to_float (machine_mode mode, machine_mode imode, | |
83 | rtx x); | |
84 | ||
85 | /* Expand a call to memcpy or memmove or memcmp, and return the result. */ | |
86 | extern rtx emit_block_op_via_libcall (enum built_in_function, rtx, rtx, rtx, | |
87 | bool); | |
88 | ||
89 | inline rtx | |
90 | emit_block_copy_via_libcall (rtx dst, rtx src, rtx size, bool tailcall = false) | |
91 | { | |
92 | return emit_block_op_via_libcall (BUILT_IN_MEMCPY, dst, src, size, tailcall); | |
93 | } | |
94 | ||
95 | inline rtx | |
96 | emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall = false) | |
97 | { | |
98 | return emit_block_op_via_libcall (BUILT_IN_MEMMOVE, dst, src, size, tailcall); | |
99 | } | |
100 | ||
101 | inline rtx | |
102 | emit_block_comp_via_libcall (rtx dst, rtx src, rtx size, bool tailcall = false) | |
103 | { | |
104 | return emit_block_op_via_libcall (BUILT_IN_MEMCMP, dst, src, size, tailcall); | |
105 | } | |
106 | ||
107 | /* Emit code to move a block Y to a block X. */ | |
108 | enum block_op_methods | |
109 | { | |
110 | BLOCK_OP_NORMAL, | |
111 | BLOCK_OP_NO_LIBCALL, | |
112 | BLOCK_OP_CALL_PARM, | |
113 | /* Like BLOCK_OP_NORMAL, but the libcall can be tail call optimized. */ | |
114 | BLOCK_OP_TAILCALL, | |
115 | /* Like BLOCK_OP_NO_LIBCALL, but instead of emitting a libcall return | |
116 | pc_rtx to indicate nothing has been emitted and let the caller handle | |
117 | it. */ | |
118 | BLOCK_OP_NO_LIBCALL_RET | |
119 | }; | |
120 | ||
121 | typedef rtx (*by_pieces_constfn) (void *, void *, HOST_WIDE_INT, | |
122 | fixed_size_mode); | |
123 | ||
124 | /* The second pointer passed to by_pieces_constfn. */ | |
125 | struct by_pieces_prev | |
126 | { | |
127 | rtx data; | |
128 | fixed_size_mode mode; | |
129 | }; | |
130 | ||
131 | extern rtx emit_block_move (rtx, rtx, rtx, enum block_op_methods, | |
132 | unsigned ctz_size = 0); | |
133 | extern rtx emit_block_move_hints (rtx, rtx, rtx, enum block_op_methods, | |
134 | unsigned int, HOST_WIDE_INT, | |
135 | unsigned HOST_WIDE_INT, | |
136 | unsigned HOST_WIDE_INT, | |
137 | unsigned HOST_WIDE_INT, | |
138 | bool bail_out_libcall = false, | |
139 | bool *is_move_done = NULL, | |
140 | bool might_overlap = false, | |
141 | unsigned ctz_size = 0); | |
142 | extern rtx emit_block_cmp_hints (rtx, rtx, rtx, tree, rtx, bool, | |
143 | by_pieces_constfn, void *, | |
144 | unsigned ctz_len = 0); | |
145 | extern bool emit_storent_insn (rtx to, rtx from); | |
146 | ||
147 | /* Copy all or part of a value X into registers starting at REGNO. | |
148 | The number of registers to be filled is NREGS. */ | |
149 | extern void move_block_to_reg (int, rtx, int, machine_mode); | |
150 | ||
151 | /* Copy all or part of a BLKmode value X out of registers starting at REGNO. | |
152 | The number of registers to be filled is NREGS. */ | |
153 | extern void move_block_from_reg (int, rtx, int); | |
154 | ||
155 | /* Generate a non-consecutive group of registers represented by a PARALLEL. */ | |
156 | extern rtx gen_group_rtx (rtx); | |
157 | ||
158 | /* Load a BLKmode value into non-consecutive registers represented by a | |
159 | PARALLEL. */ | |
160 | extern void emit_group_load (rtx, rtx, tree, poly_int64); | |
161 | ||
162 | /* Similarly, but load into new temporaries. */ | |
163 | extern rtx emit_group_load_into_temps (rtx, rtx, tree, poly_int64); | |
164 | ||
165 | /* Move a non-consecutive group of registers represented by a PARALLEL into | |
166 | a non-consecutive group of registers represented by a PARALLEL. */ | |
167 | extern void emit_group_move (rtx, rtx); | |
168 | ||
169 | /* Move a group of registers represented by a PARALLEL into pseudos. */ | |
170 | extern rtx emit_group_move_into_temps (rtx); | |
171 | ||
172 | /* Store a BLKmode value from non-consecutive registers represented by a | |
173 | PARALLEL. */ | |
174 | extern void emit_group_store (rtx, rtx, tree, poly_int64); | |
175 | ||
176 | extern rtx maybe_emit_group_store (rtx, tree); | |
177 | ||
178 | /* Mark REG as holding a parameter for the next CALL_INSN. | |
179 | Mode is TYPE_MODE of the non-promoted parameter, or VOIDmode. */ | |
180 | extern void use_reg_mode (rtx *, rtx, machine_mode); | |
181 | extern void clobber_reg_mode (rtx *, rtx, machine_mode); | |
182 | ||
183 | extern rtx copy_blkmode_to_reg (machine_mode, tree); | |
184 | ||
185 | /* Mark REG as holding a parameter for the next CALL_INSN. */ | |
186 | inline void | |
187 | use_reg (rtx *fusage, rtx reg) | |
188 | { | |
189 | use_reg_mode (fusage, reg, VOIDmode); | |
190 | } | |
191 | ||
192 | /* Mark REG as clobbered by the call with FUSAGE as CALL_INSN_FUNCTION_USAGE. */ | |
193 | inline void | |
194 | clobber_reg (rtx *fusage, rtx reg) | |
195 | { | |
196 | clobber_reg_mode (fusage, reg, VOIDmode); | |
197 | } | |
198 | ||
199 | /* Mark NREGS consecutive regs, starting at REGNO, as holding parameters | |
200 | for the next CALL_INSN. */ | |
201 | extern void use_regs (rtx *, int, int); | |
202 | ||
203 | /* Mark a PARALLEL as holding a parameter for the next CALL_INSN. */ | |
204 | extern void use_group_regs (rtx *, rtx); | |
205 | ||
206 | #ifdef GCC_INSN_CODES_H | |
207 | extern rtx expand_cmpstrn_or_cmpmem (insn_code, rtx, rtx, rtx, tree, rtx, | |
208 | HOST_WIDE_INT); | |
209 | #endif | |
210 | ||
211 | /* Write zeros through the storage of OBJECT. | |
212 | If OBJECT has BLKmode, SIZE is its length in bytes. */ | |
213 | extern rtx clear_storage (rtx, rtx, enum block_op_methods); | |
214 | extern rtx clear_storage_hints (rtx, rtx, enum block_op_methods, | |
215 | unsigned int, HOST_WIDE_INT, | |
216 | unsigned HOST_WIDE_INT, | |
217 | unsigned HOST_WIDE_INT, | |
218 | unsigned HOST_WIDE_INT, | |
219 | unsigned); | |
220 | /* The same, but always output an library call. */ | |
221 | extern rtx set_storage_via_libcall (rtx, rtx, rtx, bool = false); | |
222 | ||
223 | /* Expand a setmem pattern; return true if successful. */ | |
224 | extern bool set_storage_via_setmem (rtx, rtx, rtx, unsigned int, | |
225 | unsigned int, HOST_WIDE_INT, | |
226 | unsigned HOST_WIDE_INT, | |
227 | unsigned HOST_WIDE_INT, | |
228 | unsigned HOST_WIDE_INT); | |
229 | ||
230 | /* Return true if it is desirable to store LEN bytes generated by | |
231 | CONSTFUN with several move instructions by store_by_pieces | |
232 | function. CONSTFUNDATA is a pointer which will be passed as argument | |
233 | in every CONSTFUN call. | |
234 | ALIGN is maximum alignment we can assume. | |
235 | MEMSETP is true if this is a real memset/bzero, not a copy | |
236 | of a const string. */ | |
237 | extern bool can_store_by_pieces (unsigned HOST_WIDE_INT, | |
238 | by_pieces_constfn, | |
239 | void *, unsigned int, bool); | |
240 | ||
241 | /* Generate several move instructions to store LEN bytes generated by | |
242 | CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a | |
243 | pointer which will be passed as argument in every CONSTFUN call. | |
244 | ALIGN is maximum alignment we can assume. | |
245 | MEMSETP is true if this is a real memset/bzero, not a copy. | |
246 | Returns TO + LEN. */ | |
247 | extern rtx store_by_pieces (rtx, unsigned HOST_WIDE_INT, by_pieces_constfn, | |
248 | void *, unsigned int, bool, memop_ret); | |
249 | ||
250 | /* Generate several move instructions to clear LEN bytes of block TO. (A MEM | |
251 | rtx with BLKmode). ALIGN is maximum alignment we can assume. */ | |
252 | ||
253 | extern void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int); | |
254 | ||
255 | /* If can_store_by_pieces passes for worst-case values near MAX_LEN, call | |
256 | store_by_pieces within conditionals so as to handle variable LEN efficiently, | |
257 | storing VAL, if non-NULL_RTX, or valc instead. */ | |
258 | extern bool try_store_by_multiple_pieces (rtx to, rtx len, | |
259 | unsigned int ctz_len, | |
260 | unsigned HOST_WIDE_INT min_len, | |
261 | unsigned HOST_WIDE_INT max_len, | |
262 | rtx val, char valc, | |
263 | unsigned int align); | |
264 | ||
265 | /* Emit insns to set X from Y. */ | |
266 | extern rtx_insn *emit_move_insn (rtx, rtx); | |
267 | extern rtx_insn *gen_move_insn (rtx, rtx); | |
268 | ||
269 | /* Emit insns to set X from Y, with no frills. */ | |
270 | extern rtx_insn *emit_move_insn_1 (rtx, rtx); | |
271 | ||
272 | extern rtx_insn *emit_move_complex_push (machine_mode, rtx, rtx); | |
273 | extern rtx_insn *emit_move_complex_parts (rtx, rtx); | |
274 | extern rtx read_complex_part (rtx, bool); | |
275 | extern void write_complex_part (rtx, rtx, bool, bool); | |
276 | extern rtx read_complex_part (rtx, bool); | |
277 | extern rtx emit_move_resolve_push (machine_mode, rtx); | |
278 | ||
279 | /* Push a block of length SIZE (perhaps variable) | |
280 | and return an rtx to address the beginning of the block. */ | |
281 | extern rtx push_block (rtx, poly_int64, int); | |
282 | ||
283 | /* Generate code to push something onto the stack, given its mode and type. */ | |
284 | extern bool emit_push_insn (rtx, machine_mode, tree, rtx, unsigned int, | |
285 | int, rtx, poly_int64, rtx, rtx, int, rtx, bool); | |
286 | ||
287 | /* Extract the accessible bit-range from a COMPONENT_REF. */ | |
288 | extern void get_bit_range (poly_uint64 *, poly_uint64 *, tree, | |
289 | poly_int64 *, tree *); | |
290 | ||
291 | /* Expand an assignment that stores the value of FROM into TO. */ | |
292 | extern void expand_assignment (tree, tree, bool); | |
293 | ||
294 | /* Generate code for computing expression EXP, | |
295 | and storing the value into TARGET. | |
296 | If SUGGEST_REG is nonzero, copy the value through a register | |
297 | and return that register, if that is possible. */ | |
298 | extern rtx store_expr (tree, rtx, int, bool, bool); | |
299 | ||
300 | /* Given an rtx that may include add and multiply operations, | |
301 | generate them as insns and return a pseudo-reg containing the value. | |
302 | Useful after calling expand_expr with 1 as sum_ok. */ | |
303 | extern rtx force_operand (rtx, rtx); | |
304 | ||
305 | /* Work horses for expand_expr. */ | |
306 | extern rtx expand_expr_real (tree, rtx, machine_mode, | |
307 | enum expand_modifier, rtx *, bool); | |
308 | extern rtx expand_expr_real_1 (tree, rtx, machine_mode, | |
309 | enum expand_modifier, rtx *, bool); | |
310 | extern rtx expand_expr_real_2 (const_sepops, rtx, machine_mode, | |
311 | enum expand_modifier); | |
312 | extern rtx expand_expr_real_gassign (gassign *, rtx, machine_mode, | |
313 | enum expand_modifier modifier, | |
314 | rtx * = nullptr, bool = false); | |
315 | ||
316 | /* Generate code for computing expression EXP. | |
317 | An rtx for the computed value is returned. The value is never null. | |
318 | In the case of a void EXP, const0_rtx is returned. */ | |
319 | inline rtx | |
320 | expand_expr (tree exp, rtx target, machine_mode mode, | |
321 | enum expand_modifier modifier) | |
322 | { | |
323 | return expand_expr_real (exp, target, mode, modifier, NULL, false); | |
324 | } | |
325 | ||
326 | inline rtx | |
327 | expand_normal (tree exp) | |
328 | { | |
329 | return expand_expr_real (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL, NULL, false); | |
330 | } | |
331 | ||
332 | ||
333 | /* Return STRING_CST and set offset, size and decl, if the first | |
334 | argument corresponds to a string constant. */ | |
335 | extern tree string_constant (tree, tree *, tree *, tree *); | |
336 | /* Similar to string_constant, return a STRING_CST corresponding | |
337 | to the value representation of the first argument if it's | |
338 | a constant. */ | |
339 | extern tree byte_representation (tree, tree *, tree *, tree *); | |
340 | ||
341 | extern enum tree_code maybe_optimize_mod_cmp (enum tree_code, tree *, tree *); | |
342 | extern void maybe_optimize_sub_cmp_0 (enum tree_code, tree *, tree *); | |
343 | ||
344 | /* Two different ways of generating switch statements. */ | |
345 | extern bool try_casesi (tree, tree, tree, tree, rtx, rtx, rtx, | |
346 | profile_probability); | |
347 | extern bool try_tablejump (tree, tree, tree, tree, rtx, rtx, | |
348 | profile_probability); | |
349 | ||
350 | extern bool safe_from_p (const_rtx, tree, int); | |
351 | ||
352 | /* Get the personality libfunc for a function decl. */ | |
353 | rtx get_personality_function (tree); | |
354 | ||
355 | /* Determine whether the LEN bytes can be moved by using several move | |
356 | instructions. Return nonzero if a call to move_by_pieces should | |
357 | succeed. */ | |
358 | extern bool can_move_by_pieces (unsigned HOST_WIDE_INT, unsigned int); | |
359 | ||
360 | extern unsigned HOST_WIDE_INT highest_pow2_factor (const_tree); | |
361 | ||
362 | extern bool categorize_ctor_elements (const_tree, HOST_WIDE_INT *, | |
363 | HOST_WIDE_INT *, HOST_WIDE_INT *, | |
364 | int *); | |
365 | extern bool type_has_padding_at_level_p (tree); | |
366 | extern bool immediate_const_ctor_p (const_tree, unsigned int words = 1); | |
367 | extern void store_constructor (tree, rtx, int, poly_int64, bool); | |
368 | extern HOST_WIDE_INT int_expr_size (const_tree exp); | |
369 | ||
370 | extern void expand_operands (tree, tree, rtx, rtx*, rtx*, | |
371 | enum expand_modifier); | |
372 | ||
373 | /* rtl.h and tree.h were included. */ | |
374 | /* Return an rtx for the size in bytes of the value of an expr. */ | |
375 | extern rtx expr_size (tree); | |
376 | ||
377 | extern bool mem_ref_refers_to_non_mem_p (tree); | |
378 | extern bool non_mem_decl_p (tree); | |
379 | ||
380 | /* Return the quotient of the polynomial long division of x^2N by POLYNOMIAL | |
381 | in GF (2^N). */ | |
382 | extern unsigned HOST_WIDE_INT | |
383 | gf2n_poly_long_div_quotient (unsigned HOST_WIDE_INT, unsigned short); | |
384 | ||
385 | /* Generate table-based CRC. */ | |
386 | extern void generate_reflecting_code_standard (rtx *); | |
387 | extern void expand_crc_table_based (rtx, rtx, rtx, rtx, machine_mode); | |
388 | extern void expand_reversed_crc_table_based (rtx, rtx, rtx, rtx, machine_mode, | |
389 | void (*) (rtx *)); | |
390 | ||
391 | #endif /* GCC_EXPR_H */ |