1 | /* Definitions for code generation pass of GNU compiler. |
2 | Copyright (C) 1987-2024 Free Software Foundation, Inc. |
3 | |
4 | This file is part of GCC. |
5 | |
6 | GCC is free software; you can redistribute it and/or modify it under |
7 | the terms of the GNU General Public License as published by the Free |
8 | Software Foundation; either version 3, or (at your option) any later |
9 | version. |
10 | |
11 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
12 | WARRANTY; without even the implied warranty of MERCHANTABILITY or |
13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
14 | for more details. |
15 | |
16 | You should have received a copy of the GNU General Public License |
17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ |
19 | |
20 | #ifndef GCC_EXPR_H |
21 | #define GCC_EXPR_H |
22 | |
23 | /* This is the 4th arg to `expand_expr'. |
24 | EXPAND_STACK_PARM means we are possibly expanding a call param onto |
25 | the stack. |
26 | EXPAND_SUM means it is ok to return a PLUS rtx or MULT rtx. |
27 | EXPAND_INITIALIZER is similar but also record any labels on forced_labels. |
28 | EXPAND_CONST_ADDRESS means it is ok to return a MEM whose address |
29 | is a constant that is not a legitimate address. |
30 | EXPAND_WRITE means we are only going to write to the resulting rtx. |
31 | EXPAND_MEMORY means we are interested in a memory result, even if |
32 | the memory is constant and we could have propagated a constant value, |
33 | or the memory is unaligned on a STRICT_ALIGNMENT target. */ |
34 | enum expand_modifier {EXPAND_NORMAL = 0, EXPAND_STACK_PARM, EXPAND_SUM, |
35 | EXPAND_CONST_ADDRESS, EXPAND_INITIALIZER, EXPAND_WRITE, |
36 | EXPAND_MEMORY}; |
37 | |
38 | /* Prevent the compiler from deferring stack pops. See |
39 | inhibit_defer_pop for more information. */ |
40 | #define NO_DEFER_POP (inhibit_defer_pop += 1) |
41 | |
42 | /* Allow the compiler to defer stack pops. See inhibit_defer_pop for |
43 | more information. */ |
44 | #define OK_DEFER_POP (inhibit_defer_pop -= 1) |
45 | |
46 | /* This structure is used to pass around information about exploded |
47 | unary, binary and trinary expressions between expand_expr_real_1 and |
48 | friends. */ |
49 | typedef struct separate_ops |
50 | { |
51 | enum tree_code code; |
52 | location_t location; |
53 | tree type; |
54 | tree op0, op1, op2; |
55 | } *sepops; |
56 | |
57 | /* This is run during target initialization to set up which modes can be |
58 | used directly in memory and to initialize the block move optab. */ |
59 | extern void init_expr_target (void); |
60 | |
61 | /* This is run at the start of compiling a function. */ |
62 | extern void init_expr (void); |
63 | |
64 | /* Emit some rtl insns to move data between rtx's, converting machine modes. |
65 | Both modes must be floating or both fixed. */ |
66 | extern void convert_move (rtx, rtx, int); |
67 | |
68 | /* Convert an rtx to specified machine mode and return the result. */ |
69 | extern rtx convert_to_mode (machine_mode, rtx, int); |
70 | |
71 | /* Convert an rtx to MODE from OLDMODE and return the result. */ |
72 | extern rtx convert_modes (machine_mode mode, machine_mode oldmode, |
73 | rtx x, int unsignedp); |
74 | |
75 | /* Variant of convert_modes for ABI parameter passing/return. */ |
76 | extern rtx convert_float_to_wider_int (machine_mode mode, machine_mode fmode, |
77 | rtx x); |
78 | |
79 | /* Variant of convert_modes for ABI parameter passing/return. */ |
80 | extern rtx convert_wider_int_to_float (machine_mode mode, machine_mode imode, |
81 | rtx x); |
82 | |
83 | /* Expand a call to memcpy or memmove or memcmp, and return the result. */ |
84 | extern rtx emit_block_op_via_libcall (enum built_in_function, rtx, rtx, rtx, |
85 | bool); |
86 | |
87 | inline rtx |
88 | emit_block_copy_via_libcall (rtx dst, rtx src, rtx size, bool tailcall = false) |
89 | { |
90 | return emit_block_op_via_libcall (BUILT_IN_MEMCPY, dst, src, size, tailcall); |
91 | } |
92 | |
93 | inline rtx |
94 | emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall = false) |
95 | { |
96 | return emit_block_op_via_libcall (BUILT_IN_MEMMOVE, dst, src, size, tailcall); |
97 | } |
98 | |
99 | inline rtx |
100 | emit_block_comp_via_libcall (rtx dst, rtx src, rtx size, bool tailcall = false) |
101 | { |
102 | return emit_block_op_via_libcall (BUILT_IN_MEMCMP, dst, src, size, tailcall); |
103 | } |
104 | |
105 | /* Emit code to move a block Y to a block X. */ |
106 | enum block_op_methods |
107 | { |
108 | BLOCK_OP_NORMAL, |
109 | BLOCK_OP_NO_LIBCALL, |
110 | BLOCK_OP_CALL_PARM, |
111 | /* Like BLOCK_OP_NORMAL, but the libcall can be tail call optimized. */ |
112 | BLOCK_OP_TAILCALL, |
113 | /* Like BLOCK_OP_NO_LIBCALL, but instead of emitting a libcall return |
114 | pc_rtx to indicate nothing has been emitted and let the caller handle |
115 | it. */ |
116 | BLOCK_OP_NO_LIBCALL_RET |
117 | }; |
118 | |
119 | typedef rtx (*by_pieces_constfn) (void *, void *, HOST_WIDE_INT, |
120 | fixed_size_mode); |
121 | |
122 | /* The second pointer passed to by_pieces_constfn. */ |
123 | struct by_pieces_prev |
124 | { |
125 | rtx data; |
126 | fixed_size_mode mode; |
127 | }; |
128 | |
129 | extern rtx emit_block_move (rtx, rtx, rtx, enum block_op_methods, |
130 | unsigned ctz_size = 0); |
131 | extern rtx emit_block_move_hints (rtx, rtx, rtx, enum block_op_methods, |
132 | unsigned int, HOST_WIDE_INT, |
133 | unsigned HOST_WIDE_INT, |
134 | unsigned HOST_WIDE_INT, |
135 | unsigned HOST_WIDE_INT, |
136 | bool bail_out_libcall = false, |
137 | bool *is_move_done = NULL, |
138 | bool might_overlap = false, |
139 | unsigned ctz_size = 0); |
140 | extern rtx emit_block_cmp_hints (rtx, rtx, rtx, tree, rtx, bool, |
141 | by_pieces_constfn, void *, |
142 | unsigned ctz_len = 0); |
143 | extern bool emit_storent_insn (rtx to, rtx from); |
144 | |
145 | /* Copy all or part of a value X into registers starting at REGNO. |
146 | The number of registers to be filled is NREGS. */ |
147 | extern void move_block_to_reg (int, rtx, int, machine_mode); |
148 | |
149 | /* Copy all or part of a BLKmode value X out of registers starting at REGNO. |
150 | The number of registers to be filled is NREGS. */ |
151 | extern void move_block_from_reg (int, rtx, int); |
152 | |
153 | /* Generate a non-consecutive group of registers represented by a PARALLEL. */ |
154 | extern rtx gen_group_rtx (rtx); |
155 | |
156 | /* Load a BLKmode value into non-consecutive registers represented by a |
157 | PARALLEL. */ |
158 | extern void emit_group_load (rtx, rtx, tree, poly_int64); |
159 | |
160 | /* Similarly, but load into new temporaries. */ |
161 | extern rtx emit_group_load_into_temps (rtx, rtx, tree, poly_int64); |
162 | |
163 | /* Move a non-consecutive group of registers represented by a PARALLEL into |
164 | a non-consecutive group of registers represented by a PARALLEL. */ |
165 | extern void emit_group_move (rtx, rtx); |
166 | |
167 | /* Move a group of registers represented by a PARALLEL into pseudos. */ |
168 | extern rtx emit_group_move_into_temps (rtx); |
169 | |
170 | /* Store a BLKmode value from non-consecutive registers represented by a |
171 | PARALLEL. */ |
172 | extern void emit_group_store (rtx, rtx, tree, poly_int64); |
173 | |
174 | extern rtx maybe_emit_group_store (rtx, tree); |
175 | |
176 | /* Mark REG as holding a parameter for the next CALL_INSN. |
177 | Mode is TYPE_MODE of the non-promoted parameter, or VOIDmode. */ |
178 | extern void use_reg_mode (rtx *, rtx, machine_mode); |
179 | extern void clobber_reg_mode (rtx *, rtx, machine_mode); |
180 | |
181 | extern rtx copy_blkmode_to_reg (machine_mode, tree); |
182 | |
183 | /* Mark REG as holding a parameter for the next CALL_INSN. */ |
184 | inline void |
185 | use_reg (rtx *fusage, rtx reg) |
186 | { |
187 | use_reg_mode (fusage, reg, VOIDmode); |
188 | } |
189 | |
190 | /* Mark REG as clobbered by the call with FUSAGE as CALL_INSN_FUNCTION_USAGE. */ |
191 | inline void |
192 | clobber_reg (rtx *fusage, rtx reg) |
193 | { |
194 | clobber_reg_mode (fusage, reg, VOIDmode); |
195 | } |
196 | |
197 | /* Mark NREGS consecutive regs, starting at REGNO, as holding parameters |
198 | for the next CALL_INSN. */ |
199 | extern void use_regs (rtx *, int, int); |
200 | |
201 | /* Mark a PARALLEL as holding a parameter for the next CALL_INSN. */ |
202 | extern void use_group_regs (rtx *, rtx); |
203 | |
204 | #ifdef GCC_INSN_CODES_H |
205 | extern rtx expand_cmpstrn_or_cmpmem (insn_code, rtx, rtx, rtx, tree, rtx, |
206 | HOST_WIDE_INT); |
207 | #endif |
208 | |
209 | /* Write zeros through the storage of OBJECT. |
210 | If OBJECT has BLKmode, SIZE is its length in bytes. */ |
211 | extern rtx clear_storage (rtx, rtx, enum block_op_methods); |
212 | extern rtx clear_storage_hints (rtx, rtx, enum block_op_methods, |
213 | unsigned int, HOST_WIDE_INT, |
214 | unsigned HOST_WIDE_INT, |
215 | unsigned HOST_WIDE_INT, |
216 | unsigned HOST_WIDE_INT, |
217 | unsigned); |
218 | /* The same, but always output an library call. */ |
219 | extern rtx set_storage_via_libcall (rtx, rtx, rtx, bool = false); |
220 | |
221 | /* Expand a setmem pattern; return true if successful. */ |
222 | extern bool set_storage_via_setmem (rtx, rtx, rtx, unsigned int, |
223 | unsigned int, HOST_WIDE_INT, |
224 | unsigned HOST_WIDE_INT, |
225 | unsigned HOST_WIDE_INT, |
226 | unsigned HOST_WIDE_INT); |
227 | |
228 | /* Return true if it is desirable to store LEN bytes generated by |
229 | CONSTFUN with several move instructions by store_by_pieces |
230 | function. CONSTFUNDATA is a pointer which will be passed as argument |
231 | in every CONSTFUN call. |
232 | ALIGN is maximum alignment we can assume. |
233 | MEMSETP is true if this is a real memset/bzero, not a copy |
234 | of a const string. */ |
235 | extern bool can_store_by_pieces (unsigned HOST_WIDE_INT, |
236 | by_pieces_constfn, |
237 | void *, unsigned int, bool); |
238 | |
239 | /* Generate several move instructions to store LEN bytes generated by |
240 | CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a |
241 | pointer which will be passed as argument in every CONSTFUN call. |
242 | ALIGN is maximum alignment we can assume. |
243 | MEMSETP is true if this is a real memset/bzero, not a copy. |
244 | Returns TO + LEN. */ |
245 | extern rtx store_by_pieces (rtx, unsigned HOST_WIDE_INT, by_pieces_constfn, |
246 | void *, unsigned int, bool, memop_ret); |
247 | |
248 | /* If can_store_by_pieces passes for worst-case values near MAX_LEN, call |
249 | store_by_pieces within conditionals so as to handle variable LEN efficiently, |
250 | storing VAL, if non-NULL_RTX, or valc instead. */ |
251 | extern bool try_store_by_multiple_pieces (rtx to, rtx len, |
252 | unsigned int ctz_len, |
253 | unsigned HOST_WIDE_INT min_len, |
254 | unsigned HOST_WIDE_INT max_len, |
255 | rtx val, char valc, |
256 | unsigned int align); |
257 | |
258 | /* Emit insns to set X from Y. */ |
259 | extern rtx_insn *emit_move_insn (rtx, rtx); |
260 | extern rtx_insn *gen_move_insn (rtx, rtx); |
261 | |
262 | /* Emit insns to set X from Y, with no frills. */ |
263 | extern rtx_insn *emit_move_insn_1 (rtx, rtx); |
264 | |
265 | extern rtx_insn *emit_move_complex_push (machine_mode, rtx, rtx); |
266 | extern rtx_insn *emit_move_complex_parts (rtx, rtx); |
267 | extern rtx read_complex_part (rtx, bool); |
268 | extern void write_complex_part (rtx, rtx, bool, bool); |
269 | extern rtx read_complex_part (rtx, bool); |
270 | extern rtx emit_move_resolve_push (machine_mode, rtx); |
271 | |
272 | /* Push a block of length SIZE (perhaps variable) |
273 | and return an rtx to address the beginning of the block. */ |
274 | extern rtx push_block (rtx, poly_int64, int); |
275 | |
276 | /* Generate code to push something onto the stack, given its mode and type. */ |
277 | extern bool emit_push_insn (rtx, machine_mode, tree, rtx, unsigned int, |
278 | int, rtx, poly_int64, rtx, rtx, int, rtx, bool); |
279 | |
280 | /* Extract the accessible bit-range from a COMPONENT_REF. */ |
281 | extern void get_bit_range (poly_uint64 *, poly_uint64 *, tree, |
282 | poly_int64 *, tree *); |
283 | |
284 | /* Expand an assignment that stores the value of FROM into TO. */ |
285 | extern void expand_assignment (tree, tree, bool); |
286 | |
287 | /* Generate code for computing expression EXP, |
288 | and storing the value into TARGET. |
289 | If SUGGEST_REG is nonzero, copy the value through a register |
290 | and return that register, if that is possible. */ |
291 | extern rtx store_expr (tree, rtx, int, bool, bool); |
292 | |
293 | /* Given an rtx that may include add and multiply operations, |
294 | generate them as insns and return a pseudo-reg containing the value. |
295 | Useful after calling expand_expr with 1 as sum_ok. */ |
296 | extern rtx force_operand (rtx, rtx); |
297 | |
298 | /* Work horses for expand_expr. */ |
299 | extern rtx expand_expr_real (tree, rtx, machine_mode, |
300 | enum expand_modifier, rtx *, bool); |
301 | extern rtx expand_expr_real_1 (tree, rtx, machine_mode, |
302 | enum expand_modifier, rtx *, bool); |
303 | extern rtx expand_expr_real_2 (sepops, rtx, machine_mode, |
304 | enum expand_modifier); |
305 | extern rtx expand_expr_real_gassign (gassign *, rtx, machine_mode, |
306 | enum expand_modifier modifier, |
307 | rtx * = nullptr, bool = false); |
308 | |
309 | /* Generate code for computing expression EXP. |
310 | An rtx for the computed value is returned. The value is never null. |
311 | In the case of a void EXP, const0_rtx is returned. */ |
312 | inline rtx |
313 | expand_expr (tree exp, rtx target, machine_mode mode, |
314 | enum expand_modifier modifier) |
315 | { |
316 | return expand_expr_real (exp, target, mode, modifier, NULL, false); |
317 | } |
318 | |
319 | inline rtx |
320 | expand_normal (tree exp) |
321 | { |
322 | return expand_expr_real (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL, NULL, false); |
323 | } |
324 | |
325 | |
326 | /* Return STRING_CST and set offset, size and decl, if the first |
327 | argument corresponds to a string constant. */ |
328 | extern tree string_constant (tree, tree *, tree *, tree *); |
329 | /* Similar to string_constant, return a STRING_CST corresponding |
330 | to the value representation of the first argument if it's |
331 | a constant. */ |
332 | extern tree byte_representation (tree, tree *, tree *, tree *); |
333 | |
334 | extern enum tree_code maybe_optimize_mod_cmp (enum tree_code, tree *, tree *); |
335 | extern void maybe_optimize_sub_cmp_0 (enum tree_code, tree *, tree *); |
336 | |
337 | /* Two different ways of generating switch statements. */ |
338 | extern bool try_casesi (tree, tree, tree, tree, rtx, rtx, rtx, |
339 | profile_probability); |
340 | extern bool try_tablejump (tree, tree, tree, tree, rtx, rtx, |
341 | profile_probability); |
342 | |
343 | extern bool safe_from_p (const_rtx, tree, int); |
344 | |
345 | /* Get the personality libfunc for a function decl. */ |
346 | rtx get_personality_function (tree); |
347 | |
348 | /* Determine whether the LEN bytes can be moved by using several move |
349 | instructions. Return nonzero if a call to move_by_pieces should |
350 | succeed. */ |
351 | extern bool can_move_by_pieces (unsigned HOST_WIDE_INT, unsigned int); |
352 | |
353 | extern unsigned HOST_WIDE_INT highest_pow2_factor (const_tree); |
354 | |
355 | extern bool categorize_ctor_elements (const_tree, HOST_WIDE_INT *, |
356 | HOST_WIDE_INT *, HOST_WIDE_INT *, |
357 | bool *); |
358 | extern bool immediate_const_ctor_p (const_tree, unsigned int words = 1); |
359 | extern void store_constructor (tree, rtx, int, poly_int64, bool); |
360 | extern HOST_WIDE_INT int_expr_size (const_tree exp); |
361 | |
362 | extern void expand_operands (tree, tree, rtx, rtx*, rtx*, |
363 | enum expand_modifier); |
364 | |
365 | /* rtl.h and tree.h were included. */ |
366 | /* Return an rtx for the size in bytes of the value of an expr. */ |
367 | extern rtx expr_size (tree); |
368 | |
369 | extern bool mem_ref_refers_to_non_mem_p (tree); |
370 | extern bool non_mem_decl_p (tree); |
371 | |
372 | #endif /* GCC_EXPR_H */ |
373 | |