xref: /dragonfly/contrib/gcc-8.0/gcc/expr.h (revision 8bf5b238)
1 /* Definitions for code generation pass of GNU compiler.
2    Copyright (C) 1987-2018 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #ifndef GCC_EXPR_H
21 #define GCC_EXPR_H
22 
23 /* This is the 4th arg to `expand_expr'.
24    EXPAND_STACK_PARM means we are possibly expanding a call param onto
25    the stack.
26    EXPAND_SUM means it is ok to return a PLUS rtx or MULT rtx.
27    EXPAND_INITIALIZER is similar but also record any labels on forced_labels.
28    EXPAND_CONST_ADDRESS means it is ok to return a MEM whose address
29     is a constant that is not a legitimate address.
30    EXPAND_WRITE means we are only going to write to the resulting rtx.
31    EXPAND_MEMORY means we are interested in a memory result, even if
32     the memory is constant and we could have propagated a constant value,
33     or the memory is unaligned on a STRICT_ALIGNMENT target.  */
34 enum expand_modifier {EXPAND_NORMAL = 0, EXPAND_STACK_PARM, EXPAND_SUM,
35 		      EXPAND_CONST_ADDRESS, EXPAND_INITIALIZER, EXPAND_WRITE,
36 		      EXPAND_MEMORY};
37 
38 /* Prevent the compiler from deferring stack pops.  See
39    inhibit_defer_pop for more information.  */
40 #define NO_DEFER_POP (inhibit_defer_pop += 1)
41 
42 /* Allow the compiler to defer stack pops.  See inhibit_defer_pop for
43    more information.  */
44 #define OK_DEFER_POP (inhibit_defer_pop -= 1)
45 
46 /* This structure is used to pass around information about exploded
47    unary, binary and trinary expressions between expand_expr_real_1 and
48    friends.  */
49 typedef struct separate_ops
50 {
51   enum tree_code code;
52   location_t location;
53   tree type;
54   tree op0, op1, op2;
55 } *sepops;
56 
57 /* This is run during target initialization to set up which modes can be
58    used directly in memory and to initialize the block move optab.  */
59 extern void init_expr_target (void);
60 
61 /* This is run at the start of compiling a function.  */
62 extern void init_expr (void);
63 
64 /* Emit some rtl insns to move data between rtx's, converting machine modes.
65    Both modes must be floating or both fixed.  */
66 extern void convert_move (rtx, rtx, int);
67 
68 /* Convert an rtx to specified machine mode and return the result.  */
69 extern rtx convert_to_mode (machine_mode, rtx, int);
70 
71 /* Convert an rtx to MODE from OLDMODE and return the result.  */
72 extern rtx convert_modes (machine_mode, machine_mode, rtx, int);
73 
74 /* Expand a call to memcpy or memmove or memcmp, and return the result.  */
75 extern rtx emit_block_op_via_libcall (enum built_in_function, rtx, rtx, rtx,
76 				      bool);
77 
78 static inline rtx
79 emit_block_copy_via_libcall (rtx dst, rtx src, rtx size, bool tailcall = false)
80 {
81   return emit_block_op_via_libcall (BUILT_IN_MEMCPY, dst, src, size, tailcall);
82 }
83 
84 static inline rtx
85 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall = false)
86 {
87   return emit_block_op_via_libcall (BUILT_IN_MEMMOVE, dst, src, size, tailcall);
88 }
89 
90 static inline rtx
91 emit_block_comp_via_libcall (rtx dst, rtx src, rtx size, bool tailcall = false)
92 {
93   return emit_block_op_via_libcall (BUILT_IN_MEMCMP, dst, src, size, tailcall);
94 }
95 
96 /* Emit code to move a block Y to a block X.  */
97 enum block_op_methods
98 {
99   BLOCK_OP_NORMAL,
100   BLOCK_OP_NO_LIBCALL,
101   BLOCK_OP_CALL_PARM,
102   /* Like BLOCK_OP_NORMAL, but the libcall can be tail call optimized.  */
103   BLOCK_OP_TAILCALL,
104   /* Like BLOCK_OP_NO_LIBCALL, but instead of emitting a libcall return
105      pc_rtx to indicate nothing has been emitted and let the caller handle
106      it.  */
107   BLOCK_OP_NO_LIBCALL_RET
108 };
109 
110 typedef rtx (*by_pieces_constfn) (void *, HOST_WIDE_INT, scalar_int_mode);
111 
112 extern rtx emit_block_move (rtx, rtx, rtx, enum block_op_methods);
113 extern rtx emit_block_move_hints (rtx, rtx, rtx, enum block_op_methods,
114 			          unsigned int, HOST_WIDE_INT,
115 				  unsigned HOST_WIDE_INT,
116 				  unsigned HOST_WIDE_INT,
117 				  unsigned HOST_WIDE_INT);
118 extern rtx emit_block_cmp_hints (rtx, rtx, rtx, tree, rtx, bool,
119 				 by_pieces_constfn, void *);
120 extern bool emit_storent_insn (rtx to, rtx from);
121 
122 /* Copy all or part of a value X into registers starting at REGNO.
123    The number of registers to be filled is NREGS.  */
124 extern void move_block_to_reg (int, rtx, int, machine_mode);
125 
126 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
127    The number of registers to be filled is NREGS.  */
128 extern void move_block_from_reg (int, rtx, int);
129 
130 /* Generate a non-consecutive group of registers represented by a PARALLEL.  */
131 extern rtx gen_group_rtx (rtx);
132 
133 /* Load a BLKmode value into non-consecutive registers represented by a
134    PARALLEL.  */
135 extern void emit_group_load (rtx, rtx, tree, poly_int64);
136 
137 /* Similarly, but load into new temporaries.  */
138 extern rtx emit_group_load_into_temps (rtx, rtx, tree, poly_int64);
139 
140 /* Move a non-consecutive group of registers represented by a PARALLEL into
141    a non-consecutive group of registers represented by a PARALLEL.  */
142 extern void emit_group_move (rtx, rtx);
143 
144 /* Move a group of registers represented by a PARALLEL into pseudos.  */
145 extern rtx emit_group_move_into_temps (rtx);
146 
147 /* Store a BLKmode value from non-consecutive registers represented by a
148    PARALLEL.  */
149 extern void emit_group_store (rtx, rtx, tree, poly_int64);
150 
151 extern rtx maybe_emit_group_store (rtx, tree);
152 
153 /* Mark REG as holding a parameter for the next CALL_INSN.
154    Mode is TYPE_MODE of the non-promoted parameter, or VOIDmode.  */
155 extern void use_reg_mode (rtx *, rtx, machine_mode);
156 extern void clobber_reg_mode (rtx *, rtx, machine_mode);
157 
158 extern rtx copy_blkmode_to_reg (machine_mode, tree);
159 
160 /* Mark REG as holding a parameter for the next CALL_INSN.  */
161 static inline void
162 use_reg (rtx *fusage, rtx reg)
163 {
164   use_reg_mode (fusage, reg, VOIDmode);
165 }
166 
167 /* Mark REG as clobbered by the call with FUSAGE as CALL_INSN_FUNCTION_USAGE.  */
168 static inline void
169 clobber_reg (rtx *fusage, rtx reg)
170 {
171   clobber_reg_mode (fusage, reg, VOIDmode);
172 }
173 
174 /* Mark NREGS consecutive regs, starting at REGNO, as holding parameters
175    for the next CALL_INSN.  */
176 extern void use_regs (rtx *, int, int);
177 
178 /* Mark a PARALLEL as holding a parameter for the next CALL_INSN.  */
179 extern void use_group_regs (rtx *, rtx);
180 
181 #ifdef GCC_INSN_CODES_H
182 extern rtx expand_cmpstrn_or_cmpmem (insn_code, rtx, rtx, rtx, tree, rtx,
183 				     HOST_WIDE_INT);
184 #endif
185 
186 /* Write zeros through the storage of OBJECT.
187    If OBJECT has BLKmode, SIZE is its length in bytes.  */
188 extern rtx clear_storage (rtx, rtx, enum block_op_methods);
189 extern rtx clear_storage_hints (rtx, rtx, enum block_op_methods,
190 			        unsigned int, HOST_WIDE_INT,
191 				unsigned HOST_WIDE_INT,
192 				unsigned HOST_WIDE_INT,
193 				unsigned HOST_WIDE_INT);
194 /* The same, but always output an library call.  */
195 extern rtx set_storage_via_libcall (rtx, rtx, rtx, bool = false);
196 
197 /* Expand a setmem pattern; return true if successful.  */
198 extern bool set_storage_via_setmem (rtx, rtx, rtx, unsigned int,
199 				    unsigned int, HOST_WIDE_INT,
200 				    unsigned HOST_WIDE_INT,
201 				    unsigned HOST_WIDE_INT,
202 				    unsigned HOST_WIDE_INT);
203 
204 /* Return nonzero if it is desirable to store LEN bytes generated by
205    CONSTFUN with several move instructions by store_by_pieces
206    function.  CONSTFUNDATA is a pointer which will be passed as argument
207    in every CONSTFUN call.
208    ALIGN is maximum alignment we can assume.
209    MEMSETP is true if this is a real memset/bzero, not a copy
210    of a const string.  */
211 extern int can_store_by_pieces (unsigned HOST_WIDE_INT,
212 				by_pieces_constfn,
213 				void *, unsigned int, bool);
214 
215 /* Generate several move instructions to store LEN bytes generated by
216    CONSTFUN to block TO.  (A MEM rtx with BLKmode).  CONSTFUNDATA is a
217    pointer which will be passed as argument in every CONSTFUN call.
218    ALIGN is maximum alignment we can assume.
219    MEMSETP is true if this is a real memset/bzero, not a copy.
220    Returns TO + LEN.  */
221 extern rtx store_by_pieces (rtx, unsigned HOST_WIDE_INT, by_pieces_constfn,
222 			    void *, unsigned int, bool, int);
223 
224 /* Emit insns to set X from Y.  */
225 extern rtx_insn *emit_move_insn (rtx, rtx);
226 extern rtx_insn *gen_move_insn (rtx, rtx);
227 
228 /* Emit insns to set X from Y, with no frills.  */
229 extern rtx_insn *emit_move_insn_1 (rtx, rtx);
230 
231 extern rtx_insn *emit_move_complex_push (machine_mode, rtx, rtx);
232 extern rtx_insn *emit_move_complex_parts (rtx, rtx);
233 extern rtx read_complex_part (rtx, bool);
234 extern void write_complex_part (rtx, rtx, bool);
235 extern rtx read_complex_part (rtx, bool);
236 extern rtx emit_move_resolve_push (machine_mode, rtx);
237 
238 /* Push a block of length SIZE (perhaps variable)
239    and return an rtx to address the beginning of the block.  */
240 extern rtx push_block (rtx, poly_int64, int);
241 
242 /* Generate code to push something onto the stack, given its mode and type.  */
243 extern bool emit_push_insn (rtx, machine_mode, tree, rtx, unsigned int,
244 			    int, rtx, poly_int64, rtx, rtx, int, rtx, bool);
245 
246 /* Extract the accessible bit-range from a COMPONENT_REF.  */
247 extern void get_bit_range (poly_uint64_pod *, poly_uint64_pod *, tree,
248 			   poly_int64_pod *, tree *);
249 
250 /* Expand an assignment that stores the value of FROM into TO.  */
251 extern void expand_assignment (tree, tree, bool);
252 
253 /* Generate code for computing expression EXP,
254    and storing the value into TARGET.
255    If SUGGEST_REG is nonzero, copy the value through a register
256    and return that register, if that is possible.  */
257 extern rtx store_expr_with_bounds (tree, rtx, int, bool, bool, tree);
258 extern rtx store_expr (tree, rtx, int, bool, bool);
259 
260 /* Given an rtx that may include add and multiply operations,
261    generate them as insns and return a pseudo-reg containing the value.
262    Useful after calling expand_expr with 1 as sum_ok.  */
263 extern rtx force_operand (rtx, rtx);
264 
265 /* Work horses for expand_expr.  */
266 extern rtx expand_expr_real (tree, rtx, machine_mode,
267 			     enum expand_modifier, rtx *, bool);
268 extern rtx expand_expr_real_1 (tree, rtx, machine_mode,
269 			       enum expand_modifier, rtx *, bool);
270 extern rtx expand_expr_real_2 (sepops, rtx, machine_mode,
271 			       enum expand_modifier);
272 
273 /* Generate code for computing expression EXP.
274    An rtx for the computed value is returned.  The value is never null.
275    In the case of a void EXP, const0_rtx is returned.  */
276 static inline rtx
277 expand_expr (tree exp, rtx target, machine_mode mode,
278 	     enum expand_modifier modifier)
279 {
280   return expand_expr_real (exp, target, mode, modifier, NULL, false);
281 }
282 
283 static inline rtx
284 expand_normal (tree exp)
285 {
286   return expand_expr_real (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL, NULL, false);
287 }
288 
289 
290 /* Return the tree node and offset if a given argument corresponds to
291    a string constant.  */
292 extern tree string_constant (tree, tree *);
293 
294 /* Two different ways of generating switch statements.  */
295 extern int try_casesi (tree, tree, tree, tree, rtx, rtx, rtx, profile_probability);
296 extern int try_tablejump (tree, tree, tree, tree, rtx, rtx, profile_probability);
297 
298 extern int safe_from_p (const_rtx, tree, int);
299 
300 /* Get the personality libfunc for a function decl.  */
301 rtx get_personality_function (tree);
302 
303 /* Determine whether the LEN bytes can be moved by using several move
304    instructions.  Return nonzero if a call to move_by_pieces should
305    succeed.  */
306 extern bool can_move_by_pieces (unsigned HOST_WIDE_INT, unsigned int);
307 
308 extern unsigned HOST_WIDE_INT highest_pow2_factor (const_tree);
309 
310 extern bool categorize_ctor_elements (const_tree, HOST_WIDE_INT *,
311 				      HOST_WIDE_INT *, bool *);
312 
313 extern void expand_operands (tree, tree, rtx, rtx*, rtx*,
314 			     enum expand_modifier);
315 
316 /* rtl.h and tree.h were included.  */
317 /* Return an rtx for the size in bytes of the value of an expr.  */
318 extern rtx expr_size (tree);
319 
320 #endif /* GCC_EXPR_H */
321