1 /* Target machine subroutines for Altera Nios II.
2 Copyright (C) 2012-2016 Free Software Foundation, Inc.
3 Contributed by Jonah Graham (jgraham@altera.com),
4 Will Reece (wreece@altera.com), and Jeff DaSilva (jdasilva@altera.com).
5 Contributed by Mentor Graphics, Inc.
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published
11 by the Free Software Foundation; either version 3, or (at your
12 option) any later version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 License for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "df.h"
31 #include "tm_p.h"
32 #include "optabs.h"
33 #include "regs.h"
34 #include "emit-rtl.h"
35 #include "recog.h"
36 #include "diagnostic-core.h"
37 #include "output.h"
38 #include "insn-attr.h"
39 #include "flags.h"
40 #include "explow.h"
41 #include "calls.h"
42 #include "varasm.h"
43 #include "expr.h"
44 #include "toplev.h"
45 #include "langhooks.h"
46 #include "stor-layout.h"
47 #include "builtins.h"
48
49 /* This file should be included last. */
50 #include "target-def.h"
51
52 /* Forward function declarations. */
53 static bool prologue_saved_reg_p (unsigned);
54 static void nios2_load_pic_register (void);
55 static void nios2_register_custom_code (unsigned int, enum nios2_ccs_code, int);
56 static const char *nios2_unspec_reloc_name (int);
57 static void nios2_register_builtin_fndecl (unsigned, tree);
58 static rtx nios2_ldst_parallel (bool, bool, bool, rtx, int,
59 unsigned HOST_WIDE_INT, bool);
60
61 /* Threshold for data being put into the small data/bss area, instead
62 of the normal data area (references to the small data/bss area take
63 1 instruction, and use the global pointer, references to the normal
64 data area takes 2 instructions). */
65 unsigned HOST_WIDE_INT nios2_section_threshold = NIOS2_DEFAULT_GVALUE;
66
67 struct GTY (()) machine_function
68 {
69 /* Current frame information, to be filled in by nios2_compute_frame_layout
70 with register save masks, and offsets for the current function. */
71
72 /* Mask of registers to save. */
73 unsigned int save_mask;
74 /* Number of bytes that the entire frame takes up. */
75 int total_size;
76 /* Number of bytes that variables take up. */
77 int var_size;
78 /* Number of bytes that outgoing arguments take up. */
79 int args_size;
80 /* Number of bytes needed to store registers in frame. */
81 int save_reg_size;
82 /* Number of bytes used to store callee-saved registers. */
83 int callee_save_reg_size;
84 /* Offset from new stack pointer to store registers. */
85 int save_regs_offset;
86 /* Offset from save_regs_offset to store frame pointer register. */
87 int fp_save_offset;
88 /* != 0 if function has a variable argument list. */
89 int uses_anonymous_args;
90 /* != 0 if frame layout already calculated. */
91 int initialized;
92 };
93
94 /* State to track the assignment of custom codes to FPU/custom builtins. */
95 static enum nios2_ccs_code custom_code_status[256];
96 static int custom_code_index[256];
97 /* Set to true if any conflicts (re-use of a code between 0-255) are found. */
98 static bool custom_code_conflict = false;
99
100
101 /* Definition of builtin function types for nios2. */
102
103 #define N2_FTYPES \
104 N2_FTYPE(1, (SF)) \
105 N2_FTYPE(1, (VOID)) \
106 N2_FTYPE(2, (DF, DF)) \
107 N2_FTYPE(3, (DF, DF, DF)) \
108 N2_FTYPE(2, (DF, SF)) \
109 N2_FTYPE(2, (DF, SI)) \
110 N2_FTYPE(2, (DF, UI)) \
111 N2_FTYPE(2, (SF, DF)) \
112 N2_FTYPE(2, (SF, SF)) \
113 N2_FTYPE(3, (SF, SF, SF)) \
114 N2_FTYPE(2, (SF, SI)) \
115 N2_FTYPE(2, (SF, UI)) \
116 N2_FTYPE(2, (SI, CVPTR)) \
117 N2_FTYPE(2, (SI, DF)) \
118 N2_FTYPE(3, (SI, DF, DF)) \
119 N2_FTYPE(2, (SI, SF)) \
120 N2_FTYPE(3, (SI, SF, SF)) \
121 N2_FTYPE(2, (SI, SI)) \
122 N2_FTYPE(3, (SI, SI, SI)) \
123 N2_FTYPE(3, (SI, VPTR, SI)) \
124 N2_FTYPE(2, (UI, CVPTR)) \
125 N2_FTYPE(2, (UI, DF)) \
126 N2_FTYPE(2, (UI, SF)) \
127 N2_FTYPE(2, (VOID, DF)) \
128 N2_FTYPE(2, (VOID, SF)) \
129 N2_FTYPE(2, (VOID, SI)) \
130 N2_FTYPE(3, (VOID, SI, SI)) \
131 N2_FTYPE(2, (VOID, VPTR)) \
132 N2_FTYPE(3, (VOID, VPTR, SI))
133
134 #define N2_FTYPE_OP1(R) N2_FTYPE_ ## R ## _VOID
135 #define N2_FTYPE_OP2(R, A1) N2_FTYPE_ ## R ## _ ## A1
136 #define N2_FTYPE_OP3(R, A1, A2) N2_FTYPE_ ## R ## _ ## A1 ## _ ## A2
137
138 /* Expand ftcode enumeration. */
139 enum nios2_ftcode {
140 #define N2_FTYPE(N,ARGS) N2_FTYPE_OP ## N ARGS,
141 N2_FTYPES
142 #undef N2_FTYPE
143 N2_FTYPE_MAX
144 };
145
146 /* Return the tree function type, based on the ftcode. */
147 static tree
nios2_ftype(enum nios2_ftcode ftcode)148 nios2_ftype (enum nios2_ftcode ftcode)
149 {
150 static tree types[(int) N2_FTYPE_MAX];
151
152 tree N2_TYPE_SF = float_type_node;
153 tree N2_TYPE_DF = double_type_node;
154 tree N2_TYPE_SI = integer_type_node;
155 tree N2_TYPE_UI = unsigned_type_node;
156 tree N2_TYPE_VOID = void_type_node;
157
158 static const_tree N2_TYPE_CVPTR, N2_TYPE_VPTR;
159 if (!N2_TYPE_CVPTR)
160 {
161 /* const volatile void *. */
162 N2_TYPE_CVPTR
163 = build_pointer_type (build_qualified_type (void_type_node,
164 (TYPE_QUAL_CONST
165 | TYPE_QUAL_VOLATILE)));
166 /* volatile void *. */
167 N2_TYPE_VPTR
168 = build_pointer_type (build_qualified_type (void_type_node,
169 TYPE_QUAL_VOLATILE));
170 }
171 if (types[(int) ftcode] == NULL_TREE)
172 switch (ftcode)
173 {
174 #define N2_FTYPE_ARGS1(R) N2_TYPE_ ## R
175 #define N2_FTYPE_ARGS2(R,A1) N2_TYPE_ ## R, N2_TYPE_ ## A1
176 #define N2_FTYPE_ARGS3(R,A1,A2) N2_TYPE_ ## R, N2_TYPE_ ## A1, N2_TYPE_ ## A2
177 #define N2_FTYPE(N,ARGS) \
178 case N2_FTYPE_OP ## N ARGS: \
179 types[(int) ftcode] \
180 = build_function_type_list (N2_FTYPE_ARGS ## N ARGS, NULL_TREE); \
181 break;
182 N2_FTYPES
183 #undef N2_FTYPE
184 default: gcc_unreachable ();
185 }
186 return types[(int) ftcode];
187 }
188
189
190 /* Definition of FPU instruction descriptions. */
191
192 struct nios2_fpu_insn_info
193 {
194 const char *name;
195 int num_operands, *optvar;
196 int opt, no_opt;
197 #define N2F_DF 0x1
198 #define N2F_DFREQ 0x2
199 #define N2F_UNSAFE 0x4
200 #define N2F_FINITE 0x8
201 #define N2F_NO_ERRNO 0x10
202 unsigned int flags;
203 enum insn_code icode;
204 enum nios2_ftcode ftcode;
205 };
206
207 /* Base macro for defining FPU instructions. */
208 #define N2FPU_INSN_DEF_BASE(insn, nop, flags, icode, args) \
209 { #insn, nop, &nios2_custom_ ## insn, OPT_mcustom_##insn##_, \
210 OPT_mno_custom_##insn, flags, CODE_FOR_ ## icode, \
211 N2_FTYPE_OP ## nop args }
212
213 /* Arithmetic and math functions; 2 or 3 operand FP operations. */
214 #define N2FPU_OP2(mode) (mode, mode)
215 #define N2FPU_OP3(mode) (mode, mode, mode)
216 #define N2FPU_INSN_DEF(code, icode, nop, flags, m, M) \
217 N2FPU_INSN_DEF_BASE (f ## code ## m, nop, flags, \
218 icode ## m ## f ## nop, N2FPU_OP ## nop (M ## F))
219 #define N2FPU_INSN_SF(code, nop, flags) \
220 N2FPU_INSN_DEF (code, code, nop, flags, s, S)
221 #define N2FPU_INSN_DF(code, nop, flags) \
222 N2FPU_INSN_DEF (code, code, nop, flags | N2F_DF, d, D)
223
224 /* Compare instructions, 3 operand FP operation with a SI result. */
225 #define N2FPU_CMP_DEF(code, flags, m, M) \
226 N2FPU_INSN_DEF_BASE (fcmp ## code ## m, 3, flags, \
227 nios2_s ## code ## m ## f, (SI, M ## F, M ## F))
228 #define N2FPU_CMP_SF(code) N2FPU_CMP_DEF (code, 0, s, S)
229 #define N2FPU_CMP_DF(code) N2FPU_CMP_DEF (code, N2F_DF, d, D)
230
231 /* The order of definition needs to be maintained consistent with
232 enum n2fpu_code in nios2-opts.h. */
233 struct nios2_fpu_insn_info nios2_fpu_insn[] =
234 {
235 /* Single precision instructions. */
236 N2FPU_INSN_SF (add, 3, 0),
237 N2FPU_INSN_SF (sub, 3, 0),
238 N2FPU_INSN_SF (mul, 3, 0),
239 N2FPU_INSN_SF (div, 3, 0),
240 /* Due to textual difference between min/max and smin/smax. */
241 N2FPU_INSN_DEF (min, smin, 3, N2F_FINITE, s, S),
242 N2FPU_INSN_DEF (max, smax, 3, N2F_FINITE, s, S),
243 N2FPU_INSN_SF (neg, 2, 0),
244 N2FPU_INSN_SF (abs, 2, 0),
245 N2FPU_INSN_SF (sqrt, 2, 0),
246 N2FPU_INSN_SF (sin, 2, N2F_UNSAFE),
247 N2FPU_INSN_SF (cos, 2, N2F_UNSAFE),
248 N2FPU_INSN_SF (tan, 2, N2F_UNSAFE),
249 N2FPU_INSN_SF (atan, 2, N2F_UNSAFE),
250 N2FPU_INSN_SF (exp, 2, N2F_UNSAFE),
251 N2FPU_INSN_SF (log, 2, N2F_UNSAFE),
252 /* Single precision compares. */
253 N2FPU_CMP_SF (eq), N2FPU_CMP_SF (ne),
254 N2FPU_CMP_SF (lt), N2FPU_CMP_SF (le),
255 N2FPU_CMP_SF (gt), N2FPU_CMP_SF (ge),
256
257 /* Double precision instructions. */
258 N2FPU_INSN_DF (add, 3, 0),
259 N2FPU_INSN_DF (sub, 3, 0),
260 N2FPU_INSN_DF (mul, 3, 0),
261 N2FPU_INSN_DF (div, 3, 0),
262 /* Due to textual difference between min/max and smin/smax. */
263 N2FPU_INSN_DEF (min, smin, 3, N2F_FINITE, d, D),
264 N2FPU_INSN_DEF (max, smax, 3, N2F_FINITE, d, D),
265 N2FPU_INSN_DF (neg, 2, 0),
266 N2FPU_INSN_DF (abs, 2, 0),
267 N2FPU_INSN_DF (sqrt, 2, 0),
268 N2FPU_INSN_DF (sin, 2, N2F_UNSAFE),
269 N2FPU_INSN_DF (cos, 2, N2F_UNSAFE),
270 N2FPU_INSN_DF (tan, 2, N2F_UNSAFE),
271 N2FPU_INSN_DF (atan, 2, N2F_UNSAFE),
272 N2FPU_INSN_DF (exp, 2, N2F_UNSAFE),
273 N2FPU_INSN_DF (log, 2, N2F_UNSAFE),
274 /* Double precision compares. */
275 N2FPU_CMP_DF (eq), N2FPU_CMP_DF (ne),
276 N2FPU_CMP_DF (lt), N2FPU_CMP_DF (le),
277 N2FPU_CMP_DF (gt), N2FPU_CMP_DF (ge),
278
279 /* Conversion instructions. */
280 N2FPU_INSN_DEF_BASE (floatis, 2, 0, floatsisf2, (SF, SI)),
281 N2FPU_INSN_DEF_BASE (floatus, 2, 0, floatunssisf2, (SF, UI)),
282 N2FPU_INSN_DEF_BASE (floatid, 2, 0, floatsidf2, (DF, SI)),
283 N2FPU_INSN_DEF_BASE (floatud, 2, 0, floatunssidf2, (DF, UI)),
284 N2FPU_INSN_DEF_BASE (round, 2, N2F_NO_ERRNO, lroundsfsi2, (SI, SF)),
285 N2FPU_INSN_DEF_BASE (fixsi, 2, 0, fix_truncsfsi2, (SI, SF)),
286 N2FPU_INSN_DEF_BASE (fixsu, 2, 0, fixuns_truncsfsi2, (UI, SF)),
287 N2FPU_INSN_DEF_BASE (fixdi, 2, 0, fix_truncdfsi2, (SI, DF)),
288 N2FPU_INSN_DEF_BASE (fixdu, 2, 0, fixuns_truncdfsi2, (UI, DF)),
289 N2FPU_INSN_DEF_BASE (fextsd, 2, 0, extendsfdf2, (DF, SF)),
290 N2FPU_INSN_DEF_BASE (ftruncds, 2, 0, truncdfsf2, (SF, DF)),
291
292 /* X, Y access instructions. */
293 N2FPU_INSN_DEF_BASE (fwrx, 2, N2F_DFREQ, nios2_fwrx, (VOID, DF)),
294 N2FPU_INSN_DEF_BASE (fwry, 2, N2F_DFREQ, nios2_fwry, (VOID, SF)),
295 N2FPU_INSN_DEF_BASE (frdxlo, 1, N2F_DFREQ, nios2_frdxlo, (SF)),
296 N2FPU_INSN_DEF_BASE (frdxhi, 1, N2F_DFREQ, nios2_frdxhi, (SF)),
297 N2FPU_INSN_DEF_BASE (frdy, 1, N2F_DFREQ, nios2_frdy, (SF))
298 };
299
300 /* Some macros for ease of access. */
301 #define N2FPU(code) nios2_fpu_insn[(int) code]
302 #define N2FPU_ENABLED_P(code) (N2FPU_N(code) >= 0)
303 #define N2FPU_N(code) (*N2FPU(code).optvar)
304 #define N2FPU_NAME(code) (N2FPU(code).name)
305 #define N2FPU_ICODE(code) (N2FPU(code).icode)
306 #define N2FPU_FTCODE(code) (N2FPU(code).ftcode)
307 #define N2FPU_FINITE_P(code) (N2FPU(code).flags & N2F_FINITE)
308 #define N2FPU_UNSAFE_P(code) (N2FPU(code).flags & N2F_UNSAFE)
309 #define N2FPU_NO_ERRNO_P(code) (N2FPU(code).flags & N2F_NO_ERRNO)
310 #define N2FPU_DOUBLE_P(code) (N2FPU(code).flags & N2F_DF)
311 #define N2FPU_DOUBLE_REQUIRED_P(code) (N2FPU(code).flags & N2F_DFREQ)
312
313 /* Same as above, but for cases where using only the op part is shorter. */
314 #define N2FPU_OP(op) N2FPU(n2fpu_ ## op)
315 #define N2FPU_OP_NAME(op) N2FPU_NAME(n2fpu_ ## op)
316 #define N2FPU_OP_ENABLED_P(op) N2FPU_ENABLED_P(n2fpu_ ## op)
317
318 /* Export the FPU insn enabled predicate to nios2.md. */
319 bool
nios2_fpu_insn_enabled(enum n2fpu_code code)320 nios2_fpu_insn_enabled (enum n2fpu_code code)
321 {
322 return N2FPU_ENABLED_P (code);
323 }
324
325 /* Return true if COND comparison for mode MODE is enabled under current
326 settings. */
327
328 static bool
nios2_fpu_compare_enabled(enum rtx_code cond,machine_mode mode)329 nios2_fpu_compare_enabled (enum rtx_code cond, machine_mode mode)
330 {
331 if (mode == SFmode)
332 switch (cond)
333 {
334 case EQ: return N2FPU_OP_ENABLED_P (fcmpeqs);
335 case NE: return N2FPU_OP_ENABLED_P (fcmpnes);
336 case GT: return N2FPU_OP_ENABLED_P (fcmpgts);
337 case GE: return N2FPU_OP_ENABLED_P (fcmpges);
338 case LT: return N2FPU_OP_ENABLED_P (fcmplts);
339 case LE: return N2FPU_OP_ENABLED_P (fcmples);
340 default: break;
341 }
342 else if (mode == DFmode)
343 switch (cond)
344 {
345 case EQ: return N2FPU_OP_ENABLED_P (fcmpeqd);
346 case NE: return N2FPU_OP_ENABLED_P (fcmpned);
347 case GT: return N2FPU_OP_ENABLED_P (fcmpgtd);
348 case GE: return N2FPU_OP_ENABLED_P (fcmpged);
349 case LT: return N2FPU_OP_ENABLED_P (fcmpltd);
350 case LE: return N2FPU_OP_ENABLED_P (fcmpled);
351 default: break;
352 }
353 return false;
354 }
355
356 /* Stack layout and calling conventions. */
357
358 #define NIOS2_STACK_ALIGN(LOC) \
359 (((LOC) + ((PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT) - 1)) \
360 & ~((PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT) - 1))
361
362 /* Return the bytes needed to compute the frame pointer from the current
363 stack pointer. */
364 static int
nios2_compute_frame_layout(void)365 nios2_compute_frame_layout (void)
366 {
367 unsigned int regno;
368 unsigned int save_mask = 0;
369 int total_size;
370 int var_size;
371 int out_args_size;
372 int save_reg_size;
373 int callee_save_reg_size;
374
375 if (cfun->machine->initialized)
376 return cfun->machine->total_size;
377
378 /* Calculate space needed for gp registers. */
379 save_reg_size = 0;
380 for (regno = 0; regno <= LAST_GP_REG; regno++)
381 if (prologue_saved_reg_p (regno))
382 {
383 save_mask |= 1 << regno;
384 save_reg_size += 4;
385 }
386
387 /* If we are saving any callee-save register, then assume
388 push.n/pop.n should be used. Make sure RA is saved, and
389 contiguous registers starting from r16-- are all saved. */
390 if (TARGET_HAS_CDX && save_reg_size != 0)
391 {
392 if ((save_mask & (1 << RA_REGNO)) == 0)
393 {
394 save_mask |= 1 << RA_REGNO;
395 save_reg_size += 4;
396 }
397
398 for (regno = 23; regno >= 16; regno--)
399 if ((save_mask & (1 << regno)) != 0)
400 {
401 /* Starting from highest numbered callee-saved
402 register that is used, make sure all regs down
403 to r16 is saved, to maintain contiguous range
404 for push.n/pop.n. */
405 unsigned int i;
406 for (i = regno - 1; i >= 16; i--)
407 if ((save_mask & (1 << i)) == 0)
408 {
409 save_mask |= 1 << i;
410 save_reg_size += 4;
411 }
412 break;
413 }
414 }
415
416 callee_save_reg_size = save_reg_size;
417
418 /* If we call eh_return, we need to save the EH data registers. */
419 if (crtl->calls_eh_return)
420 {
421 unsigned i;
422 unsigned r;
423
424 for (i = 0; (r = EH_RETURN_DATA_REGNO (i)) != INVALID_REGNUM; i++)
425 if (!(save_mask & (1 << r)))
426 {
427 save_mask |= 1 << r;
428 save_reg_size += 4;
429 }
430 }
431
432 cfun->machine->fp_save_offset = 0;
433 if (save_mask & (1 << HARD_FRAME_POINTER_REGNUM))
434 {
435 int fp_save_offset = 0;
436 for (regno = 0; regno < HARD_FRAME_POINTER_REGNUM; regno++)
437 if (save_mask & (1 << regno))
438 fp_save_offset += 4;
439
440 cfun->machine->fp_save_offset = fp_save_offset;
441 }
442
443 var_size = NIOS2_STACK_ALIGN (get_frame_size ());
444 out_args_size = NIOS2_STACK_ALIGN (crtl->outgoing_args_size);
445 total_size = var_size + out_args_size;
446
447 save_reg_size = NIOS2_STACK_ALIGN (save_reg_size);
448 total_size += save_reg_size;
449 total_size += NIOS2_STACK_ALIGN (crtl->args.pretend_args_size);
450
451 /* Save other computed information. */
452 cfun->machine->save_mask = save_mask;
453 cfun->machine->total_size = total_size;
454 cfun->machine->var_size = var_size;
455 cfun->machine->args_size = out_args_size;
456 cfun->machine->save_reg_size = save_reg_size;
457 cfun->machine->callee_save_reg_size = callee_save_reg_size;
458 cfun->machine->initialized = reload_completed;
459 cfun->machine->save_regs_offset = out_args_size + var_size;
460
461 return total_size;
462 }
463
464 /* Generate save/restore of register REGNO at SP + OFFSET. Used by the
465 prologue/epilogue expand routines. */
466 static void
save_reg(int regno,unsigned offset)467 save_reg (int regno, unsigned offset)
468 {
469 rtx reg = gen_rtx_REG (SImode, regno);
470 rtx addr = plus_constant (Pmode, stack_pointer_rtx, offset, false);
471 rtx_insn *insn = emit_move_insn (gen_frame_mem (Pmode, addr), reg);
472 RTX_FRAME_RELATED_P (insn) = 1;
473 }
474
475 static void
restore_reg(int regno,unsigned offset)476 restore_reg (int regno, unsigned offset)
477 {
478 rtx reg = gen_rtx_REG (SImode, regno);
479 rtx addr = plus_constant (Pmode, stack_pointer_rtx, offset, false);
480 rtx_insn *insn = emit_move_insn (reg, gen_frame_mem (Pmode, addr));
481 /* Tag epilogue unwind note. */
482 add_reg_note (insn, REG_CFA_RESTORE, reg);
483 RTX_FRAME_RELATED_P (insn) = 1;
484 }
485
486 /* This routine tests for the base register update SET in load/store
487 multiple RTL insns, used in pop_operation_p and ldstwm_operation_p. */
488 static bool
base_reg_adjustment_p(rtx set,rtx * base_reg,rtx * offset)489 base_reg_adjustment_p (rtx set, rtx *base_reg, rtx *offset)
490 {
491 if (GET_CODE (set) == SET
492 && REG_P (SET_DEST (set))
493 && GET_CODE (SET_SRC (set)) == PLUS
494 && REG_P (XEXP (SET_SRC (set), 0))
495 && rtx_equal_p (SET_DEST (set), XEXP (SET_SRC (set), 0))
496 && CONST_INT_P (XEXP (SET_SRC (set), 1)))
497 {
498 *base_reg = XEXP (SET_SRC (set), 0);
499 *offset = XEXP (SET_SRC (set), 1);
500 return true;
501 }
502 return false;
503 }
504
505 /* Does the CFA note work for push/pop prologue/epilogue instructions. */
506 static void
nios2_create_cfa_notes(rtx_insn * insn,bool epilogue_p)507 nios2_create_cfa_notes (rtx_insn *insn, bool epilogue_p)
508 {
509 int i = 0;
510 rtx base_reg, offset, elt, pat = PATTERN (insn);
511 if (epilogue_p)
512 {
513 elt = XVECEXP (pat, 0, 0);
514 if (GET_CODE (elt) == RETURN)
515 i++;
516 elt = XVECEXP (pat, 0, i);
517 if (base_reg_adjustment_p (elt, &base_reg, &offset))
518 {
519 add_reg_note (insn, REG_CFA_ADJUST_CFA, copy_rtx (elt));
520 i++;
521 }
522 for (; i < XVECLEN (pat, 0); i++)
523 {
524 elt = SET_DEST (XVECEXP (pat, 0, i));
525 gcc_assert (REG_P (elt));
526 add_reg_note (insn, REG_CFA_RESTORE, elt);
527 }
528 }
529 else
530 {
531 /* Tag each of the prologue sets. */
532 for (i = 0; i < XVECLEN (pat, 0); i++)
533 RTX_FRAME_RELATED_P (XVECEXP (pat, 0, i)) = 1;
534 }
535 }
536
537 /* Temp regno used inside prologue/epilogue. */
538 #define TEMP_REG_NUM 8
539
540 /* Emit conditional trap for checking stack limit. SIZE is the number of
541 additional bytes required.
542
543 GDB prologue analysis depends on this generating a direct comparison
544 to the SP register, so the adjustment to add SIZE needs to be done on
545 the other operand to the comparison. Use TEMP_REG_NUM as a temporary,
546 if necessary. */
547 static void
nios2_emit_stack_limit_check(int size)548 nios2_emit_stack_limit_check (int size)
549 {
550 rtx sum = NULL_RTX;
551
552 if (GET_CODE (stack_limit_rtx) == SYMBOL_REF)
553 {
554 /* This generates a %hiadj/%lo pair with the constant size
555 add handled by the relocations. */
556 sum = gen_rtx_REG (Pmode, TEMP_REG_NUM);
557 emit_move_insn (sum, plus_constant (Pmode, stack_limit_rtx, size));
558 }
559 else if (!REG_P (stack_limit_rtx))
560 sorry ("Unknown form for stack limit expression");
561 else if (size == 0)
562 sum = stack_limit_rtx;
563 else if (SMALL_INT (size))
564 {
565 sum = gen_rtx_REG (Pmode, TEMP_REG_NUM);
566 emit_move_insn (sum, plus_constant (Pmode, stack_limit_rtx, size));
567 }
568 else
569 {
570 sum = gen_rtx_REG (Pmode, TEMP_REG_NUM);
571 emit_move_insn (sum, gen_int_mode (size, Pmode));
572 emit_insn (gen_add2_insn (sum, stack_limit_rtx));
573 }
574
575 emit_insn (gen_ctrapsi4 (gen_rtx_LTU (VOIDmode, stack_pointer_rtx, sum),
576 stack_pointer_rtx, sum, GEN_INT (3)));
577 }
578
579 static rtx_insn *
nios2_emit_add_constant(rtx reg,HOST_WIDE_INT immed)580 nios2_emit_add_constant (rtx reg, HOST_WIDE_INT immed)
581 {
582 rtx_insn *insn;
583 if (SMALL_INT (immed))
584 insn = emit_insn (gen_add2_insn (reg, gen_int_mode (immed, Pmode)));
585 else
586 {
587 rtx tmp = gen_rtx_REG (Pmode, TEMP_REG_NUM);
588 emit_move_insn (tmp, gen_int_mode (immed, Pmode));
589 insn = emit_insn (gen_add2_insn (reg, tmp));
590 }
591 return insn;
592 }
593
594 static rtx_insn *
nios2_adjust_stack(int sp_adjust,bool epilogue_p)595 nios2_adjust_stack (int sp_adjust, bool epilogue_p)
596 {
597 enum reg_note note_kind = REG_NOTE_MAX;
598 rtx_insn *insn = NULL;
599 if (sp_adjust)
600 {
601 if (SMALL_INT (sp_adjust))
602 insn = emit_insn (gen_add2_insn (stack_pointer_rtx,
603 gen_int_mode (sp_adjust, Pmode)));
604 else
605 {
606 rtx tmp = gen_rtx_REG (Pmode, TEMP_REG_NUM);
607 emit_move_insn (tmp, gen_int_mode (sp_adjust, Pmode));
608 insn = emit_insn (gen_add2_insn (stack_pointer_rtx, tmp));
609 /* Attach a note indicating what happened. */
610 if (!epilogue_p)
611 note_kind = REG_FRAME_RELATED_EXPR;
612 }
613 if (epilogue_p)
614 note_kind = REG_CFA_ADJUST_CFA;
615 if (note_kind != REG_NOTE_MAX)
616 {
617 rtx cfa_adj = gen_rtx_SET (stack_pointer_rtx,
618 plus_constant (Pmode, stack_pointer_rtx,
619 sp_adjust));
620 add_reg_note (insn, note_kind, cfa_adj);
621 }
622 RTX_FRAME_RELATED_P (insn) = 1;
623 }
624 return insn;
625 }
626
627 void
nios2_expand_prologue(void)628 nios2_expand_prologue (void)
629 {
630 unsigned int regno;
631 int total_frame_size, save_offset;
632 int sp_offset; /* offset from base_reg to final stack value. */
633 int save_regs_base; /* offset from base_reg to register save area. */
634 rtx_insn *insn;
635
636 total_frame_size = nios2_compute_frame_layout ();
637
638 if (flag_stack_usage_info)
639 current_function_static_stack_size = total_frame_size;
640
641 /* When R2 CDX push.n/stwm is available, arrange for stack frame to be built
642 using them. */
643 if (TARGET_HAS_CDX
644 && (cfun->machine->save_reg_size != 0
645 || cfun->machine->uses_anonymous_args))
646 {
647 unsigned int regmask = cfun->machine->save_mask;
648 unsigned int callee_save_regs = regmask & 0xffff0000;
649 unsigned int caller_save_regs = regmask & 0x0000ffff;
650 int push_immed = 0;
651 int pretend_args_size = NIOS2_STACK_ALIGN (crtl->args.pretend_args_size);
652 rtx stack_mem =
653 gen_frame_mem (SImode, plus_constant (Pmode, stack_pointer_rtx, -4));
654
655 /* Check that there is room for the entire stack frame before doing
656 any SP adjustments or pushes. */
657 if (crtl->limit_stack)
658 nios2_emit_stack_limit_check (total_frame_size);
659
660 if (pretend_args_size)
661 {
662 if (cfun->machine->uses_anonymous_args)
663 {
664 /* Emit a stwm to push copy of argument registers onto
665 the stack for va_arg processing. */
666 unsigned int r, mask = 0, n = pretend_args_size / 4;
667 for (r = LAST_ARG_REGNO - n + 1; r <= LAST_ARG_REGNO; r++)
668 mask |= (1 << r);
669 insn = emit_insn (nios2_ldst_parallel
670 (false, false, false, stack_mem,
671 -pretend_args_size, mask, false));
672 /* Tag first SP adjustment as frame-related. */
673 RTX_FRAME_RELATED_P (XVECEXP (PATTERN (insn), 0, 0)) = 1;
674 RTX_FRAME_RELATED_P (insn) = 1;
675 }
676 else
677 nios2_adjust_stack (-pretend_args_size, false);
678 }
679 if (callee_save_regs)
680 {
681 /* Emit a push.n to save registers and optionally allocate
682 push_immed extra bytes on the stack. */
683 int sp_adjust;
684 if (caller_save_regs)
685 /* Can't allocate extra stack space yet. */
686 push_immed = 0;
687 else if (cfun->machine->save_regs_offset <= 60)
688 /* Stack adjustment fits entirely in the push.n. */
689 push_immed = cfun->machine->save_regs_offset;
690 else if (frame_pointer_needed
691 && cfun->machine->fp_save_offset == 0)
692 /* Deferring the entire stack adjustment until later
693 allows us to use a mov.n instead of a 32-bit addi
694 instruction to set the frame pointer. */
695 push_immed = 0;
696 else
697 /* Splitting the stack adjustment between the push.n
698 and an explicit adjustment makes it more likely that
699 we can use spdeci.n for the explicit part. */
700 push_immed = 60;
701 sp_adjust = -(cfun->machine->callee_save_reg_size + push_immed);
702 insn = emit_insn (nios2_ldst_parallel (false, false, false,
703 stack_mem, sp_adjust,
704 callee_save_regs, false));
705 nios2_create_cfa_notes (insn, false);
706 RTX_FRAME_RELATED_P (insn) = 1;
707 }
708
709 if (caller_save_regs)
710 {
711 /* Emit a stwm to save the EH data regs, r4-r7. */
712 int caller_save_size = (cfun->machine->save_reg_size
713 - cfun->machine->callee_save_reg_size);
714 gcc_assert ((caller_save_regs & ~0xf0) == 0);
715 insn = emit_insn (nios2_ldst_parallel
716 (false, false, false, stack_mem,
717 -caller_save_size, caller_save_regs, false));
718 nios2_create_cfa_notes (insn, false);
719 RTX_FRAME_RELATED_P (insn) = 1;
720 }
721
722 save_regs_base = push_immed;
723 sp_offset = -(cfun->machine->save_regs_offset - push_immed);
724 }
725 /* The non-CDX cases decrement the stack pointer, to prepare for individual
726 register saves to the stack. */
727 else if (!SMALL_INT (total_frame_size))
728 {
729 /* We need an intermediary point, this will point at the spill block. */
730 nios2_adjust_stack (cfun->machine->save_regs_offset - total_frame_size,
731 false);
732 save_regs_base = 0;
733 sp_offset = -cfun->machine->save_regs_offset;
734 if (crtl->limit_stack)
735 nios2_emit_stack_limit_check (cfun->machine->save_regs_offset);
736 }
737 else if (total_frame_size)
738 {
739 nios2_adjust_stack (-total_frame_size, false);
740 save_regs_base = cfun->machine->save_regs_offset;
741 sp_offset = 0;
742 if (crtl->limit_stack)
743 nios2_emit_stack_limit_check (0);
744 }
745 else
746 save_regs_base = sp_offset = 0;
747
748 /* Save the registers individually in the non-CDX case. */
749 if (!TARGET_HAS_CDX)
750 {
751 save_offset = save_regs_base + cfun->machine->save_reg_size;
752
753 for (regno = LAST_GP_REG; regno > 0; regno--)
754 if (cfun->machine->save_mask & (1 << regno))
755 {
756 save_offset -= 4;
757 save_reg (regno, save_offset);
758 }
759 }
760
761 /* Set the hard frame pointer. */
762 if (frame_pointer_needed)
763 {
764 int fp_save_offset = save_regs_base + cfun->machine->fp_save_offset;
765 insn =
766 (fp_save_offset == 0
767 ? emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx)
768 : emit_insn (gen_add3_insn (hard_frame_pointer_rtx,
769 stack_pointer_rtx,
770 gen_int_mode (fp_save_offset, Pmode))));
771 RTX_FRAME_RELATED_P (insn) = 1;
772 }
773
774 /* Allocate sp_offset more bytes in the stack frame. */
775 nios2_adjust_stack (sp_offset, false);
776
777 /* Load the PIC register if needed. */
778 if (crtl->uses_pic_offset_table)
779 nios2_load_pic_register ();
780
781 /* If we are profiling, make sure no instructions are scheduled before
782 the call to mcount. */
783 if (crtl->profile)
784 emit_insn (gen_blockage ());
785 }
786
787 void
nios2_expand_epilogue(bool sibcall_p)788 nios2_expand_epilogue (bool sibcall_p)
789 {
790 rtx_insn *insn;
791 rtx cfa_adj;
792 int total_frame_size;
793 int sp_adjust, save_offset;
794 unsigned int regno;
795
796 if (!sibcall_p && nios2_can_use_return_insn ())
797 {
798 emit_jump_insn (gen_return ());
799 return;
800 }
801
802 emit_insn (gen_blockage ());
803
804 total_frame_size = nios2_compute_frame_layout ();
805 if (frame_pointer_needed)
806 {
807 /* Recover the stack pointer. */
808 insn =
809 (cfun->machine->fp_save_offset == 0
810 ? emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx)
811 : emit_insn (gen_add3_insn
812 (stack_pointer_rtx, hard_frame_pointer_rtx,
813 gen_int_mode (-cfun->machine->fp_save_offset, Pmode))));
814 cfa_adj = plus_constant (Pmode, stack_pointer_rtx,
815 (total_frame_size
816 - cfun->machine->save_regs_offset));
817 add_reg_note (insn, REG_CFA_DEF_CFA, cfa_adj);
818 RTX_FRAME_RELATED_P (insn) = 1;
819
820 save_offset = 0;
821 sp_adjust = total_frame_size - cfun->machine->save_regs_offset;
822 }
823 else if (!SMALL_INT (total_frame_size))
824 {
825 nios2_adjust_stack (cfun->machine->save_regs_offset, true);
826 save_offset = 0;
827 sp_adjust = total_frame_size - cfun->machine->save_regs_offset;
828 }
829 else
830 {
831 save_offset = cfun->machine->save_regs_offset;
832 sp_adjust = total_frame_size;
833 }
834
835 if (!TARGET_HAS_CDX)
836 {
837 /* Generate individual register restores. */
838 save_offset += cfun->machine->save_reg_size;
839
840 for (regno = LAST_GP_REG; regno > 0; regno--)
841 if (cfun->machine->save_mask & (1 << regno))
842 {
843 save_offset -= 4;
844 restore_reg (regno, save_offset);
845 }
846 nios2_adjust_stack (sp_adjust, true);
847 }
848 else if (cfun->machine->save_reg_size == 0)
849 {
850 /* Nothing to restore, just recover the stack position. */
851 nios2_adjust_stack (sp_adjust, true);
852 }
853 else
854 {
855 /* Emit CDX pop.n/ldwm to restore registers and optionally return. */
856 unsigned int regmask = cfun->machine->save_mask;
857 unsigned int callee_save_regs = regmask & 0xffff0000;
858 unsigned int caller_save_regs = regmask & 0x0000ffff;
859 int callee_save_size = cfun->machine->callee_save_reg_size;
860 int caller_save_size = cfun->machine->save_reg_size - callee_save_size;
861 int pretend_args_size = NIOS2_STACK_ALIGN (crtl->args.pretend_args_size);
862 bool ret_p = (!pretend_args_size && !crtl->calls_eh_return
863 && !sibcall_p);
864
865 if (!ret_p || caller_save_size > 0)
866 sp_adjust = save_offset;
867 else
868 sp_adjust = (save_offset > 60 ? save_offset - 60 : 0);
869
870 save_offset -= sp_adjust;
871
872 nios2_adjust_stack (sp_adjust, true);
873
874 if (caller_save_regs)
875 {
876 /* Emit a ldwm to restore EH data regs. */
877 rtx stack_mem = gen_frame_mem (SImode, stack_pointer_rtx);
878 insn = emit_insn (nios2_ldst_parallel
879 (true, true, true, stack_mem,
880 caller_save_size, caller_save_regs, false));
881 RTX_FRAME_RELATED_P (insn) = 1;
882 nios2_create_cfa_notes (insn, true);
883 }
884
885 if (callee_save_regs)
886 {
887 int sp_adjust = save_offset + callee_save_size;
888 rtx stack_mem;
889 if (ret_p)
890 {
891 /* Emit a pop.n to restore regs and return. */
892 stack_mem =
893 gen_frame_mem (SImode,
894 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
895 gen_int_mode (sp_adjust - 4,
896 Pmode)));
897 insn =
898 emit_jump_insn (nios2_ldst_parallel (true, false, false,
899 stack_mem, sp_adjust,
900 callee_save_regs, ret_p));
901 RTX_FRAME_RELATED_P (insn) = 1;
902 /* No need to attach CFA notes since we cannot step over
903 a return. */
904 return;
905 }
906 else
907 {
908 /* If no return, we have to use the ldwm form. */
909 stack_mem = gen_frame_mem (SImode, stack_pointer_rtx);
910 insn =
911 emit_insn (nios2_ldst_parallel (true, true, true,
912 stack_mem, sp_adjust,
913 callee_save_regs, ret_p));
914 RTX_FRAME_RELATED_P (insn) = 1;
915 nios2_create_cfa_notes (insn, true);
916 }
917 }
918
919 if (pretend_args_size)
920 nios2_adjust_stack (pretend_args_size, true);
921 }
922
923 /* Add in the __builtin_eh_return stack adjustment. */
924 if (crtl->calls_eh_return)
925 emit_insn (gen_add2_insn (stack_pointer_rtx, EH_RETURN_STACKADJ_RTX));
926
927 if (!sibcall_p)
928 emit_jump_insn (gen_simple_return ());
929 }
930
931 bool
nios2_expand_return(void)932 nios2_expand_return (void)
933 {
934 /* If CDX is available, generate a pop.n instruction to do both
935 the stack pop and return. */
936 if (TARGET_HAS_CDX)
937 {
938 int total_frame_size = nios2_compute_frame_layout ();
939 int sp_adjust = (cfun->machine->save_regs_offset
940 + cfun->machine->callee_save_reg_size);
941 gcc_assert (sp_adjust == total_frame_size);
942 if (sp_adjust != 0)
943 {
944 rtx mem =
945 gen_frame_mem (SImode,
946 plus_constant (Pmode, stack_pointer_rtx,
947 sp_adjust - 4, false));
948 rtx_insn *insn =
949 emit_jump_insn (nios2_ldst_parallel (true, false, false,
950 mem, sp_adjust,
951 cfun->machine->save_mask,
952 true));
953 RTX_FRAME_RELATED_P (insn) = 1;
954 /* No need to create CFA notes since we can't step over
955 a return. */
956 return true;
957 }
958 }
959 return false;
960 }
961
962 /* Implement RETURN_ADDR_RTX. Note, we do not support moving
963 back to a previous frame. */
964 rtx
nios2_get_return_address(int count)965 nios2_get_return_address (int count)
966 {
967 if (count != 0)
968 return const0_rtx;
969
970 return get_hard_reg_initial_val (Pmode, RA_REGNO);
971 }
972
973 /* Emit code to change the current function's return address to
974 ADDRESS. SCRATCH is available as a scratch register, if needed.
975 ADDRESS and SCRATCH are both word-mode GPRs. */
976 void
nios2_set_return_address(rtx address,rtx scratch)977 nios2_set_return_address (rtx address, rtx scratch)
978 {
979 nios2_compute_frame_layout ();
980 if (cfun->machine->save_mask & (1 << RA_REGNO))
981 {
982 unsigned offset = cfun->machine->save_reg_size - 4;
983 rtx base;
984
985 if (frame_pointer_needed)
986 base = hard_frame_pointer_rtx;
987 else
988 {
989 base = stack_pointer_rtx;
990 offset += cfun->machine->save_regs_offset;
991
992 if (!SMALL_INT (offset))
993 {
994 emit_move_insn (scratch, gen_int_mode (offset, Pmode));
995 emit_insn (gen_add2_insn (scratch, base));
996 base = scratch;
997 offset = 0;
998 }
999 }
1000 if (offset)
1001 base = plus_constant (Pmode, base, offset);
1002 emit_move_insn (gen_rtx_MEM (Pmode, base), address);
1003 }
1004 else
1005 emit_move_insn (gen_rtx_REG (Pmode, RA_REGNO), address);
1006 }
1007
1008 /* Implement FUNCTION_PROFILER macro. */
1009 void
nios2_function_profiler(FILE * file,int labelno ATTRIBUTE_UNUSED)1010 nios2_function_profiler (FILE *file, int labelno ATTRIBUTE_UNUSED)
1011 {
1012 fprintf (file, "\tmov\tr8, ra\n");
1013 if (flag_pic == 1)
1014 {
1015 fprintf (file, "\tnextpc\tr2\n");
1016 fprintf (file, "\t1: movhi\tr3, %%hiadj(_gp_got - 1b)\n");
1017 fprintf (file, "\taddi\tr3, r3, %%lo(_gp_got - 1b)\n");
1018 fprintf (file, "\tadd\tr2, r2, r3\n");
1019 fprintf (file, "\tldw\tr2, %%call(_mcount)(r2)\n");
1020 fprintf (file, "\tcallr\tr2\n");
1021 }
1022 else if (flag_pic == 2)
1023 {
1024 fprintf (file, "\tnextpc\tr2\n");
1025 fprintf (file, "\t1: movhi\tr3, %%hiadj(_gp_got - 1b)\n");
1026 fprintf (file, "\taddi\tr3, r3, %%lo(_gp_got - 1b)\n");
1027 fprintf (file, "\tadd\tr2, r2, r3\n");
1028 fprintf (file, "\tmovhi\tr3, %%call_hiadj(_mcount)\n");
1029 fprintf (file, "\taddi\tr3, r3, %%call_lo(_mcount)\n");
1030 fprintf (file, "\tadd\tr3, r2, r3\n");
1031 fprintf (file, "\tldw\tr2, 0(r3)\n");
1032 fprintf (file, "\tcallr\tr2\n");
1033 }
1034 else
1035 fprintf (file, "\tcall\t_mcount\n");
1036 fprintf (file, "\tmov\tra, r8\n");
1037 }
1038
1039 /* Dump stack layout. */
1040 static void
nios2_dump_frame_layout(FILE * file)1041 nios2_dump_frame_layout (FILE *file)
1042 {
1043 fprintf (file, "\t%s Current Frame Info\n", ASM_COMMENT_START);
1044 fprintf (file, "\t%s total_size = %d\n", ASM_COMMENT_START,
1045 cfun->machine->total_size);
1046 fprintf (file, "\t%s var_size = %d\n", ASM_COMMENT_START,
1047 cfun->machine->var_size);
1048 fprintf (file, "\t%s args_size = %d\n", ASM_COMMENT_START,
1049 cfun->machine->args_size);
1050 fprintf (file, "\t%s save_reg_size = %d\n", ASM_COMMENT_START,
1051 cfun->machine->save_reg_size);
1052 fprintf (file, "\t%s initialized = %d\n", ASM_COMMENT_START,
1053 cfun->machine->initialized);
1054 fprintf (file, "\t%s save_regs_offset = %d\n", ASM_COMMENT_START,
1055 cfun->machine->save_regs_offset);
1056 fprintf (file, "\t%s is_leaf = %d\n", ASM_COMMENT_START,
1057 crtl->is_leaf);
1058 fprintf (file, "\t%s frame_pointer_needed = %d\n", ASM_COMMENT_START,
1059 frame_pointer_needed);
1060 fprintf (file, "\t%s pretend_args_size = %d\n", ASM_COMMENT_START,
1061 crtl->args.pretend_args_size);
1062 }
1063
1064 /* Return true if REGNO should be saved in the prologue. */
1065 static bool
prologue_saved_reg_p(unsigned regno)1066 prologue_saved_reg_p (unsigned regno)
1067 {
1068 gcc_assert (GP_REG_P (regno));
1069
1070 if (df_regs_ever_live_p (regno) && !call_used_regs[regno])
1071 return true;
1072
1073 if (regno == HARD_FRAME_POINTER_REGNUM && frame_pointer_needed)
1074 return true;
1075
1076 if (regno == PIC_OFFSET_TABLE_REGNUM && crtl->uses_pic_offset_table)
1077 return true;
1078
1079 if (regno == RA_REGNO && df_regs_ever_live_p (RA_REGNO))
1080 return true;
1081
1082 return false;
1083 }
1084
1085 /* Implement TARGET_CAN_ELIMINATE. */
1086 static bool
nios2_can_eliminate(const int from ATTRIBUTE_UNUSED,const int to)1087 nios2_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
1088 {
1089 if (to == STACK_POINTER_REGNUM)
1090 return !frame_pointer_needed;
1091 return true;
1092 }
1093
1094 /* Implement INITIAL_ELIMINATION_OFFSET macro. */
1095 int
nios2_initial_elimination_offset(int from,int to)1096 nios2_initial_elimination_offset (int from, int to)
1097 {
1098 int offset;
1099
1100 nios2_compute_frame_layout ();
1101
1102 /* Set OFFSET to the offset from the stack pointer. */
1103 switch (from)
1104 {
1105 case FRAME_POINTER_REGNUM:
1106 offset = cfun->machine->args_size;
1107 break;
1108
1109 case ARG_POINTER_REGNUM:
1110 offset = cfun->machine->total_size;
1111 offset -= crtl->args.pretend_args_size;
1112 break;
1113
1114 default:
1115 gcc_unreachable ();
1116 }
1117
1118 /* If we are asked for the frame pointer offset, then adjust OFFSET
1119 by the offset from the frame pointer to the stack pointer. */
1120 if (to == HARD_FRAME_POINTER_REGNUM)
1121 offset -= (cfun->machine->save_regs_offset
1122 + cfun->machine->fp_save_offset);
1123
1124 return offset;
1125 }
1126
1127 /* Return nonzero if this function is known to have a null epilogue.
1128 This allows the optimizer to omit jumps to jumps if no stack
1129 was created. */
1130 int
nios2_can_use_return_insn(void)1131 nios2_can_use_return_insn (void)
1132 {
1133 int total_frame_size;
1134
1135 if (!reload_completed || crtl->profile)
1136 return 0;
1137
1138 total_frame_size = nios2_compute_frame_layout ();
1139
1140 /* If CDX is available, check if we can return using a
1141 single pop.n instruction. */
1142 if (TARGET_HAS_CDX
1143 && !frame_pointer_needed
1144 && cfun->machine->save_regs_offset <= 60
1145 && (cfun->machine->save_mask & 0x80000000) != 0
1146 && (cfun->machine->save_mask & 0xffff) == 0
1147 && crtl->args.pretend_args_size == 0)
1148 return true;
1149
1150 return total_frame_size == 0;
1151 }
1152
1153
1154 /* Check and signal some warnings/errors on FPU insn options. */
1155 static void
nios2_custom_check_insns(void)1156 nios2_custom_check_insns (void)
1157 {
1158 unsigned int i, j;
1159 bool errors = false;
1160
1161 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
1162 if (N2FPU_ENABLED_P (i) && N2FPU_DOUBLE_P (i))
1163 {
1164 for (j = 0; j < ARRAY_SIZE (nios2_fpu_insn); j++)
1165 if (N2FPU_DOUBLE_REQUIRED_P (j) && ! N2FPU_ENABLED_P (j))
1166 {
1167 error ("switch %<-mcustom-%s%> is required for double "
1168 "precision floating point", N2FPU_NAME (j));
1169 errors = true;
1170 }
1171 break;
1172 }
1173
1174 /* Warn if the user has certain exotic operations that won't get used
1175 without -funsafe-math-optimizations. See expand_builtin () in
1176 builtins.c. */
1177 if (!flag_unsafe_math_optimizations)
1178 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
1179 if (N2FPU_ENABLED_P (i) && N2FPU_UNSAFE_P (i))
1180 warning (0, "switch %<-mcustom-%s%> has no effect unless "
1181 "-funsafe-math-optimizations is specified", N2FPU_NAME (i));
1182
1183 /* Warn if the user is trying to use -mcustom-fmins et. al, that won't
1184 get used without -ffinite-math-only. See fold_builtin_fmin_fmax ()
1185 in builtins.c. */
1186 if (!flag_finite_math_only)
1187 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
1188 if (N2FPU_ENABLED_P (i) && N2FPU_FINITE_P (i))
1189 warning (0, "switch %<-mcustom-%s%> has no effect unless "
1190 "-ffinite-math-only is specified", N2FPU_NAME (i));
1191
1192 /* Warn if the user is trying to use a custom rounding instruction
1193 that won't get used without -fno-math-errno. See
1194 expand_builtin_int_roundingfn_2 () in builtins.c. */
1195 if (flag_errno_math)
1196 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
1197 if (N2FPU_ENABLED_P (i) && N2FPU_NO_ERRNO_P (i))
1198 warning (0, "switch %<-mcustom-%s%> has no effect unless "
1199 "-fno-math-errno is specified", N2FPU_NAME (i));
1200
1201 if (errors || custom_code_conflict)
1202 fatal_error (input_location,
1203 "conflicting use of -mcustom switches, target attributes, "
1204 "and/or __builtin_custom_ functions");
1205 }
1206
1207 static void
nios2_set_fpu_custom_code(enum n2fpu_code code,int n,bool override_p)1208 nios2_set_fpu_custom_code (enum n2fpu_code code, int n, bool override_p)
1209 {
1210 if (override_p || N2FPU_N (code) == -1)
1211 N2FPU_N (code) = n;
1212 nios2_register_custom_code (n, CCS_FPU, (int) code);
1213 }
1214
1215 /* Type to represent a standard FPU config. */
1216 struct nios2_fpu_config
1217 {
1218 const char *name;
1219 bool set_sp_constants;
1220 int code[n2fpu_code_num];
1221 };
1222
1223 #define NIOS2_FPU_CONFIG_NUM 3
1224 static struct nios2_fpu_config custom_fpu_config[NIOS2_FPU_CONFIG_NUM];
1225
1226 static void
nios2_init_fpu_configs(void)1227 nios2_init_fpu_configs (void)
1228 {
1229 struct nios2_fpu_config* cfg;
1230 int i = 0;
1231 #define NEXT_FPU_CONFIG \
1232 do { \
1233 cfg = &custom_fpu_config[i++]; \
1234 memset (cfg, -1, sizeof (struct nios2_fpu_config));\
1235 } while (0)
1236
1237 NEXT_FPU_CONFIG;
1238 cfg->name = "60-1";
1239 cfg->set_sp_constants = true;
1240 cfg->code[n2fpu_fmuls] = 252;
1241 cfg->code[n2fpu_fadds] = 253;
1242 cfg->code[n2fpu_fsubs] = 254;
1243
1244 NEXT_FPU_CONFIG;
1245 cfg->name = "60-2";
1246 cfg->set_sp_constants = true;
1247 cfg->code[n2fpu_fmuls] = 252;
1248 cfg->code[n2fpu_fadds] = 253;
1249 cfg->code[n2fpu_fsubs] = 254;
1250 cfg->code[n2fpu_fdivs] = 255;
1251
1252 NEXT_FPU_CONFIG;
1253 cfg->name = "72-3";
1254 cfg->set_sp_constants = true;
1255 cfg->code[n2fpu_floatus] = 243;
1256 cfg->code[n2fpu_fixsi] = 244;
1257 cfg->code[n2fpu_floatis] = 245;
1258 cfg->code[n2fpu_fcmpgts] = 246;
1259 cfg->code[n2fpu_fcmples] = 249;
1260 cfg->code[n2fpu_fcmpeqs] = 250;
1261 cfg->code[n2fpu_fcmpnes] = 251;
1262 cfg->code[n2fpu_fmuls] = 252;
1263 cfg->code[n2fpu_fadds] = 253;
1264 cfg->code[n2fpu_fsubs] = 254;
1265 cfg->code[n2fpu_fdivs] = 255;
1266
1267 #undef NEXT_FPU_CONFIG
1268 gcc_assert (i == NIOS2_FPU_CONFIG_NUM);
1269 }
1270
1271 static struct nios2_fpu_config *
nios2_match_custom_fpu_cfg(const char * cfgname,const char * endp)1272 nios2_match_custom_fpu_cfg (const char *cfgname, const char *endp)
1273 {
1274 int i;
1275 for (i = 0; i < NIOS2_FPU_CONFIG_NUM; i++)
1276 {
1277 bool match = !(endp != NULL
1278 ? strncmp (custom_fpu_config[i].name, cfgname,
1279 endp - cfgname)
1280 : strcmp (custom_fpu_config[i].name, cfgname));
1281 if (match)
1282 return &custom_fpu_config[i];
1283 }
1284 return NULL;
1285 }
1286
1287 /* Use CFGNAME to lookup FPU config, ENDP if not NULL marks end of string.
1288 OVERRIDE is true if loaded config codes should overwrite current state. */
1289 static void
nios2_handle_custom_fpu_cfg(const char * cfgname,const char * endp,bool override)1290 nios2_handle_custom_fpu_cfg (const char *cfgname, const char *endp,
1291 bool override)
1292 {
1293 struct nios2_fpu_config *cfg = nios2_match_custom_fpu_cfg (cfgname, endp);
1294 if (cfg)
1295 {
1296 unsigned int i;
1297 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
1298 if (cfg->code[i] >= 0)
1299 nios2_set_fpu_custom_code ((enum n2fpu_code) i, cfg->code[i],
1300 override);
1301 if (cfg->set_sp_constants)
1302 flag_single_precision_constant = 1;
1303 }
1304 else
1305 warning (0, "ignoring unrecognized switch %<-mcustom-fpu-cfg%> "
1306 "value %<%s%>", cfgname);
1307
1308 /* Guard against errors in the standard configurations. */
1309 nios2_custom_check_insns ();
1310 }
1311
1312 /* Check individual FPU insn options, and register custom code. */
1313 static void
nios2_handle_custom_fpu_insn_option(int fpu_insn_index)1314 nios2_handle_custom_fpu_insn_option (int fpu_insn_index)
1315 {
1316 int param = N2FPU_N (fpu_insn_index);
1317
1318 if (0 <= param && param <= 255)
1319 nios2_register_custom_code (param, CCS_FPU, fpu_insn_index);
1320
1321 /* Valid values are 0-255, but also allow -1 so that the
1322 -mno-custom-<opt> switches work. */
1323 else if (param != -1)
1324 error ("switch %<-mcustom-%s%> value %d must be between 0 and 255",
1325 N2FPU_NAME (fpu_insn_index), param);
1326 }
1327
1328 /* Allocate a chunk of memory for per-function machine-dependent data. */
1329 static struct machine_function *
nios2_init_machine_status(void)1330 nios2_init_machine_status (void)
1331 {
1332 return ggc_cleared_alloc<machine_function> ();
1333 }
1334
1335 /* Implement TARGET_OPTION_OVERRIDE. */
1336 static void
nios2_option_override(void)1337 nios2_option_override (void)
1338 {
1339 unsigned int i;
1340
1341 #ifdef SUBTARGET_OVERRIDE_OPTIONS
1342 SUBTARGET_OVERRIDE_OPTIONS;
1343 #endif
1344
1345 /* Check for unsupported options. */
1346 if (flag_pic && !TARGET_LINUX_ABI)
1347 sorry ("position-independent code requires the Linux ABI");
1348 if (flag_pic && stack_limit_rtx
1349 && GET_CODE (stack_limit_rtx) == SYMBOL_REF)
1350 sorry ("PIC support for -fstack-limit-symbol");
1351
1352 /* Function to allocate machine-dependent function status. */
1353 init_machine_status = &nios2_init_machine_status;
1354
1355 nios2_section_threshold
1356 = (global_options_set.x_g_switch_value
1357 ? g_switch_value : NIOS2_DEFAULT_GVALUE);
1358
1359 if (nios2_gpopt_option == gpopt_unspecified)
1360 {
1361 /* Default to -mgpopt unless -fpic or -fPIC. */
1362 if (flag_pic)
1363 nios2_gpopt_option = gpopt_none;
1364 else
1365 nios2_gpopt_option = gpopt_local;
1366 }
1367
1368 /* If we don't have mul, we don't have mulx either! */
1369 if (!TARGET_HAS_MUL && TARGET_HAS_MULX)
1370 target_flags &= ~MASK_HAS_MULX;
1371
1372 /* Optional BMX and CDX instructions only make sense for R2. */
1373 if (!TARGET_ARCH_R2)
1374 {
1375 if (TARGET_HAS_BMX)
1376 error ("BMX instructions are only supported with R2 architecture");
1377 if (TARGET_HAS_CDX)
1378 error ("CDX instructions are only supported with R2 architecture");
1379 }
1380
1381 /* R2 is little-endian only. */
1382 if (TARGET_ARCH_R2 && TARGET_BIG_ENDIAN)
1383 error ("R2 architecture is little-endian only");
1384
1385 /* Initialize default FPU configurations. */
1386 nios2_init_fpu_configs ();
1387
1388 /* Set up default handling for floating point custom instructions.
1389
1390 Putting things in this order means that the -mcustom-fpu-cfg=
1391 switch will always be overridden by individual -mcustom-fadds=
1392 switches, regardless of the order in which they were specified
1393 on the command line.
1394
1395 This behavior of prioritization of individual -mcustom-<insn>=
1396 options before the -mcustom-fpu-cfg= switch is maintained for
1397 compatibility. */
1398 if (nios2_custom_fpu_cfg_string && *nios2_custom_fpu_cfg_string)
1399 nios2_handle_custom_fpu_cfg (nios2_custom_fpu_cfg_string, NULL, false);
1400
1401 /* Handle options for individual FPU insns. */
1402 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
1403 nios2_handle_custom_fpu_insn_option (i);
1404
1405 nios2_custom_check_insns ();
1406
1407 /* Save the initial options in case the user does function specific
1408 options. */
1409 target_option_default_node = target_option_current_node
1410 = build_target_option_node (&global_options);
1411 }
1412
1413
1414 /* Return true if CST is a constant within range of movi/movui/movhi. */
1415 static bool
nios2_simple_const_p(const_rtx cst)1416 nios2_simple_const_p (const_rtx cst)
1417 {
1418 HOST_WIDE_INT val = INTVAL (cst);
1419 return SMALL_INT (val) || SMALL_INT_UNSIGNED (val) || UPPER16_INT (val);
1420 }
1421
1422 /* Compute a (partial) cost for rtx X. Return true if the complete
1423 cost has been computed, and false if subexpressions should be
1424 scanned. In either case, *TOTAL contains the cost result. */
1425 static bool
nios2_rtx_costs(rtx x,machine_mode mode ATTRIBUTE_UNUSED,int outer_code ATTRIBUTE_UNUSED,int opno ATTRIBUTE_UNUSED,int * total,bool speed ATTRIBUTE_UNUSED)1426 nios2_rtx_costs (rtx x, machine_mode mode ATTRIBUTE_UNUSED,
1427 int outer_code ATTRIBUTE_UNUSED,
1428 int opno ATTRIBUTE_UNUSED,
1429 int *total, bool speed ATTRIBUTE_UNUSED)
1430 {
1431 int code = GET_CODE (x);
1432
1433 switch (code)
1434 {
1435 case CONST_INT:
1436 if (INTVAL (x) == 0)
1437 {
1438 *total = COSTS_N_INSNS (0);
1439 return true;
1440 }
1441 else if (nios2_simple_const_p (x))
1442 {
1443 *total = COSTS_N_INSNS (2);
1444 return true;
1445 }
1446 else
1447 {
1448 *total = COSTS_N_INSNS (4);
1449 return true;
1450 }
1451
1452 case LABEL_REF:
1453 case SYMBOL_REF:
1454 case CONST:
1455 case CONST_DOUBLE:
1456 {
1457 *total = COSTS_N_INSNS (4);
1458 return true;
1459 }
1460
1461 case AND:
1462 {
1463 /* Recognize 'nor' insn pattern. */
1464 if (GET_CODE (XEXP (x, 0)) == NOT
1465 && GET_CODE (XEXP (x, 1)) == NOT)
1466 {
1467 *total = COSTS_N_INSNS (1);
1468 return true;
1469 }
1470 return false;
1471 }
1472
1473 case MULT:
1474 {
1475 *total = COSTS_N_INSNS (1);
1476 return false;
1477 }
1478 case SIGN_EXTEND:
1479 {
1480 *total = COSTS_N_INSNS (3);
1481 return false;
1482 }
1483 case ZERO_EXTEND:
1484 {
1485 *total = COSTS_N_INSNS (1);
1486 return false;
1487 }
1488
1489 case ZERO_EXTRACT:
1490 if (TARGET_HAS_BMX)
1491 {
1492 *total = COSTS_N_INSNS (1);
1493 return true;
1494 }
1495
1496 default:
1497 return false;
1498 }
1499 }
1500
1501 /* Implement TARGET_PREFERRED_RELOAD_CLASS. */
1502 static reg_class_t
nios2_preferred_reload_class(rtx x ATTRIBUTE_UNUSED,reg_class_t regclass)1503 nios2_preferred_reload_class (rtx x ATTRIBUTE_UNUSED, reg_class_t regclass)
1504 {
1505 return regclass == NO_REGS ? GENERAL_REGS : regclass;
1506 }
1507
1508 /* Emit a call to __tls_get_addr. TI is the argument to this function.
1509 RET is an RTX for the return value location. The entire insn sequence
1510 is returned. */
1511 static GTY(()) rtx nios2_tls_symbol;
1512
1513 static rtx
nios2_call_tls_get_addr(rtx ti)1514 nios2_call_tls_get_addr (rtx ti)
1515 {
1516 rtx arg = gen_rtx_REG (Pmode, FIRST_ARG_REGNO);
1517 rtx ret = gen_rtx_REG (Pmode, FIRST_RETVAL_REGNO);
1518 rtx fn;
1519 rtx_insn *insn;
1520
1521 if (!nios2_tls_symbol)
1522 nios2_tls_symbol = init_one_libfunc ("__tls_get_addr");
1523
1524 emit_move_insn (arg, ti);
1525 fn = gen_rtx_MEM (QImode, nios2_tls_symbol);
1526 insn = emit_call_insn (gen_call_value (ret, fn, const0_rtx));
1527 RTL_CONST_CALL_P (insn) = 1;
1528 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), ret);
1529 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), arg);
1530
1531 return ret;
1532 }
1533
1534 /* Return true for large offsets requiring hiadj/lo relocation pairs. */
1535 static bool
nios2_large_offset_p(int unspec)1536 nios2_large_offset_p (int unspec)
1537 {
1538 gcc_assert (nios2_unspec_reloc_name (unspec) != NULL);
1539
1540 if (flag_pic == 2
1541 /* FIXME: TLS GOT offset relocations will eventually also get this
1542 treatment, after binutils support for those are also completed. */
1543 && (unspec == UNSPEC_PIC_SYM || unspec == UNSPEC_PIC_CALL_SYM))
1544 return true;
1545
1546 /* 'gotoff' offsets are always hiadj/lo. */
1547 if (unspec == UNSPEC_PIC_GOTOFF_SYM)
1548 return true;
1549
1550 return false;
1551 }
1552
1553 /* Return true for conforming unspec relocations. Also used in
1554 constraints.md and predicates.md. */
1555 bool
nios2_unspec_reloc_p(rtx op)1556 nios2_unspec_reloc_p (rtx op)
1557 {
1558 return (GET_CODE (op) == CONST
1559 && GET_CODE (XEXP (op, 0)) == UNSPEC
1560 && ! nios2_large_offset_p (XINT (XEXP (op, 0), 1)));
1561 }
1562
1563 static bool
nios2_large_unspec_reloc_p(rtx op)1564 nios2_large_unspec_reloc_p (rtx op)
1565 {
1566 return (GET_CODE (op) == CONST
1567 && GET_CODE (XEXP (op, 0)) == UNSPEC
1568 && nios2_large_offset_p (XINT (XEXP (op, 0), 1)));
1569 }
1570
1571 /* Helper to generate unspec constant. */
1572 static rtx
nios2_unspec_offset(rtx loc,int unspec)1573 nios2_unspec_offset (rtx loc, int unspec)
1574 {
1575 return gen_rtx_CONST (Pmode, gen_rtx_UNSPEC (Pmode, gen_rtvec (1, loc),
1576 unspec));
1577 }
1578
1579 /* Generate GOT pointer based address with large offset. */
1580 static rtx
nios2_large_got_address(rtx offset,rtx tmp)1581 nios2_large_got_address (rtx offset, rtx tmp)
1582 {
1583 if (!tmp)
1584 tmp = gen_reg_rtx (Pmode);
1585 emit_move_insn (tmp, offset);
1586 return gen_rtx_PLUS (Pmode, tmp, pic_offset_table_rtx);
1587 }
1588
1589 /* Generate a GOT pointer based address. */
1590 static rtx
nios2_got_address(rtx loc,int unspec)1591 nios2_got_address (rtx loc, int unspec)
1592 {
1593 rtx offset = nios2_unspec_offset (loc, unspec);
1594 crtl->uses_pic_offset_table = 1;
1595
1596 if (nios2_large_offset_p (unspec))
1597 return force_reg (Pmode, nios2_large_got_address (offset, NULL_RTX));
1598
1599 return gen_rtx_PLUS (Pmode, pic_offset_table_rtx, offset);
1600 }
1601
1602 /* Generate the code to access LOC, a thread local SYMBOL_REF. The
1603 return value will be a valid address and move_operand (either a REG
1604 or a LO_SUM). */
1605 static rtx
nios2_legitimize_tls_address(rtx loc)1606 nios2_legitimize_tls_address (rtx loc)
1607 {
1608 rtx tmp, mem, tp;
1609 enum tls_model model = SYMBOL_REF_TLS_MODEL (loc);
1610
1611 switch (model)
1612 {
1613 case TLS_MODEL_GLOBAL_DYNAMIC:
1614 tmp = gen_reg_rtx (Pmode);
1615 emit_move_insn (tmp, nios2_got_address (loc, UNSPEC_ADD_TLS_GD));
1616 return nios2_call_tls_get_addr (tmp);
1617
1618 case TLS_MODEL_LOCAL_DYNAMIC:
1619 tmp = gen_reg_rtx (Pmode);
1620 emit_move_insn (tmp, nios2_got_address (loc, UNSPEC_ADD_TLS_LDM));
1621 return gen_rtx_PLUS (Pmode, nios2_call_tls_get_addr (tmp),
1622 nios2_unspec_offset (loc, UNSPEC_ADD_TLS_LDO));
1623
1624 case TLS_MODEL_INITIAL_EXEC:
1625 tmp = gen_reg_rtx (Pmode);
1626 mem = gen_const_mem (Pmode, nios2_got_address (loc, UNSPEC_LOAD_TLS_IE));
1627 emit_move_insn (tmp, mem);
1628 tp = gen_rtx_REG (Pmode, TP_REGNO);
1629 return gen_rtx_PLUS (Pmode, tp, tmp);
1630
1631 case TLS_MODEL_LOCAL_EXEC:
1632 tp = gen_rtx_REG (Pmode, TP_REGNO);
1633 return gen_rtx_PLUS (Pmode, tp,
1634 nios2_unspec_offset (loc, UNSPEC_ADD_TLS_LE));
1635 default:
1636 gcc_unreachable ();
1637 }
1638 }
1639
1640 /* Divide Support
1641
1642 If -O3 is used, we want to output a table lookup for
1643 divides between small numbers (both num and den >= 0
1644 and < 0x10). The overhead of this method in the worst
1645 case is 40 bytes in the text section (10 insns) and
1646 256 bytes in the data section. Additional divides do
1647 not incur additional penalties in the data section.
1648
1649 Code speed is improved for small divides by about 5x
1650 when using this method in the worse case (~9 cycles
1651 vs ~45). And in the worst case divides not within the
1652 table are penalized by about 10% (~5 cycles vs ~45).
1653 However in the typical case the penalty is not as bad
1654 because doing the long divide in only 45 cycles is
1655 quite optimistic.
1656
1657 ??? would be nice to have some benchmarks other
1658 than Dhrystone to back this up.
1659
1660 This bit of expansion is to create this instruction
1661 sequence as rtl.
1662 or $8, $4, $5
1663 slli $9, $4, 4
1664 cmpgeui $3, $8, 16
1665 beq $3, $0, .L3
1666 or $10, $9, $5
1667 add $12, $11, divide_table
1668 ldbu $2, 0($12)
1669 br .L1
1670 .L3:
1671 call slow_div
1672 .L1:
1673 # continue here with result in $2
1674
1675 ??? Ideally I would like the libcall block to contain all
1676 of this code, but I don't know how to do that. What it
1677 means is that if the divide can be eliminated, it may not
1678 completely disappear.
1679
1680 ??? The __divsi3_table label should ideally be moved out
1681 of this block and into a global. If it is placed into the
1682 sdata section we can save even more cycles by doing things
1683 gp relative. */
1684 void
nios2_emit_expensive_div(rtx * operands,machine_mode mode)1685 nios2_emit_expensive_div (rtx *operands, machine_mode mode)
1686 {
1687 rtx or_result, shift_left_result;
1688 rtx lookup_value;
1689 rtx_code_label *lab1, *lab3;
1690 rtx_insn *insns;
1691 rtx libfunc;
1692 rtx final_result;
1693 rtx_insn *tmp;
1694 rtx table;
1695
1696 /* It may look a little generic, but only SImode is supported for now. */
1697 gcc_assert (mode == SImode);
1698 libfunc = optab_libfunc (sdiv_optab, SImode);
1699
1700 lab1 = gen_label_rtx ();
1701 lab3 = gen_label_rtx ();
1702
1703 or_result = expand_simple_binop (SImode, IOR,
1704 operands[1], operands[2],
1705 0, 0, OPTAB_LIB_WIDEN);
1706
1707 emit_cmp_and_jump_insns (or_result, GEN_INT (15), GTU, 0,
1708 GET_MODE (or_result), 0, lab3);
1709 JUMP_LABEL (get_last_insn ()) = lab3;
1710
1711 shift_left_result = expand_simple_binop (SImode, ASHIFT,
1712 operands[1], GEN_INT (4),
1713 0, 0, OPTAB_LIB_WIDEN);
1714
1715 lookup_value = expand_simple_binop (SImode, IOR,
1716 shift_left_result, operands[2],
1717 0, 0, OPTAB_LIB_WIDEN);
1718 table = gen_rtx_PLUS (SImode, lookup_value,
1719 gen_rtx_SYMBOL_REF (SImode, "__divsi3_table"));
1720 convert_move (operands[0], gen_rtx_MEM (QImode, table), 1);
1721
1722 tmp = emit_jump_insn (gen_jump (lab1));
1723 JUMP_LABEL (tmp) = lab1;
1724 emit_barrier ();
1725
1726 emit_label (lab3);
1727 LABEL_NUSES (lab3) = 1;
1728
1729 start_sequence ();
1730 final_result = emit_library_call_value (libfunc, NULL_RTX,
1731 LCT_CONST, SImode, 2,
1732 operands[1], SImode,
1733 operands[2], SImode);
1734
1735 insns = get_insns ();
1736 end_sequence ();
1737 emit_libcall_block (insns, operands[0], final_result,
1738 gen_rtx_DIV (SImode, operands[1], operands[2]));
1739
1740 emit_label (lab1);
1741 LABEL_NUSES (lab1) = 1;
1742 }
1743
1744
1745 /* Branches and compares. */
1746
1747 /* Return in *ALT_CODE and *ALT_OP, an alternate equivalent constant
1748 comparison, e.g. >= 1 into > 0. */
1749 static void
nios2_alternate_compare_const(enum rtx_code code,rtx op,enum rtx_code * alt_code,rtx * alt_op,machine_mode mode)1750 nios2_alternate_compare_const (enum rtx_code code, rtx op,
1751 enum rtx_code *alt_code, rtx *alt_op,
1752 machine_mode mode)
1753 {
1754 HOST_WIDE_INT opval = INTVAL (op);
1755 enum rtx_code scode = signed_condition (code);
1756 bool dec_p = (scode == LT || scode == GE);
1757
1758 if (code == EQ || code == NE)
1759 {
1760 *alt_code = code;
1761 *alt_op = op;
1762 return;
1763 }
1764
1765 *alt_op = (dec_p
1766 ? gen_int_mode (opval - 1, mode)
1767 : gen_int_mode (opval + 1, mode));
1768
1769 /* The required conversion between [>,>=] and [<,<=] is captured
1770 by a reverse + swap of condition codes. */
1771 *alt_code = reverse_condition (swap_condition (code));
1772
1773 {
1774 /* Test if the incremented/decremented value crosses the over/underflow
1775 boundary. Supposedly, such boundary cases should already be transformed
1776 into always-true/false or EQ conditions, so use an assertion here. */
1777 unsigned HOST_WIDE_INT alt_opval = INTVAL (*alt_op);
1778 if (code == scode)
1779 alt_opval ^= (1 << (GET_MODE_BITSIZE (mode) - 1));
1780 alt_opval &= GET_MODE_MASK (mode);
1781 gcc_assert (dec_p ? alt_opval != GET_MODE_MASK (mode) : alt_opval != 0);
1782 }
1783 }
1784
1785 /* Return true if the constant comparison is supported by nios2. */
1786 static bool
nios2_valid_compare_const_p(enum rtx_code code,rtx op)1787 nios2_valid_compare_const_p (enum rtx_code code, rtx op)
1788 {
1789 switch (code)
1790 {
1791 case EQ: case NE: case GE: case LT:
1792 return SMALL_INT (INTVAL (op));
1793 case GEU: case LTU:
1794 return SMALL_INT_UNSIGNED (INTVAL (op));
1795 default:
1796 return false;
1797 }
1798 }
1799
1800 /* Checks if the FPU comparison in *CMP, *OP1, and *OP2 can be supported in
1801 the current configuration. Perform modifications if MODIFY_P is true.
1802 Returns true if FPU compare can be done. */
1803
1804 bool
nios2_validate_fpu_compare(machine_mode mode,rtx * cmp,rtx * op1,rtx * op2,bool modify_p)1805 nios2_validate_fpu_compare (machine_mode mode, rtx *cmp, rtx *op1, rtx *op2,
1806 bool modify_p)
1807 {
1808 bool rev_p = false;
1809 enum rtx_code code = GET_CODE (*cmp);
1810
1811 if (!nios2_fpu_compare_enabled (code, mode))
1812 {
1813 code = swap_condition (code);
1814 if (nios2_fpu_compare_enabled (code, mode))
1815 rev_p = true;
1816 else
1817 return false;
1818 }
1819
1820 if (modify_p)
1821 {
1822 if (rev_p)
1823 {
1824 rtx tmp = *op1;
1825 *op1 = *op2;
1826 *op2 = tmp;
1827 }
1828 *op1 = force_reg (mode, *op1);
1829 *op2 = force_reg (mode, *op2);
1830 *cmp = gen_rtx_fmt_ee (code, mode, *op1, *op2);
1831 }
1832 return true;
1833 }
1834
1835 /* Checks and modifies the comparison in *CMP, *OP1, and *OP2 into valid
1836 nios2 supported form. Returns true if success. */
1837 bool
nios2_validate_compare(machine_mode mode,rtx * cmp,rtx * op1,rtx * op2)1838 nios2_validate_compare (machine_mode mode, rtx *cmp, rtx *op1, rtx *op2)
1839 {
1840 enum rtx_code code = GET_CODE (*cmp);
1841 enum rtx_code alt_code;
1842 rtx alt_op2;
1843
1844 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
1845 return nios2_validate_fpu_compare (mode, cmp, op1, op2, true);
1846
1847 if (!reg_or_0_operand (*op2, mode))
1848 {
1849 /* Create alternate constant compare. */
1850 nios2_alternate_compare_const (code, *op2, &alt_code, &alt_op2, mode);
1851
1852 /* If alterate op2 is zero(0), we can use it directly, possibly
1853 swapping the compare code. */
1854 if (alt_op2 == const0_rtx)
1855 {
1856 code = alt_code;
1857 *op2 = alt_op2;
1858 goto check_rebuild_cmp;
1859 }
1860
1861 /* Check if either constant compare can be used. */
1862 if (nios2_valid_compare_const_p (code, *op2))
1863 return true;
1864 else if (nios2_valid_compare_const_p (alt_code, alt_op2))
1865 {
1866 code = alt_code;
1867 *op2 = alt_op2;
1868 goto rebuild_cmp;
1869 }
1870
1871 /* We have to force op2 into a register now. Try to pick one
1872 with a lower cost. */
1873 if (! nios2_simple_const_p (*op2)
1874 && nios2_simple_const_p (alt_op2))
1875 {
1876 code = alt_code;
1877 *op2 = alt_op2;
1878 }
1879 *op2 = force_reg (SImode, *op2);
1880 }
1881 check_rebuild_cmp:
1882 if (code == GT || code == GTU || code == LE || code == LEU)
1883 {
1884 rtx t = *op1; *op1 = *op2; *op2 = t;
1885 code = swap_condition (code);
1886 }
1887 rebuild_cmp:
1888 *cmp = gen_rtx_fmt_ee (code, mode, *op1, *op2);
1889 return true;
1890 }
1891
1892
1893 /* Addressing Modes. */
1894
1895 /* Implement TARGET_LEGITIMATE_CONSTANT_P. */
1896 static bool
nios2_legitimate_constant_p(machine_mode mode ATTRIBUTE_UNUSED,rtx x)1897 nios2_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
1898 {
1899 rtx base, offset;
1900 split_const (x, &base, &offset);
1901 return GET_CODE (base) != SYMBOL_REF || !SYMBOL_REF_TLS_MODEL (base);
1902 }
1903
1904 /* Implement TARGET_CANNOT_FORCE_CONST_MEM. */
1905 static bool
nios2_cannot_force_const_mem(machine_mode mode ATTRIBUTE_UNUSED,rtx x)1906 nios2_cannot_force_const_mem (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
1907 {
1908 return nios2_legitimate_constant_p (mode, x) == false;
1909 }
1910
1911 /* Return true if register REGNO is a valid base register.
1912 STRICT_P is true if REG_OK_STRICT is in effect. */
1913
1914 bool
nios2_regno_ok_for_base_p(int regno,bool strict_p)1915 nios2_regno_ok_for_base_p (int regno, bool strict_p)
1916 {
1917 if (!HARD_REGISTER_NUM_P (regno))
1918 {
1919 if (!strict_p)
1920 return true;
1921
1922 if (!reg_renumber)
1923 return false;
1924
1925 regno = reg_renumber[regno];
1926 }
1927
1928 /* The fake registers will be eliminated to either the stack or
1929 hard frame pointer, both of which are usually valid base registers.
1930 Reload deals with the cases where the eliminated form isn't valid. */
1931 return (GP_REG_P (regno)
1932 || regno == FRAME_POINTER_REGNUM
1933 || regno == ARG_POINTER_REGNUM);
1934 }
1935
1936 /* Return true if OFFSET is permitted in a load/store address expression.
1937 Normally any 16-bit value is permitted, but on R2 if we may be emitting
1938 the IO forms of these instructions we must restrict the offset to fit
1939 in a 12-bit field instead. */
1940
1941 static bool
nios2_valid_addr_offset_p(rtx offset)1942 nios2_valid_addr_offset_p (rtx offset)
1943 {
1944 return (CONST_INT_P (offset)
1945 && ((TARGET_ARCH_R2 && (TARGET_BYPASS_CACHE
1946 || TARGET_BYPASS_CACHE_VOLATILE))
1947 ? SMALL_INT12 (INTVAL (offset))
1948 : SMALL_INT (INTVAL (offset))));
1949 }
1950
1951 /* Return true if the address expression formed by BASE + OFFSET is
1952 valid. */
1953 static bool
nios2_valid_addr_expr_p(rtx base,rtx offset,bool strict_p)1954 nios2_valid_addr_expr_p (rtx base, rtx offset, bool strict_p)
1955 {
1956 if (!strict_p && GET_CODE (base) == SUBREG)
1957 base = SUBREG_REG (base);
1958 return (REG_P (base)
1959 && nios2_regno_ok_for_base_p (REGNO (base), strict_p)
1960 && (offset == NULL_RTX
1961 || nios2_valid_addr_offset_p (offset)
1962 || nios2_unspec_reloc_p (offset)));
1963 }
1964
1965 /* Implement TARGET_LEGITIMATE_ADDRESS_P. */
1966 static bool
nios2_legitimate_address_p(machine_mode mode ATTRIBUTE_UNUSED,rtx operand,bool strict_p)1967 nios2_legitimate_address_p (machine_mode mode ATTRIBUTE_UNUSED,
1968 rtx operand, bool strict_p)
1969 {
1970 switch (GET_CODE (operand))
1971 {
1972 /* Direct. */
1973 case SYMBOL_REF:
1974 if (SYMBOL_REF_TLS_MODEL (operand))
1975 return false;
1976
1977 /* Else, fall through. */
1978 case CONST:
1979 if (gprel_constant_p (operand))
1980 return true;
1981
1982 /* Else, fall through. */
1983 case LABEL_REF:
1984 case CONST_INT:
1985 case CONST_DOUBLE:
1986 return false;
1987
1988 /* Register indirect. */
1989 case REG:
1990 return nios2_regno_ok_for_base_p (REGNO (operand), strict_p);
1991
1992 /* Register indirect with displacement. */
1993 case PLUS:
1994 {
1995 rtx op0 = XEXP (operand, 0);
1996 rtx op1 = XEXP (operand, 1);
1997
1998 return (nios2_valid_addr_expr_p (op0, op1, strict_p)
1999 || nios2_valid_addr_expr_p (op1, op0, strict_p));
2000 }
2001
2002 default:
2003 break;
2004 }
2005 return false;
2006 }
2007
2008 /* Return true if SECTION is a small section name. */
2009 static bool
nios2_small_section_name_p(const char * section)2010 nios2_small_section_name_p (const char *section)
2011 {
2012 return (strcmp (section, ".sbss") == 0
2013 || strncmp (section, ".sbss.", 6) == 0
2014 || strcmp (section, ".sdata") == 0
2015 || strncmp (section, ".sdata.", 7) == 0);
2016 }
2017
2018 /* Return true if EXP should be placed in the small data section. */
2019 static bool
nios2_in_small_data_p(const_tree exp)2020 nios2_in_small_data_p (const_tree exp)
2021 {
2022 /* We want to merge strings, so we never consider them small data. */
2023 if (TREE_CODE (exp) == STRING_CST)
2024 return false;
2025
2026 if (TREE_CODE (exp) == VAR_DECL)
2027 {
2028 if (DECL_SECTION_NAME (exp))
2029 {
2030 const char *section = DECL_SECTION_NAME (exp);
2031 if (nios2_small_section_name_p (section))
2032 return true;
2033 }
2034 else
2035 {
2036 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
2037
2038 /* If this is an incomplete type with size 0, then we can't put it
2039 in sdata because it might be too big when completed. */
2040 if (size > 0
2041 && (unsigned HOST_WIDE_INT) size <= nios2_section_threshold)
2042 return true;
2043 }
2044 }
2045
2046 return false;
2047 }
2048
2049 /* Return true if symbol is in small data section. */
2050
2051 static bool
nios2_symbol_ref_in_small_data_p(rtx sym)2052 nios2_symbol_ref_in_small_data_p (rtx sym)
2053 {
2054 tree decl;
2055
2056 gcc_assert (GET_CODE (sym) == SYMBOL_REF);
2057 decl = SYMBOL_REF_DECL (sym);
2058
2059 /* TLS variables are not accessed through the GP. */
2060 if (SYMBOL_REF_TLS_MODEL (sym) != 0)
2061 return false;
2062
2063 /* On Nios II R2, there is no GP-relative relocation that can be
2064 used with "io" instructions. So, if we are implicitly generating
2065 those instructions, we cannot emit GP-relative accesses. */
2066 if (TARGET_ARCH_R2
2067 && (TARGET_BYPASS_CACHE || TARGET_BYPASS_CACHE_VOLATILE))
2068 return false;
2069
2070 /* If the user has explicitly placed the symbol in a small data section
2071 via an attribute, generate gp-relative addressing even if the symbol
2072 is external, weak, or larger than we'd automatically put in the
2073 small data section. OTOH, if the symbol is located in some
2074 non-small-data section, we can't use gp-relative accesses on it
2075 unless the user has requested gpopt_data or gpopt_all. */
2076
2077 switch (nios2_gpopt_option)
2078 {
2079 case gpopt_none:
2080 /* Don't generate a gp-relative addressing mode if that's been
2081 disabled. */
2082 return false;
2083
2084 case gpopt_local:
2085 /* Use GP-relative addressing for small data symbols that are
2086 not external or weak or uninitialized common, plus any symbols
2087 that have explicitly been placed in a small data section. */
2088 if (decl && DECL_SECTION_NAME (decl))
2089 return nios2_small_section_name_p (DECL_SECTION_NAME (decl));
2090 return (SYMBOL_REF_SMALL_P (sym)
2091 && !SYMBOL_REF_EXTERNAL_P (sym)
2092 && !(decl && DECL_WEAK (decl))
2093 && !(decl && DECL_COMMON (decl)
2094 && (DECL_INITIAL (decl) == NULL
2095 || (!in_lto_p
2096 && DECL_INITIAL (decl) == error_mark_node))));
2097
2098 case gpopt_global:
2099 /* Use GP-relative addressing for small data symbols, even if
2100 they are external or weak. Note that SYMBOL_REF_SMALL_P
2101 is also true of symbols that have explicitly been placed
2102 in a small data section. */
2103 return SYMBOL_REF_SMALL_P (sym);
2104
2105 case gpopt_data:
2106 /* Use GP-relative addressing for all data symbols regardless
2107 of the object size, but not for code symbols. This option
2108 is equivalent to the user asserting that the entire data
2109 section is accessible from the GP. */
2110 return !SYMBOL_REF_FUNCTION_P (sym);
2111
2112 case gpopt_all:
2113 /* Use GP-relative addressing for everything, including code.
2114 Effectively, the user has asserted that the entire program
2115 fits within the 64K range of the GP offset. */
2116 return true;
2117
2118 default:
2119 /* We shouldn't get here. */
2120 return false;
2121 }
2122 }
2123
2124 /* Implement TARGET_SECTION_TYPE_FLAGS. */
2125
2126 static unsigned int
nios2_section_type_flags(tree decl,const char * name,int reloc)2127 nios2_section_type_flags (tree decl, const char *name, int reloc)
2128 {
2129 unsigned int flags;
2130
2131 flags = default_section_type_flags (decl, name, reloc);
2132
2133 if (nios2_small_section_name_p (name))
2134 flags |= SECTION_SMALL;
2135
2136 return flags;
2137 }
2138
2139 /* Return true if SYMBOL_REF X binds locally. */
2140
2141 static bool
nios2_symbol_binds_local_p(const_rtx x)2142 nios2_symbol_binds_local_p (const_rtx x)
2143 {
2144 return (SYMBOL_REF_DECL (x)
2145 ? targetm.binds_local_p (SYMBOL_REF_DECL (x))
2146 : SYMBOL_REF_LOCAL_P (x));
2147 }
2148
2149 /* Position independent code related. */
2150
2151 /* Emit code to load the PIC register. */
2152 static void
nios2_load_pic_register(void)2153 nios2_load_pic_register (void)
2154 {
2155 rtx tmp = gen_rtx_REG (Pmode, TEMP_REG_NUM);
2156
2157 emit_insn (gen_load_got_register (pic_offset_table_rtx, tmp));
2158 emit_insn (gen_add3_insn (pic_offset_table_rtx, pic_offset_table_rtx, tmp));
2159 }
2160
2161 /* Generate a PIC address as a MEM rtx. */
2162 static rtx
nios2_load_pic_address(rtx sym,int unspec,rtx tmp)2163 nios2_load_pic_address (rtx sym, int unspec, rtx tmp)
2164 {
2165 if (flag_pic == 2
2166 && GET_CODE (sym) == SYMBOL_REF
2167 && nios2_symbol_binds_local_p (sym))
2168 /* Under -fPIC, generate a GOTOFF address for local symbols. */
2169 {
2170 rtx offset = nios2_unspec_offset (sym, UNSPEC_PIC_GOTOFF_SYM);
2171 crtl->uses_pic_offset_table = 1;
2172 return nios2_large_got_address (offset, tmp);
2173 }
2174
2175 return gen_const_mem (Pmode, nios2_got_address (sym, unspec));
2176 }
2177
2178 /* Nonzero if the constant value X is a legitimate general operand
2179 when generating PIC code. It is given that flag_pic is on and
2180 that X satisfies CONSTANT_P or is a CONST_DOUBLE. */
2181 bool
nios2_legitimate_pic_operand_p(rtx x)2182 nios2_legitimate_pic_operand_p (rtx x)
2183 {
2184 if (nios2_large_unspec_reloc_p (x))
2185 return true;
2186
2187 return ! (GET_CODE (x) == SYMBOL_REF
2188 || GET_CODE (x) == LABEL_REF || GET_CODE (x) == CONST);
2189 }
2190
2191 /* Return TRUE if X is a thread-local symbol. */
2192 static bool
nios2_tls_symbol_p(rtx x)2193 nios2_tls_symbol_p (rtx x)
2194 {
2195 return (targetm.have_tls && GET_CODE (x) == SYMBOL_REF
2196 && SYMBOL_REF_TLS_MODEL (x) != 0);
2197 }
2198
2199 /* Legitimize addresses that are CONSTANT_P expressions. */
2200 static rtx
nios2_legitimize_constant_address(rtx addr)2201 nios2_legitimize_constant_address (rtx addr)
2202 {
2203 rtx base, offset;
2204 split_const (addr, &base, &offset);
2205
2206 if (nios2_tls_symbol_p (base))
2207 base = nios2_legitimize_tls_address (base);
2208 else if (flag_pic)
2209 base = nios2_load_pic_address (base, UNSPEC_PIC_SYM, NULL_RTX);
2210 else
2211 return addr;
2212
2213 if (offset != const0_rtx)
2214 {
2215 gcc_assert (can_create_pseudo_p ());
2216 return gen_rtx_PLUS (Pmode, force_reg (Pmode, base),
2217 (CONST_INT_P (offset)
2218 ? (SMALL_INT (INTVAL (offset))
2219 ? offset : force_reg (Pmode, offset))
2220 : offset));
2221 }
2222 return base;
2223 }
2224
2225 /* Implement TARGET_LEGITIMIZE_ADDRESS. */
2226 static rtx
nios2_legitimize_address(rtx x,rtx oldx ATTRIBUTE_UNUSED,machine_mode mode ATTRIBUTE_UNUSED)2227 nios2_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
2228 machine_mode mode ATTRIBUTE_UNUSED)
2229 {
2230 if (CONSTANT_P (x))
2231 return nios2_legitimize_constant_address (x);
2232
2233 /* For the TLS LE (Local Exec) model, the compiler may try to
2234 combine constant offsets with unspec relocs, creating address RTXs
2235 looking like this:
2236 (plus:SI (reg:SI 23 r23)
2237 (const:SI
2238 (plus:SI
2239 (unspec:SI [(symbol_ref:SI ("var"))] UNSPEC_ADD_TLS_LE)
2240 (const_int 48 [0x30]))))
2241
2242 This usually happens when 'var' is a thread-local struct variable,
2243 and access of a field in var causes the addend.
2244
2245 We typically want this combining, so transform the above into this
2246 form, which is allowed:
2247 (plus:SI (reg:SI 23 r23)
2248 (const:SI
2249 (unspec:SI
2250 [(const:SI
2251 (plus:SI (symbol_ref:SI ("var"))
2252 (const_int 48 [0x30])))] UNSPEC_ADD_TLS_LE)))
2253
2254 Which will be output as '%tls_le(var+48)(r23)' in assembly. */
2255 if (GET_CODE (x) == PLUS
2256 && GET_CODE (XEXP (x, 1)) == CONST)
2257 {
2258 rtx unspec, offset;
2259 split_const (XEXP (x, 1), &unspec, &offset);
2260 if (GET_CODE (unspec) == UNSPEC
2261 && !nios2_large_offset_p (XINT (unspec, 1))
2262 && offset != const0_rtx)
2263 {
2264 rtx reg = force_reg (Pmode, XEXP (x, 0));
2265 unspec = copy_rtx (unspec);
2266 XVECEXP (unspec, 0, 0)
2267 = plus_constant (Pmode, XVECEXP (unspec, 0, 0), INTVAL (offset));
2268 x = gen_rtx_PLUS (Pmode, reg, gen_rtx_CONST (Pmode, unspec));
2269 }
2270 }
2271
2272 return x;
2273 }
2274
2275 static rtx
nios2_delegitimize_address(rtx x)2276 nios2_delegitimize_address (rtx x)
2277 {
2278 x = delegitimize_mem_from_attrs (x);
2279
2280 if (GET_CODE (x) == CONST && GET_CODE (XEXP (x, 0)) == UNSPEC)
2281 {
2282 switch (XINT (XEXP (x, 0), 1))
2283 {
2284 case UNSPEC_PIC_SYM:
2285 case UNSPEC_PIC_CALL_SYM:
2286 case UNSPEC_PIC_GOTOFF_SYM:
2287 case UNSPEC_ADD_TLS_GD:
2288 case UNSPEC_ADD_TLS_LDM:
2289 case UNSPEC_LOAD_TLS_IE:
2290 case UNSPEC_ADD_TLS_LE:
2291 x = XVECEXP (XEXP (x, 0), 0, 0);
2292 gcc_assert (CONSTANT_P (x));
2293 break;
2294 }
2295 }
2296 return x;
2297 }
2298
2299 /* Main expander function for RTL moves. */
2300 bool
nios2_emit_move_sequence(rtx * operands,machine_mode mode)2301 nios2_emit_move_sequence (rtx *operands, machine_mode mode)
2302 {
2303 rtx to = operands[0];
2304 rtx from = operands[1];
2305
2306 if (!register_operand (to, mode) && !reg_or_0_operand (from, mode))
2307 {
2308 gcc_assert (can_create_pseudo_p ());
2309 from = copy_to_mode_reg (mode, from);
2310 }
2311
2312 if (CONSTANT_P (from))
2313 {
2314 if (CONST_INT_P (from))
2315 {
2316 if (!SMALL_INT (INTVAL (from))
2317 && !SMALL_INT_UNSIGNED (INTVAL (from))
2318 && !UPPER16_INT (INTVAL (from)))
2319 {
2320 HOST_WIDE_INT high = (INTVAL (from) + 0x8000) & ~0xffff;
2321 HOST_WIDE_INT low = INTVAL (from) & 0xffff;
2322 emit_move_insn (to, gen_int_mode (high, SImode));
2323 emit_insn (gen_add2_insn (to, gen_int_mode (low, HImode)));
2324 set_unique_reg_note (get_last_insn (), REG_EQUAL,
2325 copy_rtx (from));
2326 return true;
2327 }
2328 }
2329 else if (!gprel_constant_p (from))
2330 {
2331 if (!nios2_large_unspec_reloc_p (from))
2332 from = nios2_legitimize_constant_address (from);
2333 if (CONSTANT_P (from))
2334 {
2335 emit_insn (gen_rtx_SET (to, gen_rtx_HIGH (Pmode, from)));
2336 emit_insn (gen_rtx_SET (to, gen_rtx_LO_SUM (Pmode, to, from)));
2337 set_unique_reg_note (get_last_insn (), REG_EQUAL,
2338 copy_rtx (operands[1]));
2339 return true;
2340 }
2341 }
2342 }
2343
2344 operands[0] = to;
2345 operands[1] = from;
2346 return false;
2347 }
2348
2349 /* The function with address *ADDR is being called. If the address
2350 needs to be loaded from the GOT, emit the instruction to do so and
2351 update *ADDR to point to the rtx for the loaded value.
2352 If REG != NULL_RTX, it is used as the target/scratch register in the
2353 GOT address calculation. */
2354 void
nios2_adjust_call_address(rtx * call_op,rtx reg)2355 nios2_adjust_call_address (rtx *call_op, rtx reg)
2356 {
2357 if (MEM_P (*call_op))
2358 call_op = &XEXP (*call_op, 0);
2359
2360 rtx addr = *call_op;
2361 if (flag_pic && CONSTANT_P (addr))
2362 {
2363 rtx tmp = reg ? reg : NULL_RTX;
2364 if (!reg)
2365 reg = gen_reg_rtx (Pmode);
2366 addr = nios2_load_pic_address (addr, UNSPEC_PIC_CALL_SYM, tmp);
2367 emit_insn (gen_rtx_SET (reg, addr));
2368 *call_op = reg;
2369 }
2370 }
2371
2372
2373 /* Output assembly language related definitions. */
2374
2375 /* Implement TARGET_PRINT_OPERAND_PUNCT_VALID_P. */
2376 static bool
nios2_print_operand_punct_valid_p(unsigned char code)2377 nios2_print_operand_punct_valid_p (unsigned char code)
2378 {
2379 return (code == '.' || code == '!');
2380 }
2381
2382
2383 /* Print the operand OP to file stream FILE modified by LETTER.
2384 LETTER can be one of:
2385
2386 i: print i/hi/ui suffixes (used for mov instruction variants),
2387 when OP is the appropriate immediate operand.
2388
2389 u: like 'i', except without "ui" suffix case (used for cmpgeu/cmpltu)
2390
2391 o: print "io" if OP needs volatile access (due to TARGET_BYPASS_CACHE
2392 or TARGET_BYPASS_CACHE_VOLATILE).
2393
2394 x: print i/hi/ci/chi suffixes for the and instruction,
2395 when OP is the appropriate immediate operand.
2396
2397 z: prints the third register immediate operand in assembly
2398 instructions. Outputs const0_rtx as the 'zero' register
2399 instead of '0'.
2400
2401 y: same as 'z', but for specifically for logical instructions,
2402 where the processing for immediates are slightly different.
2403
2404 H: for %hiadj
2405 L: for %lo
2406 D: for the upper 32-bits of a 64-bit double value
2407 R: prints reverse condition.
2408 A: prints (reg) operand for ld[s]ex and st[s]ex.
2409
2410 .: print .n suffix for 16-bit instructions.
2411 !: print r.n suffix for 16-bit instructions. Used for jmpr.n.
2412 */
2413 static void
nios2_print_operand(FILE * file,rtx op,int letter)2414 nios2_print_operand (FILE *file, rtx op, int letter)
2415 {
2416
2417 /* First take care of the format letters that just insert a string
2418 into the output stream. */
2419 switch (letter)
2420 {
2421 case '.':
2422 if (current_output_insn && get_attr_length (current_output_insn) == 2)
2423 fprintf (file, ".n");
2424 return;
2425
2426 case '!':
2427 if (current_output_insn && get_attr_length (current_output_insn) == 2)
2428 fprintf (file, "r.n");
2429 return;
2430
2431 case 'x':
2432 if (CONST_INT_P (op))
2433 {
2434 HOST_WIDE_INT val = INTVAL (op);
2435 HOST_WIDE_INT low = val & 0xffff;
2436 HOST_WIDE_INT high = (val >> 16) & 0xffff;
2437
2438 if (val != 0)
2439 {
2440 if (high != 0)
2441 {
2442 if (low != 0)
2443 {
2444 gcc_assert (TARGET_ARCH_R2);
2445 if (high == 0xffff)
2446 fprintf (file, "c");
2447 else if (low == 0xffff)
2448 fprintf (file, "ch");
2449 else
2450 gcc_unreachable ();
2451 }
2452 else
2453 fprintf (file, "h");
2454 }
2455 fprintf (file, "i");
2456 }
2457 }
2458 return;
2459
2460 case 'u':
2461 case 'i':
2462 if (CONST_INT_P (op))
2463 {
2464 HOST_WIDE_INT val = INTVAL (op);
2465 HOST_WIDE_INT low = val & 0xffff;
2466 HOST_WIDE_INT high = (val >> 16) & 0xffff;
2467 if (val != 0)
2468 {
2469 if (low == 0 && high != 0)
2470 fprintf (file, "h");
2471 else if (high == 0 && (low & 0x8000) != 0 && letter != 'u')
2472 fprintf (file, "u");
2473 }
2474 }
2475 if (CONSTANT_P (op) && op != const0_rtx)
2476 fprintf (file, "i");
2477 return;
2478
2479 case 'o':
2480 if (GET_CODE (op) == MEM
2481 && ((MEM_VOLATILE_P (op) && TARGET_BYPASS_CACHE_VOLATILE)
2482 || TARGET_BYPASS_CACHE))
2483 {
2484 gcc_assert (current_output_insn
2485 && get_attr_length (current_output_insn) == 4);
2486 fprintf (file, "io");
2487 }
2488 return;
2489
2490 default:
2491 break;
2492 }
2493
2494 /* Handle comparison operator names. */
2495 if (comparison_operator (op, VOIDmode))
2496 {
2497 enum rtx_code cond = GET_CODE (op);
2498 if (letter == 0)
2499 {
2500 fprintf (file, "%s", GET_RTX_NAME (cond));
2501 return;
2502 }
2503 if (letter == 'R')
2504 {
2505 fprintf (file, "%s", GET_RTX_NAME (reverse_condition (cond)));
2506 return;
2507 }
2508 }
2509
2510 /* Now handle the cases where we actually need to format an operand. */
2511 switch (GET_CODE (op))
2512 {
2513 case REG:
2514 if (letter == 0 || letter == 'z' || letter == 'y')
2515 {
2516 fprintf (file, "%s", reg_names[REGNO (op)]);
2517 return;
2518 }
2519 else if (letter == 'D')
2520 {
2521 fprintf (file, "%s", reg_names[REGNO (op)+1]);
2522 return;
2523 }
2524 break;
2525
2526 case CONST_INT:
2527 {
2528 rtx int_rtx = op;
2529 HOST_WIDE_INT val = INTVAL (int_rtx);
2530 HOST_WIDE_INT low = val & 0xffff;
2531 HOST_WIDE_INT high = (val >> 16) & 0xffff;
2532
2533 if (letter == 'y')
2534 {
2535 if (val == 0)
2536 fprintf (file, "zero");
2537 else
2538 {
2539 if (high != 0)
2540 {
2541 if (low != 0)
2542 {
2543 gcc_assert (TARGET_ARCH_R2);
2544 if (high == 0xffff)
2545 /* andci. */
2546 int_rtx = gen_int_mode (low, SImode);
2547 else if (low == 0xffff)
2548 /* andchi. */
2549 int_rtx = gen_int_mode (high, SImode);
2550 else
2551 gcc_unreachable ();
2552 }
2553 else
2554 /* andhi. */
2555 int_rtx = gen_int_mode (high, SImode);
2556 }
2557 else
2558 /* andi. */
2559 int_rtx = gen_int_mode (low, SImode);
2560 output_addr_const (file, int_rtx);
2561 }
2562 return;
2563 }
2564 else if (letter == 'z')
2565 {
2566 if (val == 0)
2567 fprintf (file, "zero");
2568 else
2569 {
2570 if (low == 0 && high != 0)
2571 int_rtx = gen_int_mode (high, SImode);
2572 else if (low != 0)
2573 {
2574 gcc_assert (high == 0 || high == 0xffff);
2575 int_rtx = gen_int_mode (low, high == 0 ? SImode : HImode);
2576 }
2577 else
2578 gcc_unreachable ();
2579 output_addr_const (file, int_rtx);
2580 }
2581 return;
2582 }
2583 }
2584
2585 /* Else, fall through. */
2586
2587 case CONST:
2588 case LABEL_REF:
2589 case SYMBOL_REF:
2590 case CONST_DOUBLE:
2591 if (letter == 0 || letter == 'z')
2592 {
2593 output_addr_const (file, op);
2594 return;
2595 }
2596 else if (letter == 'H' || letter == 'L')
2597 {
2598 fprintf (file, "%%");
2599 if (GET_CODE (op) == CONST
2600 && GET_CODE (XEXP (op, 0)) == UNSPEC)
2601 {
2602 rtx unspec = XEXP (op, 0);
2603 int unspec_reloc = XINT (unspec, 1);
2604 gcc_assert (nios2_large_offset_p (unspec_reloc));
2605 fprintf (file, "%s_", nios2_unspec_reloc_name (unspec_reloc));
2606 op = XVECEXP (unspec, 0, 0);
2607 }
2608 fprintf (file, letter == 'H' ? "hiadj(" : "lo(");
2609 output_addr_const (file, op);
2610 fprintf (file, ")");
2611 return;
2612 }
2613 break;
2614
2615 case SUBREG:
2616 case MEM:
2617 if (letter == 'A')
2618 {
2619 /* Address of '(reg)' form, with no index. */
2620 fprintf (file, "(%s)", reg_names[REGNO (XEXP (op, 0))]);
2621 return;
2622 }
2623 if (letter == 0)
2624 {
2625 output_address (VOIDmode, op);
2626 return;
2627 }
2628 break;
2629
2630 case CODE_LABEL:
2631 if (letter == 0)
2632 {
2633 output_addr_const (file, op);
2634 return;
2635 }
2636 break;
2637
2638 default:
2639 break;
2640 }
2641
2642 output_operand_lossage ("Unsupported operand for code '%c'", letter);
2643 gcc_unreachable ();
2644 }
2645
2646 /* Return true if this is a GP-relative accessible reference. */
2647 bool
gprel_constant_p(rtx op)2648 gprel_constant_p (rtx op)
2649 {
2650 if (GET_CODE (op) == SYMBOL_REF
2651 && nios2_symbol_ref_in_small_data_p (op))
2652 return true;
2653 else if (GET_CODE (op) == CONST
2654 && GET_CODE (XEXP (op, 0)) == PLUS)
2655 return gprel_constant_p (XEXP (XEXP (op, 0), 0));
2656
2657 return false;
2658 }
2659
2660 /* Return the name string for a supported unspec reloc offset. */
2661 static const char *
nios2_unspec_reloc_name(int unspec)2662 nios2_unspec_reloc_name (int unspec)
2663 {
2664 switch (unspec)
2665 {
2666 case UNSPEC_PIC_SYM:
2667 return "got";
2668 case UNSPEC_PIC_CALL_SYM:
2669 return "call";
2670 case UNSPEC_PIC_GOTOFF_SYM:
2671 return "gotoff";
2672 case UNSPEC_LOAD_TLS_IE:
2673 return "tls_ie";
2674 case UNSPEC_ADD_TLS_LE:
2675 return "tls_le";
2676 case UNSPEC_ADD_TLS_GD:
2677 return "tls_gd";
2678 case UNSPEC_ADD_TLS_LDM:
2679 return "tls_ldm";
2680 case UNSPEC_ADD_TLS_LDO:
2681 return "tls_ldo";
2682 default:
2683 return NULL;
2684 }
2685 }
2686
2687 /* Implement TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA. */
2688 static bool
nios2_output_addr_const_extra(FILE * file,rtx op)2689 nios2_output_addr_const_extra (FILE *file, rtx op)
2690 {
2691 const char *name;
2692 gcc_assert (GET_CODE (op) == UNSPEC);
2693
2694 /* Support for printing out const unspec relocations. */
2695 name = nios2_unspec_reloc_name (XINT (op, 1));
2696 if (name)
2697 {
2698 fprintf (file, "%%%s(", name);
2699 output_addr_const (file, XVECEXP (op, 0, 0));
2700 fprintf (file, ")");
2701 return true;
2702 }
2703 return false;
2704 }
2705
2706 /* Implement TARGET_PRINT_OPERAND_ADDRESS. */
2707 static void
nios2_print_operand_address(FILE * file,machine_mode mode,rtx op)2708 nios2_print_operand_address (FILE *file, machine_mode mode, rtx op)
2709 {
2710 switch (GET_CODE (op))
2711 {
2712 case CONST:
2713 case CONST_INT:
2714 case LABEL_REF:
2715 case CONST_DOUBLE:
2716 case SYMBOL_REF:
2717 if (gprel_constant_p (op))
2718 {
2719 fprintf (file, "%%gprel(");
2720 output_addr_const (file, op);
2721 fprintf (file, ")(%s)", reg_names[GP_REGNO]);
2722 return;
2723 }
2724
2725 break;
2726
2727 case PLUS:
2728 {
2729 rtx op0 = XEXP (op, 0);
2730 rtx op1 = XEXP (op, 1);
2731
2732 if (REG_P (op0) && CONSTANT_P (op1))
2733 {
2734 output_addr_const (file, op1);
2735 fprintf (file, "(%s)", reg_names[REGNO (op0)]);
2736 return;
2737 }
2738 else if (REG_P (op1) && CONSTANT_P (op0))
2739 {
2740 output_addr_const (file, op0);
2741 fprintf (file, "(%s)", reg_names[REGNO (op1)]);
2742 return;
2743 }
2744 }
2745 break;
2746
2747 case REG:
2748 fprintf (file, "0(%s)", reg_names[REGNO (op)]);
2749 return;
2750
2751 case MEM:
2752 {
2753 rtx base = XEXP (op, 0);
2754 nios2_print_operand_address (file, mode, base);
2755 return;
2756 }
2757 default:
2758 break;
2759 }
2760
2761 fprintf (stderr, "Missing way to print address\n");
2762 debug_rtx (op);
2763 gcc_unreachable ();
2764 }
2765
2766 /* Implement TARGET_ASM_OUTPUT_DWARF_DTPREL. */
2767 static void
nios2_output_dwarf_dtprel(FILE * file,int size,rtx x)2768 nios2_output_dwarf_dtprel (FILE *file, int size, rtx x)
2769 {
2770 gcc_assert (size == 4);
2771 fprintf (file, "\t.4byte\t%%tls_ldo(");
2772 output_addr_const (file, x);
2773 fprintf (file, ")");
2774 }
2775
2776 /* Implemet TARGET_ASM_FILE_END. */
2777
2778 static void
nios2_asm_file_end(void)2779 nios2_asm_file_end (void)
2780 {
2781 /* The Nios II Linux stack is mapped non-executable by default, so add a
2782 .note.GNU-stack section for switching to executable stacks only when
2783 trampolines are generated. */
2784 if (TARGET_LINUX_ABI && trampolines_created)
2785 file_end_indicate_exec_stack ();
2786 }
2787
2788 /* Implement TARGET_ASM_FUNCTION_PROLOGUE. */
2789 static void
nios2_asm_function_prologue(FILE * file,HOST_WIDE_INT size ATTRIBUTE_UNUSED)2790 nios2_asm_function_prologue (FILE *file, HOST_WIDE_INT size ATTRIBUTE_UNUSED)
2791 {
2792 if (flag_verbose_asm || flag_debug_asm)
2793 {
2794 nios2_compute_frame_layout ();
2795 nios2_dump_frame_layout (file);
2796 }
2797 }
2798
2799 /* Emit assembly of custom FPU instructions. */
2800 const char *
nios2_fpu_insn_asm(enum n2fpu_code code)2801 nios2_fpu_insn_asm (enum n2fpu_code code)
2802 {
2803 static char buf[256];
2804 const char *op1, *op2, *op3;
2805 int ln = 256, n = 0;
2806
2807 int N = N2FPU_N (code);
2808 int num_operands = N2FPU (code).num_operands;
2809 const char *insn_name = N2FPU_NAME (code);
2810 tree ftype = nios2_ftype (N2FPU_FTCODE (code));
2811 machine_mode dst_mode = TYPE_MODE (TREE_TYPE (ftype));
2812 machine_mode src_mode = TYPE_MODE (TREE_VALUE (TYPE_ARG_TYPES (ftype)));
2813
2814 /* Prepare X register for DF input operands. */
2815 if (GET_MODE_SIZE (src_mode) == 8 && num_operands == 3)
2816 n = snprintf (buf, ln, "custom\t%d, zero, %%1, %%D1 # fwrx %%1\n\t",
2817 N2FPU_N (n2fpu_fwrx));
2818
2819 if (src_mode == SFmode)
2820 {
2821 if (dst_mode == VOIDmode)
2822 {
2823 /* The fwry case. */
2824 op1 = op3 = "zero";
2825 op2 = "%0";
2826 num_operands -= 1;
2827 }
2828 else
2829 {
2830 op1 = (dst_mode == DFmode ? "%D0" : "%0");
2831 op2 = "%1";
2832 op3 = (num_operands == 2 ? "zero" : "%2");
2833 }
2834 }
2835 else if (src_mode == DFmode)
2836 {
2837 if (dst_mode == VOIDmode)
2838 {
2839 /* The fwrx case. */
2840 op1 = "zero";
2841 op2 = "%0";
2842 op3 = "%D0";
2843 num_operands -= 1;
2844 }
2845 else
2846 {
2847 op1 = (dst_mode == DFmode ? "%D0" : "%0");
2848 op2 = (num_operands == 2 ? "%1" : "%2");
2849 op3 = (num_operands == 2 ? "%D1" : "%D2");
2850 }
2851 }
2852 else if (src_mode == VOIDmode)
2853 {
2854 /* frdxlo, frdxhi, frdy cases. */
2855 gcc_assert (dst_mode == SFmode);
2856 op1 = "%0";
2857 op2 = op3 = "zero";
2858 }
2859 else if (src_mode == SImode)
2860 {
2861 /* Conversion operators. */
2862 gcc_assert (num_operands == 2);
2863 op1 = (dst_mode == DFmode ? "%D0" : "%0");
2864 op2 = "%1";
2865 op3 = "zero";
2866 }
2867 else
2868 gcc_unreachable ();
2869
2870 /* Main instruction string. */
2871 n += snprintf (buf + n, ln - n, "custom\t%d, %s, %s, %s # %s %%0%s%s",
2872 N, op1, op2, op3, insn_name,
2873 (num_operands >= 2 ? ", %1" : ""),
2874 (num_operands == 3 ? ", %2" : ""));
2875
2876 /* Extraction of Y register for DF results. */
2877 if (dst_mode == DFmode)
2878 snprintf (buf + n, ln - n, "\n\tcustom\t%d, %%0, zero, zero # frdy %%0",
2879 N2FPU_N (n2fpu_frdy));
2880 return buf;
2881 }
2882
2883
2884
2885 /* Function argument related. */
2886
2887 /* Define where to put the arguments to a function. Value is zero to
2888 push the argument on the stack, or a hard register in which to
2889 store the argument.
2890
2891 MODE is the argument's machine mode.
2892 TYPE is the data type of the argument (as a tree).
2893 This is null for libcalls where that information may
2894 not be available.
2895 CUM is a variable of type CUMULATIVE_ARGS which gives info about
2896 the preceding args and about the function being called.
2897 NAMED is nonzero if this argument is a named parameter
2898 (otherwise it is an extra parameter matching an ellipsis). */
2899
2900 static rtx
nios2_function_arg(cumulative_args_t cum_v,machine_mode mode,const_tree type ATTRIBUTE_UNUSED,bool named ATTRIBUTE_UNUSED)2901 nios2_function_arg (cumulative_args_t cum_v, machine_mode mode,
2902 const_tree type ATTRIBUTE_UNUSED,
2903 bool named ATTRIBUTE_UNUSED)
2904 {
2905 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2906 rtx return_rtx = NULL_RTX;
2907
2908 if (cum->regs_used < NUM_ARG_REGS)
2909 return_rtx = gen_rtx_REG (mode, FIRST_ARG_REGNO + cum->regs_used);
2910
2911 return return_rtx;
2912 }
2913
2914 /* Return number of bytes, at the beginning of the argument, that must be
2915 put in registers. 0 is the argument is entirely in registers or entirely
2916 in memory. */
2917
2918 static int
nios2_arg_partial_bytes(cumulative_args_t cum_v,machine_mode mode,tree type ATTRIBUTE_UNUSED,bool named ATTRIBUTE_UNUSED)2919 nios2_arg_partial_bytes (cumulative_args_t cum_v,
2920 machine_mode mode, tree type ATTRIBUTE_UNUSED,
2921 bool named ATTRIBUTE_UNUSED)
2922 {
2923 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2924 HOST_WIDE_INT param_size;
2925
2926 if (mode == BLKmode)
2927 {
2928 param_size = int_size_in_bytes (type);
2929 gcc_assert (param_size >= 0);
2930 }
2931 else
2932 param_size = GET_MODE_SIZE (mode);
2933
2934 /* Convert to words (round up). */
2935 param_size = (UNITS_PER_WORD - 1 + param_size) / UNITS_PER_WORD;
2936
2937 if (cum->regs_used < NUM_ARG_REGS
2938 && cum->regs_used + param_size > NUM_ARG_REGS)
2939 return (NUM_ARG_REGS - cum->regs_used) * UNITS_PER_WORD;
2940
2941 return 0;
2942 }
2943
2944 /* Update the data in CUM to advance over an argument of mode MODE
2945 and data type TYPE; TYPE is null for libcalls where that information
2946 may not be available. */
2947
2948 static void
nios2_function_arg_advance(cumulative_args_t cum_v,machine_mode mode,const_tree type ATTRIBUTE_UNUSED,bool named ATTRIBUTE_UNUSED)2949 nios2_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
2950 const_tree type ATTRIBUTE_UNUSED,
2951 bool named ATTRIBUTE_UNUSED)
2952 {
2953 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2954 HOST_WIDE_INT param_size;
2955
2956 if (mode == BLKmode)
2957 {
2958 param_size = int_size_in_bytes (type);
2959 gcc_assert (param_size >= 0);
2960 }
2961 else
2962 param_size = GET_MODE_SIZE (mode);
2963
2964 /* Convert to words (round up). */
2965 param_size = (UNITS_PER_WORD - 1 + param_size) / UNITS_PER_WORD;
2966
2967 if (cum->regs_used + param_size > NUM_ARG_REGS)
2968 cum->regs_used = NUM_ARG_REGS;
2969 else
2970 cum->regs_used += param_size;
2971 }
2972
2973 enum direction
nios2_function_arg_padding(machine_mode mode,const_tree type)2974 nios2_function_arg_padding (machine_mode mode, const_tree type)
2975 {
2976 /* On little-endian targets, the first byte of every stack argument
2977 is passed in the first byte of the stack slot. */
2978 if (!BYTES_BIG_ENDIAN)
2979 return upward;
2980
2981 /* Otherwise, integral types are padded downward: the last byte of a
2982 stack argument is passed in the last byte of the stack slot. */
2983 if (type != 0
2984 ? INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type)
2985 : GET_MODE_CLASS (mode) == MODE_INT)
2986 return downward;
2987
2988 /* Arguments smaller than a stack slot are padded downward. */
2989 if (mode != BLKmode)
2990 return (GET_MODE_BITSIZE (mode) >= PARM_BOUNDARY) ? upward : downward;
2991
2992 return ((int_size_in_bytes (type) >= (PARM_BOUNDARY / BITS_PER_UNIT))
2993 ? upward : downward);
2994 }
2995
2996 enum direction
nios2_block_reg_padding(machine_mode mode,tree type,int first ATTRIBUTE_UNUSED)2997 nios2_block_reg_padding (machine_mode mode, tree type,
2998 int first ATTRIBUTE_UNUSED)
2999 {
3000 return nios2_function_arg_padding (mode, type);
3001 }
3002
3003 /* Emit RTL insns to initialize the variable parts of a trampoline.
3004 FNADDR is an RTX for the address of the function's pure code.
3005 CXT is an RTX for the static chain value for the function.
3006 On Nios II, we handle this by a library call. */
3007 static void
nios2_trampoline_init(rtx m_tramp,tree fndecl,rtx cxt)3008 nios2_trampoline_init (rtx m_tramp, tree fndecl, rtx cxt)
3009 {
3010 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
3011 rtx ctx_reg = force_reg (Pmode, cxt);
3012 rtx addr = force_reg (Pmode, XEXP (m_tramp, 0));
3013
3014 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__trampoline_setup"),
3015 LCT_NORMAL, VOIDmode, 3, addr, Pmode, fnaddr, Pmode,
3016 ctx_reg, Pmode);
3017 }
3018
3019 /* Implement TARGET_FUNCTION_VALUE. */
3020 static rtx
nios2_function_value(const_tree ret_type,const_tree fn ATTRIBUTE_UNUSED,bool outgoing ATTRIBUTE_UNUSED)3021 nios2_function_value (const_tree ret_type, const_tree fn ATTRIBUTE_UNUSED,
3022 bool outgoing ATTRIBUTE_UNUSED)
3023 {
3024 return gen_rtx_REG (TYPE_MODE (ret_type), FIRST_RETVAL_REGNO);
3025 }
3026
3027 /* Implement TARGET_LIBCALL_VALUE. */
3028 static rtx
nios2_libcall_value(machine_mode mode,const_rtx fun ATTRIBUTE_UNUSED)3029 nios2_libcall_value (machine_mode mode, const_rtx fun ATTRIBUTE_UNUSED)
3030 {
3031 return gen_rtx_REG (mode, FIRST_RETVAL_REGNO);
3032 }
3033
3034 /* Implement TARGET_FUNCTION_VALUE_REGNO_P. */
3035 static bool
nios2_function_value_regno_p(const unsigned int regno)3036 nios2_function_value_regno_p (const unsigned int regno)
3037 {
3038 return regno == FIRST_RETVAL_REGNO;
3039 }
3040
3041 /* Implement TARGET_RETURN_IN_MEMORY. */
3042 static bool
nios2_return_in_memory(const_tree type,const_tree fntype ATTRIBUTE_UNUSED)3043 nios2_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
3044 {
3045 return (int_size_in_bytes (type) > (2 * UNITS_PER_WORD)
3046 || int_size_in_bytes (type) == -1);
3047 }
3048
3049 /* TODO: It may be possible to eliminate the copyback and implement
3050 own va_arg type. */
3051 static void
nios2_setup_incoming_varargs(cumulative_args_t cum_v,machine_mode mode,tree type,int * pretend_size,int second_time)3052 nios2_setup_incoming_varargs (cumulative_args_t cum_v,
3053 machine_mode mode, tree type,
3054 int *pretend_size, int second_time)
3055 {
3056 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
3057 CUMULATIVE_ARGS local_cum;
3058 cumulative_args_t local_cum_v = pack_cumulative_args (&local_cum);
3059 int regs_to_push;
3060 int pret_size;
3061
3062 cfun->machine->uses_anonymous_args = 1;
3063 local_cum = *cum;
3064 nios2_function_arg_advance (local_cum_v, mode, type, true);
3065
3066 regs_to_push = NUM_ARG_REGS - local_cum.regs_used;
3067
3068 /* If we can use CDX stwm to push the arguments on the stack,
3069 nios2_expand_prologue will do that instead. */
3070 if (!TARGET_HAS_CDX && !second_time && regs_to_push > 0)
3071 {
3072 rtx ptr = virtual_incoming_args_rtx;
3073 rtx mem = gen_rtx_MEM (BLKmode, ptr);
3074 emit_insn (gen_blockage ());
3075 move_block_from_reg (local_cum.regs_used + FIRST_ARG_REGNO, mem,
3076 regs_to_push);
3077 emit_insn (gen_blockage ());
3078 }
3079
3080 pret_size = regs_to_push * UNITS_PER_WORD;
3081 if (pret_size)
3082 *pretend_size = pret_size;
3083 }
3084
3085
3086
3087 /* Init FPU builtins. */
3088 static void
nios2_init_fpu_builtins(int start_code)3089 nios2_init_fpu_builtins (int start_code)
3090 {
3091 tree fndecl;
3092 char builtin_name[64] = "__builtin_custom_";
3093 unsigned int i, n = strlen ("__builtin_custom_");
3094
3095 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
3096 {
3097 snprintf (builtin_name + n, sizeof (builtin_name) - n,
3098 "%s", N2FPU_NAME (i));
3099 fndecl =
3100 add_builtin_function (builtin_name, nios2_ftype (N2FPU_FTCODE (i)),
3101 start_code + i, BUILT_IN_MD, NULL, NULL_TREE);
3102 nios2_register_builtin_fndecl (start_code + i, fndecl);
3103 }
3104 }
3105
3106 /* Helper function for expanding FPU builtins. */
3107 static rtx
nios2_expand_fpu_builtin(tree exp,unsigned int code,rtx target)3108 nios2_expand_fpu_builtin (tree exp, unsigned int code, rtx target)
3109 {
3110 struct expand_operand ops[MAX_RECOG_OPERANDS];
3111 enum insn_code icode = N2FPU_ICODE (code);
3112 int nargs, argno, opno = 0;
3113 int num_operands = N2FPU (code).num_operands;
3114 machine_mode dst_mode = TYPE_MODE (TREE_TYPE (exp));
3115 bool has_target_p = (dst_mode != VOIDmode);
3116
3117 if (N2FPU_N (code) < 0)
3118 fatal_error (input_location,
3119 "Cannot call %<__builtin_custom_%s%> without specifying switch"
3120 " %<-mcustom-%s%>", N2FPU_NAME (code), N2FPU_NAME (code));
3121 if (has_target_p)
3122 create_output_operand (&ops[opno++], target, dst_mode);
3123 else
3124 /* Subtract away the count of the VOID return, mainly for fwrx/fwry. */
3125 num_operands -= 1;
3126 nargs = call_expr_nargs (exp);
3127 for (argno = 0; argno < nargs; argno++)
3128 {
3129 tree arg = CALL_EXPR_ARG (exp, argno);
3130 create_input_operand (&ops[opno++], expand_normal (arg),
3131 TYPE_MODE (TREE_TYPE (arg)));
3132 }
3133 if (!maybe_expand_insn (icode, num_operands, ops))
3134 {
3135 error ("invalid argument to built-in function");
3136 return has_target_p ? gen_reg_rtx (ops[0].mode) : const0_rtx;
3137 }
3138 return has_target_p ? ops[0].value : const0_rtx;
3139 }
3140
3141 /* Nios II has custom instruction built-in functions of the forms:
3142 __builtin_custom_n
3143 __builtin_custom_nX
3144 __builtin_custom_nXX
3145 __builtin_custom_Xn
3146 __builtin_custom_XnX
3147 __builtin_custom_XnXX
3148
3149 where each X could be either 'i' (int), 'f' (float), or 'p' (void*).
3150 Therefore with 0-1 return values, and 0-2 arguments, we have a
3151 total of (3 + 1) * (1 + 3 + 9) == 52 custom builtin functions.
3152 */
3153 #define NUM_CUSTOM_BUILTINS ((3 + 1) * (1 + 3 + 9))
3154 static char custom_builtin_name[NUM_CUSTOM_BUILTINS][5];
3155
3156 static void
nios2_init_custom_builtins(int start_code)3157 nios2_init_custom_builtins (int start_code)
3158 {
3159 tree builtin_ftype, ret_type, fndecl;
3160 char builtin_name[32] = "__builtin_custom_";
3161 int n = strlen ("__builtin_custom_");
3162 int builtin_code = 0;
3163 int lhs, rhs1, rhs2;
3164
3165 struct { tree type; const char *c; } op[4];
3166 /* z */ op[0].c = ""; op[0].type = NULL_TREE;
3167 /* f */ op[1].c = "f"; op[1].type = float_type_node;
3168 /* i */ op[2].c = "i"; op[2].type = integer_type_node;
3169 /* p */ op[3].c = "p"; op[3].type = ptr_type_node;
3170
3171 /* We enumerate through the possible operand types to create all the
3172 __builtin_custom_XnXX function tree types. Note that these may slightly
3173 overlap with the function types created for other fixed builtins. */
3174
3175 for (lhs = 0; lhs < 4; lhs++)
3176 for (rhs1 = 0; rhs1 < 4; rhs1++)
3177 for (rhs2 = 0; rhs2 < 4; rhs2++)
3178 {
3179 if (rhs1 == 0 && rhs2 != 0)
3180 continue;
3181 ret_type = (op[lhs].type ? op[lhs].type : void_type_node);
3182 builtin_ftype
3183 = build_function_type_list (ret_type, integer_type_node,
3184 op[rhs1].type, op[rhs2].type,
3185 NULL_TREE);
3186 snprintf (builtin_name + n, 32 - n, "%sn%s%s",
3187 op[lhs].c, op[rhs1].c, op[rhs2].c);
3188 /* Save copy of parameter string into custom_builtin_name[]. */
3189 strncpy (custom_builtin_name[builtin_code], builtin_name + n, 5);
3190 fndecl =
3191 add_builtin_function (builtin_name, builtin_ftype,
3192 start_code + builtin_code,
3193 BUILT_IN_MD, NULL, NULL_TREE);
3194 nios2_register_builtin_fndecl (start_code + builtin_code, fndecl);
3195 builtin_code += 1;
3196 }
3197 }
3198
3199 /* Helper function for expanding custom builtins. */
3200 static rtx
nios2_expand_custom_builtin(tree exp,unsigned int index,rtx target)3201 nios2_expand_custom_builtin (tree exp, unsigned int index, rtx target)
3202 {
3203 bool has_target_p = (TREE_TYPE (exp) != void_type_node);
3204 machine_mode tmode = VOIDmode;
3205 int nargs, argno;
3206 rtx value, insn, unspec_args[3];
3207 tree arg;
3208
3209 /* XnXX form. */
3210 if (has_target_p)
3211 {
3212 tmode = TYPE_MODE (TREE_TYPE (exp));
3213 if (!target || GET_MODE (target) != tmode
3214 || !REG_P (target))
3215 target = gen_reg_rtx (tmode);
3216 }
3217
3218 nargs = call_expr_nargs (exp);
3219 for (argno = 0; argno < nargs; argno++)
3220 {
3221 arg = CALL_EXPR_ARG (exp, argno);
3222 value = expand_normal (arg);
3223 unspec_args[argno] = value;
3224 if (argno == 0)
3225 {
3226 if (!custom_insn_opcode (value, VOIDmode))
3227 error ("custom instruction opcode must be compile time "
3228 "constant in the range 0-255 for __builtin_custom_%s",
3229 custom_builtin_name[index]);
3230 }
3231 else
3232 /* For other arguments, force into a register. */
3233 unspec_args[argno] = force_reg (TYPE_MODE (TREE_TYPE (arg)),
3234 unspec_args[argno]);
3235 }
3236 /* Fill remaining unspec operands with zero. */
3237 for (; argno < 3; argno++)
3238 unspec_args[argno] = const0_rtx;
3239
3240 insn = (has_target_p
3241 ? gen_rtx_SET (target,
3242 gen_rtx_UNSPEC_VOLATILE (tmode,
3243 gen_rtvec_v (3, unspec_args),
3244 UNSPECV_CUSTOM_XNXX))
3245 : gen_rtx_UNSPEC_VOLATILE (VOIDmode, gen_rtvec_v (3, unspec_args),
3246 UNSPECV_CUSTOM_NXX));
3247 emit_insn (insn);
3248 return has_target_p ? target : const0_rtx;
3249 }
3250
3251
3252
3253
3254 /* Main definition of built-in functions. Nios II has a small number of fixed
3255 builtins, plus a large number of FPU insn builtins, and builtins for
3256 generating custom instructions. */
3257
3258 struct nios2_builtin_desc
3259 {
3260 enum insn_code icode;
3261 enum nios2_arch_type arch;
3262 enum nios2_ftcode ftype;
3263 const char *name;
3264 };
3265
3266 #define N2_BUILTINS \
3267 N2_BUILTIN_DEF (sync, R1, N2_FTYPE_VOID_VOID) \
3268 N2_BUILTIN_DEF (ldbio, R1, N2_FTYPE_SI_CVPTR) \
3269 N2_BUILTIN_DEF (ldbuio, R1, N2_FTYPE_UI_CVPTR) \
3270 N2_BUILTIN_DEF (ldhio, R1, N2_FTYPE_SI_CVPTR) \
3271 N2_BUILTIN_DEF (ldhuio, R1, N2_FTYPE_UI_CVPTR) \
3272 N2_BUILTIN_DEF (ldwio, R1, N2_FTYPE_SI_CVPTR) \
3273 N2_BUILTIN_DEF (stbio, R1, N2_FTYPE_VOID_VPTR_SI) \
3274 N2_BUILTIN_DEF (sthio, R1, N2_FTYPE_VOID_VPTR_SI) \
3275 N2_BUILTIN_DEF (stwio, R1, N2_FTYPE_VOID_VPTR_SI) \
3276 N2_BUILTIN_DEF (rdctl, R1, N2_FTYPE_SI_SI) \
3277 N2_BUILTIN_DEF (wrctl, R1, N2_FTYPE_VOID_SI_SI) \
3278 N2_BUILTIN_DEF (rdprs, R1, N2_FTYPE_SI_SI_SI) \
3279 N2_BUILTIN_DEF (flushd, R1, N2_FTYPE_VOID_VPTR) \
3280 N2_BUILTIN_DEF (flushda, R1, N2_FTYPE_VOID_VPTR) \
3281 N2_BUILTIN_DEF (wrpie, R2, N2_FTYPE_SI_SI) \
3282 N2_BUILTIN_DEF (eni, R2, N2_FTYPE_VOID_SI) \
3283 N2_BUILTIN_DEF (ldex, R2, N2_FTYPE_SI_CVPTR) \
3284 N2_BUILTIN_DEF (ldsex, R2, N2_FTYPE_SI_CVPTR) \
3285 N2_BUILTIN_DEF (stex, R2, N2_FTYPE_SI_VPTR_SI) \
3286 N2_BUILTIN_DEF (stsex, R2, N2_FTYPE_SI_VPTR_SI)
3287
3288 enum nios2_builtin_code {
3289 #define N2_BUILTIN_DEF(name, arch, ftype) NIOS2_BUILTIN_ ## name,
3290 N2_BUILTINS
3291 #undef N2_BUILTIN_DEF
3292 NUM_FIXED_NIOS2_BUILTINS
3293 };
3294
3295 static const struct nios2_builtin_desc nios2_builtins[] = {
3296 #define N2_BUILTIN_DEF(name, arch, ftype) \
3297 { CODE_FOR_ ## name, ARCH_ ## arch, ftype, "__builtin_" #name },
3298 N2_BUILTINS
3299 #undef N2_BUILTIN_DEF
3300 };
3301
3302 /* Start/ends of FPU/custom insn builtin index ranges. */
3303 static unsigned int nios2_fpu_builtin_base;
3304 static unsigned int nios2_custom_builtin_base;
3305 static unsigned int nios2_custom_builtin_end;
3306
3307 /* Implement TARGET_INIT_BUILTINS. */
3308 static void
nios2_init_builtins(void)3309 nios2_init_builtins (void)
3310 {
3311 unsigned int i;
3312
3313 /* Initialize fixed builtins. */
3314 for (i = 0; i < ARRAY_SIZE (nios2_builtins); i++)
3315 {
3316 const struct nios2_builtin_desc *d = &nios2_builtins[i];
3317 tree fndecl =
3318 add_builtin_function (d->name, nios2_ftype (d->ftype), i,
3319 BUILT_IN_MD, NULL, NULL);
3320 nios2_register_builtin_fndecl (i, fndecl);
3321 }
3322
3323 /* Initialize FPU builtins. */
3324 nios2_fpu_builtin_base = ARRAY_SIZE (nios2_builtins);
3325 nios2_init_fpu_builtins (nios2_fpu_builtin_base);
3326
3327 /* Initialize custom insn builtins. */
3328 nios2_custom_builtin_base
3329 = nios2_fpu_builtin_base + ARRAY_SIZE (nios2_fpu_insn);
3330 nios2_custom_builtin_end
3331 = nios2_custom_builtin_base + NUM_CUSTOM_BUILTINS;
3332 nios2_init_custom_builtins (nios2_custom_builtin_base);
3333 }
3334
3335 /* Array of fndecls for TARGET_BUILTIN_DECL. */
3336 #define NIOS2_NUM_BUILTINS \
3337 (ARRAY_SIZE (nios2_builtins) + ARRAY_SIZE (nios2_fpu_insn) + NUM_CUSTOM_BUILTINS)
3338 static GTY(()) tree nios2_builtin_decls[NIOS2_NUM_BUILTINS];
3339
3340 static void
nios2_register_builtin_fndecl(unsigned code,tree fndecl)3341 nios2_register_builtin_fndecl (unsigned code, tree fndecl)
3342 {
3343 nios2_builtin_decls[code] = fndecl;
3344 }
3345
3346 /* Implement TARGET_BUILTIN_DECL. */
3347 static tree
nios2_builtin_decl(unsigned code,bool initialize_p ATTRIBUTE_UNUSED)3348 nios2_builtin_decl (unsigned code, bool initialize_p ATTRIBUTE_UNUSED)
3349 {
3350 gcc_assert (nios2_custom_builtin_end == ARRAY_SIZE (nios2_builtin_decls));
3351
3352 if (code >= nios2_custom_builtin_end)
3353 return error_mark_node;
3354
3355 if (code >= nios2_fpu_builtin_base
3356 && code < nios2_custom_builtin_base
3357 && ! N2FPU_ENABLED_P (code - nios2_fpu_builtin_base))
3358 return error_mark_node;
3359
3360 return nios2_builtin_decls[code];
3361 }
3362
3363
3364 /* Low-level built-in expand routine. */
3365 static rtx
nios2_expand_builtin_insn(const struct nios2_builtin_desc * d,int n,struct expand_operand * ops,bool has_target_p)3366 nios2_expand_builtin_insn (const struct nios2_builtin_desc *d, int n,
3367 struct expand_operand *ops, bool has_target_p)
3368 {
3369 if (maybe_expand_insn (d->icode, n, ops))
3370 return has_target_p ? ops[0].value : const0_rtx;
3371 else
3372 {
3373 error ("invalid argument to built-in function %s", d->name);
3374 return has_target_p ? gen_reg_rtx (ops[0].mode) : const0_rtx;
3375 }
3376 }
3377
3378 /* Expand ldio/stio and ldex/ldsex/stex/stsex form load-store
3379 instruction builtins. */
3380 static rtx
nios2_expand_ldst_builtin(tree exp,rtx target,const struct nios2_builtin_desc * d)3381 nios2_expand_ldst_builtin (tree exp, rtx target,
3382 const struct nios2_builtin_desc *d)
3383 {
3384 bool has_target_p;
3385 rtx addr, mem, val;
3386 struct expand_operand ops[MAX_RECOG_OPERANDS];
3387 machine_mode mode = insn_data[d->icode].operand[0].mode;
3388
3389 addr = expand_normal (CALL_EXPR_ARG (exp, 0));
3390 mem = gen_rtx_MEM (mode, addr);
3391
3392 if (insn_data[d->icode].operand[0].allows_mem)
3393 {
3394 /* stxio/stex/stsex. */
3395 val = expand_normal (CALL_EXPR_ARG (exp, 1));
3396 if (CONST_INT_P (val))
3397 val = force_reg (mode, gen_int_mode (INTVAL (val), mode));
3398 val = simplify_gen_subreg (mode, val, GET_MODE (val), 0);
3399 create_output_operand (&ops[0], mem, mode);
3400 create_input_operand (&ops[1], val, mode);
3401 if (insn_data[d->icode].n_operands == 3)
3402 {
3403 /* stex/stsex status value, returned as result of function. */
3404 create_output_operand (&ops[2], target, mode);
3405 has_target_p = true;
3406 }
3407 else
3408 has_target_p = false;
3409 }
3410 else
3411 {
3412 /* ldxio. */
3413 create_output_operand (&ops[0], target, mode);
3414 create_input_operand (&ops[1], mem, mode);
3415 has_target_p = true;
3416 }
3417 return nios2_expand_builtin_insn (d, insn_data[d->icode].n_operands, ops,
3418 has_target_p);
3419 }
3420
3421 /* Expand rdctl/wrctl builtins. */
3422 static rtx
nios2_expand_rdwrctl_builtin(tree exp,rtx target,const struct nios2_builtin_desc * d)3423 nios2_expand_rdwrctl_builtin (tree exp, rtx target,
3424 const struct nios2_builtin_desc *d)
3425 {
3426 bool has_target_p = (insn_data[d->icode].operand[0].predicate
3427 == register_operand);
3428 rtx ctlcode = expand_normal (CALL_EXPR_ARG (exp, 0));
3429 struct expand_operand ops[MAX_RECOG_OPERANDS];
3430 if (!rdwrctl_operand (ctlcode, VOIDmode))
3431 {
3432 error ("Control register number must be in range 0-31 for %s",
3433 d->name);
3434 return has_target_p ? gen_reg_rtx (SImode) : const0_rtx;
3435 }
3436 if (has_target_p)
3437 {
3438 create_output_operand (&ops[0], target, SImode);
3439 create_integer_operand (&ops[1], INTVAL (ctlcode));
3440 }
3441 else
3442 {
3443 rtx val = expand_normal (CALL_EXPR_ARG (exp, 1));
3444 create_integer_operand (&ops[0], INTVAL (ctlcode));
3445 create_input_operand (&ops[1], val, SImode);
3446 }
3447 return nios2_expand_builtin_insn (d, 2, ops, has_target_p);
3448 }
3449
3450 static rtx
nios2_expand_rdprs_builtin(tree exp,rtx target,const struct nios2_builtin_desc * d)3451 nios2_expand_rdprs_builtin (tree exp, rtx target,
3452 const struct nios2_builtin_desc *d)
3453 {
3454 rtx reg = expand_normal (CALL_EXPR_ARG (exp, 0));
3455 rtx imm = expand_normal (CALL_EXPR_ARG (exp, 1));
3456 struct expand_operand ops[MAX_RECOG_OPERANDS];
3457
3458 if (!rdwrctl_operand (reg, VOIDmode))
3459 {
3460 error ("Register number must be in range 0-31 for %s",
3461 d->name);
3462 return gen_reg_rtx (SImode);
3463 }
3464
3465 if (!rdprs_dcache_operand (imm, VOIDmode))
3466 {
3467 error ("The immediate value must fit into a %d-bit integer for %s",
3468 (TARGET_ARCH_R2) ? 12 : 16, d->name);
3469 return gen_reg_rtx (SImode);
3470 }
3471
3472 create_output_operand (&ops[0], target, SImode);
3473 create_input_operand (&ops[1], reg, SImode);
3474 create_integer_operand (&ops[2], INTVAL (imm));
3475
3476 return nios2_expand_builtin_insn (d, 3, ops, true);
3477 }
3478
3479 static rtx
nios2_expand_cache_builtin(tree exp,rtx target ATTRIBUTE_UNUSED,const struct nios2_builtin_desc * d)3480 nios2_expand_cache_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
3481 const struct nios2_builtin_desc *d)
3482 {
3483 rtx mem, addr;
3484 struct expand_operand ops[MAX_RECOG_OPERANDS];
3485
3486 addr = expand_normal (CALL_EXPR_ARG (exp, 0));
3487 mem = gen_rtx_MEM (SImode, addr);
3488
3489 create_input_operand (&ops[0], mem, SImode);
3490
3491 return nios2_expand_builtin_insn (d, 1, ops, false);
3492 }
3493
3494 static rtx
nios2_expand_wrpie_builtin(tree exp,rtx target,const struct nios2_builtin_desc * d)3495 nios2_expand_wrpie_builtin (tree exp, rtx target,
3496 const struct nios2_builtin_desc *d)
3497 {
3498 rtx val;
3499 struct expand_operand ops[MAX_RECOG_OPERANDS];
3500
3501 val = expand_normal (CALL_EXPR_ARG (exp, 0));
3502 create_input_operand (&ops[1], val, SImode);
3503 create_output_operand (&ops[0], target, SImode);
3504
3505 return nios2_expand_builtin_insn (d, 2, ops, true);
3506 }
3507
3508 static rtx
nios2_expand_eni_builtin(tree exp,rtx target ATTRIBUTE_UNUSED,const struct nios2_builtin_desc * d)3509 nios2_expand_eni_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
3510 const struct nios2_builtin_desc *d)
3511 {
3512 rtx imm = expand_normal (CALL_EXPR_ARG (exp, 0));
3513 struct expand_operand ops[MAX_RECOG_OPERANDS];
3514
3515 if (INTVAL (imm) != 0 && INTVAL (imm) != 1)
3516 {
3517 error ("The ENI instruction operand must be either 0 or 1");
3518 return const0_rtx;
3519 }
3520 create_integer_operand (&ops[0], INTVAL (imm));
3521
3522 return nios2_expand_builtin_insn (d, 1, ops, false);
3523 }
3524
3525 /* Implement TARGET_EXPAND_BUILTIN. Expand an expression EXP that calls
3526 a built-in function, with result going to TARGET if that's convenient
3527 (and in mode MODE if that's convenient).
3528 SUBTARGET may be used as the target for computing one of EXP's operands.
3529 IGNORE is nonzero if the value is to be ignored. */
3530
3531 static rtx
nios2_expand_builtin(tree exp,rtx target,rtx subtarget ATTRIBUTE_UNUSED,machine_mode mode ATTRIBUTE_UNUSED,int ignore ATTRIBUTE_UNUSED)3532 nios2_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
3533 machine_mode mode ATTRIBUTE_UNUSED,
3534 int ignore ATTRIBUTE_UNUSED)
3535 {
3536 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
3537 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
3538
3539 if (fcode < nios2_fpu_builtin_base)
3540 {
3541 const struct nios2_builtin_desc *d = &nios2_builtins[fcode];
3542
3543 if (d->arch > nios2_arch_option)
3544 {
3545 error ("Builtin function %s requires Nios II R%d",
3546 d->name, (int) d->arch);
3547 /* Given it is invalid, just generate a normal call. */
3548 return expand_call (exp, target, ignore);
3549 }
3550
3551 switch (fcode)
3552 {
3553 case NIOS2_BUILTIN_sync:
3554 emit_insn (gen_sync ());
3555 return const0_rtx;
3556
3557 case NIOS2_BUILTIN_ldbio:
3558 case NIOS2_BUILTIN_ldbuio:
3559 case NIOS2_BUILTIN_ldhio:
3560 case NIOS2_BUILTIN_ldhuio:
3561 case NIOS2_BUILTIN_ldwio:
3562 case NIOS2_BUILTIN_stbio:
3563 case NIOS2_BUILTIN_sthio:
3564 case NIOS2_BUILTIN_stwio:
3565 case NIOS2_BUILTIN_ldex:
3566 case NIOS2_BUILTIN_ldsex:
3567 case NIOS2_BUILTIN_stex:
3568 case NIOS2_BUILTIN_stsex:
3569 return nios2_expand_ldst_builtin (exp, target, d);
3570
3571 case NIOS2_BUILTIN_rdctl:
3572 case NIOS2_BUILTIN_wrctl:
3573 return nios2_expand_rdwrctl_builtin (exp, target, d);
3574
3575 case NIOS2_BUILTIN_rdprs:
3576 return nios2_expand_rdprs_builtin (exp, target, d);
3577
3578 case NIOS2_BUILTIN_flushd:
3579 case NIOS2_BUILTIN_flushda:
3580 return nios2_expand_cache_builtin (exp, target, d);
3581
3582 case NIOS2_BUILTIN_wrpie:
3583 return nios2_expand_wrpie_builtin (exp, target, d);
3584
3585 case NIOS2_BUILTIN_eni:
3586 return nios2_expand_eni_builtin (exp, target, d);
3587
3588 default:
3589 gcc_unreachable ();
3590 }
3591 }
3592 else if (fcode < nios2_custom_builtin_base)
3593 /* FPU builtin range. */
3594 return nios2_expand_fpu_builtin (exp, fcode - nios2_fpu_builtin_base,
3595 target);
3596 else if (fcode < nios2_custom_builtin_end)
3597 /* Custom insn builtin range. */
3598 return nios2_expand_custom_builtin (exp, fcode - nios2_custom_builtin_base,
3599 target);
3600 else
3601 gcc_unreachable ();
3602 }
3603
3604 /* Implement TARGET_INIT_LIBFUNCS. */
3605 static void
nios2_init_libfuncs(void)3606 nios2_init_libfuncs (void)
3607 {
3608 /* For Linux, we have access to kernel support for atomic operations. */
3609 if (TARGET_LINUX_ABI)
3610 init_sync_libfuncs (UNITS_PER_WORD);
3611 }
3612
3613
3614
3615 /* Register a custom code use, and signal error if a conflict was found. */
3616 static void
nios2_register_custom_code(unsigned int N,enum nios2_ccs_code status,int index)3617 nios2_register_custom_code (unsigned int N, enum nios2_ccs_code status,
3618 int index)
3619 {
3620 gcc_assert (N <= 255);
3621
3622 if (status == CCS_FPU)
3623 {
3624 if (custom_code_status[N] == CCS_FPU && index != custom_code_index[N])
3625 {
3626 custom_code_conflict = true;
3627 error ("switch %<-mcustom-%s%> conflicts with switch %<-mcustom-%s%>",
3628 N2FPU_NAME (custom_code_index[N]), N2FPU_NAME (index));
3629 }
3630 else if (custom_code_status[N] == CCS_BUILTIN_CALL)
3631 {
3632 custom_code_conflict = true;
3633 error ("call to %<__builtin_custom_%s%> conflicts with switch "
3634 "%<-mcustom-%s%>", custom_builtin_name[custom_code_index[N]],
3635 N2FPU_NAME (index));
3636 }
3637 }
3638 else if (status == CCS_BUILTIN_CALL)
3639 {
3640 if (custom_code_status[N] == CCS_FPU)
3641 {
3642 custom_code_conflict = true;
3643 error ("call to %<__builtin_custom_%s%> conflicts with switch "
3644 "%<-mcustom-%s%>", custom_builtin_name[index],
3645 N2FPU_NAME (custom_code_index[N]));
3646 }
3647 else
3648 {
3649 /* Note that code conflicts between different __builtin_custom_xnxx
3650 calls are not checked. */
3651 }
3652 }
3653 else
3654 gcc_unreachable ();
3655
3656 custom_code_status[N] = status;
3657 custom_code_index[N] = index;
3658 }
3659
3660 /* Mark a custom code as not in use. */
3661 static void
nios2_deregister_custom_code(unsigned int N)3662 nios2_deregister_custom_code (unsigned int N)
3663 {
3664 if (N <= 255)
3665 {
3666 custom_code_status[N] = CCS_UNUSED;
3667 custom_code_index[N] = 0;
3668 }
3669 }
3670
3671 /* Target attributes can affect per-function option state, so we need to
3672 save/restore the custom code tracking info using the
3673 TARGET_OPTION_SAVE/TARGET_OPTION_RESTORE hooks. */
3674
3675 static void
nios2_option_save(struct cl_target_option * ptr,struct gcc_options * opts ATTRIBUTE_UNUSED)3676 nios2_option_save (struct cl_target_option *ptr,
3677 struct gcc_options *opts ATTRIBUTE_UNUSED)
3678 {
3679 unsigned int i;
3680 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
3681 ptr->saved_fpu_custom_code[i] = N2FPU_N (i);
3682 memcpy (ptr->saved_custom_code_status, custom_code_status,
3683 sizeof (custom_code_status));
3684 memcpy (ptr->saved_custom_code_index, custom_code_index,
3685 sizeof (custom_code_index));
3686 }
3687
3688 static void
nios2_option_restore(struct gcc_options * opts ATTRIBUTE_UNUSED,struct cl_target_option * ptr)3689 nios2_option_restore (struct gcc_options *opts ATTRIBUTE_UNUSED,
3690 struct cl_target_option *ptr)
3691 {
3692 unsigned int i;
3693 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
3694 N2FPU_N (i) = ptr->saved_fpu_custom_code[i];
3695 memcpy (custom_code_status, ptr->saved_custom_code_status,
3696 sizeof (custom_code_status));
3697 memcpy (custom_code_index, ptr->saved_custom_code_index,
3698 sizeof (custom_code_index));
3699 }
3700
3701 /* Inner function to process the attribute((target(...))), take an argument and
3702 set the current options from the argument. If we have a list, recursively
3703 go over the list. */
3704
3705 static bool
nios2_valid_target_attribute_rec(tree args)3706 nios2_valid_target_attribute_rec (tree args)
3707 {
3708 if (TREE_CODE (args) == TREE_LIST)
3709 {
3710 bool ret = true;
3711 for (; args; args = TREE_CHAIN (args))
3712 if (TREE_VALUE (args)
3713 && !nios2_valid_target_attribute_rec (TREE_VALUE (args)))
3714 ret = false;
3715 return ret;
3716 }
3717 else if (TREE_CODE (args) == STRING_CST)
3718 {
3719 char *argstr = ASTRDUP (TREE_STRING_POINTER (args));
3720 while (argstr && *argstr != '\0')
3721 {
3722 bool no_opt = false, end_p = false;
3723 char *eq = NULL, *p;
3724 while (ISSPACE (*argstr))
3725 argstr++;
3726 p = argstr;
3727 while (*p != '\0' && *p != ',')
3728 {
3729 if (!eq && *p == '=')
3730 eq = p;
3731 ++p;
3732 }
3733 if (*p == '\0')
3734 end_p = true;
3735 else
3736 *p = '\0';
3737 if (eq) *eq = '\0';
3738
3739 if (!strncmp (argstr, "no-", 3))
3740 {
3741 no_opt = true;
3742 argstr += 3;
3743 }
3744 if (!strncmp (argstr, "custom-fpu-cfg", 14))
3745 {
3746 char *end_eq = p;
3747 if (no_opt)
3748 {
3749 error ("custom-fpu-cfg option does not support %<no-%>");
3750 return false;
3751 }
3752 if (!eq)
3753 {
3754 error ("custom-fpu-cfg option requires configuration"
3755 " argument");
3756 return false;
3757 }
3758 /* Increment and skip whitespace. */
3759 while (ISSPACE (*(++eq))) ;
3760 /* Decrement and skip to before any trailing whitespace. */
3761 while (ISSPACE (*(--end_eq))) ;
3762
3763 nios2_handle_custom_fpu_cfg (eq, end_eq + 1, true);
3764 }
3765 else if (!strncmp (argstr, "custom-", 7))
3766 {
3767 int code = -1;
3768 unsigned int i;
3769 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
3770 if (!strncmp (argstr + 7, N2FPU_NAME (i),
3771 strlen (N2FPU_NAME (i))))
3772 {
3773 /* Found insn. */
3774 code = i;
3775 break;
3776 }
3777 if (code >= 0)
3778 {
3779 if (no_opt)
3780 {
3781 if (eq)
3782 {
3783 error ("%<no-custom-%s%> does not accept arguments",
3784 N2FPU_NAME (code));
3785 return false;
3786 }
3787 /* Disable option by setting to -1. */
3788 nios2_deregister_custom_code (N2FPU_N (code));
3789 N2FPU_N (code) = -1;
3790 }
3791 else
3792 {
3793 char *t;
3794 if (eq)
3795 while (ISSPACE (*(++eq))) ;
3796 if (!eq || eq == p)
3797 {
3798 error ("%<custom-%s=%> requires argument",
3799 N2FPU_NAME (code));
3800 return false;
3801 }
3802 for (t = eq; t != p; ++t)
3803 {
3804 if (ISSPACE (*t))
3805 continue;
3806 if (!ISDIGIT (*t))
3807 {
3808 error ("`custom-%s=' argument requires "
3809 "numeric digits", N2FPU_NAME (code));
3810 return false;
3811 }
3812 }
3813 /* Set option to argument. */
3814 N2FPU_N (code) = atoi (eq);
3815 nios2_handle_custom_fpu_insn_option (code);
3816 }
3817 }
3818 else
3819 {
3820 error ("%<custom-%s=%> is not recognised as FPU instruction",
3821 argstr + 7);
3822 return false;
3823 }
3824 }
3825 else
3826 {
3827 error ("%<%s%> is unknown", argstr);
3828 return false;
3829 }
3830
3831 if (end_p)
3832 break;
3833 else
3834 argstr = p + 1;
3835 }
3836 return true;
3837 }
3838 else
3839 gcc_unreachable ();
3840 }
3841
3842 /* Return a TARGET_OPTION_NODE tree of the target options listed or NULL. */
3843
3844 static tree
nios2_valid_target_attribute_tree(tree args)3845 nios2_valid_target_attribute_tree (tree args)
3846 {
3847 if (!nios2_valid_target_attribute_rec (args))
3848 return NULL_TREE;
3849 nios2_custom_check_insns ();
3850 return build_target_option_node (&global_options);
3851 }
3852
3853 /* Hook to validate attribute((target("string"))). */
3854
3855 static bool
nios2_valid_target_attribute_p(tree fndecl,tree ARG_UNUSED (name),tree args,int ARG_UNUSED (flags))3856 nios2_valid_target_attribute_p (tree fndecl, tree ARG_UNUSED (name),
3857 tree args, int ARG_UNUSED (flags))
3858 {
3859 struct cl_target_option cur_target;
3860 bool ret = true;
3861 tree old_optimize = build_optimization_node (&global_options);
3862 tree new_target, new_optimize;
3863 tree func_optimize = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl);
3864
3865 /* If the function changed the optimization levels as well as setting target
3866 options, start with the optimizations specified. */
3867 if (func_optimize && func_optimize != old_optimize)
3868 cl_optimization_restore (&global_options,
3869 TREE_OPTIMIZATION (func_optimize));
3870
3871 /* The target attributes may also change some optimization flags, so update
3872 the optimization options if necessary. */
3873 cl_target_option_save (&cur_target, &global_options);
3874 new_target = nios2_valid_target_attribute_tree (args);
3875 new_optimize = build_optimization_node (&global_options);
3876
3877 if (!new_target)
3878 ret = false;
3879
3880 else if (fndecl)
3881 {
3882 DECL_FUNCTION_SPECIFIC_TARGET (fndecl) = new_target;
3883
3884 if (old_optimize != new_optimize)
3885 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl) = new_optimize;
3886 }
3887
3888 cl_target_option_restore (&global_options, &cur_target);
3889
3890 if (old_optimize != new_optimize)
3891 cl_optimization_restore (&global_options,
3892 TREE_OPTIMIZATION (old_optimize));
3893 return ret;
3894 }
3895
3896 /* Remember the last target of nios2_set_current_function. */
3897 static GTY(()) tree nios2_previous_fndecl;
3898
3899 /* Establish appropriate back-end context for processing the function
3900 FNDECL. The argument might be NULL to indicate processing at top
3901 level, outside of any function scope. */
3902 static void
nios2_set_current_function(tree fndecl)3903 nios2_set_current_function (tree fndecl)
3904 {
3905 tree old_tree = (nios2_previous_fndecl
3906 ? DECL_FUNCTION_SPECIFIC_TARGET (nios2_previous_fndecl)
3907 : NULL_TREE);
3908
3909 tree new_tree = (fndecl
3910 ? DECL_FUNCTION_SPECIFIC_TARGET (fndecl)
3911 : NULL_TREE);
3912
3913 if (fndecl && fndecl != nios2_previous_fndecl)
3914 {
3915 nios2_previous_fndecl = fndecl;
3916 if (old_tree == new_tree)
3917 ;
3918
3919 else if (new_tree)
3920 {
3921 cl_target_option_restore (&global_options,
3922 TREE_TARGET_OPTION (new_tree));
3923 target_reinit ();
3924 }
3925
3926 else if (old_tree)
3927 {
3928 struct cl_target_option *def
3929 = TREE_TARGET_OPTION (target_option_current_node);
3930
3931 cl_target_option_restore (&global_options, def);
3932 target_reinit ();
3933 }
3934 }
3935 }
3936
3937 /* Hook to validate the current #pragma GCC target and set the FPU custom
3938 code option state. If ARGS is NULL, then POP_TARGET is used to reset
3939 the options. */
3940 static bool
nios2_pragma_target_parse(tree args,tree pop_target)3941 nios2_pragma_target_parse (tree args, tree pop_target)
3942 {
3943 tree cur_tree;
3944 if (! args)
3945 {
3946 cur_tree = ((pop_target)
3947 ? pop_target
3948 : target_option_default_node);
3949 cl_target_option_restore (&global_options,
3950 TREE_TARGET_OPTION (cur_tree));
3951 }
3952 else
3953 {
3954 cur_tree = nios2_valid_target_attribute_tree (args);
3955 if (!cur_tree)
3956 return false;
3957 }
3958
3959 target_option_current_node = cur_tree;
3960 return true;
3961 }
3962
3963 /* Implement TARGET_MERGE_DECL_ATTRIBUTES.
3964 We are just using this hook to add some additional error checking to
3965 the default behavior. GCC does not provide a target hook for merging
3966 the target options, and only correctly handles merging empty vs non-empty
3967 option data; see merge_decls() in c-decl.c.
3968 So here we require either that at least one of the decls has empty
3969 target options, or that the target options/data be identical. */
3970 static tree
nios2_merge_decl_attributes(tree olddecl,tree newdecl)3971 nios2_merge_decl_attributes (tree olddecl, tree newdecl)
3972 {
3973 tree oldopts = lookup_attribute ("target", DECL_ATTRIBUTES (olddecl));
3974 tree newopts = lookup_attribute ("target", DECL_ATTRIBUTES (newdecl));
3975 if (newopts && oldopts && newopts != oldopts)
3976 {
3977 tree oldtree = DECL_FUNCTION_SPECIFIC_TARGET (olddecl);
3978 tree newtree = DECL_FUNCTION_SPECIFIC_TARGET (newdecl);
3979 if (oldtree && newtree && oldtree != newtree)
3980 {
3981 struct cl_target_option *olddata = TREE_TARGET_OPTION (oldtree);
3982 struct cl_target_option *newdata = TREE_TARGET_OPTION (newtree);
3983 if (olddata != newdata
3984 && memcmp (olddata, newdata, sizeof (struct cl_target_option)))
3985 error ("%qE redeclared with conflicting %qs attributes",
3986 DECL_NAME (newdecl), "target");
3987 }
3988 }
3989 return merge_attributes (DECL_ATTRIBUTES (olddecl),
3990 DECL_ATTRIBUTES (newdecl));
3991 }
3992
3993 /* Implement TARGET_ASM_OUTPUT_MI_THUNK. */
3994 static void
nios2_asm_output_mi_thunk(FILE * file,tree thunk_fndecl ATTRIBUTE_UNUSED,HOST_WIDE_INT delta,HOST_WIDE_INT vcall_offset,tree function)3995 nios2_asm_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
3996 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
3997 tree function)
3998 {
3999 rtx this_rtx, funexp;
4000 rtx_insn *insn;
4001
4002 /* Pretend to be a post-reload pass while generating rtl. */
4003 reload_completed = 1;
4004
4005 if (flag_pic)
4006 nios2_load_pic_register ();
4007
4008 /* Mark the end of the (empty) prologue. */
4009 emit_note (NOTE_INSN_PROLOGUE_END);
4010
4011 /* Find the "this" pointer. If the function returns a structure,
4012 the structure return pointer is in $5. */
4013 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
4014 this_rtx = gen_rtx_REG (Pmode, FIRST_ARG_REGNO + 1);
4015 else
4016 this_rtx = gen_rtx_REG (Pmode, FIRST_ARG_REGNO);
4017
4018 /* Add DELTA to THIS_RTX. */
4019 nios2_emit_add_constant (this_rtx, delta);
4020
4021 /* If needed, add *(*THIS_RTX + VCALL_OFFSET) to THIS_RTX. */
4022 if (vcall_offset)
4023 {
4024 rtx tmp;
4025
4026 tmp = gen_rtx_REG (Pmode, 2);
4027 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this_rtx));
4028 nios2_emit_add_constant (tmp, vcall_offset);
4029 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
4030 emit_insn (gen_add2_insn (this_rtx, tmp));
4031 }
4032
4033 /* Generate a tail call to the target function. */
4034 if (!TREE_USED (function))
4035 {
4036 assemble_external (function);
4037 TREE_USED (function) = 1;
4038 }
4039 funexp = XEXP (DECL_RTL (function), 0);
4040 /* Function address needs to be constructed under PIC,
4041 provide r2 to use here. */
4042 nios2_adjust_call_address (&funexp, gen_rtx_REG (Pmode, 2));
4043 insn = emit_call_insn (gen_sibcall_internal (funexp, const0_rtx));
4044 SIBLING_CALL_P (insn) = 1;
4045
4046 /* Run just enough of rest_of_compilation to get the insns emitted.
4047 There's not really enough bulk here to make other passes such as
4048 instruction scheduling worth while. Note that use_thunk calls
4049 assemble_start_function and assemble_end_function. */
4050 insn = get_insns ();
4051 shorten_branches (insn);
4052 final_start_function (insn, file, 1);
4053 final (insn, file, 1);
4054 final_end_function ();
4055
4056 /* Stop pretending to be a post-reload pass. */
4057 reload_completed = 0;
4058 }
4059
4060
4061 /* Utility function to break a memory address into
4062 base register + constant offset. Return false if something
4063 unexpected is seen. */
4064 static bool
split_mem_address(rtx addr,rtx * base_reg,rtx * offset)4065 split_mem_address (rtx addr, rtx *base_reg, rtx *offset)
4066 {
4067 if (REG_P (addr))
4068 {
4069 *base_reg = addr;
4070 *offset = const0_rtx;
4071 return true;
4072 }
4073 else if (GET_CODE (addr) == PLUS)
4074 {
4075 *base_reg = XEXP (addr, 0);
4076 *offset = XEXP (addr, 1);
4077 return true;
4078 }
4079 return false;
4080 }
4081
4082 /* Splits out the operands of an ALU insn, places them in *LHS, *RHS1, *RHS2. */
4083 static void
split_alu_insn(rtx_insn * insn,rtx * lhs,rtx * rhs1,rtx * rhs2)4084 split_alu_insn (rtx_insn *insn, rtx *lhs, rtx *rhs1, rtx *rhs2)
4085 {
4086 rtx pat = PATTERN (insn);
4087 gcc_assert (GET_CODE (pat) == SET);
4088 *lhs = SET_DEST (pat);
4089 *rhs1 = XEXP (SET_SRC (pat), 0);
4090 if (GET_RTX_CLASS (GET_CODE (SET_SRC (pat))) != RTX_UNARY)
4091 *rhs2 = XEXP (SET_SRC (pat), 1);
4092 return;
4093 }
4094
4095 /* Returns true if OP is a REG and assigned a CDX reg. */
4096 static bool
cdxreg(rtx op)4097 cdxreg (rtx op)
4098 {
4099 return REG_P (op) && (!reload_completed || CDX_REG_P (REGNO (op)));
4100 }
4101
4102 /* Returns true if OP is within range of CDX addi.n immediates. */
4103 static bool
cdx_add_immed(rtx op)4104 cdx_add_immed (rtx op)
4105 {
4106 if (CONST_INT_P (op))
4107 {
4108 HOST_WIDE_INT ival = INTVAL (op);
4109 return ival <= 128 && ival > 0 && (ival & (ival - 1)) == 0;
4110 }
4111 return false;
4112 }
4113
4114 /* Returns true if OP is within range of CDX andi.n immediates. */
4115 static bool
cdx_and_immed(rtx op)4116 cdx_and_immed (rtx op)
4117 {
4118 if (CONST_INT_P (op))
4119 {
4120 HOST_WIDE_INT ival = INTVAL (op);
4121 return (ival == 1 || ival == 2 || ival == 3 || ival == 4
4122 || ival == 8 || ival == 0xf || ival == 0x10
4123 || ival == 0x1f || ival == 0x20
4124 || ival == 0x3f || ival == 0x7f
4125 || ival == 0x80 || ival == 0xff || ival == 0x7ff
4126 || ival == 0xff00 || ival == 0xffff);
4127 }
4128 return false;
4129 }
4130
4131 /* Returns true if OP is within range of CDX movi.n immediates. */
4132 static bool
cdx_mov_immed(rtx op)4133 cdx_mov_immed (rtx op)
4134 {
4135 if (CONST_INT_P (op))
4136 {
4137 HOST_WIDE_INT ival = INTVAL (op);
4138 return ((ival >= 0 && ival <= 124)
4139 || ival == 0xff || ival == -2 || ival == -1);
4140 }
4141 return false;
4142 }
4143
4144 /* Returns true if OP is within range of CDX slli.n/srli.n immediates. */
4145 static bool
cdx_shift_immed(rtx op)4146 cdx_shift_immed (rtx op)
4147 {
4148 if (CONST_INT_P (op))
4149 {
4150 HOST_WIDE_INT ival = INTVAL (op);
4151 return (ival == 1 || ival == 2 || ival == 3 || ival == 8
4152 || ival == 12 || ival == 16 || ival == 24
4153 || ival == 31);
4154 }
4155 return false;
4156 }
4157
4158
4159
4160 /* Classification of different kinds of add instructions. */
4161 enum nios2_add_insn_kind {
4162 nios2_add_n_kind,
4163 nios2_addi_n_kind,
4164 nios2_subi_n_kind,
4165 nios2_spaddi_n_kind,
4166 nios2_spinci_n_kind,
4167 nios2_spdeci_n_kind,
4168 nios2_add_kind,
4169 nios2_addi_kind
4170 };
4171
4172 static const char *nios2_add_insn_names[] = {
4173 "add.n", "addi.n", "subi.n", "spaddi.n", "spinci.n", "spdeci.n",
4174 "add", "addi" };
4175 static bool nios2_add_insn_narrow[] = {
4176 true, true, true, true, true, true,
4177 false, false};
4178
4179 /* Function to classify kinds of add instruction patterns. */
4180 static enum nios2_add_insn_kind
nios2_add_insn_classify(rtx_insn * insn ATTRIBUTE_UNUSED,rtx lhs,rtx rhs1,rtx rhs2)4181 nios2_add_insn_classify (rtx_insn *insn ATTRIBUTE_UNUSED,
4182 rtx lhs, rtx rhs1, rtx rhs2)
4183 {
4184 if (TARGET_HAS_CDX)
4185 {
4186 if (cdxreg (lhs) && cdxreg (rhs1))
4187 {
4188 if (cdxreg (rhs2))
4189 return nios2_add_n_kind;
4190 if (CONST_INT_P (rhs2))
4191 {
4192 HOST_WIDE_INT ival = INTVAL (rhs2);
4193 if (ival > 0 && cdx_add_immed (rhs2))
4194 return nios2_addi_n_kind;
4195 if (ival < 0 && cdx_add_immed (GEN_INT (-ival)))
4196 return nios2_subi_n_kind;
4197 }
4198 }
4199 else if (rhs1 == stack_pointer_rtx
4200 && CONST_INT_P (rhs2))
4201 {
4202 HOST_WIDE_INT imm7 = INTVAL (rhs2) >> 2;
4203 HOST_WIDE_INT rem = INTVAL (rhs2) & 3;
4204 if (rem == 0 && (imm7 & ~0x7f) == 0)
4205 {
4206 if (cdxreg (lhs))
4207 return nios2_spaddi_n_kind;
4208 if (lhs == stack_pointer_rtx)
4209 return nios2_spinci_n_kind;
4210 }
4211 imm7 = -INTVAL(rhs2) >> 2;
4212 rem = -INTVAL (rhs2) & 3;
4213 if (lhs == stack_pointer_rtx
4214 && rem == 0 && (imm7 & ~0x7f) == 0)
4215 return nios2_spdeci_n_kind;
4216 }
4217 }
4218 return ((REG_P (rhs2) || rhs2 == const0_rtx)
4219 ? nios2_add_kind : nios2_addi_kind);
4220 }
4221
4222 /* Emit assembly language for the different kinds of add instructions. */
4223 const char*
nios2_add_insn_asm(rtx_insn * insn,rtx * operands)4224 nios2_add_insn_asm (rtx_insn *insn, rtx *operands)
4225 {
4226 static char buf[256];
4227 int ln = 256;
4228 enum nios2_add_insn_kind kind
4229 = nios2_add_insn_classify (insn, operands[0], operands[1], operands[2]);
4230 if (kind == nios2_subi_n_kind)
4231 snprintf (buf, ln, "subi.n\t%%0, %%1, %d", (int) -INTVAL (operands[2]));
4232 else if (kind == nios2_spaddi_n_kind)
4233 snprintf (buf, ln, "spaddi.n\t%%0, %%2");
4234 else if (kind == nios2_spinci_n_kind)
4235 snprintf (buf, ln, "spinci.n\t%%2");
4236 else if (kind == nios2_spdeci_n_kind)
4237 snprintf (buf, ln, "spdeci.n\t%d", (int) -INTVAL (operands[2]));
4238 else
4239 snprintf (buf, ln, "%s\t%%0, %%1, %%z2", nios2_add_insn_names[(int)kind]);
4240 return buf;
4241 }
4242
4243 /* This routine, which the default "length" attribute computation is
4244 based on, encapsulates information about all the cases where CDX
4245 provides a narrow 2-byte instruction form. */
4246 bool
nios2_cdx_narrow_form_p(rtx_insn * insn)4247 nios2_cdx_narrow_form_p (rtx_insn *insn)
4248 {
4249 rtx pat, lhs, rhs1, rhs2;
4250 enum attr_type type;
4251 if (!TARGET_HAS_CDX)
4252 return false;
4253 type = get_attr_type (insn);
4254 pat = PATTERN (insn);
4255 gcc_assert (reload_completed);
4256 switch (type)
4257 {
4258 case TYPE_CONTROL:
4259 if (GET_CODE (pat) == SIMPLE_RETURN)
4260 return true;
4261 if (GET_CODE (pat) == PARALLEL)
4262 pat = XVECEXP (pat, 0, 0);
4263 if (GET_CODE (pat) == SET)
4264 pat = SET_SRC (pat);
4265 if (GET_CODE (pat) == IF_THEN_ELSE)
4266 {
4267 /* Conditional branch patterns; for these we
4268 only check the comparison to find beqz.n/bnez.n cases.
4269 For the 'nios2_cbranch' pattern, we cannot also check
4270 the branch range here. That will be done at the md
4271 pattern "length" attribute computation. */
4272 rtx cmp = XEXP (pat, 0);
4273 return ((GET_CODE (cmp) == EQ || GET_CODE (cmp) == NE)
4274 && cdxreg (XEXP (cmp, 0))
4275 && XEXP (cmp, 1) == const0_rtx);
4276 }
4277 if (GET_CODE (pat) == TRAP_IF)
4278 /* trap.n is always usable. */
4279 return true;
4280 if (GET_CODE (pat) == CALL)
4281 pat = XEXP (XEXP (pat, 0), 0);
4282 if (REG_P (pat))
4283 /* Control instructions taking a register operand are indirect
4284 jumps and calls. The CDX instructions have a 5-bit register
4285 field so any reg is valid. */
4286 return true;
4287 else
4288 {
4289 gcc_assert (!insn_variable_length_p (insn));
4290 return false;
4291 }
4292 case TYPE_ADD:
4293 {
4294 enum nios2_add_insn_kind kind;
4295 split_alu_insn (insn, &lhs, &rhs1, &rhs2);
4296 kind = nios2_add_insn_classify (insn, lhs, rhs1, rhs2);
4297 return nios2_add_insn_narrow[(int)kind];
4298 }
4299 case TYPE_LD:
4300 {
4301 bool ret;
4302 HOST_WIDE_INT offset, rem = 0;
4303 rtx addr, reg = SET_DEST (pat), mem = SET_SRC (pat);
4304 if (GET_CODE (mem) == SIGN_EXTEND)
4305 /* No CDX form for sign-extended load. */
4306 return false;
4307 if (GET_CODE (mem) == ZERO_EXTEND)
4308 /* The load alternatives in the zero_extend* patterns. */
4309 mem = XEXP (mem, 0);
4310 if (MEM_P (mem))
4311 {
4312 /* ldxio. */
4313 if ((MEM_VOLATILE_P (mem) && TARGET_BYPASS_CACHE_VOLATILE)
4314 || TARGET_BYPASS_CACHE)
4315 return false;
4316 addr = XEXP (mem, 0);
4317 /* GP-based references are never narrow. */
4318 if (gprel_constant_p (addr))
4319 return false;
4320 ret = split_mem_address (addr, &rhs1, &rhs2);
4321 gcc_assert (ret);
4322 }
4323 else
4324 return false;
4325
4326 offset = INTVAL (rhs2);
4327 if (GET_MODE (mem) == SImode)
4328 {
4329 rem = offset & 3;
4330 offset >>= 2;
4331 /* ldwsp.n case. */
4332 if (rtx_equal_p (rhs1, stack_pointer_rtx)
4333 && rem == 0 && (offset & ~0x1f) == 0)
4334 return true;
4335 }
4336 else if (GET_MODE (mem) == HImode)
4337 {
4338 rem = offset & 1;
4339 offset >>= 1;
4340 }
4341 /* ldbu.n, ldhu.n, ldw.n cases. */
4342 return (cdxreg (reg) && cdxreg (rhs1)
4343 && rem == 0 && (offset & ~0xf) == 0);
4344 }
4345 case TYPE_ST:
4346 if (GET_CODE (pat) == PARALLEL)
4347 /* stex, stsex. */
4348 return false;
4349 else
4350 {
4351 bool ret;
4352 HOST_WIDE_INT offset, rem = 0;
4353 rtx addr, reg = SET_SRC (pat), mem = SET_DEST (pat);
4354 if (!MEM_P (mem))
4355 return false;
4356 /* stxio. */
4357 if ((MEM_VOLATILE_P (mem) && TARGET_BYPASS_CACHE_VOLATILE)
4358 || TARGET_BYPASS_CACHE)
4359 return false;
4360 addr = XEXP (mem, 0);
4361 /* GP-based references are never narrow. */
4362 if (gprel_constant_p (addr))
4363 return false;
4364 ret = split_mem_address (addr, &rhs1, &rhs2);
4365 gcc_assert (ret);
4366 offset = INTVAL (rhs2);
4367 if (GET_MODE (mem) == SImode)
4368 {
4369 rem = offset & 3;
4370 offset >>= 2;
4371 /* stwsp.n case. */
4372 if (rtx_equal_p (rhs1, stack_pointer_rtx)
4373 && rem == 0 && (offset & ~0x1f) == 0)
4374 return true;
4375 /* stwz.n case. */
4376 else if (reg == const0_rtx && cdxreg (rhs1)
4377 && rem == 0 && (offset & ~0x3f) == 0)
4378 return true;
4379 }
4380 else if (GET_MODE (mem) == HImode)
4381 {
4382 rem = offset & 1;
4383 offset >>= 1;
4384 }
4385 else
4386 {
4387 gcc_assert (GET_MODE (mem) == QImode);
4388 /* stbz.n case. */
4389 if (reg == const0_rtx && cdxreg (rhs1)
4390 && (offset & ~0x3f) == 0)
4391 return true;
4392 }
4393
4394 /* stbu.n, sthu.n, stw.n cases. */
4395 return (cdxreg (reg) && cdxreg (rhs1)
4396 && rem == 0 && (offset & ~0xf) == 0);
4397 }
4398 case TYPE_MOV:
4399 lhs = SET_DEST (pat);
4400 rhs1 = SET_SRC (pat);
4401 if (CONST_INT_P (rhs1))
4402 return (cdxreg (lhs) && cdx_mov_immed (rhs1));
4403 gcc_assert (REG_P (lhs) && REG_P (rhs1));
4404 return true;
4405
4406 case TYPE_AND:
4407 /* Some zero_extend* alternatives are and insns. */
4408 if (GET_CODE (SET_SRC (pat)) == ZERO_EXTEND)
4409 return (cdxreg (SET_DEST (pat))
4410 && cdxreg (XEXP (SET_SRC (pat), 0)));
4411 split_alu_insn (insn, &lhs, &rhs1, &rhs2);
4412 if (CONST_INT_P (rhs2))
4413 return (cdxreg (lhs) && cdxreg (rhs1) && cdx_and_immed (rhs2));
4414 return (cdxreg (lhs) && cdxreg (rhs2)
4415 && (!reload_completed || rtx_equal_p (lhs, rhs1)));
4416
4417 case TYPE_OR:
4418 case TYPE_XOR:
4419 /* Note the two-address limitation for CDX form. */
4420 split_alu_insn (insn, &lhs, &rhs1, &rhs2);
4421 return (cdxreg (lhs) && cdxreg (rhs2)
4422 && (!reload_completed || rtx_equal_p (lhs, rhs1)));
4423
4424 case TYPE_SUB:
4425 split_alu_insn (insn, &lhs, &rhs1, &rhs2);
4426 return (cdxreg (lhs) && cdxreg (rhs1) && cdxreg (rhs2));
4427
4428 case TYPE_NEG:
4429 case TYPE_NOT:
4430 split_alu_insn (insn, &lhs, &rhs1, NULL);
4431 return (cdxreg (lhs) && cdxreg (rhs1));
4432
4433 case TYPE_SLL:
4434 case TYPE_SRL:
4435 split_alu_insn (insn, &lhs, &rhs1, &rhs2);
4436 return (cdxreg (lhs)
4437 && ((cdxreg (rhs1) && cdx_shift_immed (rhs2))
4438 || (cdxreg (rhs2)
4439 && (!reload_completed || rtx_equal_p (lhs, rhs1)))));
4440 case TYPE_NOP:
4441 case TYPE_PUSH:
4442 case TYPE_POP:
4443 return true;
4444 default:
4445 break;
4446 }
4447 return false;
4448 }
4449
4450 /* Main function to implement the pop_operation predicate that
4451 check pop.n insn pattern integrity. The CDX pop.n patterns mostly
4452 hardcode the restored registers, so the main checking is for the
4453 SP offsets. */
4454 bool
pop_operation_p(rtx op)4455 pop_operation_p (rtx op)
4456 {
4457 int i;
4458 HOST_WIDE_INT last_offset = -1, len = XVECLEN (op, 0);
4459 rtx base_reg, offset;
4460
4461 if (len < 3 /* At least has a return, SP-update, and RA restore. */
4462 || GET_CODE (XVECEXP (op, 0, 0)) != RETURN
4463 || !base_reg_adjustment_p (XVECEXP (op, 0, 1), &base_reg, &offset)
4464 || !rtx_equal_p (base_reg, stack_pointer_rtx)
4465 || !CONST_INT_P (offset)
4466 || (INTVAL (offset) & 3) != 0)
4467 return false;
4468
4469 for (i = len - 1; i > 1; i--)
4470 {
4471 rtx set = XVECEXP (op, 0, i);
4472 rtx curr_base_reg, curr_offset;
4473
4474 if (GET_CODE (set) != SET || !MEM_P (SET_SRC (set))
4475 || !split_mem_address (XEXP (SET_SRC (set), 0),
4476 &curr_base_reg, &curr_offset)
4477 || !rtx_equal_p (base_reg, curr_base_reg)
4478 || !CONST_INT_P (curr_offset))
4479 return false;
4480 if (i == len - 1)
4481 {
4482 last_offset = INTVAL (curr_offset);
4483 if ((last_offset & 3) != 0 || last_offset > 60)
4484 return false;
4485 }
4486 else
4487 {
4488 last_offset += 4;
4489 if (INTVAL (curr_offset) != last_offset)
4490 return false;
4491 }
4492 }
4493 if (last_offset < 0 || last_offset + 4 != INTVAL (offset))
4494 return false;
4495
4496 return true;
4497 }
4498
4499
4500 /* Masks of registers that are valid for CDX ldwm/stwm instructions.
4501 The instruction can encode subsets drawn from either R2-R13 or
4502 R14-R23 + FP + RA. */
4503 #define CDX_LDSTWM_VALID_REGS_0 0x00003ffc
4504 #define CDX_LDSTWM_VALID_REGS_1 0x90ffc000
4505
4506 static bool
nios2_ldstwm_regset_p(unsigned int regno,unsigned int * regset)4507 nios2_ldstwm_regset_p (unsigned int regno, unsigned int *regset)
4508 {
4509 if (*regset == 0)
4510 {
4511 if (CDX_LDSTWM_VALID_REGS_0 & (1 << regno))
4512 *regset = CDX_LDSTWM_VALID_REGS_0;
4513 else if (CDX_LDSTWM_VALID_REGS_1 & (1 << regno))
4514 *regset = CDX_LDSTWM_VALID_REGS_1;
4515 else
4516 return false;
4517 return true;
4518 }
4519 else
4520 return (*regset & (1 << regno)) != 0;
4521 }
4522
4523 /* Main function to implement ldwm_operation/stwm_operation
4524 predicates that check ldwm/stwm insn pattern integrity. */
4525 bool
ldstwm_operation_p(rtx op,bool load_p)4526 ldstwm_operation_p (rtx op, bool load_p)
4527 {
4528 int start, i, end = XVECLEN (op, 0) - 1, last_regno = -1;
4529 unsigned int regset = 0;
4530 rtx base_reg, offset;
4531 rtx first_elt = XVECEXP (op, 0, 0);
4532 bool inc_p = true;
4533 bool wb_p = base_reg_adjustment_p (first_elt, &base_reg, &offset);
4534 if (GET_CODE (XVECEXP (op, 0, end)) == RETURN)
4535 end--;
4536 start = wb_p ? 1 : 0;
4537 for (i = start; i <= end; i++)
4538 {
4539 int regno;
4540 rtx reg, mem, elt = XVECEXP (op, 0, i);
4541 /* Return early if not a SET at all. */
4542 if (GET_CODE (elt) != SET)
4543 return false;
4544 reg = load_p ? SET_DEST (elt) : SET_SRC (elt);
4545 mem = load_p ? SET_SRC (elt) : SET_DEST (elt);
4546 if (!REG_P (reg) || !MEM_P (mem))
4547 return false;
4548 regno = REGNO (reg);
4549 if (!nios2_ldstwm_regset_p (regno, ®set))
4550 return false;
4551 /* If no writeback to determine direction, use offset of first MEM. */
4552 if (wb_p)
4553 inc_p = INTVAL (offset) > 0;
4554 else if (i == start)
4555 {
4556 rtx first_base, first_offset;
4557 if (!split_mem_address (XEXP (mem, 0),
4558 &first_base, &first_offset))
4559 return false;
4560 base_reg = first_base;
4561 inc_p = INTVAL (first_offset) >= 0;
4562 }
4563 /* Ensure that the base register is not loaded into. */
4564 if (load_p && regno == (int) REGNO (base_reg))
4565 return false;
4566 /* Check for register order inc/dec integrity. */
4567 if (last_regno >= 0)
4568 {
4569 if (inc_p && last_regno >= regno)
4570 return false;
4571 if (!inc_p && last_regno <= regno)
4572 return false;
4573 }
4574 last_regno = regno;
4575 }
4576 return true;
4577 }
4578
4579 /* Helper for nios2_ldst_parallel, for generating a parallel vector
4580 SET element. */
4581 static rtx
gen_ldst(bool load_p,int regno,rtx base_mem,int offset)4582 gen_ldst (bool load_p, int regno, rtx base_mem, int offset)
4583 {
4584 rtx reg = gen_rtx_REG (SImode, regno);
4585 rtx mem = adjust_address_nv (base_mem, SImode, offset);
4586 return gen_rtx_SET (load_p ? reg : mem,
4587 load_p ? mem : reg);
4588 }
4589
4590 /* A general routine for creating the body RTL pattern of
4591 ldwm/stwm/push.n/pop.n insns.
4592 LOAD_P: true/false for load/store direction.
4593 REG_INC_P: whether registers are incrementing/decrementing in the
4594 *RTL vector* (not necessarily the order defined in the ISA specification).
4595 OFFSET_INC_P: Same as REG_INC_P, but for the memory offset order.
4596 BASE_MEM: starting MEM.
4597 BASE_UPDATE: amount to update base register; zero means no writeback.
4598 REGMASK: register mask to load/store.
4599 RET_P: true if to tag a (return) element at the end.
4600
4601 Note that this routine does not do any checking. It's the job of the
4602 caller to do the right thing, and the insn patterns to do the
4603 safe-guarding. */
4604 static rtx
nios2_ldst_parallel(bool load_p,bool reg_inc_p,bool offset_inc_p,rtx base_mem,int base_update,unsigned HOST_WIDE_INT regmask,bool ret_p)4605 nios2_ldst_parallel (bool load_p, bool reg_inc_p, bool offset_inc_p,
4606 rtx base_mem, int base_update,
4607 unsigned HOST_WIDE_INT regmask, bool ret_p)
4608 {
4609 rtvec p;
4610 int regno, b = 0, i = 0, n = 0, len = popcount_hwi (regmask);
4611 if (ret_p) len++, i++, b++;
4612 if (base_update != 0) len++, i++;
4613 p = rtvec_alloc (len);
4614 for (regno = (reg_inc_p ? 0 : 31);
4615 regno != (reg_inc_p ? 32 : -1);
4616 regno += (reg_inc_p ? 1 : -1))
4617 if ((regmask & (1 << regno)) != 0)
4618 {
4619 int offset = (offset_inc_p ? 4 : -4) * n++;
4620 RTVEC_ELT (p, i++) = gen_ldst (load_p, regno, base_mem, offset);
4621 }
4622 if (ret_p)
4623 RTVEC_ELT (p, 0) = ret_rtx;
4624 if (base_update != 0)
4625 {
4626 rtx reg, offset;
4627 if (!split_mem_address (XEXP (base_mem, 0), ®, &offset))
4628 gcc_unreachable ();
4629 RTVEC_ELT (p, b) =
4630 gen_rtx_SET (reg, plus_constant (Pmode, reg, base_update));
4631 }
4632 return gen_rtx_PARALLEL (VOIDmode, p);
4633 }
4634
4635 /* CDX ldwm/stwm peephole optimization pattern related routines. */
4636
4637 /* Data structure and sorting function for ldwm/stwm peephole optimizers. */
4638 struct ldstwm_operand
4639 {
4640 int offset; /* Offset from base register. */
4641 rtx reg; /* Register to store at this offset. */
4642 rtx mem; /* Original mem. */
4643 bool bad; /* True if this load/store can't be combined. */
4644 bool rewrite; /* True if we should rewrite using scratch. */
4645 };
4646
4647 static int
compare_ldstwm_operands(const void * arg1,const void * arg2)4648 compare_ldstwm_operands (const void *arg1, const void *arg2)
4649 {
4650 const struct ldstwm_operand *op1 = (const struct ldstwm_operand *) arg1;
4651 const struct ldstwm_operand *op2 = (const struct ldstwm_operand *) arg2;
4652 if (op1->bad)
4653 return op2->bad ? 0 : 1;
4654 else if (op2->bad)
4655 return -1;
4656 else
4657 return op1->offset - op2->offset;
4658 }
4659
4660 /* Helper function: return true if a load/store using REGNO with address
4661 BASEREG and offset OFFSET meets the constraints for a 2-byte CDX ldw.n,
4662 stw.n, ldwsp.n, or stwsp.n instruction. */
4663 static bool
can_use_cdx_ldstw(int regno,int basereg,int offset)4664 can_use_cdx_ldstw (int regno, int basereg, int offset)
4665 {
4666 if (CDX_REG_P (regno) && CDX_REG_P (basereg)
4667 && (offset & 0x3) == 0 && 0 <= offset && offset < 0x40)
4668 return true;
4669 else if (basereg == SP_REGNO
4670 && offset >= 0 && offset < 0x80 && (offset & 0x3) == 0)
4671 return true;
4672 return false;
4673 }
4674
4675 /* This function is called from peephole2 optimizers to try to merge
4676 a series of individual loads and stores into a ldwm or stwm. It
4677 can also rewrite addresses inside the individual loads and stores
4678 using a common base register using a scratch register and smaller
4679 offsets if that allows them to use CDX ldw.n or stw.n instructions
4680 instead of 4-byte loads or stores.
4681 N is the number of insns we are trying to merge. SCRATCH is non-null
4682 if there is a scratch register available. The OPERANDS array contains
4683 alternating REG (even) and MEM (odd) operands. */
4684 bool
gen_ldstwm_peep(bool load_p,int n,rtx scratch,rtx * operands)4685 gen_ldstwm_peep (bool load_p, int n, rtx scratch, rtx *operands)
4686 {
4687 /* CDX ldwm/stwm instructions allow a maximum of 12 registers to be
4688 specified. */
4689 #define MAX_LDSTWM_OPS 12
4690 struct ldstwm_operand sort[MAX_LDSTWM_OPS];
4691 int basereg = -1;
4692 int baseoffset;
4693 int i, m, lastoffset, lastreg;
4694 unsigned int regmask = 0, usemask = 0, regset;
4695 bool needscratch;
4696 int newbasereg;
4697 int nbytes;
4698
4699 if (!TARGET_HAS_CDX)
4700 return false;
4701 if (n < 2 || n > MAX_LDSTWM_OPS)
4702 return false;
4703
4704 /* Check all the operands for validity and initialize the sort array.
4705 The places where we return false here are all situations that aren't
4706 expected to ever happen -- invalid patterns, invalid registers, etc. */
4707 for (i = 0; i < n; i++)
4708 {
4709 rtx base, offset;
4710 rtx reg = operands[i];
4711 rtx mem = operands[i + n];
4712 int r, o, regno;
4713 bool bad = false;
4714
4715 if (!REG_P (reg) || !MEM_P (mem))
4716 return false;
4717
4718 regno = REGNO (reg);
4719 if (regno > 31)
4720 return false;
4721 if (load_p && (regmask & (1 << regno)) != 0)
4722 return false;
4723 regmask |= 1 << regno;
4724
4725 if (!split_mem_address (XEXP (mem, 0), &base, &offset))
4726 return false;
4727 r = REGNO (base);
4728 o = INTVAL (offset);
4729
4730 if (basereg == -1)
4731 basereg = r;
4732 else if (r != basereg)
4733 bad = true;
4734 usemask |= 1 << r;
4735
4736 sort[i].bad = bad;
4737 sort[i].rewrite = false;
4738 sort[i].offset = o;
4739 sort[i].reg = reg;
4740 sort[i].mem = mem;
4741 }
4742
4743 /* If we are doing a series of register loads, we can't safely reorder
4744 them if any of the regs used in addr expressions are also being set. */
4745 if (load_p && (regmask & usemask))
4746 return false;
4747
4748 /* Sort the array by increasing mem offset order, then check that
4749 offsets are valid and register order matches mem order. At the
4750 end of this loop, m is the number of loads/stores we will try to
4751 combine; the rest are leftovers. */
4752 qsort (sort, n, sizeof (struct ldstwm_operand), compare_ldstwm_operands);
4753
4754 baseoffset = sort[0].offset;
4755 needscratch = baseoffset != 0;
4756 if (needscratch && !scratch)
4757 return false;
4758
4759 lastreg = regmask = regset = 0;
4760 lastoffset = baseoffset;
4761 for (m = 0; m < n && !sort[m].bad; m++)
4762 {
4763 int thisreg = REGNO (sort[m].reg);
4764 if (sort[m].offset != lastoffset
4765 || (m > 0 && lastreg >= thisreg)
4766 || !nios2_ldstwm_regset_p (thisreg, ®set))
4767 break;
4768 lastoffset += 4;
4769 lastreg = thisreg;
4770 regmask |= (1 << thisreg);
4771 }
4772
4773 /* For loads, make sure we are not overwriting the scratch reg.
4774 The peephole2 pattern isn't supposed to match unless the register is
4775 unused all the way through, so this isn't supposed to happen anyway. */
4776 if (load_p
4777 && needscratch
4778 && ((1 << REGNO (scratch)) & regmask) != 0)
4779 return false;
4780 newbasereg = needscratch ? (int) REGNO (scratch) : basereg;
4781
4782 /* We may be able to combine only the first m of the n total loads/stores
4783 into a single instruction. If m < 2, there's no point in emitting
4784 a ldwm/stwm at all, but we might be able to do further optimizations
4785 if we have a scratch. We will count the instruction lengths of the
4786 old and new patterns and store the savings in nbytes. */
4787 if (m < 2)
4788 {
4789 if (!needscratch)
4790 return false;
4791 m = 0;
4792 nbytes = 0;
4793 }
4794 else
4795 nbytes = -4; /* Size of ldwm/stwm. */
4796 if (needscratch)
4797 {
4798 int bo = baseoffset > 0 ? baseoffset : -baseoffset;
4799 if (CDX_REG_P (newbasereg)
4800 && CDX_REG_P (basereg)
4801 && bo <= 128 && bo > 0 && (bo & (bo - 1)) == 0)
4802 nbytes -= 2; /* Size of addi.n/subi.n. */
4803 else
4804 nbytes -= 4; /* Size of non-CDX addi. */
4805 }
4806
4807 /* Count the size of the input load/store instructions being replaced. */
4808 for (i = 0; i < m; i++)
4809 if (can_use_cdx_ldstw (REGNO (sort[i].reg), basereg, sort[i].offset))
4810 nbytes += 2;
4811 else
4812 nbytes += 4;
4813
4814 /* We may also be able to save a bit if we can rewrite non-CDX
4815 load/stores that can't be combined into the ldwm/stwm into CDX
4816 load/stores using the scratch reg. For example, this might happen
4817 if baseoffset is large, by bringing in the offsets in the load/store
4818 instructions within the range that fits in the CDX instruction. */
4819 if (needscratch && CDX_REG_P (newbasereg))
4820 for (i = m; i < n && !sort[i].bad; i++)
4821 if (!can_use_cdx_ldstw (REGNO (sort[i].reg), basereg, sort[i].offset)
4822 && can_use_cdx_ldstw (REGNO (sort[i].reg), newbasereg,
4823 sort[i].offset - baseoffset))
4824 {
4825 sort[i].rewrite = true;
4826 nbytes += 2;
4827 }
4828
4829 /* Are we good to go? */
4830 if (nbytes <= 0)
4831 return false;
4832
4833 /* Emit the scratch load. */
4834 if (needscratch)
4835 emit_insn (gen_rtx_SET (scratch, XEXP (sort[0].mem, 0)));
4836
4837 /* Emit the ldwm/stwm insn. */
4838 if (m > 0)
4839 {
4840 rtvec p = rtvec_alloc (m);
4841 for (i = 0; i < m; i++)
4842 {
4843 int offset = sort[i].offset;
4844 rtx mem, reg = sort[i].reg;
4845 rtx base_reg = gen_rtx_REG (Pmode, newbasereg);
4846 if (needscratch)
4847 offset -= baseoffset;
4848 mem = gen_rtx_MEM (SImode, plus_constant (Pmode, base_reg, offset));
4849 if (load_p)
4850 RTVEC_ELT (p, i) = gen_rtx_SET (reg, mem);
4851 else
4852 RTVEC_ELT (p, i) = gen_rtx_SET (mem, reg);
4853 }
4854 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
4855 }
4856
4857 /* Emit any leftover load/stores as individual instructions, doing
4858 the previously-noted rewrites to use the scratch reg. */
4859 for (i = m; i < n; i++)
4860 {
4861 rtx reg = sort[i].reg;
4862 rtx mem = sort[i].mem;
4863 if (sort[i].rewrite)
4864 {
4865 int offset = sort[i].offset - baseoffset;
4866 mem = gen_rtx_MEM (SImode, plus_constant (Pmode, scratch, offset));
4867 }
4868 if (load_p)
4869 emit_move_insn (reg, mem);
4870 else
4871 emit_move_insn (mem, reg);
4872 }
4873 return true;
4874 }
4875
4876 /* Implement TARGET_MACHINE_DEPENDENT_REORG:
4877 We use this hook when emitting CDX code to enforce the 4-byte
4878 alignment requirement for labels that are used as the targets of
4879 jmpi instructions. CDX code can otherwise contain a mix of 16-bit
4880 and 32-bit instructions aligned on any 16-bit boundary, but functions
4881 and jmpi labels have to be 32-bit aligned because of the way the address
4882 is encoded in the instruction. */
4883
4884 static unsigned char *label_align;
4885 static int min_labelno, max_labelno;
4886
4887 static void
nios2_reorg(void)4888 nios2_reorg (void)
4889 {
4890 bool changed = true;
4891 rtx_insn *insn;
4892
4893 if (!TARGET_HAS_CDX)
4894 return;
4895
4896 /* Initialize the data structures. */
4897 if (label_align)
4898 free (label_align);
4899 max_labelno = max_label_num ();
4900 min_labelno = get_first_label_num ();
4901 label_align = XCNEWVEC (unsigned char, max_labelno - min_labelno + 1);
4902
4903 /* Iterate on inserting alignment and adjusting branch lengths until
4904 no more changes. */
4905 while (changed)
4906 {
4907 changed = false;
4908 shorten_branches (get_insns ());
4909
4910 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
4911 if (JUMP_P (insn) && insn_variable_length_p (insn))
4912 {
4913 rtx label = JUMP_LABEL (insn);
4914 /* We use the current fact that all cases of 'jmpi'
4915 doing the actual branch in the machine description
4916 has a computed length of 6 or 8. Length 4 and below
4917 are all PC-relative 'br' branches without the jump-align
4918 problem. */
4919 if (label && LABEL_P (label) && get_attr_length (insn) > 4)
4920 {
4921 int index = CODE_LABEL_NUMBER (label) - min_labelno;
4922 if (label_align[index] != 2)
4923 {
4924 label_align[index] = 2;
4925 changed = true;
4926 }
4927 }
4928 }
4929 }
4930 }
4931
4932 /* Implement LABEL_ALIGN, using the information gathered in nios2_reorg. */
4933 int
nios2_label_align(rtx label)4934 nios2_label_align (rtx label)
4935 {
4936 int n = CODE_LABEL_NUMBER (label);
4937
4938 if (label_align && n >= min_labelno && n <= max_labelno)
4939 return MAX (label_align[n - min_labelno], align_labels_log);
4940 return align_labels_log;
4941 }
4942
4943 /* Implement ADJUST_REG_ALLOC_ORDER. We use the default ordering
4944 for R1 and non-CDX R2 code; for CDX we tweak thing to prefer
4945 the registers that can be used as operands to instructions that
4946 have 3-bit register fields. */
4947 void
nios2_adjust_reg_alloc_order(void)4948 nios2_adjust_reg_alloc_order (void)
4949 {
4950 const int cdx_reg_alloc_order[] =
4951 {
4952 /* Call-clobbered GPRs within CDX 3-bit encoded range. */
4953 2, 3, 4, 5, 6, 7,
4954 /* Call-saved GPRs within CDX 3-bit encoded range. */
4955 16, 17,
4956 /* Other call-clobbered GPRs. */
4957 8, 9, 10, 11, 12, 13, 14, 15,
4958 /* Other call-saved GPRs. RA placed first since it is always saved. */
4959 31, 18, 19, 20, 21, 22, 23, 28,
4960 /* Fixed GPRs, not used by the register allocator. */
4961 0, 1, 24, 25, 26, 27, 29, 30, 32, 33, 34, 35, 36, 37, 38, 39
4962 };
4963
4964 if (TARGET_HAS_CDX)
4965 memcpy (reg_alloc_order, cdx_reg_alloc_order,
4966 sizeof (int) * FIRST_PSEUDO_REGISTER);
4967 }
4968
4969
4970 /* Initialize the GCC target structure. */
4971 #undef TARGET_ASM_FUNCTION_PROLOGUE
4972 #define TARGET_ASM_FUNCTION_PROLOGUE nios2_asm_function_prologue
4973
4974 #undef TARGET_IN_SMALL_DATA_P
4975 #define TARGET_IN_SMALL_DATA_P nios2_in_small_data_p
4976
4977 #undef TARGET_SECTION_TYPE_FLAGS
4978 #define TARGET_SECTION_TYPE_FLAGS nios2_section_type_flags
4979
4980 #undef TARGET_INIT_BUILTINS
4981 #define TARGET_INIT_BUILTINS nios2_init_builtins
4982 #undef TARGET_EXPAND_BUILTIN
4983 #define TARGET_EXPAND_BUILTIN nios2_expand_builtin
4984 #undef TARGET_BUILTIN_DECL
4985 #define TARGET_BUILTIN_DECL nios2_builtin_decl
4986
4987 #undef TARGET_INIT_LIBFUNCS
4988 #define TARGET_INIT_LIBFUNCS nios2_init_libfuncs
4989
4990 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
4991 #define TARGET_FUNCTION_OK_FOR_SIBCALL hook_bool_tree_tree_true
4992
4993 #undef TARGET_CAN_ELIMINATE
4994 #define TARGET_CAN_ELIMINATE nios2_can_eliminate
4995
4996 #undef TARGET_FUNCTION_ARG
4997 #define TARGET_FUNCTION_ARG nios2_function_arg
4998
4999 #undef TARGET_FUNCTION_ARG_ADVANCE
5000 #define TARGET_FUNCTION_ARG_ADVANCE nios2_function_arg_advance
5001
5002 #undef TARGET_ARG_PARTIAL_BYTES
5003 #define TARGET_ARG_PARTIAL_BYTES nios2_arg_partial_bytes
5004
5005 #undef TARGET_TRAMPOLINE_INIT
5006 #define TARGET_TRAMPOLINE_INIT nios2_trampoline_init
5007
5008 #undef TARGET_FUNCTION_VALUE
5009 #define TARGET_FUNCTION_VALUE nios2_function_value
5010
5011 #undef TARGET_LIBCALL_VALUE
5012 #define TARGET_LIBCALL_VALUE nios2_libcall_value
5013
5014 #undef TARGET_FUNCTION_VALUE_REGNO_P
5015 #define TARGET_FUNCTION_VALUE_REGNO_P nios2_function_value_regno_p
5016
5017 #undef TARGET_RETURN_IN_MEMORY
5018 #define TARGET_RETURN_IN_MEMORY nios2_return_in_memory
5019
5020 #undef TARGET_PROMOTE_PROTOTYPES
5021 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
5022
5023 #undef TARGET_SETUP_INCOMING_VARARGS
5024 #define TARGET_SETUP_INCOMING_VARARGS nios2_setup_incoming_varargs
5025
5026 #undef TARGET_MUST_PASS_IN_STACK
5027 #define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
5028
5029 #undef TARGET_LEGITIMATE_CONSTANT_P
5030 #define TARGET_LEGITIMATE_CONSTANT_P nios2_legitimate_constant_p
5031
5032 #undef TARGET_LEGITIMIZE_ADDRESS
5033 #define TARGET_LEGITIMIZE_ADDRESS nios2_legitimize_address
5034
5035 #undef TARGET_DELEGITIMIZE_ADDRESS
5036 #define TARGET_DELEGITIMIZE_ADDRESS nios2_delegitimize_address
5037
5038 #undef TARGET_LEGITIMATE_ADDRESS_P
5039 #define TARGET_LEGITIMATE_ADDRESS_P nios2_legitimate_address_p
5040
5041 #undef TARGET_PREFERRED_RELOAD_CLASS
5042 #define TARGET_PREFERRED_RELOAD_CLASS nios2_preferred_reload_class
5043
5044 #undef TARGET_RTX_COSTS
5045 #define TARGET_RTX_COSTS nios2_rtx_costs
5046
5047 #undef TARGET_HAVE_TLS
5048 #define TARGET_HAVE_TLS TARGET_LINUX_ABI
5049
5050 #undef TARGET_CANNOT_FORCE_CONST_MEM
5051 #define TARGET_CANNOT_FORCE_CONST_MEM nios2_cannot_force_const_mem
5052
5053 #undef TARGET_ASM_OUTPUT_DWARF_DTPREL
5054 #define TARGET_ASM_OUTPUT_DWARF_DTPREL nios2_output_dwarf_dtprel
5055
5056 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
5057 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P nios2_print_operand_punct_valid_p
5058
5059 #undef TARGET_PRINT_OPERAND
5060 #define TARGET_PRINT_OPERAND nios2_print_operand
5061
5062 #undef TARGET_PRINT_OPERAND_ADDRESS
5063 #define TARGET_PRINT_OPERAND_ADDRESS nios2_print_operand_address
5064
5065 #undef TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA
5066 #define TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA nios2_output_addr_const_extra
5067
5068 #undef TARGET_ASM_FILE_END
5069 #define TARGET_ASM_FILE_END nios2_asm_file_end
5070
5071 #undef TARGET_OPTION_OVERRIDE
5072 #define TARGET_OPTION_OVERRIDE nios2_option_override
5073
5074 #undef TARGET_OPTION_SAVE
5075 #define TARGET_OPTION_SAVE nios2_option_save
5076
5077 #undef TARGET_OPTION_RESTORE
5078 #define TARGET_OPTION_RESTORE nios2_option_restore
5079
5080 #undef TARGET_SET_CURRENT_FUNCTION
5081 #define TARGET_SET_CURRENT_FUNCTION nios2_set_current_function
5082
5083 #undef TARGET_OPTION_VALID_ATTRIBUTE_P
5084 #define TARGET_OPTION_VALID_ATTRIBUTE_P nios2_valid_target_attribute_p
5085
5086 #undef TARGET_OPTION_PRAGMA_PARSE
5087 #define TARGET_OPTION_PRAGMA_PARSE nios2_pragma_target_parse
5088
5089 #undef TARGET_MERGE_DECL_ATTRIBUTES
5090 #define TARGET_MERGE_DECL_ATTRIBUTES nios2_merge_decl_attributes
5091
5092 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
5093 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK \
5094 hook_bool_const_tree_hwi_hwi_const_tree_true
5095
5096 #undef TARGET_ASM_OUTPUT_MI_THUNK
5097 #define TARGET_ASM_OUTPUT_MI_THUNK nios2_asm_output_mi_thunk
5098
5099 #undef TARGET_MACHINE_DEPENDENT_REORG
5100 #define TARGET_MACHINE_DEPENDENT_REORG nios2_reorg
5101
5102 struct gcc_target targetm = TARGET_INITIALIZER;
5103
5104 #include "gt-nios2.h"
5105