1*404b540aSrobert /* Subroutines for assembler code output on the TMS320C[34]x
2*404b540aSrobert Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2003,
3*404b540aSrobert 2004, 2005
4*404b540aSrobert Free Software Foundation, Inc.
5*404b540aSrobert
6*404b540aSrobert Contributed by Michael Hayes (m.hayes@elec.canterbury.ac.nz)
7*404b540aSrobert and Herman Ten Brugge (Haj.Ten.Brugge@net.HCC.nl).
8*404b540aSrobert
9*404b540aSrobert This file is part of GCC.
10*404b540aSrobert
11*404b540aSrobert GCC is free software; you can redistribute it and/or modify
12*404b540aSrobert it under the terms of the GNU General Public License as published by
13*404b540aSrobert the Free Software Foundation; either version 2, or (at your option)
14*404b540aSrobert any later version.
15*404b540aSrobert
16*404b540aSrobert GCC is distributed in the hope that it will be useful,
17*404b540aSrobert but WITHOUT ANY WARRANTY; without even the implied warranty of
18*404b540aSrobert MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19*404b540aSrobert GNU General Public License for more details.
20*404b540aSrobert
21*404b540aSrobert You should have received a copy of the GNU General Public License
22*404b540aSrobert along with GCC; see the file COPYING. If not, write to
23*404b540aSrobert the Free Software Foundation, 51 Franklin Street, Fifth Floor,
24*404b540aSrobert Boston, MA 02110-1301, USA. */
25*404b540aSrobert
26*404b540aSrobert /* Some output-actions in c4x.md need these. */
27*404b540aSrobert #include "config.h"
28*404b540aSrobert #include "system.h"
29*404b540aSrobert #include "coretypes.h"
30*404b540aSrobert #include "tm.h"
31*404b540aSrobert #include "rtl.h"
32*404b540aSrobert #include "tree.h"
33*404b540aSrobert #include "regs.h"
34*404b540aSrobert #include "hard-reg-set.h"
35*404b540aSrobert #include "basic-block.h"
36*404b540aSrobert #include "real.h"
37*404b540aSrobert #include "insn-config.h"
38*404b540aSrobert #include "insn-attr.h"
39*404b540aSrobert #include "conditions.h"
40*404b540aSrobert #include "output.h"
41*404b540aSrobert #include "function.h"
42*404b540aSrobert #include "expr.h"
43*404b540aSrobert #include "optabs.h"
44*404b540aSrobert #include "libfuncs.h"
45*404b540aSrobert #include "flags.h"
46*404b540aSrobert #include "recog.h"
47*404b540aSrobert #include "ggc.h"
48*404b540aSrobert #include "cpplib.h"
49*404b540aSrobert #include "toplev.h"
50*404b540aSrobert #include "tm_p.h"
51*404b540aSrobert #include "target.h"
52*404b540aSrobert #include "target-def.h"
53*404b540aSrobert #include "langhooks.h"
54*404b540aSrobert
55*404b540aSrobert rtx smulhi3_libfunc;
56*404b540aSrobert rtx umulhi3_libfunc;
57*404b540aSrobert rtx fix_truncqfhi2_libfunc;
58*404b540aSrobert rtx fixuns_truncqfhi2_libfunc;
59*404b540aSrobert rtx fix_trunchfhi2_libfunc;
60*404b540aSrobert rtx fixuns_trunchfhi2_libfunc;
61*404b540aSrobert rtx floathiqf2_libfunc;
62*404b540aSrobert rtx floatunshiqf2_libfunc;
63*404b540aSrobert rtx floathihf2_libfunc;
64*404b540aSrobert rtx floatunshihf2_libfunc;
65*404b540aSrobert
66*404b540aSrobert static int c4x_leaf_function;
67*404b540aSrobert
68*404b540aSrobert static const char *const float_reg_names[] = FLOAT_REGISTER_NAMES;
69*404b540aSrobert
70*404b540aSrobert /* Array of the smallest class containing reg number REGNO, indexed by
71*404b540aSrobert REGNO. Used by REGNO_REG_CLASS in c4x.h. We assume that all these
72*404b540aSrobert registers are available and set the class to NO_REGS for registers
73*404b540aSrobert that the target switches say are unavailable. */
74*404b540aSrobert
75*404b540aSrobert enum reg_class c4x_regclass_map[FIRST_PSEUDO_REGISTER] =
76*404b540aSrobert {
77*404b540aSrobert /* Reg Modes Saved. */
78*404b540aSrobert R0R1_REGS, /* R0 QI, QF, HF No. */
79*404b540aSrobert R0R1_REGS, /* R1 QI, QF, HF No. */
80*404b540aSrobert R2R3_REGS, /* R2 QI, QF, HF No. */
81*404b540aSrobert R2R3_REGS, /* R3 QI, QF, HF No. */
82*404b540aSrobert EXT_LOW_REGS, /* R4 QI, QF, HF QI. */
83*404b540aSrobert EXT_LOW_REGS, /* R5 QI, QF, HF QI. */
84*404b540aSrobert EXT_LOW_REGS, /* R6 QI, QF, HF QF. */
85*404b540aSrobert EXT_LOW_REGS, /* R7 QI, QF, HF QF. */
86*404b540aSrobert ADDR_REGS, /* AR0 QI No. */
87*404b540aSrobert ADDR_REGS, /* AR1 QI No. */
88*404b540aSrobert ADDR_REGS, /* AR2 QI No. */
89*404b540aSrobert ADDR_REGS, /* AR3 QI QI. */
90*404b540aSrobert ADDR_REGS, /* AR4 QI QI. */
91*404b540aSrobert ADDR_REGS, /* AR5 QI QI. */
92*404b540aSrobert ADDR_REGS, /* AR6 QI QI. */
93*404b540aSrobert ADDR_REGS, /* AR7 QI QI. */
94*404b540aSrobert DP_REG, /* DP QI No. */
95*404b540aSrobert INDEX_REGS, /* IR0 QI No. */
96*404b540aSrobert INDEX_REGS, /* IR1 QI No. */
97*404b540aSrobert BK_REG, /* BK QI QI. */
98*404b540aSrobert SP_REG, /* SP QI No. */
99*404b540aSrobert ST_REG, /* ST CC No. */
100*404b540aSrobert NO_REGS, /* DIE/IE No. */
101*404b540aSrobert NO_REGS, /* IIE/IF No. */
102*404b540aSrobert NO_REGS, /* IIF/IOF No. */
103*404b540aSrobert INT_REGS, /* RS QI No. */
104*404b540aSrobert INT_REGS, /* RE QI No. */
105*404b540aSrobert RC_REG, /* RC QI No. */
106*404b540aSrobert EXT_REGS, /* R8 QI, QF, HF QI. */
107*404b540aSrobert EXT_REGS, /* R9 QI, QF, HF No. */
108*404b540aSrobert EXT_REGS, /* R10 QI, QF, HF No. */
109*404b540aSrobert EXT_REGS, /* R11 QI, QF, HF No. */
110*404b540aSrobert };
111*404b540aSrobert
112*404b540aSrobert enum machine_mode c4x_caller_save_map[FIRST_PSEUDO_REGISTER] =
113*404b540aSrobert {
114*404b540aSrobert /* Reg Modes Saved. */
115*404b540aSrobert HFmode, /* R0 QI, QF, HF No. */
116*404b540aSrobert HFmode, /* R1 QI, QF, HF No. */
117*404b540aSrobert HFmode, /* R2 QI, QF, HF No. */
118*404b540aSrobert HFmode, /* R3 QI, QF, HF No. */
119*404b540aSrobert QFmode, /* R4 QI, QF, HF QI. */
120*404b540aSrobert QFmode, /* R5 QI, QF, HF QI. */
121*404b540aSrobert QImode, /* R6 QI, QF, HF QF. */
122*404b540aSrobert QImode, /* R7 QI, QF, HF QF. */
123*404b540aSrobert QImode, /* AR0 QI No. */
124*404b540aSrobert QImode, /* AR1 QI No. */
125*404b540aSrobert QImode, /* AR2 QI No. */
126*404b540aSrobert QImode, /* AR3 QI QI. */
127*404b540aSrobert QImode, /* AR4 QI QI. */
128*404b540aSrobert QImode, /* AR5 QI QI. */
129*404b540aSrobert QImode, /* AR6 QI QI. */
130*404b540aSrobert QImode, /* AR7 QI QI. */
131*404b540aSrobert VOIDmode, /* DP QI No. */
132*404b540aSrobert QImode, /* IR0 QI No. */
133*404b540aSrobert QImode, /* IR1 QI No. */
134*404b540aSrobert QImode, /* BK QI QI. */
135*404b540aSrobert VOIDmode, /* SP QI No. */
136*404b540aSrobert VOIDmode, /* ST CC No. */
137*404b540aSrobert VOIDmode, /* DIE/IE No. */
138*404b540aSrobert VOIDmode, /* IIE/IF No. */
139*404b540aSrobert VOIDmode, /* IIF/IOF No. */
140*404b540aSrobert QImode, /* RS QI No. */
141*404b540aSrobert QImode, /* RE QI No. */
142*404b540aSrobert VOIDmode, /* RC QI No. */
143*404b540aSrobert QFmode, /* R8 QI, QF, HF QI. */
144*404b540aSrobert HFmode, /* R9 QI, QF, HF No. */
145*404b540aSrobert HFmode, /* R10 QI, QF, HF No. */
146*404b540aSrobert HFmode, /* R11 QI, QF, HF No. */
147*404b540aSrobert };
148*404b540aSrobert
149*404b540aSrobert
150*404b540aSrobert /* Test and compare insns in c4x.md store the information needed to
151*404b540aSrobert generate branch and scc insns here. */
152*404b540aSrobert
153*404b540aSrobert rtx c4x_compare_op0;
154*404b540aSrobert rtx c4x_compare_op1;
155*404b540aSrobert
156*404b540aSrobert int c4x_cpu_version = 40; /* CPU version C30/31/32/33/40/44. */
157*404b540aSrobert
158*404b540aSrobert /* Pragma definitions. */
159*404b540aSrobert
160*404b540aSrobert tree code_tree = NULL_TREE;
161*404b540aSrobert tree data_tree = NULL_TREE;
162*404b540aSrobert tree pure_tree = NULL_TREE;
163*404b540aSrobert tree noreturn_tree = NULL_TREE;
164*404b540aSrobert tree interrupt_tree = NULL_TREE;
165*404b540aSrobert tree naked_tree = NULL_TREE;
166*404b540aSrobert
167*404b540aSrobert /* Forward declarations */
168*404b540aSrobert static bool c4x_handle_option (size_t, const char *, int);
169*404b540aSrobert static int c4x_isr_reg_used_p (unsigned int);
170*404b540aSrobert static int c4x_leaf_function_p (void);
171*404b540aSrobert static int c4x_naked_function_p (void);
172*404b540aSrobert static int c4x_immed_int_constant (rtx);
173*404b540aSrobert static int c4x_immed_float_constant (rtx);
174*404b540aSrobert static int c4x_R_indirect (rtx);
175*404b540aSrobert static void c4x_S_address_parse (rtx , int *, int *, int *, int *);
176*404b540aSrobert static int c4x_valid_operands (enum rtx_code, rtx *, enum machine_mode, int);
177*404b540aSrobert static int c4x_arn_reg_operand (rtx, enum machine_mode, unsigned int);
178*404b540aSrobert static int c4x_arn_mem_operand (rtx, enum machine_mode, unsigned int);
179*404b540aSrobert static void c4x_file_start (void);
180*404b540aSrobert static void c4x_file_end (void);
181*404b540aSrobert static void c4x_check_attribute (const char *, tree, tree, tree *);
182*404b540aSrobert static int c4x_r11_set_p (rtx);
183*404b540aSrobert static int c4x_rptb_valid_p (rtx, rtx);
184*404b540aSrobert static void c4x_reorg (void);
185*404b540aSrobert static int c4x_label_ref_used_p (rtx, rtx);
186*404b540aSrobert static tree c4x_handle_fntype_attribute (tree *, tree, tree, int, bool *);
187*404b540aSrobert const struct attribute_spec c4x_attribute_table[];
188*404b540aSrobert static void c4x_insert_attributes (tree, tree *);
189*404b540aSrobert static void c4x_asm_named_section (const char *, unsigned int, tree);
190*404b540aSrobert static int c4x_adjust_cost (rtx, rtx, rtx, int);
191*404b540aSrobert static void c4x_globalize_label (FILE *, const char *);
192*404b540aSrobert static bool c4x_rtx_costs (rtx, int, int, int *);
193*404b540aSrobert static int c4x_address_cost (rtx);
194*404b540aSrobert static void c4x_init_libfuncs (void);
195*404b540aSrobert static void c4x_external_libcall (rtx);
196*404b540aSrobert static rtx c4x_struct_value_rtx (tree, int);
197*404b540aSrobert static tree c4x_gimplify_va_arg_expr (tree, tree, tree *, tree *);
198*404b540aSrobert
199*404b540aSrobert /* Initialize the GCC target structure. */
200*404b540aSrobert #undef TARGET_ASM_BYTE_OP
201*404b540aSrobert #define TARGET_ASM_BYTE_OP "\t.word\t"
202*404b540aSrobert #undef TARGET_ASM_ALIGNED_HI_OP
203*404b540aSrobert #define TARGET_ASM_ALIGNED_HI_OP NULL
204*404b540aSrobert #undef TARGET_ASM_ALIGNED_SI_OP
205*404b540aSrobert #define TARGET_ASM_ALIGNED_SI_OP NULL
206*404b540aSrobert #undef TARGET_ASM_FILE_START
207*404b540aSrobert #define TARGET_ASM_FILE_START c4x_file_start
208*404b540aSrobert #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
209*404b540aSrobert #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
210*404b540aSrobert #undef TARGET_ASM_FILE_END
211*404b540aSrobert #define TARGET_ASM_FILE_END c4x_file_end
212*404b540aSrobert
213*404b540aSrobert #undef TARGET_ASM_EXTERNAL_LIBCALL
214*404b540aSrobert #define TARGET_ASM_EXTERNAL_LIBCALL c4x_external_libcall
215*404b540aSrobert
216*404b540aSrobert /* Play safe, not the fastest code. */
217*404b540aSrobert #undef TARGET_DEFAULT_TARGET_FLAGS
218*404b540aSrobert #define TARGET_DEFAULT_TARGET_FLAGS (MASK_ALIASES | MASK_PARALLEL \
219*404b540aSrobert | MASK_PARALLEL_MPY | MASK_RPTB)
220*404b540aSrobert #undef TARGET_HANDLE_OPTION
221*404b540aSrobert #define TARGET_HANDLE_OPTION c4x_handle_option
222*404b540aSrobert
223*404b540aSrobert #undef TARGET_ATTRIBUTE_TABLE
224*404b540aSrobert #define TARGET_ATTRIBUTE_TABLE c4x_attribute_table
225*404b540aSrobert
226*404b540aSrobert #undef TARGET_INSERT_ATTRIBUTES
227*404b540aSrobert #define TARGET_INSERT_ATTRIBUTES c4x_insert_attributes
228*404b540aSrobert
229*404b540aSrobert #undef TARGET_INIT_BUILTINS
230*404b540aSrobert #define TARGET_INIT_BUILTINS c4x_init_builtins
231*404b540aSrobert
232*404b540aSrobert #undef TARGET_EXPAND_BUILTIN
233*404b540aSrobert #define TARGET_EXPAND_BUILTIN c4x_expand_builtin
234*404b540aSrobert
235*404b540aSrobert #undef TARGET_SCHED_ADJUST_COST
236*404b540aSrobert #define TARGET_SCHED_ADJUST_COST c4x_adjust_cost
237*404b540aSrobert
238*404b540aSrobert #undef TARGET_ASM_GLOBALIZE_LABEL
239*404b540aSrobert #define TARGET_ASM_GLOBALIZE_LABEL c4x_globalize_label
240*404b540aSrobert
241*404b540aSrobert #undef TARGET_RTX_COSTS
242*404b540aSrobert #define TARGET_RTX_COSTS c4x_rtx_costs
243*404b540aSrobert #undef TARGET_ADDRESS_COST
244*404b540aSrobert #define TARGET_ADDRESS_COST c4x_address_cost
245*404b540aSrobert
246*404b540aSrobert #undef TARGET_MACHINE_DEPENDENT_REORG
247*404b540aSrobert #define TARGET_MACHINE_DEPENDENT_REORG c4x_reorg
248*404b540aSrobert
249*404b540aSrobert #undef TARGET_INIT_LIBFUNCS
250*404b540aSrobert #define TARGET_INIT_LIBFUNCS c4x_init_libfuncs
251*404b540aSrobert
252*404b540aSrobert #undef TARGET_STRUCT_VALUE_RTX
253*404b540aSrobert #define TARGET_STRUCT_VALUE_RTX c4x_struct_value_rtx
254*404b540aSrobert
255*404b540aSrobert #undef TARGET_GIMPLIFY_VA_ARG_EXPR
256*404b540aSrobert #define TARGET_GIMPLIFY_VA_ARG_EXPR c4x_gimplify_va_arg_expr
257*404b540aSrobert
258*404b540aSrobert struct gcc_target targetm = TARGET_INITIALIZER;
259*404b540aSrobert
260*404b540aSrobert /* Implement TARGET_HANDLE_OPTION. */
261*404b540aSrobert
262*404b540aSrobert static bool
c4x_handle_option(size_t code,const char * arg,int value)263*404b540aSrobert c4x_handle_option (size_t code, const char *arg, int value)
264*404b540aSrobert {
265*404b540aSrobert switch (code)
266*404b540aSrobert {
267*404b540aSrobert case OPT_m30: c4x_cpu_version = 30; return true;
268*404b540aSrobert case OPT_m31: c4x_cpu_version = 31; return true;
269*404b540aSrobert case OPT_m32: c4x_cpu_version = 32; return true;
270*404b540aSrobert case OPT_m33: c4x_cpu_version = 33; return true;
271*404b540aSrobert case OPT_m40: c4x_cpu_version = 40; return true;
272*404b540aSrobert case OPT_m44: c4x_cpu_version = 44; return true;
273*404b540aSrobert
274*404b540aSrobert case OPT_mcpu_:
275*404b540aSrobert if (arg[0] == 'c' || arg[0] == 'C')
276*404b540aSrobert arg++;
277*404b540aSrobert value = atoi (arg);
278*404b540aSrobert switch (value)
279*404b540aSrobert {
280*404b540aSrobert case 30: case 31: case 32: case 33: case 40: case 44:
281*404b540aSrobert c4x_cpu_version = value;
282*404b540aSrobert return true;
283*404b540aSrobert }
284*404b540aSrobert return false;
285*404b540aSrobert
286*404b540aSrobert default:
287*404b540aSrobert return true;
288*404b540aSrobert }
289*404b540aSrobert }
290*404b540aSrobert
291*404b540aSrobert /* Override command line options.
292*404b540aSrobert Called once after all options have been parsed.
293*404b540aSrobert Mostly we process the processor
294*404b540aSrobert type and sometimes adjust other TARGET_ options. */
295*404b540aSrobert
296*404b540aSrobert void
c4x_override_options(void)297*404b540aSrobert c4x_override_options (void)
298*404b540aSrobert {
299*404b540aSrobert /* Convert foo / 8.0 into foo * 0.125, etc. */
300*404b540aSrobert set_fast_math_flags (1);
301*404b540aSrobert
302*404b540aSrobert /* We should phase out the following at some stage.
303*404b540aSrobert This provides compatibility with the old -mno-aliases option. */
304*404b540aSrobert if (! TARGET_ALIASES && ! flag_argument_noalias)
305*404b540aSrobert flag_argument_noalias = 1;
306*404b540aSrobert
307*404b540aSrobert if (!TARGET_C3X)
308*404b540aSrobert target_flags |= MASK_MPYI | MASK_DB;
309*404b540aSrobert
310*404b540aSrobert if (optimize < 2)
311*404b540aSrobert target_flags &= ~(MASK_RPTB | MASK_PARALLEL);
312*404b540aSrobert
313*404b540aSrobert if (!TARGET_PARALLEL)
314*404b540aSrobert target_flags &= ~MASK_PARALLEL_MPY;
315*404b540aSrobert }
316*404b540aSrobert
317*404b540aSrobert
318*404b540aSrobert /* This is called before c4x_override_options. */
319*404b540aSrobert
320*404b540aSrobert void
c4x_optimization_options(int level ATTRIBUTE_UNUSED,int size ATTRIBUTE_UNUSED)321*404b540aSrobert c4x_optimization_options (int level ATTRIBUTE_UNUSED,
322*404b540aSrobert int size ATTRIBUTE_UNUSED)
323*404b540aSrobert {
324*404b540aSrobert /* Scheduling before register allocation can screw up global
325*404b540aSrobert register allocation, especially for functions that use MPY||ADD
326*404b540aSrobert instructions. The benefit we gain we get by scheduling before
327*404b540aSrobert register allocation is probably marginal anyhow. */
328*404b540aSrobert flag_schedule_insns = 0;
329*404b540aSrobert }
330*404b540aSrobert
331*404b540aSrobert
332*404b540aSrobert /* Write an ASCII string. */
333*404b540aSrobert
334*404b540aSrobert #define C4X_ASCII_LIMIT 40
335*404b540aSrobert
336*404b540aSrobert void
c4x_output_ascii(FILE * stream,const char * ptr,int len)337*404b540aSrobert c4x_output_ascii (FILE *stream, const char *ptr, int len)
338*404b540aSrobert {
339*404b540aSrobert char sbuf[C4X_ASCII_LIMIT + 1];
340*404b540aSrobert int s, l, special, first = 1, onlys;
341*404b540aSrobert
342*404b540aSrobert if (len)
343*404b540aSrobert fprintf (stream, "\t.byte\t");
344*404b540aSrobert
345*404b540aSrobert for (s = l = 0; len > 0; --len, ++ptr)
346*404b540aSrobert {
347*404b540aSrobert onlys = 0;
348*404b540aSrobert
349*404b540aSrobert /* Escape " and \ with a \". */
350*404b540aSrobert special = *ptr == '\"' || *ptr == '\\';
351*404b540aSrobert
352*404b540aSrobert /* If printable - add to buff. */
353*404b540aSrobert if ((! TARGET_TI || ! special) && *ptr >= 0x20 && *ptr < 0x7f)
354*404b540aSrobert {
355*404b540aSrobert if (special)
356*404b540aSrobert sbuf[s++] = '\\';
357*404b540aSrobert sbuf[s++] = *ptr;
358*404b540aSrobert if (s < C4X_ASCII_LIMIT - 1)
359*404b540aSrobert continue;
360*404b540aSrobert onlys = 1;
361*404b540aSrobert }
362*404b540aSrobert if (s)
363*404b540aSrobert {
364*404b540aSrobert if (first)
365*404b540aSrobert first = 0;
366*404b540aSrobert else
367*404b540aSrobert {
368*404b540aSrobert fputc (',', stream);
369*404b540aSrobert l++;
370*404b540aSrobert }
371*404b540aSrobert
372*404b540aSrobert sbuf[s] = 0;
373*404b540aSrobert fprintf (stream, "\"%s\"", sbuf);
374*404b540aSrobert l += s + 2;
375*404b540aSrobert if (TARGET_TI && l >= 80 && len > 1)
376*404b540aSrobert {
377*404b540aSrobert fprintf (stream, "\n\t.byte\t");
378*404b540aSrobert first = 1;
379*404b540aSrobert l = 0;
380*404b540aSrobert }
381*404b540aSrobert
382*404b540aSrobert s = 0;
383*404b540aSrobert }
384*404b540aSrobert if (onlys)
385*404b540aSrobert continue;
386*404b540aSrobert
387*404b540aSrobert if (first)
388*404b540aSrobert first = 0;
389*404b540aSrobert else
390*404b540aSrobert {
391*404b540aSrobert fputc (',', stream);
392*404b540aSrobert l++;
393*404b540aSrobert }
394*404b540aSrobert
395*404b540aSrobert fprintf (stream, "%d", *ptr);
396*404b540aSrobert l += 3;
397*404b540aSrobert if (TARGET_TI && l >= 80 && len > 1)
398*404b540aSrobert {
399*404b540aSrobert fprintf (stream, "\n\t.byte\t");
400*404b540aSrobert first = 1;
401*404b540aSrobert l = 0;
402*404b540aSrobert }
403*404b540aSrobert }
404*404b540aSrobert if (s)
405*404b540aSrobert {
406*404b540aSrobert if (! first)
407*404b540aSrobert fputc (',', stream);
408*404b540aSrobert
409*404b540aSrobert sbuf[s] = 0;
410*404b540aSrobert fprintf (stream, "\"%s\"", sbuf);
411*404b540aSrobert s = 0;
412*404b540aSrobert }
413*404b540aSrobert fputc ('\n', stream);
414*404b540aSrobert }
415*404b540aSrobert
416*404b540aSrobert
417*404b540aSrobert int
c4x_hard_regno_mode_ok(unsigned int regno,enum machine_mode mode)418*404b540aSrobert c4x_hard_regno_mode_ok (unsigned int regno, enum machine_mode mode)
419*404b540aSrobert {
420*404b540aSrobert switch (mode)
421*404b540aSrobert {
422*404b540aSrobert #if Pmode != QImode
423*404b540aSrobert case Pmode: /* Pointer (24/32 bits). */
424*404b540aSrobert #endif
425*404b540aSrobert case QImode: /* Integer (32 bits). */
426*404b540aSrobert return IS_INT_REGNO (regno);
427*404b540aSrobert
428*404b540aSrobert case QFmode: /* Float, Double (32 bits). */
429*404b540aSrobert case HFmode: /* Long Double (40 bits). */
430*404b540aSrobert return IS_EXT_REGNO (regno);
431*404b540aSrobert
432*404b540aSrobert case CCmode: /* Condition Codes. */
433*404b540aSrobert case CC_NOOVmode: /* Condition Codes. */
434*404b540aSrobert return IS_ST_REGNO (regno);
435*404b540aSrobert
436*404b540aSrobert case HImode: /* Long Long (64 bits). */
437*404b540aSrobert /* We need two registers to store long longs. Note that
438*404b540aSrobert it is much easier to constrain the first register
439*404b540aSrobert to start on an even boundary. */
440*404b540aSrobert return IS_INT_REGNO (regno)
441*404b540aSrobert && IS_INT_REGNO (regno + 1)
442*404b540aSrobert && (regno & 1) == 0;
443*404b540aSrobert
444*404b540aSrobert default:
445*404b540aSrobert return 0; /* We don't support these modes. */
446*404b540aSrobert }
447*404b540aSrobert
448*404b540aSrobert return 0;
449*404b540aSrobert }
450*404b540aSrobert
451*404b540aSrobert /* Return nonzero if REGNO1 can be renamed to REGNO2. */
452*404b540aSrobert int
c4x_hard_regno_rename_ok(unsigned int regno1,unsigned int regno2)453*404b540aSrobert c4x_hard_regno_rename_ok (unsigned int regno1, unsigned int regno2)
454*404b540aSrobert {
455*404b540aSrobert /* We cannot copy call saved registers from mode QI into QF or from
456*404b540aSrobert mode QF into QI. */
457*404b540aSrobert if (IS_FLOAT_CALL_SAVED_REGNO (regno1) && IS_INT_CALL_SAVED_REGNO (regno2))
458*404b540aSrobert return 0;
459*404b540aSrobert if (IS_INT_CALL_SAVED_REGNO (regno1) && IS_FLOAT_CALL_SAVED_REGNO (regno2))
460*404b540aSrobert return 0;
461*404b540aSrobert /* We cannot copy from an extended (40 bit) register to a standard
462*404b540aSrobert (32 bit) register because we only set the condition codes for
463*404b540aSrobert extended registers. */
464*404b540aSrobert if (IS_EXT_REGNO (regno1) && ! IS_EXT_REGNO (regno2))
465*404b540aSrobert return 0;
466*404b540aSrobert if (IS_EXT_REGNO (regno2) && ! IS_EXT_REGNO (regno1))
467*404b540aSrobert return 0;
468*404b540aSrobert return 1;
469*404b540aSrobert }
470*404b540aSrobert
471*404b540aSrobert /* The TI C3x C compiler register argument runtime model uses 6 registers,
472*404b540aSrobert AR2, R2, R3, RC, RS, RE.
473*404b540aSrobert
474*404b540aSrobert The first two floating point arguments (float, double, long double)
475*404b540aSrobert that are found scanning from left to right are assigned to R2 and R3.
476*404b540aSrobert
477*404b540aSrobert The remaining integer (char, short, int, long) or pointer arguments
478*404b540aSrobert are assigned to the remaining registers in the order AR2, R2, R3,
479*404b540aSrobert RC, RS, RE when scanning left to right, except for the last named
480*404b540aSrobert argument prior to an ellipsis denoting variable number of
481*404b540aSrobert arguments. We don't have to worry about the latter condition since
482*404b540aSrobert function.c treats the last named argument as anonymous (unnamed).
483*404b540aSrobert
484*404b540aSrobert All arguments that cannot be passed in registers are pushed onto
485*404b540aSrobert the stack in reverse order (right to left). GCC handles that for us.
486*404b540aSrobert
487*404b540aSrobert c4x_init_cumulative_args() is called at the start, so we can parse
488*404b540aSrobert the args to see how many floating point arguments and how many
489*404b540aSrobert integer (or pointer) arguments there are. c4x_function_arg() is
490*404b540aSrobert then called (sometimes repeatedly) for each argument (parsed left
491*404b540aSrobert to right) to obtain the register to pass the argument in, or zero
492*404b540aSrobert if the argument is to be passed on the stack. Once the compiler is
493*404b540aSrobert happy, c4x_function_arg_advance() is called.
494*404b540aSrobert
495*404b540aSrobert Don't use R0 to pass arguments in, we use 0 to indicate a stack
496*404b540aSrobert argument. */
497*404b540aSrobert
498*404b540aSrobert static const int c4x_int_reglist[3][6] =
499*404b540aSrobert {
500*404b540aSrobert {AR2_REGNO, R2_REGNO, R3_REGNO, RC_REGNO, RS_REGNO, RE_REGNO},
501*404b540aSrobert {AR2_REGNO, R3_REGNO, RC_REGNO, RS_REGNO, RE_REGNO, 0},
502*404b540aSrobert {AR2_REGNO, RC_REGNO, RS_REGNO, RE_REGNO, 0, 0}
503*404b540aSrobert };
504*404b540aSrobert
505*404b540aSrobert static const int c4x_fp_reglist[2] = {R2_REGNO, R3_REGNO};
506*404b540aSrobert
507*404b540aSrobert
508*404b540aSrobert /* Initialize a variable CUM of type CUMULATIVE_ARGS for a call to a
509*404b540aSrobert function whose data type is FNTYPE.
510*404b540aSrobert For a library call, FNTYPE is 0. */
511*404b540aSrobert
512*404b540aSrobert void
c4x_init_cumulative_args(CUMULATIVE_ARGS * cum,tree fntype,rtx libname)513*404b540aSrobert c4x_init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname)
514*404b540aSrobert {
515*404b540aSrobert tree param, next_param;
516*404b540aSrobert
517*404b540aSrobert cum->floats = cum->ints = 0;
518*404b540aSrobert cum->init = 0;
519*404b540aSrobert cum->var = 0;
520*404b540aSrobert cum->args = 0;
521*404b540aSrobert
522*404b540aSrobert if (TARGET_DEBUG)
523*404b540aSrobert {
524*404b540aSrobert fprintf (stderr, "\nc4x_init_cumulative_args (");
525*404b540aSrobert if (fntype)
526*404b540aSrobert {
527*404b540aSrobert tree ret_type = TREE_TYPE (fntype);
528*404b540aSrobert
529*404b540aSrobert fprintf (stderr, "fntype code = %s, ret code = %s",
530*404b540aSrobert tree_code_name[(int) TREE_CODE (fntype)],
531*404b540aSrobert tree_code_name[(int) TREE_CODE (ret_type)]);
532*404b540aSrobert }
533*404b540aSrobert else
534*404b540aSrobert fprintf (stderr, "no fntype");
535*404b540aSrobert
536*404b540aSrobert if (libname)
537*404b540aSrobert fprintf (stderr, ", libname = %s", XSTR (libname, 0));
538*404b540aSrobert }
539*404b540aSrobert
540*404b540aSrobert cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
541*404b540aSrobert
542*404b540aSrobert for (param = fntype ? TYPE_ARG_TYPES (fntype) : 0;
543*404b540aSrobert param; param = next_param)
544*404b540aSrobert {
545*404b540aSrobert tree type;
546*404b540aSrobert
547*404b540aSrobert next_param = TREE_CHAIN (param);
548*404b540aSrobert
549*404b540aSrobert type = TREE_VALUE (param);
550*404b540aSrobert if (type && type != void_type_node)
551*404b540aSrobert {
552*404b540aSrobert enum machine_mode mode;
553*404b540aSrobert
554*404b540aSrobert /* If the last arg doesn't have void type then we have
555*404b540aSrobert variable arguments. */
556*404b540aSrobert if (! next_param)
557*404b540aSrobert cum->var = 1;
558*404b540aSrobert
559*404b540aSrobert if ((mode = TYPE_MODE (type)))
560*404b540aSrobert {
561*404b540aSrobert if (! targetm.calls.must_pass_in_stack (mode, type))
562*404b540aSrobert {
563*404b540aSrobert /* Look for float, double, or long double argument. */
564*404b540aSrobert if (mode == QFmode || mode == HFmode)
565*404b540aSrobert cum->floats++;
566*404b540aSrobert /* Look for integer, enumeral, boolean, char, or pointer
567*404b540aSrobert argument. */
568*404b540aSrobert else if (mode == QImode || mode == Pmode)
569*404b540aSrobert cum->ints++;
570*404b540aSrobert }
571*404b540aSrobert }
572*404b540aSrobert cum->args++;
573*404b540aSrobert }
574*404b540aSrobert }
575*404b540aSrobert
576*404b540aSrobert if (TARGET_DEBUG)
577*404b540aSrobert fprintf (stderr, "%s%s, args = %d)\n",
578*404b540aSrobert cum->prototype ? ", prototype" : "",
579*404b540aSrobert cum->var ? ", variable args" : "",
580*404b540aSrobert cum->args);
581*404b540aSrobert }
582*404b540aSrobert
583*404b540aSrobert
584*404b540aSrobert /* Update the data in CUM to advance over an argument
585*404b540aSrobert of mode MODE and data type TYPE.
586*404b540aSrobert (TYPE is null for libcalls where that information may not be available.) */
587*404b540aSrobert
588*404b540aSrobert void
c4x_function_arg_advance(CUMULATIVE_ARGS * cum,enum machine_mode mode,tree type,int named)589*404b540aSrobert c4x_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
590*404b540aSrobert tree type, int named)
591*404b540aSrobert {
592*404b540aSrobert if (TARGET_DEBUG)
593*404b540aSrobert fprintf (stderr, "c4x_function_adv(mode=%s, named=%d)\n\n",
594*404b540aSrobert GET_MODE_NAME (mode), named);
595*404b540aSrobert if (! TARGET_MEMPARM
596*404b540aSrobert && named
597*404b540aSrobert && type
598*404b540aSrobert && ! targetm.calls.must_pass_in_stack (mode, type))
599*404b540aSrobert {
600*404b540aSrobert /* Look for float, double, or long double argument. */
601*404b540aSrobert if (mode == QFmode || mode == HFmode)
602*404b540aSrobert cum->floats++;
603*404b540aSrobert /* Look for integer, enumeral, boolean, char, or pointer argument. */
604*404b540aSrobert else if (mode == QImode || mode == Pmode)
605*404b540aSrobert cum->ints++;
606*404b540aSrobert }
607*404b540aSrobert else if (! TARGET_MEMPARM && ! type)
608*404b540aSrobert {
609*404b540aSrobert /* Handle libcall arguments. */
610*404b540aSrobert if (mode == QFmode || mode == HFmode)
611*404b540aSrobert cum->floats++;
612*404b540aSrobert else if (mode == QImode || mode == Pmode)
613*404b540aSrobert cum->ints++;
614*404b540aSrobert }
615*404b540aSrobert return;
616*404b540aSrobert }
617*404b540aSrobert
618*404b540aSrobert
619*404b540aSrobert /* Define where to put the arguments to a function. Value is zero to
620*404b540aSrobert push the argument on the stack, or a hard register in which to
621*404b540aSrobert store the argument.
622*404b540aSrobert
623*404b540aSrobert MODE is the argument's machine mode.
624*404b540aSrobert TYPE is the data type of the argument (as a tree).
625*404b540aSrobert This is null for libcalls where that information may
626*404b540aSrobert not be available.
627*404b540aSrobert CUM is a variable of type CUMULATIVE_ARGS which gives info about
628*404b540aSrobert the preceding args and about the function being called.
629*404b540aSrobert NAMED is nonzero if this argument is a named parameter
630*404b540aSrobert (otherwise it is an extra parameter matching an ellipsis). */
631*404b540aSrobert
632*404b540aSrobert struct rtx_def *
c4x_function_arg(CUMULATIVE_ARGS * cum,enum machine_mode mode,tree type,int named)633*404b540aSrobert c4x_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
634*404b540aSrobert tree type, int named)
635*404b540aSrobert {
636*404b540aSrobert int reg = 0; /* Default to passing argument on stack. */
637*404b540aSrobert
638*404b540aSrobert if (! cum->init)
639*404b540aSrobert {
640*404b540aSrobert /* We can handle at most 2 floats in R2, R3. */
641*404b540aSrobert cum->maxfloats = (cum->floats > 2) ? 2 : cum->floats;
642*404b540aSrobert
643*404b540aSrobert /* We can handle at most 6 integers minus number of floats passed
644*404b540aSrobert in registers. */
645*404b540aSrobert cum->maxints = (cum->ints > 6 - cum->maxfloats) ?
646*404b540aSrobert 6 - cum->maxfloats : cum->ints;
647*404b540aSrobert
648*404b540aSrobert /* If there is no prototype, assume all the arguments are integers. */
649*404b540aSrobert if (! cum->prototype)
650*404b540aSrobert cum->maxints = 6;
651*404b540aSrobert
652*404b540aSrobert cum->ints = cum->floats = 0;
653*404b540aSrobert cum->init = 1;
654*404b540aSrobert }
655*404b540aSrobert
656*404b540aSrobert /* This marks the last argument. We don't need to pass this through
657*404b540aSrobert to the call insn. */
658*404b540aSrobert if (type == void_type_node)
659*404b540aSrobert return 0;
660*404b540aSrobert
661*404b540aSrobert if (! TARGET_MEMPARM
662*404b540aSrobert && named
663*404b540aSrobert && type
664*404b540aSrobert && ! targetm.calls.must_pass_in_stack (mode, type))
665*404b540aSrobert {
666*404b540aSrobert /* Look for float, double, or long double argument. */
667*404b540aSrobert if (mode == QFmode || mode == HFmode)
668*404b540aSrobert {
669*404b540aSrobert if (cum->floats < cum->maxfloats)
670*404b540aSrobert reg = c4x_fp_reglist[cum->floats];
671*404b540aSrobert }
672*404b540aSrobert /* Look for integer, enumeral, boolean, char, or pointer argument. */
673*404b540aSrobert else if (mode == QImode || mode == Pmode)
674*404b540aSrobert {
675*404b540aSrobert if (cum->ints < cum->maxints)
676*404b540aSrobert reg = c4x_int_reglist[cum->maxfloats][cum->ints];
677*404b540aSrobert }
678*404b540aSrobert }
679*404b540aSrobert else if (! TARGET_MEMPARM && ! type)
680*404b540aSrobert {
681*404b540aSrobert /* We could use a different argument calling model for libcalls,
682*404b540aSrobert since we're only calling functions in libgcc. Thus we could
683*404b540aSrobert pass arguments for long longs in registers rather than on the
684*404b540aSrobert stack. In the meantime, use the odd TI format. We make the
685*404b540aSrobert assumption that we won't have more than two floating point
686*404b540aSrobert args, six integer args, and that all the arguments are of the
687*404b540aSrobert same mode. */
688*404b540aSrobert if (mode == QFmode || mode == HFmode)
689*404b540aSrobert reg = c4x_fp_reglist[cum->floats];
690*404b540aSrobert else if (mode == QImode || mode == Pmode)
691*404b540aSrobert reg = c4x_int_reglist[0][cum->ints];
692*404b540aSrobert }
693*404b540aSrobert
694*404b540aSrobert if (TARGET_DEBUG)
695*404b540aSrobert {
696*404b540aSrobert fprintf (stderr, "c4x_function_arg(mode=%s, named=%d",
697*404b540aSrobert GET_MODE_NAME (mode), named);
698*404b540aSrobert if (reg)
699*404b540aSrobert fprintf (stderr, ", reg=%s", reg_names[reg]);
700*404b540aSrobert else
701*404b540aSrobert fprintf (stderr, ", stack");
702*404b540aSrobert fprintf (stderr, ")\n");
703*404b540aSrobert }
704*404b540aSrobert if (reg)
705*404b540aSrobert return gen_rtx_REG (mode, reg);
706*404b540aSrobert else
707*404b540aSrobert return NULL_RTX;
708*404b540aSrobert }
709*404b540aSrobert
710*404b540aSrobert /* C[34]x arguments grow in weird ways (downwards) that the standard
711*404b540aSrobert varargs stuff can't handle.. */
712*404b540aSrobert
713*404b540aSrobert static tree
c4x_gimplify_va_arg_expr(tree valist,tree type,tree * pre_p ATTRIBUTE_UNUSED,tree * post_p ATTRIBUTE_UNUSED)714*404b540aSrobert c4x_gimplify_va_arg_expr (tree valist, tree type,
715*404b540aSrobert tree *pre_p ATTRIBUTE_UNUSED,
716*404b540aSrobert tree *post_p ATTRIBUTE_UNUSED)
717*404b540aSrobert {
718*404b540aSrobert tree t;
719*404b540aSrobert bool indirect;
720*404b540aSrobert
721*404b540aSrobert indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
722*404b540aSrobert if (indirect)
723*404b540aSrobert type = build_pointer_type (type);
724*404b540aSrobert
725*404b540aSrobert t = build2 (PREDECREMENT_EXPR, TREE_TYPE (valist), valist,
726*404b540aSrobert build_int_cst (NULL_TREE, int_size_in_bytes (type)));
727*404b540aSrobert t = fold_convert (build_pointer_type (type), t);
728*404b540aSrobert t = build_va_arg_indirect_ref (t);
729*404b540aSrobert
730*404b540aSrobert if (indirect)
731*404b540aSrobert t = build_va_arg_indirect_ref (t);
732*404b540aSrobert
733*404b540aSrobert return t;
734*404b540aSrobert }
735*404b540aSrobert
736*404b540aSrobert
737*404b540aSrobert static int
c4x_isr_reg_used_p(unsigned int regno)738*404b540aSrobert c4x_isr_reg_used_p (unsigned int regno)
739*404b540aSrobert {
740*404b540aSrobert /* Don't save/restore FP or ST, we handle them separately. */
741*404b540aSrobert if (regno == FRAME_POINTER_REGNUM
742*404b540aSrobert || IS_ST_REGNO (regno))
743*404b540aSrobert return 0;
744*404b540aSrobert
745*404b540aSrobert /* We could be a little smarter abut saving/restoring DP.
746*404b540aSrobert We'll only save if for the big memory model or if
747*404b540aSrobert we're paranoid. ;-) */
748*404b540aSrobert if (IS_DP_REGNO (regno))
749*404b540aSrobert return ! TARGET_SMALL || TARGET_PARANOID;
750*404b540aSrobert
751*404b540aSrobert /* Only save/restore regs in leaf function that are used. */
752*404b540aSrobert if (c4x_leaf_function)
753*404b540aSrobert return regs_ever_live[regno] && fixed_regs[regno] == 0;
754*404b540aSrobert
755*404b540aSrobert /* Only save/restore regs that are used by the ISR and regs
756*404b540aSrobert that are likely to be used by functions the ISR calls
757*404b540aSrobert if they are not fixed. */
758*404b540aSrobert return IS_EXT_REGNO (regno)
759*404b540aSrobert || ((regs_ever_live[regno] || call_used_regs[regno])
760*404b540aSrobert && fixed_regs[regno] == 0);
761*404b540aSrobert }
762*404b540aSrobert
763*404b540aSrobert
764*404b540aSrobert static int
c4x_leaf_function_p(void)765*404b540aSrobert c4x_leaf_function_p (void)
766*404b540aSrobert {
767*404b540aSrobert /* A leaf function makes no calls, so we only need
768*404b540aSrobert to save/restore the registers we actually use.
769*404b540aSrobert For the global variable leaf_function to be set, we need
770*404b540aSrobert to define LEAF_REGISTERS and all that it entails.
771*404b540aSrobert Let's check ourselves.... */
772*404b540aSrobert
773*404b540aSrobert if (lookup_attribute ("leaf_pretend",
774*404b540aSrobert TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
775*404b540aSrobert return 1;
776*404b540aSrobert
777*404b540aSrobert /* Use the leaf_pretend attribute at your own risk. This is a hack
778*404b540aSrobert to speed up ISRs that call a function infrequently where the
779*404b540aSrobert overhead of saving and restoring the additional registers is not
780*404b540aSrobert warranted. You must save and restore the additional registers
781*404b540aSrobert required by the called function. Caveat emptor. Here's enough
782*404b540aSrobert rope... */
783*404b540aSrobert
784*404b540aSrobert if (leaf_function_p ())
785*404b540aSrobert return 1;
786*404b540aSrobert
787*404b540aSrobert return 0;
788*404b540aSrobert }
789*404b540aSrobert
790*404b540aSrobert
791*404b540aSrobert static int
c4x_naked_function_p(void)792*404b540aSrobert c4x_naked_function_p (void)
793*404b540aSrobert {
794*404b540aSrobert tree type;
795*404b540aSrobert
796*404b540aSrobert type = TREE_TYPE (current_function_decl);
797*404b540aSrobert return lookup_attribute ("naked", TYPE_ATTRIBUTES (type)) != NULL;
798*404b540aSrobert }
799*404b540aSrobert
800*404b540aSrobert
801*404b540aSrobert int
c4x_interrupt_function_p(void)802*404b540aSrobert c4x_interrupt_function_p (void)
803*404b540aSrobert {
804*404b540aSrobert const char *cfun_name;
805*404b540aSrobert if (lookup_attribute ("interrupt",
806*404b540aSrobert TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
807*404b540aSrobert return 1;
808*404b540aSrobert
809*404b540aSrobert /* Look for TI style c_intnn. */
810*404b540aSrobert cfun_name = current_function_name ();
811*404b540aSrobert return cfun_name[0] == 'c'
812*404b540aSrobert && cfun_name[1] == '_'
813*404b540aSrobert && cfun_name[2] == 'i'
814*404b540aSrobert && cfun_name[3] == 'n'
815*404b540aSrobert && cfun_name[4] == 't'
816*404b540aSrobert && ISDIGIT (cfun_name[5])
817*404b540aSrobert && ISDIGIT (cfun_name[6]);
818*404b540aSrobert }
819*404b540aSrobert
820*404b540aSrobert void
c4x_expand_prologue(void)821*404b540aSrobert c4x_expand_prologue (void)
822*404b540aSrobert {
823*404b540aSrobert unsigned int regno;
824*404b540aSrobert int size = get_frame_size ();
825*404b540aSrobert rtx insn;
826*404b540aSrobert
827*404b540aSrobert /* In functions where ar3 is not used but frame pointers are still
828*404b540aSrobert specified, frame pointers are not adjusted (if >= -O2) and this
829*404b540aSrobert is used so it won't needlessly push the frame pointer. */
830*404b540aSrobert int dont_push_ar3;
831*404b540aSrobert
832*404b540aSrobert /* For __naked__ function don't build a prologue. */
833*404b540aSrobert if (c4x_naked_function_p ())
834*404b540aSrobert {
835*404b540aSrobert return;
836*404b540aSrobert }
837*404b540aSrobert
838*404b540aSrobert /* For __interrupt__ function build specific prologue. */
839*404b540aSrobert if (c4x_interrupt_function_p ())
840*404b540aSrobert {
841*404b540aSrobert c4x_leaf_function = c4x_leaf_function_p ();
842*404b540aSrobert
843*404b540aSrobert insn = emit_insn (gen_push_st ());
844*404b540aSrobert RTX_FRAME_RELATED_P (insn) = 1;
845*404b540aSrobert if (size)
846*404b540aSrobert {
847*404b540aSrobert insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, AR3_REGNO)));
848*404b540aSrobert RTX_FRAME_RELATED_P (insn) = 1;
849*404b540aSrobert insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, AR3_REGNO),
850*404b540aSrobert gen_rtx_REG (QImode, SP_REGNO)));
851*404b540aSrobert RTX_FRAME_RELATED_P (insn) = 1;
852*404b540aSrobert /* We require that an ISR uses fewer than 32768 words of
853*404b540aSrobert local variables, otherwise we have to go to lots of
854*404b540aSrobert effort to save a register, load it with the desired size,
855*404b540aSrobert adjust the stack pointer, and then restore the modified
856*404b540aSrobert register. Frankly, I think it is a poor ISR that
857*404b540aSrobert requires more than 32767 words of local temporary
858*404b540aSrobert storage! */
859*404b540aSrobert if (size > 32767)
860*404b540aSrobert error ("ISR %s requires %d words of local vars, max is 32767",
861*404b540aSrobert current_function_name (), size);
862*404b540aSrobert
863*404b540aSrobert insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
864*404b540aSrobert gen_rtx_REG (QImode, SP_REGNO),
865*404b540aSrobert GEN_INT (size)));
866*404b540aSrobert RTX_FRAME_RELATED_P (insn) = 1;
867*404b540aSrobert }
868*404b540aSrobert for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
869*404b540aSrobert {
870*404b540aSrobert if (c4x_isr_reg_used_p (regno))
871*404b540aSrobert {
872*404b540aSrobert if (regno == DP_REGNO)
873*404b540aSrobert {
874*404b540aSrobert insn = emit_insn (gen_push_dp ());
875*404b540aSrobert RTX_FRAME_RELATED_P (insn) = 1;
876*404b540aSrobert }
877*404b540aSrobert else
878*404b540aSrobert {
879*404b540aSrobert insn = emit_insn (gen_pushqi (gen_rtx_REG (QImode, regno)));
880*404b540aSrobert RTX_FRAME_RELATED_P (insn) = 1;
881*404b540aSrobert if (IS_EXT_REGNO (regno))
882*404b540aSrobert {
883*404b540aSrobert insn = emit_insn (gen_pushqf
884*404b540aSrobert (gen_rtx_REG (QFmode, regno)));
885*404b540aSrobert RTX_FRAME_RELATED_P (insn) = 1;
886*404b540aSrobert }
887*404b540aSrobert }
888*404b540aSrobert }
889*404b540aSrobert }
890*404b540aSrobert /* We need to clear the repeat mode flag if the ISR is
891*404b540aSrobert going to use a RPTB instruction or uses the RC, RS, or RE
892*404b540aSrobert registers. */
893*404b540aSrobert if (regs_ever_live[RC_REGNO]
894*404b540aSrobert || regs_ever_live[RS_REGNO]
895*404b540aSrobert || regs_ever_live[RE_REGNO])
896*404b540aSrobert {
897*404b540aSrobert insn = emit_insn (gen_andn_st (GEN_INT(~0x100)));
898*404b540aSrobert RTX_FRAME_RELATED_P (insn) = 1;
899*404b540aSrobert }
900*404b540aSrobert
901*404b540aSrobert /* Reload DP reg if we are paranoid about some turkey
902*404b540aSrobert violating small memory model rules. */
903*404b540aSrobert if (TARGET_SMALL && TARGET_PARANOID)
904*404b540aSrobert {
905*404b540aSrobert insn = emit_insn (gen_set_ldp_prologue
906*404b540aSrobert (gen_rtx_REG (QImode, DP_REGNO),
907*404b540aSrobert gen_rtx_SYMBOL_REF (QImode, "data_sec")));
908*404b540aSrobert RTX_FRAME_RELATED_P (insn) = 1;
909*404b540aSrobert }
910*404b540aSrobert }
911*404b540aSrobert else
912*404b540aSrobert {
913*404b540aSrobert if (frame_pointer_needed)
914*404b540aSrobert {
915*404b540aSrobert if ((size != 0)
916*404b540aSrobert || (current_function_args_size != 0)
917*404b540aSrobert || (optimize < 2))
918*404b540aSrobert {
919*404b540aSrobert insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, AR3_REGNO)));
920*404b540aSrobert RTX_FRAME_RELATED_P (insn) = 1;
921*404b540aSrobert insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, AR3_REGNO),
922*404b540aSrobert gen_rtx_REG (QImode, SP_REGNO)));
923*404b540aSrobert RTX_FRAME_RELATED_P (insn) = 1;
924*404b540aSrobert dont_push_ar3 = 1;
925*404b540aSrobert }
926*404b540aSrobert else
927*404b540aSrobert {
928*404b540aSrobert /* Since ar3 is not used, we don't need to push it. */
929*404b540aSrobert dont_push_ar3 = 1;
930*404b540aSrobert }
931*404b540aSrobert }
932*404b540aSrobert else
933*404b540aSrobert {
934*404b540aSrobert /* If we use ar3, we need to push it. */
935*404b540aSrobert dont_push_ar3 = 0;
936*404b540aSrobert if ((size != 0) || (current_function_args_size != 0))
937*404b540aSrobert {
938*404b540aSrobert /* If we are omitting the frame pointer, we still have
939*404b540aSrobert to make space for it so the offsets are correct
940*404b540aSrobert unless we don't use anything on the stack at all. */
941*404b540aSrobert size += 1;
942*404b540aSrobert }
943*404b540aSrobert }
944*404b540aSrobert
945*404b540aSrobert if (size > 32767)
946*404b540aSrobert {
947*404b540aSrobert /* Local vars are too big, it will take multiple operations
948*404b540aSrobert to increment SP. */
949*404b540aSrobert if (TARGET_C3X)
950*404b540aSrobert {
951*404b540aSrobert insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R1_REGNO),
952*404b540aSrobert GEN_INT(size >> 16)));
953*404b540aSrobert RTX_FRAME_RELATED_P (insn) = 1;
954*404b540aSrobert insn = emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode, R1_REGNO),
955*404b540aSrobert gen_rtx_REG (QImode, R1_REGNO),
956*404b540aSrobert GEN_INT(-16)));
957*404b540aSrobert RTX_FRAME_RELATED_P (insn) = 1;
958*404b540aSrobert }
959*404b540aSrobert else
960*404b540aSrobert {
961*404b540aSrobert insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R1_REGNO),
962*404b540aSrobert GEN_INT(size & ~0xffff)));
963*404b540aSrobert RTX_FRAME_RELATED_P (insn) = 1;
964*404b540aSrobert }
965*404b540aSrobert insn = emit_insn (gen_iorqi3 (gen_rtx_REG (QImode, R1_REGNO),
966*404b540aSrobert gen_rtx_REG (QImode, R1_REGNO),
967*404b540aSrobert GEN_INT(size & 0xffff)));
968*404b540aSrobert RTX_FRAME_RELATED_P (insn) = 1;
969*404b540aSrobert insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
970*404b540aSrobert gen_rtx_REG (QImode, SP_REGNO),
971*404b540aSrobert gen_rtx_REG (QImode, R1_REGNO)));
972*404b540aSrobert RTX_FRAME_RELATED_P (insn) = 1;
973*404b540aSrobert }
974*404b540aSrobert else if (size != 0)
975*404b540aSrobert {
976*404b540aSrobert /* Local vars take up less than 32767 words, so we can directly
977*404b540aSrobert add the number. */
978*404b540aSrobert insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
979*404b540aSrobert gen_rtx_REG (QImode, SP_REGNO),
980*404b540aSrobert GEN_INT (size)));
981*404b540aSrobert RTX_FRAME_RELATED_P (insn) = 1;
982*404b540aSrobert }
983*404b540aSrobert
984*404b540aSrobert for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
985*404b540aSrobert {
986*404b540aSrobert if (regs_ever_live[regno] && ! call_used_regs[regno])
987*404b540aSrobert {
988*404b540aSrobert if (IS_FLOAT_CALL_SAVED_REGNO (regno))
989*404b540aSrobert {
990*404b540aSrobert if (TARGET_PRESERVE_FLOAT)
991*404b540aSrobert {
992*404b540aSrobert insn = emit_insn (gen_pushqi
993*404b540aSrobert (gen_rtx_REG (QImode, regno)));
994*404b540aSrobert RTX_FRAME_RELATED_P (insn) = 1;
995*404b540aSrobert }
996*404b540aSrobert insn = emit_insn (gen_pushqf (gen_rtx_REG (QFmode, regno)));
997*404b540aSrobert RTX_FRAME_RELATED_P (insn) = 1;
998*404b540aSrobert }
999*404b540aSrobert else if ((! dont_push_ar3) || (regno != AR3_REGNO))
1000*404b540aSrobert {
1001*404b540aSrobert insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, regno)));
1002*404b540aSrobert RTX_FRAME_RELATED_P (insn) = 1;
1003*404b540aSrobert }
1004*404b540aSrobert }
1005*404b540aSrobert }
1006*404b540aSrobert }
1007*404b540aSrobert }
1008*404b540aSrobert
1009*404b540aSrobert
1010*404b540aSrobert void
c4x_expand_epilogue(void)1011*404b540aSrobert c4x_expand_epilogue(void)
1012*404b540aSrobert {
1013*404b540aSrobert int regno;
1014*404b540aSrobert int jump = 0;
1015*404b540aSrobert int dont_pop_ar3;
1016*404b540aSrobert rtx insn;
1017*404b540aSrobert int size = get_frame_size ();
1018*404b540aSrobert
1019*404b540aSrobert /* For __naked__ function build no epilogue. */
1020*404b540aSrobert if (c4x_naked_function_p ())
1021*404b540aSrobert {
1022*404b540aSrobert insn = emit_jump_insn (gen_return_from_epilogue ());
1023*404b540aSrobert RTX_FRAME_RELATED_P (insn) = 1;
1024*404b540aSrobert return;
1025*404b540aSrobert }
1026*404b540aSrobert
1027*404b540aSrobert /* For __interrupt__ function build specific epilogue. */
1028*404b540aSrobert if (c4x_interrupt_function_p ())
1029*404b540aSrobert {
1030*404b540aSrobert for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; --regno)
1031*404b540aSrobert {
1032*404b540aSrobert if (! c4x_isr_reg_used_p (regno))
1033*404b540aSrobert continue;
1034*404b540aSrobert if (regno == DP_REGNO)
1035*404b540aSrobert {
1036*404b540aSrobert insn = emit_insn (gen_pop_dp ());
1037*404b540aSrobert RTX_FRAME_RELATED_P (insn) = 1;
1038*404b540aSrobert }
1039*404b540aSrobert else
1040*404b540aSrobert {
1041*404b540aSrobert /* We have to use unspec because the compiler will delete insns
1042*404b540aSrobert that are not call-saved. */
1043*404b540aSrobert if (IS_EXT_REGNO (regno))
1044*404b540aSrobert {
1045*404b540aSrobert insn = emit_insn (gen_popqf_unspec
1046*404b540aSrobert (gen_rtx_REG (QFmode, regno)));
1047*404b540aSrobert RTX_FRAME_RELATED_P (insn) = 1;
1048*404b540aSrobert }
1049*404b540aSrobert insn = emit_insn (gen_popqi_unspec (gen_rtx_REG (QImode, regno)));
1050*404b540aSrobert RTX_FRAME_RELATED_P (insn) = 1;
1051*404b540aSrobert }
1052*404b540aSrobert }
1053*404b540aSrobert if (size)
1054*404b540aSrobert {
1055*404b540aSrobert insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1056*404b540aSrobert gen_rtx_REG (QImode, SP_REGNO),
1057*404b540aSrobert GEN_INT(size)));
1058*404b540aSrobert RTX_FRAME_RELATED_P (insn) = 1;
1059*404b540aSrobert insn = emit_insn (gen_popqi
1060*404b540aSrobert (gen_rtx_REG (QImode, AR3_REGNO)));
1061*404b540aSrobert RTX_FRAME_RELATED_P (insn) = 1;
1062*404b540aSrobert }
1063*404b540aSrobert insn = emit_insn (gen_pop_st ());
1064*404b540aSrobert RTX_FRAME_RELATED_P (insn) = 1;
1065*404b540aSrobert insn = emit_jump_insn (gen_return_from_interrupt_epilogue ());
1066*404b540aSrobert RTX_FRAME_RELATED_P (insn) = 1;
1067*404b540aSrobert }
1068*404b540aSrobert else
1069*404b540aSrobert {
1070*404b540aSrobert if (frame_pointer_needed)
1071*404b540aSrobert {
1072*404b540aSrobert if ((size != 0)
1073*404b540aSrobert || (current_function_args_size != 0)
1074*404b540aSrobert || (optimize < 2))
1075*404b540aSrobert {
1076*404b540aSrobert insn = emit_insn
1077*404b540aSrobert (gen_movqi (gen_rtx_REG (QImode, R2_REGNO),
1078*404b540aSrobert gen_rtx_MEM (QImode,
1079*404b540aSrobert gen_rtx_PLUS
1080*404b540aSrobert (QImode, gen_rtx_REG (QImode,
1081*404b540aSrobert AR3_REGNO),
1082*404b540aSrobert constm1_rtx))));
1083*404b540aSrobert RTX_FRAME_RELATED_P (insn) = 1;
1084*404b540aSrobert
1085*404b540aSrobert /* We already have the return value and the fp,
1086*404b540aSrobert so we need to add those to the stack. */
1087*404b540aSrobert size += 2;
1088*404b540aSrobert jump = 1;
1089*404b540aSrobert dont_pop_ar3 = 1;
1090*404b540aSrobert }
1091*404b540aSrobert else
1092*404b540aSrobert {
1093*404b540aSrobert /* Since ar3 is not used for anything, we don't need to
1094*404b540aSrobert pop it. */
1095*404b540aSrobert dont_pop_ar3 = 1;
1096*404b540aSrobert }
1097*404b540aSrobert }
1098*404b540aSrobert else
1099*404b540aSrobert {
1100*404b540aSrobert dont_pop_ar3 = 0; /* If we use ar3, we need to pop it. */
1101*404b540aSrobert if (size || current_function_args_size)
1102*404b540aSrobert {
1103*404b540aSrobert /* If we are omitting the frame pointer, we still have
1104*404b540aSrobert to make space for it so the offsets are correct
1105*404b540aSrobert unless we don't use anything on the stack at all. */
1106*404b540aSrobert size += 1;
1107*404b540aSrobert }
1108*404b540aSrobert }
1109*404b540aSrobert
1110*404b540aSrobert /* Now restore the saved registers, putting in the delayed branch
1111*404b540aSrobert where required. */
1112*404b540aSrobert for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1113*404b540aSrobert {
1114*404b540aSrobert if (regs_ever_live[regno] && ! call_used_regs[regno])
1115*404b540aSrobert {
1116*404b540aSrobert if (regno == AR3_REGNO && dont_pop_ar3)
1117*404b540aSrobert continue;
1118*404b540aSrobert
1119*404b540aSrobert if (IS_FLOAT_CALL_SAVED_REGNO (regno))
1120*404b540aSrobert {
1121*404b540aSrobert insn = emit_insn (gen_popqf_unspec
1122*404b540aSrobert (gen_rtx_REG (QFmode, regno)));
1123*404b540aSrobert RTX_FRAME_RELATED_P (insn) = 1;
1124*404b540aSrobert if (TARGET_PRESERVE_FLOAT)
1125*404b540aSrobert {
1126*404b540aSrobert insn = emit_insn (gen_popqi_unspec
1127*404b540aSrobert (gen_rtx_REG (QImode, regno)));
1128*404b540aSrobert RTX_FRAME_RELATED_P (insn) = 1;
1129*404b540aSrobert }
1130*404b540aSrobert }
1131*404b540aSrobert else
1132*404b540aSrobert {
1133*404b540aSrobert insn = emit_insn (gen_popqi (gen_rtx_REG (QImode, regno)));
1134*404b540aSrobert RTX_FRAME_RELATED_P (insn) = 1;
1135*404b540aSrobert }
1136*404b540aSrobert }
1137*404b540aSrobert }
1138*404b540aSrobert
1139*404b540aSrobert if (frame_pointer_needed)
1140*404b540aSrobert {
1141*404b540aSrobert if ((size != 0)
1142*404b540aSrobert || (current_function_args_size != 0)
1143*404b540aSrobert || (optimize < 2))
1144*404b540aSrobert {
1145*404b540aSrobert /* Restore the old FP. */
1146*404b540aSrobert insn = emit_insn
1147*404b540aSrobert (gen_movqi
1148*404b540aSrobert (gen_rtx_REG (QImode, AR3_REGNO),
1149*404b540aSrobert gen_rtx_MEM (QImode, gen_rtx_REG (QImode, AR3_REGNO))));
1150*404b540aSrobert
1151*404b540aSrobert RTX_FRAME_RELATED_P (insn) = 1;
1152*404b540aSrobert }
1153*404b540aSrobert }
1154*404b540aSrobert
1155*404b540aSrobert if (size > 32767)
1156*404b540aSrobert {
1157*404b540aSrobert /* Local vars are too big, it will take multiple operations
1158*404b540aSrobert to decrement SP. */
1159*404b540aSrobert if (TARGET_C3X)
1160*404b540aSrobert {
1161*404b540aSrobert insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R3_REGNO),
1162*404b540aSrobert GEN_INT(size >> 16)));
1163*404b540aSrobert RTX_FRAME_RELATED_P (insn) = 1;
1164*404b540aSrobert insn = emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode, R3_REGNO),
1165*404b540aSrobert gen_rtx_REG (QImode, R3_REGNO),
1166*404b540aSrobert GEN_INT(-16)));
1167*404b540aSrobert RTX_FRAME_RELATED_P (insn) = 1;
1168*404b540aSrobert }
1169*404b540aSrobert else
1170*404b540aSrobert {
1171*404b540aSrobert insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R3_REGNO),
1172*404b540aSrobert GEN_INT(size & ~0xffff)));
1173*404b540aSrobert RTX_FRAME_RELATED_P (insn) = 1;
1174*404b540aSrobert }
1175*404b540aSrobert insn = emit_insn (gen_iorqi3 (gen_rtx_REG (QImode, R3_REGNO),
1176*404b540aSrobert gen_rtx_REG (QImode, R3_REGNO),
1177*404b540aSrobert GEN_INT(size & 0xffff)));
1178*404b540aSrobert RTX_FRAME_RELATED_P (insn) = 1;
1179*404b540aSrobert insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1180*404b540aSrobert gen_rtx_REG (QImode, SP_REGNO),
1181*404b540aSrobert gen_rtx_REG (QImode, R3_REGNO)));
1182*404b540aSrobert RTX_FRAME_RELATED_P (insn) = 1;
1183*404b540aSrobert }
1184*404b540aSrobert else if (size != 0)
1185*404b540aSrobert {
1186*404b540aSrobert /* Local vars take up less than 32768 words, so we can directly
1187*404b540aSrobert subtract the number. */
1188*404b540aSrobert insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1189*404b540aSrobert gen_rtx_REG (QImode, SP_REGNO),
1190*404b540aSrobert GEN_INT(size)));
1191*404b540aSrobert RTX_FRAME_RELATED_P (insn) = 1;
1192*404b540aSrobert }
1193*404b540aSrobert
1194*404b540aSrobert if (jump)
1195*404b540aSrobert {
1196*404b540aSrobert insn = emit_jump_insn (gen_return_indirect_internal
1197*404b540aSrobert (gen_rtx_REG (QImode, R2_REGNO)));
1198*404b540aSrobert RTX_FRAME_RELATED_P (insn) = 1;
1199*404b540aSrobert }
1200*404b540aSrobert else
1201*404b540aSrobert {
1202*404b540aSrobert insn = emit_jump_insn (gen_return_from_epilogue ());
1203*404b540aSrobert RTX_FRAME_RELATED_P (insn) = 1;
1204*404b540aSrobert }
1205*404b540aSrobert }
1206*404b540aSrobert }
1207*404b540aSrobert
1208*404b540aSrobert
1209*404b540aSrobert int
c4x_null_epilogue_p(void)1210*404b540aSrobert c4x_null_epilogue_p (void)
1211*404b540aSrobert {
1212*404b540aSrobert int regno;
1213*404b540aSrobert
1214*404b540aSrobert if (reload_completed
1215*404b540aSrobert && ! c4x_naked_function_p ()
1216*404b540aSrobert && ! c4x_interrupt_function_p ()
1217*404b540aSrobert && ! current_function_calls_alloca
1218*404b540aSrobert && ! current_function_args_size
1219*404b540aSrobert && ! (optimize < 2)
1220*404b540aSrobert && ! get_frame_size ())
1221*404b540aSrobert {
1222*404b540aSrobert for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1223*404b540aSrobert if (regs_ever_live[regno] && ! call_used_regs[regno]
1224*404b540aSrobert && (regno != AR3_REGNO))
1225*404b540aSrobert return 1;
1226*404b540aSrobert return 0;
1227*404b540aSrobert }
1228*404b540aSrobert return 1;
1229*404b540aSrobert }
1230*404b540aSrobert
1231*404b540aSrobert
1232*404b540aSrobert int
c4x_emit_move_sequence(rtx * operands,enum machine_mode mode)1233*404b540aSrobert c4x_emit_move_sequence (rtx *operands, enum machine_mode mode)
1234*404b540aSrobert {
1235*404b540aSrobert rtx op0 = operands[0];
1236*404b540aSrobert rtx op1 = operands[1];
1237*404b540aSrobert
1238*404b540aSrobert if (! reload_in_progress
1239*404b540aSrobert && ! REG_P (op0)
1240*404b540aSrobert && ! REG_P (op1)
1241*404b540aSrobert && ! (stik_const_operand (op1, mode) && ! push_operand (op0, mode)))
1242*404b540aSrobert op1 = force_reg (mode, op1);
1243*404b540aSrobert
1244*404b540aSrobert if (GET_CODE (op1) == LO_SUM
1245*404b540aSrobert && GET_MODE (op1) == Pmode
1246*404b540aSrobert && dp_reg_operand (XEXP (op1, 0), mode))
1247*404b540aSrobert {
1248*404b540aSrobert /* expand_increment will sometimes create a LO_SUM immediate
1249*404b540aSrobert address. Undo this silliness. */
1250*404b540aSrobert op1 = XEXP (op1, 1);
1251*404b540aSrobert }
1252*404b540aSrobert
1253*404b540aSrobert if (symbolic_address_operand (op1, mode))
1254*404b540aSrobert {
1255*404b540aSrobert if (TARGET_LOAD_ADDRESS)
1256*404b540aSrobert {
1257*404b540aSrobert /* Alias analysis seems to do a better job if we force
1258*404b540aSrobert constant addresses to memory after reload. */
1259*404b540aSrobert emit_insn (gen_load_immed_address (op0, op1));
1260*404b540aSrobert return 1;
1261*404b540aSrobert }
1262*404b540aSrobert else
1263*404b540aSrobert {
1264*404b540aSrobert /* Stick symbol or label address into the constant pool. */
1265*404b540aSrobert op1 = force_const_mem (Pmode, op1);
1266*404b540aSrobert }
1267*404b540aSrobert }
1268*404b540aSrobert else if (mode == HFmode && CONSTANT_P (op1) && ! LEGITIMATE_CONSTANT_P (op1))
1269*404b540aSrobert {
1270*404b540aSrobert /* We could be a lot smarter about loading some of these
1271*404b540aSrobert constants... */
1272*404b540aSrobert op1 = force_const_mem (mode, op1);
1273*404b540aSrobert }
1274*404b540aSrobert
1275*404b540aSrobert /* Convert (MEM (SYMREF)) to a (MEM (LO_SUM (REG) (SYMREF)))
1276*404b540aSrobert and emit associated (HIGH (SYMREF)) if large memory model.
1277*404b540aSrobert c4x_legitimize_address could be used to do this,
1278*404b540aSrobert perhaps by calling validize_address. */
1279*404b540aSrobert if (TARGET_EXPOSE_LDP
1280*404b540aSrobert && ! (reload_in_progress || reload_completed)
1281*404b540aSrobert && GET_CODE (op1) == MEM
1282*404b540aSrobert && symbolic_address_operand (XEXP (op1, 0), Pmode))
1283*404b540aSrobert {
1284*404b540aSrobert rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1285*404b540aSrobert if (! TARGET_SMALL)
1286*404b540aSrobert emit_insn (gen_set_ldp (dp_reg, XEXP (op1, 0)));
1287*404b540aSrobert op1 = change_address (op1, mode,
1288*404b540aSrobert gen_rtx_LO_SUM (Pmode, dp_reg, XEXP (op1, 0)));
1289*404b540aSrobert }
1290*404b540aSrobert
1291*404b540aSrobert if (TARGET_EXPOSE_LDP
1292*404b540aSrobert && ! (reload_in_progress || reload_completed)
1293*404b540aSrobert && GET_CODE (op0) == MEM
1294*404b540aSrobert && symbolic_address_operand (XEXP (op0, 0), Pmode))
1295*404b540aSrobert {
1296*404b540aSrobert rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1297*404b540aSrobert if (! TARGET_SMALL)
1298*404b540aSrobert emit_insn (gen_set_ldp (dp_reg, XEXP (op0, 0)));
1299*404b540aSrobert op0 = change_address (op0, mode,
1300*404b540aSrobert gen_rtx_LO_SUM (Pmode, dp_reg, XEXP (op0, 0)));
1301*404b540aSrobert }
1302*404b540aSrobert
1303*404b540aSrobert if (GET_CODE (op0) == SUBREG
1304*404b540aSrobert && mixed_subreg_operand (op0, mode))
1305*404b540aSrobert {
1306*404b540aSrobert /* We should only generate these mixed mode patterns
1307*404b540aSrobert during RTL generation. If we need do it later on
1308*404b540aSrobert then we'll have to emit patterns that won't clobber CC. */
1309*404b540aSrobert if (reload_in_progress || reload_completed)
1310*404b540aSrobert abort ();
1311*404b540aSrobert if (GET_MODE (SUBREG_REG (op0)) == QImode)
1312*404b540aSrobert op0 = SUBREG_REG (op0);
1313*404b540aSrobert else if (GET_MODE (SUBREG_REG (op0)) == HImode)
1314*404b540aSrobert {
1315*404b540aSrobert op0 = copy_rtx (op0);
1316*404b540aSrobert PUT_MODE (op0, QImode);
1317*404b540aSrobert }
1318*404b540aSrobert else
1319*404b540aSrobert abort ();
1320*404b540aSrobert
1321*404b540aSrobert if (mode == QFmode)
1322*404b540aSrobert emit_insn (gen_storeqf_int_clobber (op0, op1));
1323*404b540aSrobert else
1324*404b540aSrobert abort ();
1325*404b540aSrobert return 1;
1326*404b540aSrobert }
1327*404b540aSrobert
1328*404b540aSrobert if (GET_CODE (op1) == SUBREG
1329*404b540aSrobert && mixed_subreg_operand (op1, mode))
1330*404b540aSrobert {
1331*404b540aSrobert /* We should only generate these mixed mode patterns
1332*404b540aSrobert during RTL generation. If we need do it later on
1333*404b540aSrobert then we'll have to emit patterns that won't clobber CC. */
1334*404b540aSrobert if (reload_in_progress || reload_completed)
1335*404b540aSrobert abort ();
1336*404b540aSrobert if (GET_MODE (SUBREG_REG (op1)) == QImode)
1337*404b540aSrobert op1 = SUBREG_REG (op1);
1338*404b540aSrobert else if (GET_MODE (SUBREG_REG (op1)) == HImode)
1339*404b540aSrobert {
1340*404b540aSrobert op1 = copy_rtx (op1);
1341*404b540aSrobert PUT_MODE (op1, QImode);
1342*404b540aSrobert }
1343*404b540aSrobert else
1344*404b540aSrobert abort ();
1345*404b540aSrobert
1346*404b540aSrobert if (mode == QFmode)
1347*404b540aSrobert emit_insn (gen_loadqf_int_clobber (op0, op1));
1348*404b540aSrobert else
1349*404b540aSrobert abort ();
1350*404b540aSrobert return 1;
1351*404b540aSrobert }
1352*404b540aSrobert
1353*404b540aSrobert if (mode == QImode
1354*404b540aSrobert && reg_operand (op0, mode)
1355*404b540aSrobert && const_int_operand (op1, mode)
1356*404b540aSrobert && ! IS_INT16_CONST (INTVAL (op1))
1357*404b540aSrobert && ! IS_HIGH_CONST (INTVAL (op1)))
1358*404b540aSrobert {
1359*404b540aSrobert emit_insn (gen_loadqi_big_constant (op0, op1));
1360*404b540aSrobert return 1;
1361*404b540aSrobert }
1362*404b540aSrobert
1363*404b540aSrobert if (mode == HImode
1364*404b540aSrobert && reg_operand (op0, mode)
1365*404b540aSrobert && const_int_operand (op1, mode))
1366*404b540aSrobert {
1367*404b540aSrobert emit_insn (gen_loadhi_big_constant (op0, op1));
1368*404b540aSrobert return 1;
1369*404b540aSrobert }
1370*404b540aSrobert
1371*404b540aSrobert /* Adjust operands in case we have modified them. */
1372*404b540aSrobert operands[0] = op0;
1373*404b540aSrobert operands[1] = op1;
1374*404b540aSrobert
1375*404b540aSrobert /* Emit normal pattern. */
1376*404b540aSrobert return 0;
1377*404b540aSrobert }
1378*404b540aSrobert
1379*404b540aSrobert
1380*404b540aSrobert void
c4x_emit_libcall(rtx libcall,enum rtx_code code,enum machine_mode dmode,enum machine_mode smode,int noperands,rtx * operands)1381*404b540aSrobert c4x_emit_libcall (rtx libcall, enum rtx_code code,
1382*404b540aSrobert enum machine_mode dmode, enum machine_mode smode,
1383*404b540aSrobert int noperands, rtx *operands)
1384*404b540aSrobert {
1385*404b540aSrobert rtx ret;
1386*404b540aSrobert rtx insns;
1387*404b540aSrobert rtx equiv;
1388*404b540aSrobert
1389*404b540aSrobert start_sequence ();
1390*404b540aSrobert switch (noperands)
1391*404b540aSrobert {
1392*404b540aSrobert case 2:
1393*404b540aSrobert ret = emit_library_call_value (libcall, NULL_RTX, 1, dmode, 1,
1394*404b540aSrobert operands[1], smode);
1395*404b540aSrobert equiv = gen_rtx_fmt_e (code, dmode, operands[1]);
1396*404b540aSrobert break;
1397*404b540aSrobert
1398*404b540aSrobert case 3:
1399*404b540aSrobert ret = emit_library_call_value (libcall, NULL_RTX, 1, dmode, 2,
1400*404b540aSrobert operands[1], smode, operands[2], smode);
1401*404b540aSrobert equiv = gen_rtx_fmt_ee (code, dmode, operands[1], operands[2]);
1402*404b540aSrobert break;
1403*404b540aSrobert
1404*404b540aSrobert default:
1405*404b540aSrobert abort ();
1406*404b540aSrobert }
1407*404b540aSrobert
1408*404b540aSrobert insns = get_insns ();
1409*404b540aSrobert end_sequence ();
1410*404b540aSrobert emit_libcall_block (insns, operands[0], ret, equiv);
1411*404b540aSrobert }
1412*404b540aSrobert
1413*404b540aSrobert
1414*404b540aSrobert void
c4x_emit_libcall3(rtx libcall,enum rtx_code code,enum machine_mode mode,rtx * operands)1415*404b540aSrobert c4x_emit_libcall3 (rtx libcall, enum rtx_code code,
1416*404b540aSrobert enum machine_mode mode, rtx *operands)
1417*404b540aSrobert {
1418*404b540aSrobert c4x_emit_libcall (libcall, code, mode, mode, 3, operands);
1419*404b540aSrobert }
1420*404b540aSrobert
1421*404b540aSrobert
1422*404b540aSrobert void
c4x_emit_libcall_mulhi(rtx libcall,enum rtx_code code,enum machine_mode mode,rtx * operands)1423*404b540aSrobert c4x_emit_libcall_mulhi (rtx libcall, enum rtx_code code,
1424*404b540aSrobert enum machine_mode mode, rtx *operands)
1425*404b540aSrobert {
1426*404b540aSrobert rtx ret;
1427*404b540aSrobert rtx insns;
1428*404b540aSrobert rtx equiv;
1429*404b540aSrobert
1430*404b540aSrobert start_sequence ();
1431*404b540aSrobert ret = emit_library_call_value (libcall, NULL_RTX, 1, mode, 2,
1432*404b540aSrobert operands[1], mode, operands[2], mode);
1433*404b540aSrobert equiv = gen_rtx_TRUNCATE (mode,
1434*404b540aSrobert gen_rtx_LSHIFTRT (HImode,
1435*404b540aSrobert gen_rtx_MULT (HImode,
1436*404b540aSrobert gen_rtx_fmt_e (code, HImode, operands[1]),
1437*404b540aSrobert gen_rtx_fmt_e (code, HImode, operands[2])),
1438*404b540aSrobert GEN_INT (32)));
1439*404b540aSrobert insns = get_insns ();
1440*404b540aSrobert end_sequence ();
1441*404b540aSrobert emit_libcall_block (insns, operands[0], ret, equiv);
1442*404b540aSrobert }
1443*404b540aSrobert
1444*404b540aSrobert
1445*404b540aSrobert int
c4x_legitimate_address_p(enum machine_mode mode,rtx addr,int strict)1446*404b540aSrobert c4x_legitimate_address_p (enum machine_mode mode, rtx addr, int strict)
1447*404b540aSrobert {
1448*404b540aSrobert rtx base = NULL_RTX; /* Base register (AR0-AR7). */
1449*404b540aSrobert rtx indx = NULL_RTX; /* Index register (IR0,IR1). */
1450*404b540aSrobert rtx disp = NULL_RTX; /* Displacement. */
1451*404b540aSrobert enum rtx_code code;
1452*404b540aSrobert
1453*404b540aSrobert code = GET_CODE (addr);
1454*404b540aSrobert switch (code)
1455*404b540aSrobert {
1456*404b540aSrobert /* Register indirect with auto increment/decrement. We don't
1457*404b540aSrobert allow SP here---push_operand should recognize an operand
1458*404b540aSrobert being pushed on the stack. */
1459*404b540aSrobert
1460*404b540aSrobert case PRE_DEC:
1461*404b540aSrobert case PRE_INC:
1462*404b540aSrobert case POST_DEC:
1463*404b540aSrobert if (mode != QImode && mode != QFmode)
1464*404b540aSrobert return 0;
1465*404b540aSrobert
1466*404b540aSrobert case POST_INC:
1467*404b540aSrobert base = XEXP (addr, 0);
1468*404b540aSrobert if (! REG_P (base))
1469*404b540aSrobert return 0;
1470*404b540aSrobert break;
1471*404b540aSrobert
1472*404b540aSrobert case PRE_MODIFY:
1473*404b540aSrobert case POST_MODIFY:
1474*404b540aSrobert {
1475*404b540aSrobert rtx op0 = XEXP (addr, 0);
1476*404b540aSrobert rtx op1 = XEXP (addr, 1);
1477*404b540aSrobert
1478*404b540aSrobert if (mode != QImode && mode != QFmode)
1479*404b540aSrobert return 0;
1480*404b540aSrobert
1481*404b540aSrobert if (! REG_P (op0)
1482*404b540aSrobert || (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS))
1483*404b540aSrobert return 0;
1484*404b540aSrobert base = XEXP (op1, 0);
1485*404b540aSrobert if (! REG_P (base))
1486*404b540aSrobert return 0;
1487*404b540aSrobert if (REGNO (base) != REGNO (op0))
1488*404b540aSrobert return 0;
1489*404b540aSrobert if (REG_P (XEXP (op1, 1)))
1490*404b540aSrobert indx = XEXP (op1, 1);
1491*404b540aSrobert else
1492*404b540aSrobert disp = XEXP (op1, 1);
1493*404b540aSrobert }
1494*404b540aSrobert break;
1495*404b540aSrobert
1496*404b540aSrobert /* Register indirect. */
1497*404b540aSrobert case REG:
1498*404b540aSrobert base = addr;
1499*404b540aSrobert break;
1500*404b540aSrobert
1501*404b540aSrobert /* Register indirect with displacement or index. */
1502*404b540aSrobert case PLUS:
1503*404b540aSrobert {
1504*404b540aSrobert rtx op0 = XEXP (addr, 0);
1505*404b540aSrobert rtx op1 = XEXP (addr, 1);
1506*404b540aSrobert enum rtx_code code0 = GET_CODE (op0);
1507*404b540aSrobert
1508*404b540aSrobert switch (code0)
1509*404b540aSrobert {
1510*404b540aSrobert case REG:
1511*404b540aSrobert if (REG_P (op1))
1512*404b540aSrobert {
1513*404b540aSrobert base = op0; /* Base + index. */
1514*404b540aSrobert indx = op1;
1515*404b540aSrobert if (IS_INDEX_REG (base) || IS_ADDR_REG (indx))
1516*404b540aSrobert {
1517*404b540aSrobert base = op1;
1518*404b540aSrobert indx = op0;
1519*404b540aSrobert }
1520*404b540aSrobert }
1521*404b540aSrobert else
1522*404b540aSrobert {
1523*404b540aSrobert base = op0; /* Base + displacement. */
1524*404b540aSrobert disp = op1;
1525*404b540aSrobert }
1526*404b540aSrobert break;
1527*404b540aSrobert
1528*404b540aSrobert default:
1529*404b540aSrobert return 0;
1530*404b540aSrobert }
1531*404b540aSrobert }
1532*404b540aSrobert break;
1533*404b540aSrobert
1534*404b540aSrobert /* Direct addressing with DP register. */
1535*404b540aSrobert case LO_SUM:
1536*404b540aSrobert {
1537*404b540aSrobert rtx op0 = XEXP (addr, 0);
1538*404b540aSrobert rtx op1 = XEXP (addr, 1);
1539*404b540aSrobert
1540*404b540aSrobert /* HImode and HFmode direct memory references aren't truly
1541*404b540aSrobert offsettable (consider case at end of data page). We
1542*404b540aSrobert probably get better code by loading a pointer and using an
1543*404b540aSrobert indirect memory reference. */
1544*404b540aSrobert if (mode == HImode || mode == HFmode)
1545*404b540aSrobert return 0;
1546*404b540aSrobert
1547*404b540aSrobert if (!REG_P (op0) || REGNO (op0) != DP_REGNO)
1548*404b540aSrobert return 0;
1549*404b540aSrobert
1550*404b540aSrobert if ((GET_CODE (op1) == SYMBOL_REF || GET_CODE (op1) == LABEL_REF))
1551*404b540aSrobert return 1;
1552*404b540aSrobert
1553*404b540aSrobert if (GET_CODE (op1) == CONST)
1554*404b540aSrobert return 1;
1555*404b540aSrobert return 0;
1556*404b540aSrobert }
1557*404b540aSrobert break;
1558*404b540aSrobert
1559*404b540aSrobert /* Direct addressing with some work for the assembler... */
1560*404b540aSrobert case CONST:
1561*404b540aSrobert /* Direct addressing. */
1562*404b540aSrobert case LABEL_REF:
1563*404b540aSrobert case SYMBOL_REF:
1564*404b540aSrobert if (! TARGET_EXPOSE_LDP && ! strict && mode != HFmode && mode != HImode)
1565*404b540aSrobert return 1;
1566*404b540aSrobert /* These need to be converted to a LO_SUM (...).
1567*404b540aSrobert LEGITIMIZE_RELOAD_ADDRESS will do this during reload. */
1568*404b540aSrobert return 0;
1569*404b540aSrobert
1570*404b540aSrobert /* Do not allow direct memory access to absolute addresses.
1571*404b540aSrobert This is more pain than it's worth, especially for the
1572*404b540aSrobert small memory model where we can't guarantee that
1573*404b540aSrobert this address is within the data page---we don't want
1574*404b540aSrobert to modify the DP register in the small memory model,
1575*404b540aSrobert even temporarily, since an interrupt can sneak in.... */
1576*404b540aSrobert case CONST_INT:
1577*404b540aSrobert return 0;
1578*404b540aSrobert
1579*404b540aSrobert /* Indirect indirect addressing. */
1580*404b540aSrobert case MEM:
1581*404b540aSrobert return 0;
1582*404b540aSrobert
1583*404b540aSrobert case CONST_DOUBLE:
1584*404b540aSrobert fatal_insn ("using CONST_DOUBLE for address", addr);
1585*404b540aSrobert
1586*404b540aSrobert default:
1587*404b540aSrobert return 0;
1588*404b540aSrobert }
1589*404b540aSrobert
1590*404b540aSrobert /* Validate the base register. */
1591*404b540aSrobert if (base)
1592*404b540aSrobert {
1593*404b540aSrobert /* Check that the address is offsettable for HImode and HFmode. */
1594*404b540aSrobert if (indx && (mode == HImode || mode == HFmode))
1595*404b540aSrobert return 0;
1596*404b540aSrobert
1597*404b540aSrobert /* Handle DP based stuff. */
1598*404b540aSrobert if (REGNO (base) == DP_REGNO)
1599*404b540aSrobert return 1;
1600*404b540aSrobert if (strict && ! REGNO_OK_FOR_BASE_P (REGNO (base)))
1601*404b540aSrobert return 0;
1602*404b540aSrobert else if (! strict && ! IS_ADDR_OR_PSEUDO_REG (base))
1603*404b540aSrobert return 0;
1604*404b540aSrobert }
1605*404b540aSrobert
1606*404b540aSrobert /* Now validate the index register. */
1607*404b540aSrobert if (indx)
1608*404b540aSrobert {
1609*404b540aSrobert if (GET_CODE (indx) != REG)
1610*404b540aSrobert return 0;
1611*404b540aSrobert if (strict && ! REGNO_OK_FOR_INDEX_P (REGNO (indx)))
1612*404b540aSrobert return 0;
1613*404b540aSrobert else if (! strict && ! IS_INDEX_OR_PSEUDO_REG (indx))
1614*404b540aSrobert return 0;
1615*404b540aSrobert }
1616*404b540aSrobert
1617*404b540aSrobert /* Validate displacement. */
1618*404b540aSrobert if (disp)
1619*404b540aSrobert {
1620*404b540aSrobert if (GET_CODE (disp) != CONST_INT)
1621*404b540aSrobert return 0;
1622*404b540aSrobert if (mode == HImode || mode == HFmode)
1623*404b540aSrobert {
1624*404b540aSrobert /* The offset displacement must be legitimate. */
1625*404b540aSrobert if (! IS_DISP8_OFF_CONST (INTVAL (disp)))
1626*404b540aSrobert return 0;
1627*404b540aSrobert }
1628*404b540aSrobert else
1629*404b540aSrobert {
1630*404b540aSrobert if (! IS_DISP8_CONST (INTVAL (disp)))
1631*404b540aSrobert return 0;
1632*404b540aSrobert }
1633*404b540aSrobert /* Can't add an index with a disp. */
1634*404b540aSrobert if (indx)
1635*404b540aSrobert return 0;
1636*404b540aSrobert }
1637*404b540aSrobert return 1;
1638*404b540aSrobert }
1639*404b540aSrobert
1640*404b540aSrobert
1641*404b540aSrobert rtx
c4x_legitimize_address(rtx orig ATTRIBUTE_UNUSED,enum machine_mode mode ATTRIBUTE_UNUSED)1642*404b540aSrobert c4x_legitimize_address (rtx orig ATTRIBUTE_UNUSED,
1643*404b540aSrobert enum machine_mode mode ATTRIBUTE_UNUSED)
1644*404b540aSrobert {
1645*404b540aSrobert if (GET_CODE (orig) == SYMBOL_REF
1646*404b540aSrobert || GET_CODE (orig) == LABEL_REF)
1647*404b540aSrobert {
1648*404b540aSrobert if (mode == HImode || mode == HFmode)
1649*404b540aSrobert {
1650*404b540aSrobert /* We need to force the address into
1651*404b540aSrobert a register so that it is offsettable. */
1652*404b540aSrobert rtx addr_reg = gen_reg_rtx (Pmode);
1653*404b540aSrobert emit_move_insn (addr_reg, orig);
1654*404b540aSrobert return addr_reg;
1655*404b540aSrobert }
1656*404b540aSrobert else
1657*404b540aSrobert {
1658*404b540aSrobert rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1659*404b540aSrobert
1660*404b540aSrobert if (! TARGET_SMALL)
1661*404b540aSrobert emit_insn (gen_set_ldp (dp_reg, orig));
1662*404b540aSrobert
1663*404b540aSrobert return gen_rtx_LO_SUM (Pmode, dp_reg, orig);
1664*404b540aSrobert }
1665*404b540aSrobert }
1666*404b540aSrobert
1667*404b540aSrobert return NULL_RTX;
1668*404b540aSrobert }
1669*404b540aSrobert
1670*404b540aSrobert
1671*404b540aSrobert /* Provide the costs of an addressing mode that contains ADDR.
1672*404b540aSrobert If ADDR is not a valid address, its cost is irrelevant.
1673*404b540aSrobert This is used in cse and loop optimization to determine
1674*404b540aSrobert if it is worthwhile storing a common address into a register.
1675*404b540aSrobert Unfortunately, the C4x address cost depends on other operands. */
1676*404b540aSrobert
1677*404b540aSrobert static int
c4x_address_cost(rtx addr)1678*404b540aSrobert c4x_address_cost (rtx addr)
1679*404b540aSrobert {
1680*404b540aSrobert switch (GET_CODE (addr))
1681*404b540aSrobert {
1682*404b540aSrobert case REG:
1683*404b540aSrobert return 1;
1684*404b540aSrobert
1685*404b540aSrobert case POST_INC:
1686*404b540aSrobert case POST_DEC:
1687*404b540aSrobert case PRE_INC:
1688*404b540aSrobert case PRE_DEC:
1689*404b540aSrobert return 1;
1690*404b540aSrobert
1691*404b540aSrobert /* These shouldn't be directly generated. */
1692*404b540aSrobert case SYMBOL_REF:
1693*404b540aSrobert case LABEL_REF:
1694*404b540aSrobert case CONST:
1695*404b540aSrobert return 10;
1696*404b540aSrobert
1697*404b540aSrobert case LO_SUM:
1698*404b540aSrobert {
1699*404b540aSrobert rtx op1 = XEXP (addr, 1);
1700*404b540aSrobert
1701*404b540aSrobert if (GET_CODE (op1) == LABEL_REF || GET_CODE (op1) == SYMBOL_REF)
1702*404b540aSrobert return TARGET_SMALL ? 3 : 4;
1703*404b540aSrobert
1704*404b540aSrobert if (GET_CODE (op1) == CONST)
1705*404b540aSrobert {
1706*404b540aSrobert rtx offset = const0_rtx;
1707*404b540aSrobert
1708*404b540aSrobert op1 = eliminate_constant_term (op1, &offset);
1709*404b540aSrobert
1710*404b540aSrobert /* ??? These costs need rethinking... */
1711*404b540aSrobert if (GET_CODE (op1) == LABEL_REF)
1712*404b540aSrobert return 3;
1713*404b540aSrobert
1714*404b540aSrobert if (GET_CODE (op1) != SYMBOL_REF)
1715*404b540aSrobert return 4;
1716*404b540aSrobert
1717*404b540aSrobert if (INTVAL (offset) == 0)
1718*404b540aSrobert return 3;
1719*404b540aSrobert
1720*404b540aSrobert return 4;
1721*404b540aSrobert }
1722*404b540aSrobert fatal_insn ("c4x_address_cost: Invalid addressing mode", addr);
1723*404b540aSrobert }
1724*404b540aSrobert break;
1725*404b540aSrobert
1726*404b540aSrobert case PLUS:
1727*404b540aSrobert {
1728*404b540aSrobert register rtx op0 = XEXP (addr, 0);
1729*404b540aSrobert register rtx op1 = XEXP (addr, 1);
1730*404b540aSrobert
1731*404b540aSrobert if (GET_CODE (op0) != REG)
1732*404b540aSrobert break;
1733*404b540aSrobert
1734*404b540aSrobert switch (GET_CODE (op1))
1735*404b540aSrobert {
1736*404b540aSrobert default:
1737*404b540aSrobert break;
1738*404b540aSrobert
1739*404b540aSrobert case REG:
1740*404b540aSrobert /* This cost for REG+REG must be greater than the cost
1741*404b540aSrobert for REG if we want autoincrement addressing modes. */
1742*404b540aSrobert return 2;
1743*404b540aSrobert
1744*404b540aSrobert case CONST_INT:
1745*404b540aSrobert /* The following tries to improve GIV combination
1746*404b540aSrobert in strength reduce but appears not to help. */
1747*404b540aSrobert if (TARGET_DEVEL && IS_UINT5_CONST (INTVAL (op1)))
1748*404b540aSrobert return 1;
1749*404b540aSrobert
1750*404b540aSrobert if (IS_DISP1_CONST (INTVAL (op1)))
1751*404b540aSrobert return 1;
1752*404b540aSrobert
1753*404b540aSrobert if (! TARGET_C3X && IS_UINT5_CONST (INTVAL (op1)))
1754*404b540aSrobert return 2;
1755*404b540aSrobert
1756*404b540aSrobert return 3;
1757*404b540aSrobert }
1758*404b540aSrobert }
1759*404b540aSrobert default:
1760*404b540aSrobert break;
1761*404b540aSrobert }
1762*404b540aSrobert
1763*404b540aSrobert return 4;
1764*404b540aSrobert }
1765*404b540aSrobert
1766*404b540aSrobert
1767*404b540aSrobert rtx
c4x_gen_compare_reg(enum rtx_code code,rtx x,rtx y)1768*404b540aSrobert c4x_gen_compare_reg (enum rtx_code code, rtx x, rtx y)
1769*404b540aSrobert {
1770*404b540aSrobert enum machine_mode mode = SELECT_CC_MODE (code, x, y);
1771*404b540aSrobert rtx cc_reg;
1772*404b540aSrobert
1773*404b540aSrobert if (mode == CC_NOOVmode
1774*404b540aSrobert && (code == LE || code == GE || code == LT || code == GT))
1775*404b540aSrobert return NULL_RTX;
1776*404b540aSrobert
1777*404b540aSrobert cc_reg = gen_rtx_REG (mode, ST_REGNO);
1778*404b540aSrobert emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
1779*404b540aSrobert gen_rtx_COMPARE (mode, x, y)));
1780*404b540aSrobert return cc_reg;
1781*404b540aSrobert }
1782*404b540aSrobert
1783*404b540aSrobert char *
c4x_output_cbranch(const char * form,rtx seq)1784*404b540aSrobert c4x_output_cbranch (const char *form, rtx seq)
1785*404b540aSrobert {
1786*404b540aSrobert int delayed = 0;
1787*404b540aSrobert int annultrue = 0;
1788*404b540aSrobert int annulfalse = 0;
1789*404b540aSrobert rtx delay;
1790*404b540aSrobert char *cp;
1791*404b540aSrobert static char str[100];
1792*404b540aSrobert
1793*404b540aSrobert if (final_sequence)
1794*404b540aSrobert {
1795*404b540aSrobert delay = XVECEXP (final_sequence, 0, 1);
1796*404b540aSrobert delayed = ! INSN_ANNULLED_BRANCH_P (seq);
1797*404b540aSrobert annultrue = INSN_ANNULLED_BRANCH_P (seq) && ! INSN_FROM_TARGET_P (delay);
1798*404b540aSrobert annulfalse = INSN_ANNULLED_BRANCH_P (seq) && INSN_FROM_TARGET_P (delay);
1799*404b540aSrobert }
1800*404b540aSrobert strcpy (str, form);
1801*404b540aSrobert cp = &str [strlen (str)];
1802*404b540aSrobert if (delayed)
1803*404b540aSrobert {
1804*404b540aSrobert *cp++ = '%';
1805*404b540aSrobert *cp++ = '#';
1806*404b540aSrobert }
1807*404b540aSrobert if (annultrue)
1808*404b540aSrobert {
1809*404b540aSrobert *cp++ = 'a';
1810*404b540aSrobert *cp++ = 't';
1811*404b540aSrobert }
1812*404b540aSrobert if (annulfalse)
1813*404b540aSrobert {
1814*404b540aSrobert *cp++ = 'a';
1815*404b540aSrobert *cp++ = 'f';
1816*404b540aSrobert }
1817*404b540aSrobert *cp++ = '\t';
1818*404b540aSrobert *cp++ = '%';
1819*404b540aSrobert *cp++ = 'l';
1820*404b540aSrobert *cp++ = '1';
1821*404b540aSrobert *cp = 0;
1822*404b540aSrobert return str;
1823*404b540aSrobert }
1824*404b540aSrobert
1825*404b540aSrobert void
c4x_print_operand(FILE * file,rtx op,int letter)1826*404b540aSrobert c4x_print_operand (FILE *file, rtx op, int letter)
1827*404b540aSrobert {
1828*404b540aSrobert rtx op1;
1829*404b540aSrobert enum rtx_code code;
1830*404b540aSrobert
1831*404b540aSrobert switch (letter)
1832*404b540aSrobert {
1833*404b540aSrobert case '#': /* Delayed. */
1834*404b540aSrobert if (final_sequence)
1835*404b540aSrobert fprintf (file, "d");
1836*404b540aSrobert return;
1837*404b540aSrobert }
1838*404b540aSrobert
1839*404b540aSrobert code = GET_CODE (op);
1840*404b540aSrobert switch (letter)
1841*404b540aSrobert {
1842*404b540aSrobert case 'A': /* Direct address. */
1843*404b540aSrobert if (code == CONST_INT || code == SYMBOL_REF || code == CONST)
1844*404b540aSrobert fprintf (file, "@");
1845*404b540aSrobert break;
1846*404b540aSrobert
1847*404b540aSrobert case 'H': /* Sethi. */
1848*404b540aSrobert output_addr_const (file, op);
1849*404b540aSrobert return;
1850*404b540aSrobert
1851*404b540aSrobert case 'I': /* Reversed condition. */
1852*404b540aSrobert code = reverse_condition (code);
1853*404b540aSrobert break;
1854*404b540aSrobert
1855*404b540aSrobert case 'L': /* Log 2 of constant. */
1856*404b540aSrobert if (code != CONST_INT)
1857*404b540aSrobert fatal_insn ("c4x_print_operand: %%L inconsistency", op);
1858*404b540aSrobert fprintf (file, "%d", exact_log2 (INTVAL (op)));
1859*404b540aSrobert return;
1860*404b540aSrobert
1861*404b540aSrobert case 'N': /* Ones complement of small constant. */
1862*404b540aSrobert if (code != CONST_INT)
1863*404b540aSrobert fatal_insn ("c4x_print_operand: %%N inconsistency", op);
1864*404b540aSrobert fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~INTVAL (op));
1865*404b540aSrobert return;
1866*404b540aSrobert
1867*404b540aSrobert case 'K': /* Generate ldp(k) if direct address. */
1868*404b540aSrobert if (! TARGET_SMALL
1869*404b540aSrobert && code == MEM
1870*404b540aSrobert && GET_CODE (XEXP (op, 0)) == LO_SUM
1871*404b540aSrobert && GET_CODE (XEXP (XEXP (op, 0), 0)) == REG
1872*404b540aSrobert && REGNO (XEXP (XEXP (op, 0), 0)) == DP_REGNO)
1873*404b540aSrobert {
1874*404b540aSrobert op1 = XEXP (XEXP (op, 0), 1);
1875*404b540aSrobert if (GET_CODE(op1) == CONST_INT || GET_CODE(op1) == SYMBOL_REF)
1876*404b540aSrobert {
1877*404b540aSrobert fprintf (file, "\t%s\t@", TARGET_C3X ? "ldp" : "ldpk");
1878*404b540aSrobert output_address (XEXP (adjust_address (op, VOIDmode, 1), 0));
1879*404b540aSrobert fprintf (file, "\n");
1880*404b540aSrobert }
1881*404b540aSrobert }
1882*404b540aSrobert return;
1883*404b540aSrobert
1884*404b540aSrobert case 'M': /* Generate ldp(k) if direct address. */
1885*404b540aSrobert if (! TARGET_SMALL /* Only used in asm statements. */
1886*404b540aSrobert && code == MEM
1887*404b540aSrobert && (GET_CODE (XEXP (op, 0)) == CONST
1888*404b540aSrobert || GET_CODE (XEXP (op, 0)) == SYMBOL_REF))
1889*404b540aSrobert {
1890*404b540aSrobert fprintf (file, "%s\t@", TARGET_C3X ? "ldp" : "ldpk");
1891*404b540aSrobert output_address (XEXP (op, 0));
1892*404b540aSrobert fprintf (file, "\n\t");
1893*404b540aSrobert }
1894*404b540aSrobert return;
1895*404b540aSrobert
1896*404b540aSrobert case 'O': /* Offset address. */
1897*404b540aSrobert if (code == MEM && c4x_autoinc_operand (op, Pmode))
1898*404b540aSrobert break;
1899*404b540aSrobert else if (code == MEM)
1900*404b540aSrobert output_address (XEXP (adjust_address (op, VOIDmode, 1), 0));
1901*404b540aSrobert else if (code == REG)
1902*404b540aSrobert fprintf (file, "%s", reg_names[REGNO (op) + 1]);
1903*404b540aSrobert else
1904*404b540aSrobert fatal_insn ("c4x_print_operand: %%O inconsistency", op);
1905*404b540aSrobert return;
1906*404b540aSrobert
1907*404b540aSrobert case 'C': /* Call. */
1908*404b540aSrobert break;
1909*404b540aSrobert
1910*404b540aSrobert case 'U': /* Call/callu. */
1911*404b540aSrobert if (code != SYMBOL_REF)
1912*404b540aSrobert fprintf (file, "u");
1913*404b540aSrobert return;
1914*404b540aSrobert
1915*404b540aSrobert default:
1916*404b540aSrobert break;
1917*404b540aSrobert }
1918*404b540aSrobert
1919*404b540aSrobert switch (code)
1920*404b540aSrobert {
1921*404b540aSrobert case REG:
1922*404b540aSrobert if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1923*404b540aSrobert && ! TARGET_TI)
1924*404b540aSrobert fprintf (file, "%s", float_reg_names[REGNO (op)]);
1925*404b540aSrobert else
1926*404b540aSrobert fprintf (file, "%s", reg_names[REGNO (op)]);
1927*404b540aSrobert break;
1928*404b540aSrobert
1929*404b540aSrobert case MEM:
1930*404b540aSrobert output_address (XEXP (op, 0));
1931*404b540aSrobert break;
1932*404b540aSrobert
1933*404b540aSrobert case CONST_DOUBLE:
1934*404b540aSrobert {
1935*404b540aSrobert char str[64];
1936*404b540aSrobert
1937*404b540aSrobert real_to_decimal (str, CONST_DOUBLE_REAL_VALUE (op),
1938*404b540aSrobert sizeof (str), 0, 1);
1939*404b540aSrobert fprintf (file, "%s", str);
1940*404b540aSrobert }
1941*404b540aSrobert break;
1942*404b540aSrobert
1943*404b540aSrobert case CONST_INT:
1944*404b540aSrobert fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (op));
1945*404b540aSrobert break;
1946*404b540aSrobert
1947*404b540aSrobert case NE:
1948*404b540aSrobert fprintf (file, "ne");
1949*404b540aSrobert break;
1950*404b540aSrobert
1951*404b540aSrobert case EQ:
1952*404b540aSrobert fprintf (file, "eq");
1953*404b540aSrobert break;
1954*404b540aSrobert
1955*404b540aSrobert case GE:
1956*404b540aSrobert fprintf (file, "ge");
1957*404b540aSrobert break;
1958*404b540aSrobert
1959*404b540aSrobert case GT:
1960*404b540aSrobert fprintf (file, "gt");
1961*404b540aSrobert break;
1962*404b540aSrobert
1963*404b540aSrobert case LE:
1964*404b540aSrobert fprintf (file, "le");
1965*404b540aSrobert break;
1966*404b540aSrobert
1967*404b540aSrobert case LT:
1968*404b540aSrobert fprintf (file, "lt");
1969*404b540aSrobert break;
1970*404b540aSrobert
1971*404b540aSrobert case GEU:
1972*404b540aSrobert fprintf (file, "hs");
1973*404b540aSrobert break;
1974*404b540aSrobert
1975*404b540aSrobert case GTU:
1976*404b540aSrobert fprintf (file, "hi");
1977*404b540aSrobert break;
1978*404b540aSrobert
1979*404b540aSrobert case LEU:
1980*404b540aSrobert fprintf (file, "ls");
1981*404b540aSrobert break;
1982*404b540aSrobert
1983*404b540aSrobert case LTU:
1984*404b540aSrobert fprintf (file, "lo");
1985*404b540aSrobert break;
1986*404b540aSrobert
1987*404b540aSrobert case SYMBOL_REF:
1988*404b540aSrobert output_addr_const (file, op);
1989*404b540aSrobert break;
1990*404b540aSrobert
1991*404b540aSrobert case CONST:
1992*404b540aSrobert output_addr_const (file, XEXP (op, 0));
1993*404b540aSrobert break;
1994*404b540aSrobert
1995*404b540aSrobert case CODE_LABEL:
1996*404b540aSrobert break;
1997*404b540aSrobert
1998*404b540aSrobert default:
1999*404b540aSrobert fatal_insn ("c4x_print_operand: Bad operand case", op);
2000*404b540aSrobert break;
2001*404b540aSrobert }
2002*404b540aSrobert }
2003*404b540aSrobert
2004*404b540aSrobert
2005*404b540aSrobert void
c4x_print_operand_address(FILE * file,rtx addr)2006*404b540aSrobert c4x_print_operand_address (FILE *file, rtx addr)
2007*404b540aSrobert {
2008*404b540aSrobert switch (GET_CODE (addr))
2009*404b540aSrobert {
2010*404b540aSrobert case REG:
2011*404b540aSrobert fprintf (file, "*%s", reg_names[REGNO (addr)]);
2012*404b540aSrobert break;
2013*404b540aSrobert
2014*404b540aSrobert case PRE_DEC:
2015*404b540aSrobert fprintf (file, "*--%s", reg_names[REGNO (XEXP (addr, 0))]);
2016*404b540aSrobert break;
2017*404b540aSrobert
2018*404b540aSrobert case POST_INC:
2019*404b540aSrobert fprintf (file, "*%s++", reg_names[REGNO (XEXP (addr, 0))]);
2020*404b540aSrobert break;
2021*404b540aSrobert
2022*404b540aSrobert case POST_MODIFY:
2023*404b540aSrobert {
2024*404b540aSrobert rtx op0 = XEXP (XEXP (addr, 1), 0);
2025*404b540aSrobert rtx op1 = XEXP (XEXP (addr, 1), 1);
2026*404b540aSrobert
2027*404b540aSrobert if (GET_CODE (XEXP (addr, 1)) == PLUS && REG_P (op1))
2028*404b540aSrobert fprintf (file, "*%s++(%s)", reg_names[REGNO (op0)],
2029*404b540aSrobert reg_names[REGNO (op1)]);
2030*404b540aSrobert else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) > 0)
2031*404b540aSrobert fprintf (file, "*%s++(" HOST_WIDE_INT_PRINT_DEC ")",
2032*404b540aSrobert reg_names[REGNO (op0)], INTVAL (op1));
2033*404b540aSrobert else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) < 0)
2034*404b540aSrobert fprintf (file, "*%s--(" HOST_WIDE_INT_PRINT_DEC ")",
2035*404b540aSrobert reg_names[REGNO (op0)], -INTVAL (op1));
2036*404b540aSrobert else if (GET_CODE (XEXP (addr, 1)) == MINUS && REG_P (op1))
2037*404b540aSrobert fprintf (file, "*%s--(%s)", reg_names[REGNO (op0)],
2038*404b540aSrobert reg_names[REGNO (op1)]);
2039*404b540aSrobert else
2040*404b540aSrobert fatal_insn ("c4x_print_operand_address: Bad post_modify", addr);
2041*404b540aSrobert }
2042*404b540aSrobert break;
2043*404b540aSrobert
2044*404b540aSrobert case PRE_MODIFY:
2045*404b540aSrobert {
2046*404b540aSrobert rtx op0 = XEXP (XEXP (addr, 1), 0);
2047*404b540aSrobert rtx op1 = XEXP (XEXP (addr, 1), 1);
2048*404b540aSrobert
2049*404b540aSrobert if (GET_CODE (XEXP (addr, 1)) == PLUS && REG_P (op1))
2050*404b540aSrobert fprintf (file, "*++%s(%s)", reg_names[REGNO (op0)],
2051*404b540aSrobert reg_names[REGNO (op1)]);
2052*404b540aSrobert else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) > 0)
2053*404b540aSrobert fprintf (file, "*++%s(" HOST_WIDE_INT_PRINT_DEC ")",
2054*404b540aSrobert reg_names[REGNO (op0)], INTVAL (op1));
2055*404b540aSrobert else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) < 0)
2056*404b540aSrobert fprintf (file, "*--%s(" HOST_WIDE_INT_PRINT_DEC ")",
2057*404b540aSrobert reg_names[REGNO (op0)], -INTVAL (op1));
2058*404b540aSrobert else if (GET_CODE (XEXP (addr, 1)) == MINUS && REG_P (op1))
2059*404b540aSrobert fprintf (file, "*--%s(%s)", reg_names[REGNO (op0)],
2060*404b540aSrobert reg_names[REGNO (op1)]);
2061*404b540aSrobert else
2062*404b540aSrobert fatal_insn ("c4x_print_operand_address: Bad pre_modify", addr);
2063*404b540aSrobert }
2064*404b540aSrobert break;
2065*404b540aSrobert
2066*404b540aSrobert case PRE_INC:
2067*404b540aSrobert fprintf (file, "*++%s", reg_names[REGNO (XEXP (addr, 0))]);
2068*404b540aSrobert break;
2069*404b540aSrobert
2070*404b540aSrobert case POST_DEC:
2071*404b540aSrobert fprintf (file, "*%s--", reg_names[REGNO (XEXP (addr, 0))]);
2072*404b540aSrobert break;
2073*404b540aSrobert
2074*404b540aSrobert case PLUS: /* Indirect with displacement. */
2075*404b540aSrobert {
2076*404b540aSrobert rtx op0 = XEXP (addr, 0);
2077*404b540aSrobert rtx op1 = XEXP (addr, 1);
2078*404b540aSrobert
2079*404b540aSrobert if (REG_P (op0))
2080*404b540aSrobert {
2081*404b540aSrobert if (REG_P (op1))
2082*404b540aSrobert {
2083*404b540aSrobert if (IS_INDEX_REG (op0))
2084*404b540aSrobert {
2085*404b540aSrobert fprintf (file, "*+%s(%s)",
2086*404b540aSrobert reg_names[REGNO (op1)],
2087*404b540aSrobert reg_names[REGNO (op0)]); /* Index + base. */
2088*404b540aSrobert }
2089*404b540aSrobert else
2090*404b540aSrobert {
2091*404b540aSrobert fprintf (file, "*+%s(%s)",
2092*404b540aSrobert reg_names[REGNO (op0)],
2093*404b540aSrobert reg_names[REGNO (op1)]); /* Base + index. */
2094*404b540aSrobert }
2095*404b540aSrobert }
2096*404b540aSrobert else if (INTVAL (op1) < 0)
2097*404b540aSrobert {
2098*404b540aSrobert fprintf (file, "*-%s(" HOST_WIDE_INT_PRINT_DEC ")",
2099*404b540aSrobert reg_names[REGNO (op0)],
2100*404b540aSrobert -INTVAL (op1)); /* Base - displacement. */
2101*404b540aSrobert }
2102*404b540aSrobert else
2103*404b540aSrobert {
2104*404b540aSrobert fprintf (file, "*+%s(" HOST_WIDE_INT_PRINT_DEC ")",
2105*404b540aSrobert reg_names[REGNO (op0)],
2106*404b540aSrobert INTVAL (op1)); /* Base + displacement. */
2107*404b540aSrobert }
2108*404b540aSrobert }
2109*404b540aSrobert else
2110*404b540aSrobert fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2111*404b540aSrobert }
2112*404b540aSrobert break;
2113*404b540aSrobert
2114*404b540aSrobert case LO_SUM:
2115*404b540aSrobert {
2116*404b540aSrobert rtx op0 = XEXP (addr, 0);
2117*404b540aSrobert rtx op1 = XEXP (addr, 1);
2118*404b540aSrobert
2119*404b540aSrobert if (REG_P (op0) && REGNO (op0) == DP_REGNO)
2120*404b540aSrobert c4x_print_operand_address (file, op1);
2121*404b540aSrobert else
2122*404b540aSrobert fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2123*404b540aSrobert }
2124*404b540aSrobert break;
2125*404b540aSrobert
2126*404b540aSrobert case CONST:
2127*404b540aSrobert case SYMBOL_REF:
2128*404b540aSrobert case LABEL_REF:
2129*404b540aSrobert fprintf (file, "@");
2130*404b540aSrobert output_addr_const (file, addr);
2131*404b540aSrobert break;
2132*404b540aSrobert
2133*404b540aSrobert /* We shouldn't access CONST_INT addresses. */
2134*404b540aSrobert case CONST_INT:
2135*404b540aSrobert
2136*404b540aSrobert default:
2137*404b540aSrobert fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2138*404b540aSrobert break;
2139*404b540aSrobert }
2140*404b540aSrobert }
2141*404b540aSrobert
2142*404b540aSrobert
2143*404b540aSrobert /* Return nonzero if the floating point operand will fit
2144*404b540aSrobert in the immediate field. */
2145*404b540aSrobert
2146*404b540aSrobert int
c4x_immed_float_p(rtx op)2147*404b540aSrobert c4x_immed_float_p (rtx op)
2148*404b540aSrobert {
2149*404b540aSrobert long convval[2];
2150*404b540aSrobert int exponent;
2151*404b540aSrobert REAL_VALUE_TYPE r;
2152*404b540aSrobert
2153*404b540aSrobert REAL_VALUE_FROM_CONST_DOUBLE (r, op);
2154*404b540aSrobert if (GET_MODE (op) == HFmode)
2155*404b540aSrobert REAL_VALUE_TO_TARGET_DOUBLE (r, convval);
2156*404b540aSrobert else
2157*404b540aSrobert {
2158*404b540aSrobert REAL_VALUE_TO_TARGET_SINGLE (r, convval[0]);
2159*404b540aSrobert convval[1] = 0;
2160*404b540aSrobert }
2161*404b540aSrobert
2162*404b540aSrobert /* Sign extend exponent. */
2163*404b540aSrobert exponent = (((convval[0] >> 24) & 0xff) ^ 0x80) - 0x80;
2164*404b540aSrobert if (exponent == -128)
2165*404b540aSrobert return 1; /* 0.0 */
2166*404b540aSrobert if ((convval[0] & 0x00000fff) != 0 || convval[1] != 0)
2167*404b540aSrobert return 0; /* Precision doesn't fit. */
2168*404b540aSrobert return (exponent <= 7) /* Positive exp. */
2169*404b540aSrobert && (exponent >= -7); /* Negative exp. */
2170*404b540aSrobert }
2171*404b540aSrobert
2172*404b540aSrobert
2173*404b540aSrobert /* The last instruction in a repeat block cannot be a Bcond, DBcound,
2174*404b540aSrobert CALL, CALLCond, TRAPcond, RETIcond, RETScond, IDLE, RPTB or RPTS.
2175*404b540aSrobert
2176*404b540aSrobert None of the last four instructions from the bottom of the block can
2177*404b540aSrobert be a BcondD, BRD, DBcondD, RPTBD, LAJ, LAJcond, LATcond, BcondAF,
2178*404b540aSrobert BcondAT or RETIcondD.
2179*404b540aSrobert
2180*404b540aSrobert This routine scans the four previous insns for a jump insn, and if
2181*404b540aSrobert one is found, returns 1 so that we bung in a nop instruction.
2182*404b540aSrobert This simple minded strategy will add a nop, when it may not
2183*404b540aSrobert be required. Say when there is a JUMP_INSN near the end of the
2184*404b540aSrobert block that doesn't get converted into a delayed branch.
2185*404b540aSrobert
2186*404b540aSrobert Note that we cannot have a call insn, since we don't generate
2187*404b540aSrobert repeat loops with calls in them (although I suppose we could, but
2188*404b540aSrobert there's no benefit.)
2189*404b540aSrobert
2190*404b540aSrobert !!! FIXME. The rptb_top insn may be sucked into a SEQUENCE. */
2191*404b540aSrobert
2192*404b540aSrobert int
c4x_rptb_nop_p(rtx insn)2193*404b540aSrobert c4x_rptb_nop_p (rtx insn)
2194*404b540aSrobert {
2195*404b540aSrobert rtx start_label;
2196*404b540aSrobert int i;
2197*404b540aSrobert
2198*404b540aSrobert /* Extract the start label from the jump pattern (rptb_end). */
2199*404b540aSrobert start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 1), 0);
2200*404b540aSrobert
2201*404b540aSrobert /* If there is a label at the end of the loop we must insert
2202*404b540aSrobert a NOP. */
2203*404b540aSrobert do {
2204*404b540aSrobert insn = previous_insn (insn);
2205*404b540aSrobert } while (GET_CODE (insn) == NOTE
2206*404b540aSrobert || GET_CODE (insn) == USE
2207*404b540aSrobert || GET_CODE (insn) == CLOBBER);
2208*404b540aSrobert if (GET_CODE (insn) == CODE_LABEL)
2209*404b540aSrobert return 1;
2210*404b540aSrobert
2211*404b540aSrobert for (i = 0; i < 4; i++)
2212*404b540aSrobert {
2213*404b540aSrobert /* Search back for prev non-note and non-label insn. */
2214*404b540aSrobert while (GET_CODE (insn) == NOTE || GET_CODE (insn) == CODE_LABEL
2215*404b540aSrobert || GET_CODE (insn) == USE || GET_CODE (insn) == CLOBBER)
2216*404b540aSrobert {
2217*404b540aSrobert if (insn == start_label)
2218*404b540aSrobert return i == 0;
2219*404b540aSrobert
2220*404b540aSrobert insn = previous_insn (insn);
2221*404b540aSrobert };
2222*404b540aSrobert
2223*404b540aSrobert /* If we have a jump instruction we should insert a NOP. If we
2224*404b540aSrobert hit repeat block top we should only insert a NOP if the loop
2225*404b540aSrobert is empty. */
2226*404b540aSrobert if (GET_CODE (insn) == JUMP_INSN)
2227*404b540aSrobert return 1;
2228*404b540aSrobert insn = previous_insn (insn);
2229*404b540aSrobert }
2230*404b540aSrobert return 0;
2231*404b540aSrobert }
2232*404b540aSrobert
2233*404b540aSrobert
2234*404b540aSrobert /* The C4x looping instruction needs to be emitted at the top of the
2235*404b540aSrobert loop. Emitting the true RTL for a looping instruction at the top of
2236*404b540aSrobert the loop can cause problems with flow analysis. So instead, a dummy
2237*404b540aSrobert doloop insn is emitted at the end of the loop. This routine checks
2238*404b540aSrobert for the presence of this doloop insn and then searches back to the
2239*404b540aSrobert top of the loop, where it inserts the true looping insn (provided
2240*404b540aSrobert there are no instructions in the loop which would cause problems).
2241*404b540aSrobert Any additional labels can be emitted at this point. In addition, if
2242*404b540aSrobert the desired loop count register was not allocated, this routine does
2243*404b540aSrobert nothing.
2244*404b540aSrobert
2245*404b540aSrobert Before we can create a repeat block looping instruction we have to
2246*404b540aSrobert verify that there are no jumps outside the loop and no jumps outside
2247*404b540aSrobert the loop go into this loop. This can happen in the basic blocks reorder
2248*404b540aSrobert pass. The C4x cpu cannot handle this. */
2249*404b540aSrobert
2250*404b540aSrobert static int
c4x_label_ref_used_p(rtx x,rtx code_label)2251*404b540aSrobert c4x_label_ref_used_p (rtx x, rtx code_label)
2252*404b540aSrobert {
2253*404b540aSrobert enum rtx_code code;
2254*404b540aSrobert int i, j;
2255*404b540aSrobert const char *fmt;
2256*404b540aSrobert
2257*404b540aSrobert if (x == 0)
2258*404b540aSrobert return 0;
2259*404b540aSrobert
2260*404b540aSrobert code = GET_CODE (x);
2261*404b540aSrobert if (code == LABEL_REF)
2262*404b540aSrobert return INSN_UID (XEXP (x,0)) == INSN_UID (code_label);
2263*404b540aSrobert
2264*404b540aSrobert fmt = GET_RTX_FORMAT (code);
2265*404b540aSrobert for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2266*404b540aSrobert {
2267*404b540aSrobert if (fmt[i] == 'e')
2268*404b540aSrobert {
2269*404b540aSrobert if (c4x_label_ref_used_p (XEXP (x, i), code_label))
2270*404b540aSrobert return 1;
2271*404b540aSrobert }
2272*404b540aSrobert else if (fmt[i] == 'E')
2273*404b540aSrobert for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2274*404b540aSrobert if (c4x_label_ref_used_p (XVECEXP (x, i, j), code_label))
2275*404b540aSrobert return 1;
2276*404b540aSrobert }
2277*404b540aSrobert return 0;
2278*404b540aSrobert }
2279*404b540aSrobert
2280*404b540aSrobert
2281*404b540aSrobert static int
c4x_rptb_valid_p(rtx insn,rtx start_label)2282*404b540aSrobert c4x_rptb_valid_p (rtx insn, rtx start_label)
2283*404b540aSrobert {
2284*404b540aSrobert rtx end = insn;
2285*404b540aSrobert rtx start;
2286*404b540aSrobert rtx tmp;
2287*404b540aSrobert
2288*404b540aSrobert /* Find the start label. */
2289*404b540aSrobert for (; insn; insn = PREV_INSN (insn))
2290*404b540aSrobert if (insn == start_label)
2291*404b540aSrobert break;
2292*404b540aSrobert
2293*404b540aSrobert /* Note found then we cannot use a rptb or rpts. The label was
2294*404b540aSrobert probably moved by the basic block reorder pass. */
2295*404b540aSrobert if (! insn)
2296*404b540aSrobert return 0;
2297*404b540aSrobert
2298*404b540aSrobert start = insn;
2299*404b540aSrobert /* If any jump jumps inside this block then we must fail. */
2300*404b540aSrobert for (insn = PREV_INSN (start); insn; insn = PREV_INSN (insn))
2301*404b540aSrobert {
2302*404b540aSrobert if (GET_CODE (insn) == CODE_LABEL)
2303*404b540aSrobert {
2304*404b540aSrobert for (tmp = NEXT_INSN (start); tmp != end; tmp = NEXT_INSN(tmp))
2305*404b540aSrobert if (GET_CODE (tmp) == JUMP_INSN
2306*404b540aSrobert && c4x_label_ref_used_p (tmp, insn))
2307*404b540aSrobert return 0;
2308*404b540aSrobert }
2309*404b540aSrobert }
2310*404b540aSrobert for (insn = NEXT_INSN (end); insn; insn = NEXT_INSN (insn))
2311*404b540aSrobert {
2312*404b540aSrobert if (GET_CODE (insn) == CODE_LABEL)
2313*404b540aSrobert {
2314*404b540aSrobert for (tmp = NEXT_INSN (start); tmp != end; tmp = NEXT_INSN(tmp))
2315*404b540aSrobert if (GET_CODE (tmp) == JUMP_INSN
2316*404b540aSrobert && c4x_label_ref_used_p (tmp, insn))
2317*404b540aSrobert return 0;
2318*404b540aSrobert }
2319*404b540aSrobert }
2320*404b540aSrobert /* If any jump jumps outside this block then we must fail. */
2321*404b540aSrobert for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
2322*404b540aSrobert {
2323*404b540aSrobert if (GET_CODE (insn) == CODE_LABEL)
2324*404b540aSrobert {
2325*404b540aSrobert for (tmp = NEXT_INSN (end); tmp; tmp = NEXT_INSN(tmp))
2326*404b540aSrobert if (GET_CODE (tmp) == JUMP_INSN
2327*404b540aSrobert && c4x_label_ref_used_p (tmp, insn))
2328*404b540aSrobert return 0;
2329*404b540aSrobert for (tmp = PREV_INSN (start); tmp; tmp = PREV_INSN(tmp))
2330*404b540aSrobert if (GET_CODE (tmp) == JUMP_INSN
2331*404b540aSrobert && c4x_label_ref_used_p (tmp, insn))
2332*404b540aSrobert return 0;
2333*404b540aSrobert }
2334*404b540aSrobert }
2335*404b540aSrobert
2336*404b540aSrobert /* All checks OK. */
2337*404b540aSrobert return 1;
2338*404b540aSrobert }
2339*404b540aSrobert
2340*404b540aSrobert
2341*404b540aSrobert void
c4x_rptb_insert(rtx insn)2342*404b540aSrobert c4x_rptb_insert (rtx insn)
2343*404b540aSrobert {
2344*404b540aSrobert rtx end_label;
2345*404b540aSrobert rtx start_label;
2346*404b540aSrobert rtx new_start_label;
2347*404b540aSrobert rtx count_reg;
2348*404b540aSrobert
2349*404b540aSrobert /* If the count register has not been allocated to RC, say if
2350*404b540aSrobert there is a movmem pattern in the loop, then do not insert a
2351*404b540aSrobert RPTB instruction. Instead we emit a decrement and branch
2352*404b540aSrobert at the end of the loop. */
2353*404b540aSrobert count_reg = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 0), 0);
2354*404b540aSrobert if (REGNO (count_reg) != RC_REGNO)
2355*404b540aSrobert return;
2356*404b540aSrobert
2357*404b540aSrobert /* Extract the start label from the jump pattern (rptb_end). */
2358*404b540aSrobert start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 1), 0);
2359*404b540aSrobert
2360*404b540aSrobert if (! c4x_rptb_valid_p (insn, start_label))
2361*404b540aSrobert {
2362*404b540aSrobert /* We cannot use the rptb insn. Replace it so reorg can use
2363*404b540aSrobert the delay slots of the jump insn. */
2364*404b540aSrobert emit_insn_before (gen_addqi3 (count_reg, count_reg, constm1_rtx), insn);
2365*404b540aSrobert emit_insn_before (gen_cmpqi (count_reg, const0_rtx), insn);
2366*404b540aSrobert emit_insn_before (gen_bge (start_label), insn);
2367*404b540aSrobert LABEL_NUSES (start_label)++;
2368*404b540aSrobert delete_insn (insn);
2369*404b540aSrobert return;
2370*404b540aSrobert }
2371*404b540aSrobert
2372*404b540aSrobert end_label = gen_label_rtx ();
2373*404b540aSrobert LABEL_NUSES (end_label)++;
2374*404b540aSrobert emit_label_after (end_label, insn);
2375*404b540aSrobert
2376*404b540aSrobert new_start_label = gen_label_rtx ();
2377*404b540aSrobert LABEL_NUSES (new_start_label)++;
2378*404b540aSrobert
2379*404b540aSrobert for (; insn; insn = PREV_INSN (insn))
2380*404b540aSrobert {
2381*404b540aSrobert if (insn == start_label)
2382*404b540aSrobert break;
2383*404b540aSrobert if (GET_CODE (insn) == JUMP_INSN &&
2384*404b540aSrobert JUMP_LABEL (insn) == start_label)
2385*404b540aSrobert redirect_jump (insn, new_start_label, 0);
2386*404b540aSrobert }
2387*404b540aSrobert if (! insn)
2388*404b540aSrobert fatal_insn ("c4x_rptb_insert: Cannot find start label", start_label);
2389*404b540aSrobert
2390*404b540aSrobert emit_label_after (new_start_label, insn);
2391*404b540aSrobert
2392*404b540aSrobert if (TARGET_RPTS && c4x_rptb_rpts_p (PREV_INSN (insn), 0))
2393*404b540aSrobert emit_insn_after (gen_rpts_top (new_start_label, end_label), insn);
2394*404b540aSrobert else
2395*404b540aSrobert emit_insn_after (gen_rptb_top (new_start_label, end_label), insn);
2396*404b540aSrobert if (LABEL_NUSES (start_label) == 0)
2397*404b540aSrobert delete_insn (start_label);
2398*404b540aSrobert }
2399*404b540aSrobert
2400*404b540aSrobert
2401*404b540aSrobert /* We need to use direct addressing for large constants and addresses
2402*404b540aSrobert that cannot fit within an instruction. We must check for these
2403*404b540aSrobert after after the final jump optimization pass, since this may
2404*404b540aSrobert introduce a local_move insn for a SYMBOL_REF. This pass
2405*404b540aSrobert must come before delayed branch slot filling since it can generate
2406*404b540aSrobert additional instructions.
2407*404b540aSrobert
2408*404b540aSrobert This function also fixes up RTPB style loops that didn't get RC
2409*404b540aSrobert allocated as the loop counter. */
2410*404b540aSrobert
2411*404b540aSrobert static void
c4x_reorg(void)2412*404b540aSrobert c4x_reorg (void)
2413*404b540aSrobert {
2414*404b540aSrobert rtx insn;
2415*404b540aSrobert
2416*404b540aSrobert for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2417*404b540aSrobert {
2418*404b540aSrobert /* Look for insn. */
2419*404b540aSrobert if (INSN_P (insn))
2420*404b540aSrobert {
2421*404b540aSrobert int insn_code_number;
2422*404b540aSrobert rtx old;
2423*404b540aSrobert
2424*404b540aSrobert insn_code_number = recog_memoized (insn);
2425*404b540aSrobert
2426*404b540aSrobert if (insn_code_number < 0)
2427*404b540aSrobert continue;
2428*404b540aSrobert
2429*404b540aSrobert /* Insert the RTX for RPTB at the top of the loop
2430*404b540aSrobert and a label at the end of the loop. */
2431*404b540aSrobert if (insn_code_number == CODE_FOR_rptb_end)
2432*404b540aSrobert c4x_rptb_insert(insn);
2433*404b540aSrobert
2434*404b540aSrobert /* We need to split the insn here. Otherwise the calls to
2435*404b540aSrobert force_const_mem will not work for load_immed_address. */
2436*404b540aSrobert old = insn;
2437*404b540aSrobert
2438*404b540aSrobert /* Don't split the insn if it has been deleted. */
2439*404b540aSrobert if (! INSN_DELETED_P (old))
2440*404b540aSrobert insn = try_split (PATTERN(old), old, 1);
2441*404b540aSrobert
2442*404b540aSrobert /* When not optimizing, the old insn will be still left around
2443*404b540aSrobert with only the 'deleted' bit set. Transform it into a note
2444*404b540aSrobert to avoid confusion of subsequent processing. */
2445*404b540aSrobert if (INSN_DELETED_P (old))
2446*404b540aSrobert {
2447*404b540aSrobert PUT_CODE (old, NOTE);
2448*404b540aSrobert NOTE_LINE_NUMBER (old) = NOTE_INSN_DELETED;
2449*404b540aSrobert NOTE_SOURCE_FILE (old) = 0;
2450*404b540aSrobert }
2451*404b540aSrobert }
2452*404b540aSrobert }
2453*404b540aSrobert }
2454*404b540aSrobert
2455*404b540aSrobert
2456*404b540aSrobert int
c4x_a_register(rtx op)2457*404b540aSrobert c4x_a_register (rtx op)
2458*404b540aSrobert {
2459*404b540aSrobert return REG_P (op) && IS_ADDR_OR_PSEUDO_REG (op);
2460*404b540aSrobert }
2461*404b540aSrobert
2462*404b540aSrobert
2463*404b540aSrobert int
c4x_x_register(rtx op)2464*404b540aSrobert c4x_x_register (rtx op)
2465*404b540aSrobert {
2466*404b540aSrobert return REG_P (op) && IS_INDEX_OR_PSEUDO_REG (op);
2467*404b540aSrobert }
2468*404b540aSrobert
2469*404b540aSrobert
2470*404b540aSrobert static int
c4x_immed_int_constant(rtx op)2471*404b540aSrobert c4x_immed_int_constant (rtx op)
2472*404b540aSrobert {
2473*404b540aSrobert if (GET_CODE (op) != CONST_INT)
2474*404b540aSrobert return 0;
2475*404b540aSrobert
2476*404b540aSrobert return GET_MODE (op) == VOIDmode
2477*404b540aSrobert || GET_MODE_CLASS (GET_MODE (op)) == MODE_INT
2478*404b540aSrobert || GET_MODE_CLASS (GET_MODE (op)) == MODE_PARTIAL_INT;
2479*404b540aSrobert }
2480*404b540aSrobert
2481*404b540aSrobert
2482*404b540aSrobert static int
c4x_immed_float_constant(rtx op)2483*404b540aSrobert c4x_immed_float_constant (rtx op)
2484*404b540aSrobert {
2485*404b540aSrobert if (GET_CODE (op) != CONST_DOUBLE)
2486*404b540aSrobert return 0;
2487*404b540aSrobert
2488*404b540aSrobert /* Do not check if the CONST_DOUBLE is in memory. If there is a MEM
2489*404b540aSrobert present this only means that a MEM rtx has been generated. It does
2490*404b540aSrobert not mean the rtx is really in memory. */
2491*404b540aSrobert
2492*404b540aSrobert return GET_MODE (op) == QFmode || GET_MODE (op) == HFmode;
2493*404b540aSrobert }
2494*404b540aSrobert
2495*404b540aSrobert
2496*404b540aSrobert int
c4x_shiftable_constant(rtx op)2497*404b540aSrobert c4x_shiftable_constant (rtx op)
2498*404b540aSrobert {
2499*404b540aSrobert int i;
2500*404b540aSrobert int mask;
2501*404b540aSrobert int val = INTVAL (op);
2502*404b540aSrobert
2503*404b540aSrobert for (i = 0; i < 16; i++)
2504*404b540aSrobert {
2505*404b540aSrobert if (val & (1 << i))
2506*404b540aSrobert break;
2507*404b540aSrobert }
2508*404b540aSrobert mask = ((0xffff >> i) << 16) | 0xffff;
2509*404b540aSrobert if (IS_INT16_CONST (val & (1 << 31) ? (val >> i) | ~mask
2510*404b540aSrobert : (val >> i) & mask))
2511*404b540aSrobert return i;
2512*404b540aSrobert return -1;
2513*404b540aSrobert }
2514*404b540aSrobert
2515*404b540aSrobert
2516*404b540aSrobert int
c4x_H_constant(rtx op)2517*404b540aSrobert c4x_H_constant (rtx op)
2518*404b540aSrobert {
2519*404b540aSrobert return c4x_immed_float_constant (op) && c4x_immed_float_p (op);
2520*404b540aSrobert }
2521*404b540aSrobert
2522*404b540aSrobert
2523*404b540aSrobert int
c4x_I_constant(rtx op)2524*404b540aSrobert c4x_I_constant (rtx op)
2525*404b540aSrobert {
2526*404b540aSrobert return c4x_immed_int_constant (op) && IS_INT16_CONST (INTVAL (op));
2527*404b540aSrobert }
2528*404b540aSrobert
2529*404b540aSrobert
2530*404b540aSrobert int
c4x_J_constant(rtx op)2531*404b540aSrobert c4x_J_constant (rtx op)
2532*404b540aSrobert {
2533*404b540aSrobert if (TARGET_C3X)
2534*404b540aSrobert return 0;
2535*404b540aSrobert return c4x_immed_int_constant (op) && IS_INT8_CONST (INTVAL (op));
2536*404b540aSrobert }
2537*404b540aSrobert
2538*404b540aSrobert
2539*404b540aSrobert int
c4x_K_constant(rtx op)2540*404b540aSrobert c4x_K_constant (rtx op)
2541*404b540aSrobert {
2542*404b540aSrobert if (TARGET_C3X || ! c4x_immed_int_constant (op))
2543*404b540aSrobert return 0;
2544*404b540aSrobert return IS_INT5_CONST (INTVAL (op));
2545*404b540aSrobert }
2546*404b540aSrobert
2547*404b540aSrobert
2548*404b540aSrobert int
c4x_L_constant(rtx op)2549*404b540aSrobert c4x_L_constant (rtx op)
2550*404b540aSrobert {
2551*404b540aSrobert return c4x_immed_int_constant (op) && IS_UINT16_CONST (INTVAL (op));
2552*404b540aSrobert }
2553*404b540aSrobert
2554*404b540aSrobert
2555*404b540aSrobert int
c4x_N_constant(rtx op)2556*404b540aSrobert c4x_N_constant (rtx op)
2557*404b540aSrobert {
2558*404b540aSrobert return c4x_immed_int_constant (op) && IS_NOT_UINT16_CONST (INTVAL (op));
2559*404b540aSrobert }
2560*404b540aSrobert
2561*404b540aSrobert
2562*404b540aSrobert int
c4x_O_constant(rtx op)2563*404b540aSrobert c4x_O_constant (rtx op)
2564*404b540aSrobert {
2565*404b540aSrobert return c4x_immed_int_constant (op) && IS_HIGH_CONST (INTVAL (op));
2566*404b540aSrobert }
2567*404b540aSrobert
2568*404b540aSrobert
2569*404b540aSrobert /* The constraints do not have to check the register class,
2570*404b540aSrobert except when needed to discriminate between the constraints.
2571*404b540aSrobert The operand has been checked by the predicates to be valid. */
2572*404b540aSrobert
2573*404b540aSrobert /* ARx + 9-bit signed const or IRn
2574*404b540aSrobert *ARx, *+ARx(n), *-ARx(n), *+ARx(IRn), *-Arx(IRn) for -256 < n < 256
2575*404b540aSrobert We don't include the pre/post inc/dec forms here since
2576*404b540aSrobert they are handled by the <> constraints. */
2577*404b540aSrobert
2578*404b540aSrobert int
c4x_Q_constraint(rtx op)2579*404b540aSrobert c4x_Q_constraint (rtx op)
2580*404b540aSrobert {
2581*404b540aSrobert enum machine_mode mode = GET_MODE (op);
2582*404b540aSrobert
2583*404b540aSrobert if (GET_CODE (op) != MEM)
2584*404b540aSrobert return 0;
2585*404b540aSrobert op = XEXP (op, 0);
2586*404b540aSrobert switch (GET_CODE (op))
2587*404b540aSrobert {
2588*404b540aSrobert case REG:
2589*404b540aSrobert return 1;
2590*404b540aSrobert
2591*404b540aSrobert case PLUS:
2592*404b540aSrobert {
2593*404b540aSrobert rtx op0 = XEXP (op, 0);
2594*404b540aSrobert rtx op1 = XEXP (op, 1);
2595*404b540aSrobert
2596*404b540aSrobert if (! REG_P (op0))
2597*404b540aSrobert return 0;
2598*404b540aSrobert
2599*404b540aSrobert if (REG_P (op1))
2600*404b540aSrobert return 1;
2601*404b540aSrobert
2602*404b540aSrobert if (GET_CODE (op1) != CONST_INT)
2603*404b540aSrobert return 0;
2604*404b540aSrobert
2605*404b540aSrobert /* HImode and HFmode must be offsettable. */
2606*404b540aSrobert if (mode == HImode || mode == HFmode)
2607*404b540aSrobert return IS_DISP8_OFF_CONST (INTVAL (op1));
2608*404b540aSrobert
2609*404b540aSrobert return IS_DISP8_CONST (INTVAL (op1));
2610*404b540aSrobert }
2611*404b540aSrobert break;
2612*404b540aSrobert
2613*404b540aSrobert default:
2614*404b540aSrobert break;
2615*404b540aSrobert }
2616*404b540aSrobert return 0;
2617*404b540aSrobert }
2618*404b540aSrobert
2619*404b540aSrobert
2620*404b540aSrobert /* ARx + 5-bit unsigned const
2621*404b540aSrobert *ARx, *+ARx(n) for n < 32. */
2622*404b540aSrobert
2623*404b540aSrobert int
c4x_R_constraint(rtx op)2624*404b540aSrobert c4x_R_constraint (rtx op)
2625*404b540aSrobert {
2626*404b540aSrobert enum machine_mode mode = GET_MODE (op);
2627*404b540aSrobert
2628*404b540aSrobert if (TARGET_C3X)
2629*404b540aSrobert return 0;
2630*404b540aSrobert if (GET_CODE (op) != MEM)
2631*404b540aSrobert return 0;
2632*404b540aSrobert op = XEXP (op, 0);
2633*404b540aSrobert switch (GET_CODE (op))
2634*404b540aSrobert {
2635*404b540aSrobert case REG:
2636*404b540aSrobert return 1;
2637*404b540aSrobert
2638*404b540aSrobert case PLUS:
2639*404b540aSrobert {
2640*404b540aSrobert rtx op0 = XEXP (op, 0);
2641*404b540aSrobert rtx op1 = XEXP (op, 1);
2642*404b540aSrobert
2643*404b540aSrobert if (! REG_P (op0))
2644*404b540aSrobert return 0;
2645*404b540aSrobert
2646*404b540aSrobert if (GET_CODE (op1) != CONST_INT)
2647*404b540aSrobert return 0;
2648*404b540aSrobert
2649*404b540aSrobert /* HImode and HFmode must be offsettable. */
2650*404b540aSrobert if (mode == HImode || mode == HFmode)
2651*404b540aSrobert return IS_UINT5_CONST (INTVAL (op1) + 1);
2652*404b540aSrobert
2653*404b540aSrobert return IS_UINT5_CONST (INTVAL (op1));
2654*404b540aSrobert }
2655*404b540aSrobert break;
2656*404b540aSrobert
2657*404b540aSrobert default:
2658*404b540aSrobert break;
2659*404b540aSrobert }
2660*404b540aSrobert return 0;
2661*404b540aSrobert }
2662*404b540aSrobert
2663*404b540aSrobert
2664*404b540aSrobert static int
c4x_R_indirect(rtx op)2665*404b540aSrobert c4x_R_indirect (rtx op)
2666*404b540aSrobert {
2667*404b540aSrobert enum machine_mode mode = GET_MODE (op);
2668*404b540aSrobert
2669*404b540aSrobert if (TARGET_C3X || GET_CODE (op) != MEM)
2670*404b540aSrobert return 0;
2671*404b540aSrobert
2672*404b540aSrobert op = XEXP (op, 0);
2673*404b540aSrobert switch (GET_CODE (op))
2674*404b540aSrobert {
2675*404b540aSrobert case REG:
2676*404b540aSrobert return IS_ADDR_OR_PSEUDO_REG (op);
2677*404b540aSrobert
2678*404b540aSrobert case PLUS:
2679*404b540aSrobert {
2680*404b540aSrobert rtx op0 = XEXP (op, 0);
2681*404b540aSrobert rtx op1 = XEXP (op, 1);
2682*404b540aSrobert
2683*404b540aSrobert /* HImode and HFmode must be offsettable. */
2684*404b540aSrobert if (mode == HImode || mode == HFmode)
2685*404b540aSrobert return IS_ADDR_OR_PSEUDO_REG (op0)
2686*404b540aSrobert && GET_CODE (op1) == CONST_INT
2687*404b540aSrobert && IS_UINT5_CONST (INTVAL (op1) + 1);
2688*404b540aSrobert
2689*404b540aSrobert return REG_P (op0)
2690*404b540aSrobert && IS_ADDR_OR_PSEUDO_REG (op0)
2691*404b540aSrobert && GET_CODE (op1) == CONST_INT
2692*404b540aSrobert && IS_UINT5_CONST (INTVAL (op1));
2693*404b540aSrobert }
2694*404b540aSrobert break;
2695*404b540aSrobert
2696*404b540aSrobert default:
2697*404b540aSrobert break;
2698*404b540aSrobert }
2699*404b540aSrobert return 0;
2700*404b540aSrobert }
2701*404b540aSrobert
2702*404b540aSrobert
2703*404b540aSrobert /* ARx + 1-bit unsigned const or IRn
2704*404b540aSrobert *ARx, *+ARx(1), *-ARx(1), *+ARx(IRn), *-Arx(IRn)
2705*404b540aSrobert We don't include the pre/post inc/dec forms here since
2706*404b540aSrobert they are handled by the <> constraints. */
2707*404b540aSrobert
2708*404b540aSrobert int
c4x_S_constraint(rtx op)2709*404b540aSrobert c4x_S_constraint (rtx op)
2710*404b540aSrobert {
2711*404b540aSrobert enum machine_mode mode = GET_MODE (op);
2712*404b540aSrobert if (GET_CODE (op) != MEM)
2713*404b540aSrobert return 0;
2714*404b540aSrobert op = XEXP (op, 0);
2715*404b540aSrobert switch (GET_CODE (op))
2716*404b540aSrobert {
2717*404b540aSrobert case REG:
2718*404b540aSrobert return 1;
2719*404b540aSrobert
2720*404b540aSrobert case PRE_MODIFY:
2721*404b540aSrobert case POST_MODIFY:
2722*404b540aSrobert {
2723*404b540aSrobert rtx op0 = XEXP (op, 0);
2724*404b540aSrobert rtx op1 = XEXP (op, 1);
2725*404b540aSrobert
2726*404b540aSrobert if ((GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
2727*404b540aSrobert || (op0 != XEXP (op1, 0)))
2728*404b540aSrobert return 0;
2729*404b540aSrobert
2730*404b540aSrobert op0 = XEXP (op1, 0);
2731*404b540aSrobert op1 = XEXP (op1, 1);
2732*404b540aSrobert return REG_P (op0) && REG_P (op1);
2733*404b540aSrobert /* Pre or post_modify with a displacement of 0 or 1
2734*404b540aSrobert should not be generated. */
2735*404b540aSrobert }
2736*404b540aSrobert break;
2737*404b540aSrobert
2738*404b540aSrobert case PLUS:
2739*404b540aSrobert {
2740*404b540aSrobert rtx op0 = XEXP (op, 0);
2741*404b540aSrobert rtx op1 = XEXP (op, 1);
2742*404b540aSrobert
2743*404b540aSrobert if (!REG_P (op0))
2744*404b540aSrobert return 0;
2745*404b540aSrobert
2746*404b540aSrobert if (REG_P (op1))
2747*404b540aSrobert return 1;
2748*404b540aSrobert
2749*404b540aSrobert if (GET_CODE (op1) != CONST_INT)
2750*404b540aSrobert return 0;
2751*404b540aSrobert
2752*404b540aSrobert /* HImode and HFmode must be offsettable. */
2753*404b540aSrobert if (mode == HImode || mode == HFmode)
2754*404b540aSrobert return IS_DISP1_OFF_CONST (INTVAL (op1));
2755*404b540aSrobert
2756*404b540aSrobert return IS_DISP1_CONST (INTVAL (op1));
2757*404b540aSrobert }
2758*404b540aSrobert break;
2759*404b540aSrobert
2760*404b540aSrobert default:
2761*404b540aSrobert break;
2762*404b540aSrobert }
2763*404b540aSrobert return 0;
2764*404b540aSrobert }
2765*404b540aSrobert
2766*404b540aSrobert
2767*404b540aSrobert int
c4x_S_indirect(rtx op)2768*404b540aSrobert c4x_S_indirect (rtx op)
2769*404b540aSrobert {
2770*404b540aSrobert enum machine_mode mode = GET_MODE (op);
2771*404b540aSrobert if (GET_CODE (op) != MEM)
2772*404b540aSrobert return 0;
2773*404b540aSrobert
2774*404b540aSrobert op = XEXP (op, 0);
2775*404b540aSrobert switch (GET_CODE (op))
2776*404b540aSrobert {
2777*404b540aSrobert case PRE_DEC:
2778*404b540aSrobert case POST_DEC:
2779*404b540aSrobert if (mode != QImode && mode != QFmode)
2780*404b540aSrobert return 0;
2781*404b540aSrobert case PRE_INC:
2782*404b540aSrobert case POST_INC:
2783*404b540aSrobert op = XEXP (op, 0);
2784*404b540aSrobert
2785*404b540aSrobert case REG:
2786*404b540aSrobert return IS_ADDR_OR_PSEUDO_REG (op);
2787*404b540aSrobert
2788*404b540aSrobert case PRE_MODIFY:
2789*404b540aSrobert case POST_MODIFY:
2790*404b540aSrobert {
2791*404b540aSrobert rtx op0 = XEXP (op, 0);
2792*404b540aSrobert rtx op1 = XEXP (op, 1);
2793*404b540aSrobert
2794*404b540aSrobert if (mode != QImode && mode != QFmode)
2795*404b540aSrobert return 0;
2796*404b540aSrobert
2797*404b540aSrobert if ((GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
2798*404b540aSrobert || (op0 != XEXP (op1, 0)))
2799*404b540aSrobert return 0;
2800*404b540aSrobert
2801*404b540aSrobert op0 = XEXP (op1, 0);
2802*404b540aSrobert op1 = XEXP (op1, 1);
2803*404b540aSrobert return REG_P (op0) && IS_ADDR_OR_PSEUDO_REG (op0)
2804*404b540aSrobert && REG_P (op1) && IS_INDEX_OR_PSEUDO_REG (op1);
2805*404b540aSrobert /* Pre or post_modify with a displacement of 0 or 1
2806*404b540aSrobert should not be generated. */
2807*404b540aSrobert }
2808*404b540aSrobert
2809*404b540aSrobert case PLUS:
2810*404b540aSrobert {
2811*404b540aSrobert rtx op0 = XEXP (op, 0);
2812*404b540aSrobert rtx op1 = XEXP (op, 1);
2813*404b540aSrobert
2814*404b540aSrobert if (REG_P (op0))
2815*404b540aSrobert {
2816*404b540aSrobert /* HImode and HFmode must be offsettable. */
2817*404b540aSrobert if (mode == HImode || mode == HFmode)
2818*404b540aSrobert return IS_ADDR_OR_PSEUDO_REG (op0)
2819*404b540aSrobert && GET_CODE (op1) == CONST_INT
2820*404b540aSrobert && IS_DISP1_OFF_CONST (INTVAL (op1));
2821*404b540aSrobert
2822*404b540aSrobert if (REG_P (op1))
2823*404b540aSrobert return (IS_INDEX_OR_PSEUDO_REG (op1)
2824*404b540aSrobert && IS_ADDR_OR_PSEUDO_REG (op0))
2825*404b540aSrobert || (IS_ADDR_OR_PSEUDO_REG (op1)
2826*404b540aSrobert && IS_INDEX_OR_PSEUDO_REG (op0));
2827*404b540aSrobert
2828*404b540aSrobert return IS_ADDR_OR_PSEUDO_REG (op0)
2829*404b540aSrobert && GET_CODE (op1) == CONST_INT
2830*404b540aSrobert && IS_DISP1_CONST (INTVAL (op1));
2831*404b540aSrobert }
2832*404b540aSrobert }
2833*404b540aSrobert break;
2834*404b540aSrobert
2835*404b540aSrobert default:
2836*404b540aSrobert break;
2837*404b540aSrobert }
2838*404b540aSrobert return 0;
2839*404b540aSrobert }
2840*404b540aSrobert
2841*404b540aSrobert
2842*404b540aSrobert /* Direct memory operand. */
2843*404b540aSrobert
2844*404b540aSrobert int
c4x_T_constraint(rtx op)2845*404b540aSrobert c4x_T_constraint (rtx op)
2846*404b540aSrobert {
2847*404b540aSrobert if (GET_CODE (op) != MEM)
2848*404b540aSrobert return 0;
2849*404b540aSrobert op = XEXP (op, 0);
2850*404b540aSrobert
2851*404b540aSrobert if (GET_CODE (op) != LO_SUM)
2852*404b540aSrobert {
2853*404b540aSrobert /* Allow call operands. */
2854*404b540aSrobert return GET_CODE (op) == SYMBOL_REF
2855*404b540aSrobert && GET_MODE (op) == Pmode
2856*404b540aSrobert && SYMBOL_REF_FUNCTION_P (op);
2857*404b540aSrobert }
2858*404b540aSrobert
2859*404b540aSrobert /* HImode and HFmode are not offsettable. */
2860*404b540aSrobert if (GET_MODE (op) == HImode || GET_CODE (op) == HFmode)
2861*404b540aSrobert return 0;
2862*404b540aSrobert
2863*404b540aSrobert if ((GET_CODE (XEXP (op, 0)) == REG)
2864*404b540aSrobert && (REGNO (XEXP (op, 0)) == DP_REGNO))
2865*404b540aSrobert return c4x_U_constraint (XEXP (op, 1));
2866*404b540aSrobert
2867*404b540aSrobert return 0;
2868*404b540aSrobert }
2869*404b540aSrobert
2870*404b540aSrobert
2871*404b540aSrobert /* Symbolic operand. */
2872*404b540aSrobert
2873*404b540aSrobert int
c4x_U_constraint(rtx op)2874*404b540aSrobert c4x_U_constraint (rtx op)
2875*404b540aSrobert {
2876*404b540aSrobert /* Don't allow direct addressing to an arbitrary constant. */
2877*404b540aSrobert return GET_CODE (op) == CONST
2878*404b540aSrobert || GET_CODE (op) == SYMBOL_REF
2879*404b540aSrobert || GET_CODE (op) == LABEL_REF;
2880*404b540aSrobert }
2881*404b540aSrobert
2882*404b540aSrobert
2883*404b540aSrobert int
c4x_autoinc_operand(rtx op,enum machine_mode mode ATTRIBUTE_UNUSED)2884*404b540aSrobert c4x_autoinc_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2885*404b540aSrobert {
2886*404b540aSrobert if (GET_CODE (op) == MEM)
2887*404b540aSrobert {
2888*404b540aSrobert enum rtx_code code = GET_CODE (XEXP (op, 0));
2889*404b540aSrobert
2890*404b540aSrobert if (code == PRE_INC
2891*404b540aSrobert || code == PRE_DEC
2892*404b540aSrobert || code == POST_INC
2893*404b540aSrobert || code == POST_DEC
2894*404b540aSrobert || code == PRE_MODIFY
2895*404b540aSrobert || code == POST_MODIFY
2896*404b540aSrobert )
2897*404b540aSrobert return 1;
2898*404b540aSrobert }
2899*404b540aSrobert return 0;
2900*404b540aSrobert }
2901*404b540aSrobert
2902*404b540aSrobert
2903*404b540aSrobert int
mixed_subreg_operand(rtx op,enum machine_mode mode ATTRIBUTE_UNUSED)2904*404b540aSrobert mixed_subreg_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2905*404b540aSrobert {
2906*404b540aSrobert /* Allow (subreg:HF (reg:HI)) that be generated for a union of an
2907*404b540aSrobert int and a long double. */
2908*404b540aSrobert if (GET_CODE (op) == SUBREG
2909*404b540aSrobert && (GET_MODE (op) == QFmode)
2910*404b540aSrobert && (GET_MODE (SUBREG_REG (op)) == QImode
2911*404b540aSrobert || GET_MODE (SUBREG_REG (op)) == HImode))
2912*404b540aSrobert return 1;
2913*404b540aSrobert return 0;
2914*404b540aSrobert }
2915*404b540aSrobert
2916*404b540aSrobert
2917*404b540aSrobert int
reg_imm_operand(rtx op,enum machine_mode mode ATTRIBUTE_UNUSED)2918*404b540aSrobert reg_imm_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2919*404b540aSrobert {
2920*404b540aSrobert if (REG_P (op) || CONSTANT_P (op))
2921*404b540aSrobert return 1;
2922*404b540aSrobert return 0;
2923*404b540aSrobert }
2924*404b540aSrobert
2925*404b540aSrobert
2926*404b540aSrobert int
not_modify_reg(rtx op,enum machine_mode mode ATTRIBUTE_UNUSED)2927*404b540aSrobert not_modify_reg (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2928*404b540aSrobert {
2929*404b540aSrobert if (REG_P (op) || CONSTANT_P (op))
2930*404b540aSrobert return 1;
2931*404b540aSrobert if (GET_CODE (op) != MEM)
2932*404b540aSrobert return 0;
2933*404b540aSrobert op = XEXP (op, 0);
2934*404b540aSrobert switch (GET_CODE (op))
2935*404b540aSrobert {
2936*404b540aSrobert case REG:
2937*404b540aSrobert return 1;
2938*404b540aSrobert
2939*404b540aSrobert case PLUS:
2940*404b540aSrobert {
2941*404b540aSrobert rtx op0 = XEXP (op, 0);
2942*404b540aSrobert rtx op1 = XEXP (op, 1);
2943*404b540aSrobert
2944*404b540aSrobert if (! REG_P (op0))
2945*404b540aSrobert return 0;
2946*404b540aSrobert
2947*404b540aSrobert if (REG_P (op1) || GET_CODE (op1) == CONST_INT)
2948*404b540aSrobert return 1;
2949*404b540aSrobert }
2950*404b540aSrobert
2951*404b540aSrobert case LO_SUM:
2952*404b540aSrobert {
2953*404b540aSrobert rtx op0 = XEXP (op, 0);
2954*404b540aSrobert
2955*404b540aSrobert if (REG_P (op0) && REGNO (op0) == DP_REGNO)
2956*404b540aSrobert return 1;
2957*404b540aSrobert }
2958*404b540aSrobert break;
2959*404b540aSrobert
2960*404b540aSrobert case CONST:
2961*404b540aSrobert case SYMBOL_REF:
2962*404b540aSrobert case LABEL_REF:
2963*404b540aSrobert return 1;
2964*404b540aSrobert
2965*404b540aSrobert default:
2966*404b540aSrobert break;
2967*404b540aSrobert }
2968*404b540aSrobert return 0;
2969*404b540aSrobert }
2970*404b540aSrobert
2971*404b540aSrobert
2972*404b540aSrobert int
not_rc_reg(rtx op,enum machine_mode mode ATTRIBUTE_UNUSED)2973*404b540aSrobert not_rc_reg (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2974*404b540aSrobert {
2975*404b540aSrobert if (REG_P (op) && REGNO (op) == RC_REGNO)
2976*404b540aSrobert return 0;
2977*404b540aSrobert return 1;
2978*404b540aSrobert }
2979*404b540aSrobert
2980*404b540aSrobert
2981*404b540aSrobert static void
c4x_S_address_parse(rtx op,int * base,int * incdec,int * index,int * disp)2982*404b540aSrobert c4x_S_address_parse (rtx op, int *base, int *incdec, int *index, int *disp)
2983*404b540aSrobert {
2984*404b540aSrobert *base = 0;
2985*404b540aSrobert *incdec = 0;
2986*404b540aSrobert *index = 0;
2987*404b540aSrobert *disp = 0;
2988*404b540aSrobert
2989*404b540aSrobert if (GET_CODE (op) != MEM)
2990*404b540aSrobert fatal_insn ("invalid indirect memory address", op);
2991*404b540aSrobert
2992*404b540aSrobert op = XEXP (op, 0);
2993*404b540aSrobert switch (GET_CODE (op))
2994*404b540aSrobert {
2995*404b540aSrobert case PRE_DEC:
2996*404b540aSrobert *base = REGNO (XEXP (op, 0));
2997*404b540aSrobert *incdec = 1;
2998*404b540aSrobert *disp = -1;
2999*404b540aSrobert return;
3000*404b540aSrobert
3001*404b540aSrobert case POST_DEC:
3002*404b540aSrobert *base = REGNO (XEXP (op, 0));
3003*404b540aSrobert *incdec = 1;
3004*404b540aSrobert *disp = 0;
3005*404b540aSrobert return;
3006*404b540aSrobert
3007*404b540aSrobert case PRE_INC:
3008*404b540aSrobert *base = REGNO (XEXP (op, 0));
3009*404b540aSrobert *incdec = 1;
3010*404b540aSrobert *disp = 1;
3011*404b540aSrobert return;
3012*404b540aSrobert
3013*404b540aSrobert case POST_INC:
3014*404b540aSrobert *base = REGNO (XEXP (op, 0));
3015*404b540aSrobert *incdec = 1;
3016*404b540aSrobert *disp = 0;
3017*404b540aSrobert return;
3018*404b540aSrobert
3019*404b540aSrobert case POST_MODIFY:
3020*404b540aSrobert *base = REGNO (XEXP (op, 0));
3021*404b540aSrobert if (REG_P (XEXP (XEXP (op, 1), 1)))
3022*404b540aSrobert {
3023*404b540aSrobert *index = REGNO (XEXP (XEXP (op, 1), 1));
3024*404b540aSrobert *disp = 0; /* ??? */
3025*404b540aSrobert }
3026*404b540aSrobert else
3027*404b540aSrobert *disp = INTVAL (XEXP (XEXP (op, 1), 1));
3028*404b540aSrobert *incdec = 1;
3029*404b540aSrobert return;
3030*404b540aSrobert
3031*404b540aSrobert case PRE_MODIFY:
3032*404b540aSrobert *base = REGNO (XEXP (op, 0));
3033*404b540aSrobert if (REG_P (XEXP (XEXP (op, 1), 1)))
3034*404b540aSrobert {
3035*404b540aSrobert *index = REGNO (XEXP (XEXP (op, 1), 1));
3036*404b540aSrobert *disp = 1; /* ??? */
3037*404b540aSrobert }
3038*404b540aSrobert else
3039*404b540aSrobert *disp = INTVAL (XEXP (XEXP (op, 1), 1));
3040*404b540aSrobert *incdec = 1;
3041*404b540aSrobert
3042*404b540aSrobert return;
3043*404b540aSrobert
3044*404b540aSrobert case REG:
3045*404b540aSrobert *base = REGNO (op);
3046*404b540aSrobert return;
3047*404b540aSrobert
3048*404b540aSrobert case PLUS:
3049*404b540aSrobert {
3050*404b540aSrobert rtx op0 = XEXP (op, 0);
3051*404b540aSrobert rtx op1 = XEXP (op, 1);
3052*404b540aSrobert
3053*404b540aSrobert if (c4x_a_register (op0))
3054*404b540aSrobert {
3055*404b540aSrobert if (c4x_x_register (op1))
3056*404b540aSrobert {
3057*404b540aSrobert *base = REGNO (op0);
3058*404b540aSrobert *index = REGNO (op1);
3059*404b540aSrobert return;
3060*404b540aSrobert }
3061*404b540aSrobert else if ((GET_CODE (op1) == CONST_INT
3062*404b540aSrobert && IS_DISP1_CONST (INTVAL (op1))))
3063*404b540aSrobert {
3064*404b540aSrobert *base = REGNO (op0);
3065*404b540aSrobert *disp = INTVAL (op1);
3066*404b540aSrobert return;
3067*404b540aSrobert }
3068*404b540aSrobert }
3069*404b540aSrobert else if (c4x_x_register (op0) && c4x_a_register (op1))
3070*404b540aSrobert {
3071*404b540aSrobert *base = REGNO (op1);
3072*404b540aSrobert *index = REGNO (op0);
3073*404b540aSrobert return;
3074*404b540aSrobert }
3075*404b540aSrobert }
3076*404b540aSrobert /* Fall through. */
3077*404b540aSrobert
3078*404b540aSrobert default:
3079*404b540aSrobert fatal_insn ("invalid indirect (S) memory address", op);
3080*404b540aSrobert }
3081*404b540aSrobert }
3082*404b540aSrobert
3083*404b540aSrobert
3084*404b540aSrobert int
c4x_address_conflict(rtx op0,rtx op1,int store0,int store1)3085*404b540aSrobert c4x_address_conflict (rtx op0, rtx op1, int store0, int store1)
3086*404b540aSrobert {
3087*404b540aSrobert int base0;
3088*404b540aSrobert int base1;
3089*404b540aSrobert int incdec0;
3090*404b540aSrobert int incdec1;
3091*404b540aSrobert int index0;
3092*404b540aSrobert int index1;
3093*404b540aSrobert int disp0;
3094*404b540aSrobert int disp1;
3095*404b540aSrobert
3096*404b540aSrobert if (MEM_VOLATILE_P (op0) && MEM_VOLATILE_P (op1))
3097*404b540aSrobert return 1;
3098*404b540aSrobert
3099*404b540aSrobert c4x_S_address_parse (op0, &base0, &incdec0, &index0, &disp0);
3100*404b540aSrobert c4x_S_address_parse (op1, &base1, &incdec1, &index1, &disp1);
3101*404b540aSrobert
3102*404b540aSrobert if (store0 && store1)
3103*404b540aSrobert {
3104*404b540aSrobert /* If we have two stores in parallel to the same address, then
3105*404b540aSrobert the C4x only executes one of the stores. This is unlikely to
3106*404b540aSrobert cause problems except when writing to a hardware device such
3107*404b540aSrobert as a FIFO since the second write will be lost. The user
3108*404b540aSrobert should flag the hardware location as being volatile so that
3109*404b540aSrobert we don't do this optimization. While it is unlikely that we
3110*404b540aSrobert have an aliased address if both locations are not marked
3111*404b540aSrobert volatile, it is probably safer to flag a potential conflict
3112*404b540aSrobert if either location is volatile. */
3113*404b540aSrobert if (! flag_argument_noalias)
3114*404b540aSrobert {
3115*404b540aSrobert if (MEM_VOLATILE_P (op0) || MEM_VOLATILE_P (op1))
3116*404b540aSrobert return 1;
3117*404b540aSrobert }
3118*404b540aSrobert }
3119*404b540aSrobert
3120*404b540aSrobert /* If have a parallel load and a store to the same address, the load
3121*404b540aSrobert is performed first, so there is no conflict. Similarly, there is
3122*404b540aSrobert no conflict if have parallel loads from the same address. */
3123*404b540aSrobert
3124*404b540aSrobert /* Cannot use auto increment or auto decrement twice for same
3125*404b540aSrobert base register. */
3126*404b540aSrobert if (base0 == base1 && incdec0 && incdec0)
3127*404b540aSrobert return 1;
3128*404b540aSrobert
3129*404b540aSrobert /* It might be too confusing for GCC if we have use a base register
3130*404b540aSrobert with a side effect and a memory reference using the same register
3131*404b540aSrobert in parallel. */
3132*404b540aSrobert if (! TARGET_DEVEL && base0 == base1 && (incdec0 || incdec1))
3133*404b540aSrobert return 1;
3134*404b540aSrobert
3135*404b540aSrobert /* We cannot optimize the case where op1 and op2 refer to the same
3136*404b540aSrobert address. */
3137*404b540aSrobert if (base0 == base1 && disp0 == disp1 && index0 == index1)
3138*404b540aSrobert return 1;
3139*404b540aSrobert
3140*404b540aSrobert /* No conflict. */
3141*404b540aSrobert return 0;
3142*404b540aSrobert }
3143*404b540aSrobert
3144*404b540aSrobert
3145*404b540aSrobert /* Check for while loop inside a decrement and branch loop. */
3146*404b540aSrobert
3147*404b540aSrobert int
c4x_label_conflict(rtx insn,rtx jump,rtx db)3148*404b540aSrobert c4x_label_conflict (rtx insn, rtx jump, rtx db)
3149*404b540aSrobert {
3150*404b540aSrobert while (insn)
3151*404b540aSrobert {
3152*404b540aSrobert if (GET_CODE (insn) == CODE_LABEL)
3153*404b540aSrobert {
3154*404b540aSrobert if (CODE_LABEL_NUMBER (jump) == CODE_LABEL_NUMBER (insn))
3155*404b540aSrobert return 1;
3156*404b540aSrobert if (CODE_LABEL_NUMBER (db) == CODE_LABEL_NUMBER (insn))
3157*404b540aSrobert return 0;
3158*404b540aSrobert }
3159*404b540aSrobert insn = PREV_INSN (insn);
3160*404b540aSrobert }
3161*404b540aSrobert return 1;
3162*404b540aSrobert }
3163*404b540aSrobert
3164*404b540aSrobert
3165*404b540aSrobert /* Validate combination of operands for parallel load/store instructions. */
3166*404b540aSrobert
3167*404b540aSrobert int
valid_parallel_load_store(rtx * operands,enum machine_mode mode ATTRIBUTE_UNUSED)3168*404b540aSrobert valid_parallel_load_store (rtx *operands,
3169*404b540aSrobert enum machine_mode mode ATTRIBUTE_UNUSED)
3170*404b540aSrobert {
3171*404b540aSrobert rtx op0 = operands[0];
3172*404b540aSrobert rtx op1 = operands[1];
3173*404b540aSrobert rtx op2 = operands[2];
3174*404b540aSrobert rtx op3 = operands[3];
3175*404b540aSrobert
3176*404b540aSrobert if (GET_CODE (op0) == SUBREG)
3177*404b540aSrobert op0 = SUBREG_REG (op0);
3178*404b540aSrobert if (GET_CODE (op1) == SUBREG)
3179*404b540aSrobert op1 = SUBREG_REG (op1);
3180*404b540aSrobert if (GET_CODE (op2) == SUBREG)
3181*404b540aSrobert op2 = SUBREG_REG (op2);
3182*404b540aSrobert if (GET_CODE (op3) == SUBREG)
3183*404b540aSrobert op3 = SUBREG_REG (op3);
3184*404b540aSrobert
3185*404b540aSrobert /* The patterns should only allow ext_low_reg_operand() or
3186*404b540aSrobert par_ind_operand() operands. Thus of the 4 operands, only 2
3187*404b540aSrobert should be REGs and the other 2 should be MEMs. */
3188*404b540aSrobert
3189*404b540aSrobert /* This test prevents the multipack pass from using this pattern if
3190*404b540aSrobert op0 is used as an index or base register in op2 or op3, since
3191*404b540aSrobert this combination will require reloading. */
3192*404b540aSrobert if (GET_CODE (op0) == REG
3193*404b540aSrobert && ((GET_CODE (op2) == MEM && reg_mentioned_p (op0, XEXP (op2, 0)))
3194*404b540aSrobert || (GET_CODE (op3) == MEM && reg_mentioned_p (op0, XEXP (op3, 0)))))
3195*404b540aSrobert return 0;
3196*404b540aSrobert
3197*404b540aSrobert /* LDI||LDI. */
3198*404b540aSrobert if (GET_CODE (op0) == REG && GET_CODE (op2) == REG)
3199*404b540aSrobert return (REGNO (op0) != REGNO (op2))
3200*404b540aSrobert && GET_CODE (op1) == MEM && GET_CODE (op3) == MEM
3201*404b540aSrobert && ! c4x_address_conflict (op1, op3, 0, 0);
3202*404b540aSrobert
3203*404b540aSrobert /* STI||STI. */
3204*404b540aSrobert if (GET_CODE (op1) == REG && GET_CODE (op3) == REG)
3205*404b540aSrobert return GET_CODE (op0) == MEM && GET_CODE (op2) == MEM
3206*404b540aSrobert && ! c4x_address_conflict (op0, op2, 1, 1);
3207*404b540aSrobert
3208*404b540aSrobert /* LDI||STI. */
3209*404b540aSrobert if (GET_CODE (op0) == REG && GET_CODE (op3) == REG)
3210*404b540aSrobert return GET_CODE (op1) == MEM && GET_CODE (op2) == MEM
3211*404b540aSrobert && ! c4x_address_conflict (op1, op2, 0, 1);
3212*404b540aSrobert
3213*404b540aSrobert /* STI||LDI. */
3214*404b540aSrobert if (GET_CODE (op1) == REG && GET_CODE (op2) == REG)
3215*404b540aSrobert return GET_CODE (op0) == MEM && GET_CODE (op3) == MEM
3216*404b540aSrobert && ! c4x_address_conflict (op0, op3, 1, 0);
3217*404b540aSrobert
3218*404b540aSrobert return 0;
3219*404b540aSrobert }
3220*404b540aSrobert
3221*404b540aSrobert
3222*404b540aSrobert int
valid_parallel_operands_4(rtx * operands,enum machine_mode mode ATTRIBUTE_UNUSED)3223*404b540aSrobert valid_parallel_operands_4 (rtx *operands,
3224*404b540aSrobert enum machine_mode mode ATTRIBUTE_UNUSED)
3225*404b540aSrobert {
3226*404b540aSrobert rtx op0 = operands[0];
3227*404b540aSrobert rtx op2 = operands[2];
3228*404b540aSrobert
3229*404b540aSrobert if (GET_CODE (op0) == SUBREG)
3230*404b540aSrobert op0 = SUBREG_REG (op0);
3231*404b540aSrobert if (GET_CODE (op2) == SUBREG)
3232*404b540aSrobert op2 = SUBREG_REG (op2);
3233*404b540aSrobert
3234*404b540aSrobert /* This test prevents the multipack pass from using this pattern if
3235*404b540aSrobert op0 is used as an index or base register in op2, since this combination
3236*404b540aSrobert will require reloading. */
3237*404b540aSrobert if (GET_CODE (op0) == REG
3238*404b540aSrobert && GET_CODE (op2) == MEM
3239*404b540aSrobert && reg_mentioned_p (op0, XEXP (op2, 0)))
3240*404b540aSrobert return 0;
3241*404b540aSrobert
3242*404b540aSrobert return 1;
3243*404b540aSrobert }
3244*404b540aSrobert
3245*404b540aSrobert
3246*404b540aSrobert int
valid_parallel_operands_5(rtx * operands,enum machine_mode mode ATTRIBUTE_UNUSED)3247*404b540aSrobert valid_parallel_operands_5 (rtx *operands,
3248*404b540aSrobert enum machine_mode mode ATTRIBUTE_UNUSED)
3249*404b540aSrobert {
3250*404b540aSrobert int regs = 0;
3251*404b540aSrobert rtx op0 = operands[0];
3252*404b540aSrobert rtx op1 = operands[1];
3253*404b540aSrobert rtx op2 = operands[2];
3254*404b540aSrobert rtx op3 = operands[3];
3255*404b540aSrobert
3256*404b540aSrobert if (GET_CODE (op0) == SUBREG)
3257*404b540aSrobert op0 = SUBREG_REG (op0);
3258*404b540aSrobert if (GET_CODE (op1) == SUBREG)
3259*404b540aSrobert op1 = SUBREG_REG (op1);
3260*404b540aSrobert if (GET_CODE (op2) == SUBREG)
3261*404b540aSrobert op2 = SUBREG_REG (op2);
3262*404b540aSrobert
3263*404b540aSrobert /* The patterns should only allow ext_low_reg_operand() or
3264*404b540aSrobert par_ind_operand() operands. Operands 1 and 2 may be commutative
3265*404b540aSrobert but only one of them can be a register. */
3266*404b540aSrobert if (GET_CODE (op1) == REG)
3267*404b540aSrobert regs++;
3268*404b540aSrobert if (GET_CODE (op2) == REG)
3269*404b540aSrobert regs++;
3270*404b540aSrobert
3271*404b540aSrobert if (regs != 1)
3272*404b540aSrobert return 0;
3273*404b540aSrobert
3274*404b540aSrobert /* This test prevents the multipack pass from using this pattern if
3275*404b540aSrobert op0 is used as an index or base register in op3, since this combination
3276*404b540aSrobert will require reloading. */
3277*404b540aSrobert if (GET_CODE (op0) == REG
3278*404b540aSrobert && GET_CODE (op3) == MEM
3279*404b540aSrobert && reg_mentioned_p (op0, XEXP (op3, 0)))
3280*404b540aSrobert return 0;
3281*404b540aSrobert
3282*404b540aSrobert return 1;
3283*404b540aSrobert }
3284*404b540aSrobert
3285*404b540aSrobert
3286*404b540aSrobert int
valid_parallel_operands_6(rtx * operands,enum machine_mode mode ATTRIBUTE_UNUSED)3287*404b540aSrobert valid_parallel_operands_6 (rtx *operands,
3288*404b540aSrobert enum machine_mode mode ATTRIBUTE_UNUSED)
3289*404b540aSrobert {
3290*404b540aSrobert int regs = 0;
3291*404b540aSrobert rtx op0 = operands[0];
3292*404b540aSrobert rtx op1 = operands[1];
3293*404b540aSrobert rtx op2 = operands[2];
3294*404b540aSrobert rtx op4 = operands[4];
3295*404b540aSrobert rtx op5 = operands[5];
3296*404b540aSrobert
3297*404b540aSrobert if (GET_CODE (op1) == SUBREG)
3298*404b540aSrobert op1 = SUBREG_REG (op1);
3299*404b540aSrobert if (GET_CODE (op2) == SUBREG)
3300*404b540aSrobert op2 = SUBREG_REG (op2);
3301*404b540aSrobert if (GET_CODE (op4) == SUBREG)
3302*404b540aSrobert op4 = SUBREG_REG (op4);
3303*404b540aSrobert if (GET_CODE (op5) == SUBREG)
3304*404b540aSrobert op5 = SUBREG_REG (op5);
3305*404b540aSrobert
3306*404b540aSrobert /* The patterns should only allow ext_low_reg_operand() or
3307*404b540aSrobert par_ind_operand() operands. Thus of the 4 input operands, only 2
3308*404b540aSrobert should be REGs and the other 2 should be MEMs. */
3309*404b540aSrobert
3310*404b540aSrobert if (GET_CODE (op1) == REG)
3311*404b540aSrobert regs++;
3312*404b540aSrobert if (GET_CODE (op2) == REG)
3313*404b540aSrobert regs++;
3314*404b540aSrobert if (GET_CODE (op4) == REG)
3315*404b540aSrobert regs++;
3316*404b540aSrobert if (GET_CODE (op5) == REG)
3317*404b540aSrobert regs++;
3318*404b540aSrobert
3319*404b540aSrobert /* The new C30/C40 silicon dies allow 3 regs of the 4 input operands.
3320*404b540aSrobert Perhaps we should count the MEMs as well? */
3321*404b540aSrobert if (regs != 2)
3322*404b540aSrobert return 0;
3323*404b540aSrobert
3324*404b540aSrobert /* This test prevents the multipack pass from using this pattern if
3325*404b540aSrobert op0 is used as an index or base register in op4 or op5, since
3326*404b540aSrobert this combination will require reloading. */
3327*404b540aSrobert if (GET_CODE (op0) == REG
3328*404b540aSrobert && ((GET_CODE (op4) == MEM && reg_mentioned_p (op0, XEXP (op4, 0)))
3329*404b540aSrobert || (GET_CODE (op5) == MEM && reg_mentioned_p (op0, XEXP (op5, 0)))))
3330*404b540aSrobert return 0;
3331*404b540aSrobert
3332*404b540aSrobert return 1;
3333*404b540aSrobert }
3334*404b540aSrobert
3335*404b540aSrobert
3336*404b540aSrobert /* Validate combination of src operands. Note that the operands have
3337*404b540aSrobert been screened by the src_operand predicate. We just have to check
3338*404b540aSrobert that the combination of operands is valid. If FORCE is set, ensure
3339*404b540aSrobert that the destination regno is valid if we have a 2 operand insn. */
3340*404b540aSrobert
3341*404b540aSrobert static int
c4x_valid_operands(enum rtx_code code,rtx * operands,enum machine_mode mode ATTRIBUTE_UNUSED,int force)3342*404b540aSrobert c4x_valid_operands (enum rtx_code code, rtx *operands,
3343*404b540aSrobert enum machine_mode mode ATTRIBUTE_UNUSED,
3344*404b540aSrobert int force)
3345*404b540aSrobert {
3346*404b540aSrobert rtx op0;
3347*404b540aSrobert rtx op1;
3348*404b540aSrobert rtx op2;
3349*404b540aSrobert enum rtx_code code1;
3350*404b540aSrobert enum rtx_code code2;
3351*404b540aSrobert
3352*404b540aSrobert
3353*404b540aSrobert /* FIXME, why can't we tighten the operands for IF_THEN_ELSE? */
3354*404b540aSrobert if (code == IF_THEN_ELSE)
3355*404b540aSrobert return 1 || (operands[0] == operands[2] || operands[0] == operands[3]);
3356*404b540aSrobert
3357*404b540aSrobert if (code == COMPARE)
3358*404b540aSrobert {
3359*404b540aSrobert op1 = operands[0];
3360*404b540aSrobert op2 = operands[1];
3361*404b540aSrobert }
3362*404b540aSrobert else
3363*404b540aSrobert {
3364*404b540aSrobert op1 = operands[1];
3365*404b540aSrobert op2 = operands[2];
3366*404b540aSrobert }
3367*404b540aSrobert
3368*404b540aSrobert op0 = operands[0];
3369*404b540aSrobert
3370*404b540aSrobert if (GET_CODE (op0) == SUBREG)
3371*404b540aSrobert op0 = SUBREG_REG (op0);
3372*404b540aSrobert if (GET_CODE (op1) == SUBREG)
3373*404b540aSrobert op1 = SUBREG_REG (op1);
3374*404b540aSrobert if (GET_CODE (op2) == SUBREG)
3375*404b540aSrobert op2 = SUBREG_REG (op2);
3376*404b540aSrobert
3377*404b540aSrobert code1 = GET_CODE (op1);
3378*404b540aSrobert code2 = GET_CODE (op2);
3379*404b540aSrobert
3380*404b540aSrobert
3381*404b540aSrobert if (code1 == REG && code2 == REG)
3382*404b540aSrobert return 1;
3383*404b540aSrobert
3384*404b540aSrobert if (code1 == MEM && code2 == MEM)
3385*404b540aSrobert {
3386*404b540aSrobert if (c4x_S_indirect (op1) && c4x_S_indirect (op2))
3387*404b540aSrobert return 1;
3388*404b540aSrobert return c4x_R_indirect (op1) && c4x_R_indirect (op2);
3389*404b540aSrobert }
3390*404b540aSrobert
3391*404b540aSrobert /* We cannot handle two MEMs or two CONSTS, etc. */
3392*404b540aSrobert if (code1 == code2)
3393*404b540aSrobert return 0;
3394*404b540aSrobert
3395*404b540aSrobert if (code1 == REG)
3396*404b540aSrobert {
3397*404b540aSrobert switch (code2)
3398*404b540aSrobert {
3399*404b540aSrobert case CONST_INT:
3400*404b540aSrobert if (c4x_J_constant (op2) && c4x_R_indirect (op1))
3401*404b540aSrobert return 1;
3402*404b540aSrobert break;
3403*404b540aSrobert
3404*404b540aSrobert case CONST_DOUBLE:
3405*404b540aSrobert if (! c4x_H_constant (op2))
3406*404b540aSrobert return 0;
3407*404b540aSrobert break;
3408*404b540aSrobert
3409*404b540aSrobert /* Any valid memory operand screened by src_operand is OK. */
3410*404b540aSrobert case MEM:
3411*404b540aSrobert break;
3412*404b540aSrobert
3413*404b540aSrobert default:
3414*404b540aSrobert fatal_insn ("c4x_valid_operands: Internal error", op2);
3415*404b540aSrobert break;
3416*404b540aSrobert }
3417*404b540aSrobert
3418*404b540aSrobert if (GET_CODE (op0) == SCRATCH)
3419*404b540aSrobert return 1;
3420*404b540aSrobert
3421*404b540aSrobert if (!REG_P (op0))
3422*404b540aSrobert return 0;
3423*404b540aSrobert
3424*404b540aSrobert /* Check that we have a valid destination register for a two operand
3425*404b540aSrobert instruction. */
3426*404b540aSrobert return ! force || code == COMPARE || REGNO (op1) == REGNO (op0);
3427*404b540aSrobert }
3428*404b540aSrobert
3429*404b540aSrobert
3430*404b540aSrobert /* Check non-commutative operators. */
3431*404b540aSrobert if (code == ASHIFTRT || code == LSHIFTRT
3432*404b540aSrobert || code == ASHIFT || code == COMPARE)
3433*404b540aSrobert return code2 == REG
3434*404b540aSrobert && (c4x_S_indirect (op1) || c4x_R_indirect (op1));
3435*404b540aSrobert
3436*404b540aSrobert
3437*404b540aSrobert /* Assume MINUS is commutative since the subtract patterns
3438*404b540aSrobert also support the reverse subtract instructions. Since op1
3439*404b540aSrobert is not a register, and op2 is a register, op1 can only
3440*404b540aSrobert be a restricted memory operand for a shift instruction. */
3441*404b540aSrobert if (code2 == REG)
3442*404b540aSrobert {
3443*404b540aSrobert switch (code1)
3444*404b540aSrobert {
3445*404b540aSrobert case CONST_INT:
3446*404b540aSrobert break;
3447*404b540aSrobert
3448*404b540aSrobert case CONST_DOUBLE:
3449*404b540aSrobert if (! c4x_H_constant (op1))
3450*404b540aSrobert return 0;
3451*404b540aSrobert break;
3452*404b540aSrobert
3453*404b540aSrobert /* Any valid memory operand screened by src_operand is OK. */
3454*404b540aSrobert case MEM:
3455*404b540aSrobert break;
3456*404b540aSrobert
3457*404b540aSrobert default:
3458*404b540aSrobert abort ();
3459*404b540aSrobert break;
3460*404b540aSrobert }
3461*404b540aSrobert
3462*404b540aSrobert if (GET_CODE (op0) == SCRATCH)
3463*404b540aSrobert return 1;
3464*404b540aSrobert
3465*404b540aSrobert if (!REG_P (op0))
3466*404b540aSrobert return 0;
3467*404b540aSrobert
3468*404b540aSrobert /* Check that we have a valid destination register for a two operand
3469*404b540aSrobert instruction. */
3470*404b540aSrobert return ! force || REGNO (op1) == REGNO (op0);
3471*404b540aSrobert }
3472*404b540aSrobert
3473*404b540aSrobert if (c4x_J_constant (op1) && c4x_R_indirect (op2))
3474*404b540aSrobert return 1;
3475*404b540aSrobert
3476*404b540aSrobert return 0;
3477*404b540aSrobert }
3478*404b540aSrobert
3479*404b540aSrobert
valid_operands(enum rtx_code code,rtx * operands,enum machine_mode mode)3480*404b540aSrobert int valid_operands (enum rtx_code code, rtx *operands, enum machine_mode mode)
3481*404b540aSrobert {
3482*404b540aSrobert
3483*404b540aSrobert /* If we are not optimizing then we have to let anything go and let
3484*404b540aSrobert reload fix things up. instantiate_decl in function.c can produce
3485*404b540aSrobert invalid insns by changing the offset of a memory operand from a
3486*404b540aSrobert valid one into an invalid one, when the second operand is also a
3487*404b540aSrobert memory operand. The alternative is not to allow two memory
3488*404b540aSrobert operands for an insn when not optimizing. The problem only rarely
3489*404b540aSrobert occurs, for example with the C-torture program DFcmp.c. */
3490*404b540aSrobert
3491*404b540aSrobert return ! optimize || c4x_valid_operands (code, operands, mode, 0);
3492*404b540aSrobert }
3493*404b540aSrobert
3494*404b540aSrobert
3495*404b540aSrobert int
legitimize_operands(enum rtx_code code,rtx * operands,enum machine_mode mode)3496*404b540aSrobert legitimize_operands (enum rtx_code code, rtx *operands, enum machine_mode mode)
3497*404b540aSrobert {
3498*404b540aSrobert /* Compare only has 2 operands. */
3499*404b540aSrobert if (code == COMPARE)
3500*404b540aSrobert {
3501*404b540aSrobert /* During RTL generation, force constants into pseudos so that
3502*404b540aSrobert they can get hoisted out of loops. This will tie up an extra
3503*404b540aSrobert register but can save an extra cycle. Only do this if loop
3504*404b540aSrobert optimization enabled. (We cannot pull this trick for add and
3505*404b540aSrobert sub instructions since the flow pass won't find
3506*404b540aSrobert autoincrements etc.) This allows us to generate compare
3507*404b540aSrobert instructions like CMPI R0, *AR0++ where R0 = 42, say, instead
3508*404b540aSrobert of LDI *AR0++, R0; CMPI 42, R0.
3509*404b540aSrobert
3510*404b540aSrobert Note that expand_binops will try to load an expensive constant
3511*404b540aSrobert into a register if it is used within a loop. Unfortunately,
3512*404b540aSrobert the cost mechanism doesn't allow us to look at the other
3513*404b540aSrobert operand to decide whether the constant is expensive. */
3514*404b540aSrobert
3515*404b540aSrobert if (! reload_in_progress
3516*404b540aSrobert && TARGET_HOIST
3517*404b540aSrobert && optimize > 0
3518*404b540aSrobert && GET_CODE (operands[1]) == CONST_INT
3519*404b540aSrobert && rtx_cost (operands[1], code) > 1)
3520*404b540aSrobert operands[1] = force_reg (mode, operands[1]);
3521*404b540aSrobert
3522*404b540aSrobert if (! reload_in_progress
3523*404b540aSrobert && ! c4x_valid_operands (code, operands, mode, 0))
3524*404b540aSrobert operands[0] = force_reg (mode, operands[0]);
3525*404b540aSrobert return 1;
3526*404b540aSrobert }
3527*404b540aSrobert
3528*404b540aSrobert /* We cannot do this for ADDI/SUBI insns since we will
3529*404b540aSrobert defeat the flow pass from finding autoincrement addressing
3530*404b540aSrobert opportunities. */
3531*404b540aSrobert if (! reload_in_progress
3532*404b540aSrobert && ! ((code == PLUS || code == MINUS) && mode == Pmode)
3533*404b540aSrobert && TARGET_HOIST
3534*404b540aSrobert && optimize > 1
3535*404b540aSrobert && GET_CODE (operands[2]) == CONST_INT
3536*404b540aSrobert && rtx_cost (operands[2], code) > 1)
3537*404b540aSrobert operands[2] = force_reg (mode, operands[2]);
3538*404b540aSrobert
3539*404b540aSrobert /* We can get better code on a C30 if we force constant shift counts
3540*404b540aSrobert into a register. This way they can get hoisted out of loops,
3541*404b540aSrobert tying up a register but saving an instruction. The downside is
3542*404b540aSrobert that they may get allocated to an address or index register, and
3543*404b540aSrobert thus we will get a pipeline conflict if there is a nearby
3544*404b540aSrobert indirect address using an address register.
3545*404b540aSrobert
3546*404b540aSrobert Note that expand_binops will not try to load an expensive constant
3547*404b540aSrobert into a register if it is used within a loop for a shift insn. */
3548*404b540aSrobert
3549*404b540aSrobert if (! reload_in_progress
3550*404b540aSrobert && ! c4x_valid_operands (code, operands, mode, TARGET_FORCE))
3551*404b540aSrobert {
3552*404b540aSrobert /* If the operand combination is invalid, we force operand1 into a
3553*404b540aSrobert register, preventing reload from having doing to do this at a
3554*404b540aSrobert later stage. */
3555*404b540aSrobert operands[1] = force_reg (mode, operands[1]);
3556*404b540aSrobert if (TARGET_FORCE)
3557*404b540aSrobert {
3558*404b540aSrobert emit_move_insn (operands[0], operands[1]);
3559*404b540aSrobert operands[1] = copy_rtx (operands[0]);
3560*404b540aSrobert }
3561*404b540aSrobert else
3562*404b540aSrobert {
3563*404b540aSrobert /* Just in case... */
3564*404b540aSrobert if (! c4x_valid_operands (code, operands, mode, 0))
3565*404b540aSrobert operands[2] = force_reg (mode, operands[2]);
3566*404b540aSrobert }
3567*404b540aSrobert }
3568*404b540aSrobert
3569*404b540aSrobert /* Right shifts require a negative shift count, but GCC expects
3570*404b540aSrobert a positive count, so we emit a NEG. */
3571*404b540aSrobert if ((code == ASHIFTRT || code == LSHIFTRT)
3572*404b540aSrobert && (GET_CODE (operands[2]) != CONST_INT))
3573*404b540aSrobert operands[2] = gen_rtx_NEG (mode, negate_rtx (mode, operands[2]));
3574*404b540aSrobert
3575*404b540aSrobert
3576*404b540aSrobert /* When the shift count is greater than 32 then the result
3577*404b540aSrobert can be implementation dependent. We truncate the result to
3578*404b540aSrobert fit in 5 bits so that we do not emit invalid code when
3579*404b540aSrobert optimizing---such as trying to generate lhu2 with 20021124-1.c. */
3580*404b540aSrobert if (((code == ASHIFTRT || code == LSHIFTRT || code == ASHIFT)
3581*404b540aSrobert && (GET_CODE (operands[2]) == CONST_INT))
3582*404b540aSrobert && INTVAL (operands[2]) > (GET_MODE_BITSIZE (mode) - 1))
3583*404b540aSrobert operands[2]
3584*404b540aSrobert = GEN_INT (INTVAL (operands[2]) & (GET_MODE_BITSIZE (mode) - 1));
3585*404b540aSrobert
3586*404b540aSrobert return 1;
3587*404b540aSrobert }
3588*404b540aSrobert
3589*404b540aSrobert
3590*404b540aSrobert /* The following predicates are used for instruction scheduling. */
3591*404b540aSrobert
3592*404b540aSrobert int
group1_reg_operand(rtx op,enum machine_mode mode)3593*404b540aSrobert group1_reg_operand (rtx op, enum machine_mode mode)
3594*404b540aSrobert {
3595*404b540aSrobert if (mode != VOIDmode && mode != GET_MODE (op))
3596*404b540aSrobert return 0;
3597*404b540aSrobert if (GET_CODE (op) == SUBREG)
3598*404b540aSrobert op = SUBREG_REG (op);
3599*404b540aSrobert return REG_P (op) && (! reload_completed || IS_GROUP1_REG (op));
3600*404b540aSrobert }
3601*404b540aSrobert
3602*404b540aSrobert
3603*404b540aSrobert int
group1_mem_operand(rtx op,enum machine_mode mode)3604*404b540aSrobert group1_mem_operand (rtx op, enum machine_mode mode)
3605*404b540aSrobert {
3606*404b540aSrobert if (mode != VOIDmode && mode != GET_MODE (op))
3607*404b540aSrobert return 0;
3608*404b540aSrobert
3609*404b540aSrobert if (GET_CODE (op) == MEM)
3610*404b540aSrobert {
3611*404b540aSrobert op = XEXP (op, 0);
3612*404b540aSrobert if (GET_CODE (op) == PLUS)
3613*404b540aSrobert {
3614*404b540aSrobert rtx op0 = XEXP (op, 0);
3615*404b540aSrobert rtx op1 = XEXP (op, 1);
3616*404b540aSrobert
3617*404b540aSrobert if ((REG_P (op0) && (! reload_completed || IS_GROUP1_REG (op0)))
3618*404b540aSrobert || (REG_P (op1) && (! reload_completed || IS_GROUP1_REG (op1))))
3619*404b540aSrobert return 1;
3620*404b540aSrobert }
3621*404b540aSrobert else if ((REG_P (op)) && (! reload_completed || IS_GROUP1_REG (op)))
3622*404b540aSrobert return 1;
3623*404b540aSrobert }
3624*404b540aSrobert
3625*404b540aSrobert return 0;
3626*404b540aSrobert }
3627*404b540aSrobert
3628*404b540aSrobert
3629*404b540aSrobert /* Return true if any one of the address registers. */
3630*404b540aSrobert
3631*404b540aSrobert int
arx_reg_operand(rtx op,enum machine_mode mode)3632*404b540aSrobert arx_reg_operand (rtx op, enum machine_mode mode)
3633*404b540aSrobert {
3634*404b540aSrobert if (mode != VOIDmode && mode != GET_MODE (op))
3635*404b540aSrobert return 0;
3636*404b540aSrobert if (GET_CODE (op) == SUBREG)
3637*404b540aSrobert op = SUBREG_REG (op);
3638*404b540aSrobert return REG_P (op) && (! reload_completed || IS_ADDR_REG (op));
3639*404b540aSrobert }
3640*404b540aSrobert
3641*404b540aSrobert
3642*404b540aSrobert static int
c4x_arn_reg_operand(rtx op,enum machine_mode mode,unsigned int regno)3643*404b540aSrobert c4x_arn_reg_operand (rtx op, enum machine_mode mode, unsigned int regno)
3644*404b540aSrobert {
3645*404b540aSrobert if (mode != VOIDmode && mode != GET_MODE (op))
3646*404b540aSrobert return 0;
3647*404b540aSrobert if (GET_CODE (op) == SUBREG)
3648*404b540aSrobert op = SUBREG_REG (op);
3649*404b540aSrobert return REG_P (op) && (! reload_completed || (REGNO (op) == regno));
3650*404b540aSrobert }
3651*404b540aSrobert
3652*404b540aSrobert
3653*404b540aSrobert static int
c4x_arn_mem_operand(rtx op,enum machine_mode mode,unsigned int regno)3654*404b540aSrobert c4x_arn_mem_operand (rtx op, enum machine_mode mode, unsigned int regno)
3655*404b540aSrobert {
3656*404b540aSrobert if (mode != VOIDmode && mode != GET_MODE (op))
3657*404b540aSrobert return 0;
3658*404b540aSrobert
3659*404b540aSrobert if (GET_CODE (op) == MEM)
3660*404b540aSrobert {
3661*404b540aSrobert op = XEXP (op, 0);
3662*404b540aSrobert switch (GET_CODE (op))
3663*404b540aSrobert {
3664*404b540aSrobert case PRE_DEC:
3665*404b540aSrobert case POST_DEC:
3666*404b540aSrobert case PRE_INC:
3667*404b540aSrobert case POST_INC:
3668*404b540aSrobert op = XEXP (op, 0);
3669*404b540aSrobert
3670*404b540aSrobert case REG:
3671*404b540aSrobert return REG_P (op) && (! reload_completed || (REGNO (op) == regno));
3672*404b540aSrobert
3673*404b540aSrobert case PRE_MODIFY:
3674*404b540aSrobert case POST_MODIFY:
3675*404b540aSrobert if (REG_P (XEXP (op, 0)) && (! reload_completed
3676*404b540aSrobert || (REGNO (XEXP (op, 0)) == regno)))
3677*404b540aSrobert return 1;
3678*404b540aSrobert if (REG_P (XEXP (XEXP (op, 1), 1))
3679*404b540aSrobert && (! reload_completed
3680*404b540aSrobert || (REGNO (XEXP (XEXP (op, 1), 1)) == regno)))
3681*404b540aSrobert return 1;
3682*404b540aSrobert break;
3683*404b540aSrobert
3684*404b540aSrobert case PLUS:
3685*404b540aSrobert {
3686*404b540aSrobert rtx op0 = XEXP (op, 0);
3687*404b540aSrobert rtx op1 = XEXP (op, 1);
3688*404b540aSrobert
3689*404b540aSrobert if ((REG_P (op0) && (! reload_completed
3690*404b540aSrobert || (REGNO (op0) == regno)))
3691*404b540aSrobert || (REG_P (op1) && (! reload_completed
3692*404b540aSrobert || (REGNO (op1) == regno))))
3693*404b540aSrobert return 1;
3694*404b540aSrobert }
3695*404b540aSrobert break;
3696*404b540aSrobert
3697*404b540aSrobert default:
3698*404b540aSrobert break;
3699*404b540aSrobert }
3700*404b540aSrobert }
3701*404b540aSrobert return 0;
3702*404b540aSrobert }
3703*404b540aSrobert
3704*404b540aSrobert
3705*404b540aSrobert int
ar0_reg_operand(rtx op,enum machine_mode mode)3706*404b540aSrobert ar0_reg_operand (rtx op, enum machine_mode mode)
3707*404b540aSrobert {
3708*404b540aSrobert return c4x_arn_reg_operand (op, mode, AR0_REGNO);
3709*404b540aSrobert }
3710*404b540aSrobert
3711*404b540aSrobert
3712*404b540aSrobert int
ar0_mem_operand(rtx op,enum machine_mode mode)3713*404b540aSrobert ar0_mem_operand (rtx op, enum machine_mode mode)
3714*404b540aSrobert {
3715*404b540aSrobert return c4x_arn_mem_operand (op, mode, AR0_REGNO);
3716*404b540aSrobert }
3717*404b540aSrobert
3718*404b540aSrobert
3719*404b540aSrobert int
ar1_reg_operand(rtx op,enum machine_mode mode)3720*404b540aSrobert ar1_reg_operand (rtx op, enum machine_mode mode)
3721*404b540aSrobert {
3722*404b540aSrobert return c4x_arn_reg_operand (op, mode, AR1_REGNO);
3723*404b540aSrobert }
3724*404b540aSrobert
3725*404b540aSrobert
3726*404b540aSrobert int
ar1_mem_operand(rtx op,enum machine_mode mode)3727*404b540aSrobert ar1_mem_operand (rtx op, enum machine_mode mode)
3728*404b540aSrobert {
3729*404b540aSrobert return c4x_arn_mem_operand (op, mode, AR1_REGNO);
3730*404b540aSrobert }
3731*404b540aSrobert
3732*404b540aSrobert
3733*404b540aSrobert int
ar2_reg_operand(rtx op,enum machine_mode mode)3734*404b540aSrobert ar2_reg_operand (rtx op, enum machine_mode mode)
3735*404b540aSrobert {
3736*404b540aSrobert return c4x_arn_reg_operand (op, mode, AR2_REGNO);
3737*404b540aSrobert }
3738*404b540aSrobert
3739*404b540aSrobert
3740*404b540aSrobert int
ar2_mem_operand(rtx op,enum machine_mode mode)3741*404b540aSrobert ar2_mem_operand (rtx op, enum machine_mode mode)
3742*404b540aSrobert {
3743*404b540aSrobert return c4x_arn_mem_operand (op, mode, AR2_REGNO);
3744*404b540aSrobert }
3745*404b540aSrobert
3746*404b540aSrobert
3747*404b540aSrobert int
ar3_reg_operand(rtx op,enum machine_mode mode)3748*404b540aSrobert ar3_reg_operand (rtx op, enum machine_mode mode)
3749*404b540aSrobert {
3750*404b540aSrobert return c4x_arn_reg_operand (op, mode, AR3_REGNO);
3751*404b540aSrobert }
3752*404b540aSrobert
3753*404b540aSrobert
3754*404b540aSrobert int
ar3_mem_operand(rtx op,enum machine_mode mode)3755*404b540aSrobert ar3_mem_operand (rtx op, enum machine_mode mode)
3756*404b540aSrobert {
3757*404b540aSrobert return c4x_arn_mem_operand (op, mode, AR3_REGNO);
3758*404b540aSrobert }
3759*404b540aSrobert
3760*404b540aSrobert
3761*404b540aSrobert int
ar4_reg_operand(rtx op,enum machine_mode mode)3762*404b540aSrobert ar4_reg_operand (rtx op, enum machine_mode mode)
3763*404b540aSrobert {
3764*404b540aSrobert return c4x_arn_reg_operand (op, mode, AR4_REGNO);
3765*404b540aSrobert }
3766*404b540aSrobert
3767*404b540aSrobert
3768*404b540aSrobert int
ar4_mem_operand(rtx op,enum machine_mode mode)3769*404b540aSrobert ar4_mem_operand (rtx op, enum machine_mode mode)
3770*404b540aSrobert {
3771*404b540aSrobert return c4x_arn_mem_operand (op, mode, AR4_REGNO);
3772*404b540aSrobert }
3773*404b540aSrobert
3774*404b540aSrobert
3775*404b540aSrobert int
ar5_reg_operand(rtx op,enum machine_mode mode)3776*404b540aSrobert ar5_reg_operand (rtx op, enum machine_mode mode)
3777*404b540aSrobert {
3778*404b540aSrobert return c4x_arn_reg_operand (op, mode, AR5_REGNO);
3779*404b540aSrobert }
3780*404b540aSrobert
3781*404b540aSrobert
3782*404b540aSrobert int
ar5_mem_operand(rtx op,enum machine_mode mode)3783*404b540aSrobert ar5_mem_operand (rtx op, enum machine_mode mode)
3784*404b540aSrobert {
3785*404b540aSrobert return c4x_arn_mem_operand (op, mode, AR5_REGNO);
3786*404b540aSrobert }
3787*404b540aSrobert
3788*404b540aSrobert
3789*404b540aSrobert int
ar6_reg_operand(rtx op,enum machine_mode mode)3790*404b540aSrobert ar6_reg_operand (rtx op, enum machine_mode mode)
3791*404b540aSrobert {
3792*404b540aSrobert return c4x_arn_reg_operand (op, mode, AR6_REGNO);
3793*404b540aSrobert }
3794*404b540aSrobert
3795*404b540aSrobert
3796*404b540aSrobert int
ar6_mem_operand(rtx op,enum machine_mode mode)3797*404b540aSrobert ar6_mem_operand (rtx op, enum machine_mode mode)
3798*404b540aSrobert {
3799*404b540aSrobert return c4x_arn_mem_operand (op, mode, AR6_REGNO);
3800*404b540aSrobert }
3801*404b540aSrobert
3802*404b540aSrobert
3803*404b540aSrobert int
ar7_reg_operand(rtx op,enum machine_mode mode)3804*404b540aSrobert ar7_reg_operand (rtx op, enum machine_mode mode)
3805*404b540aSrobert {
3806*404b540aSrobert return c4x_arn_reg_operand (op, mode, AR7_REGNO);
3807*404b540aSrobert }
3808*404b540aSrobert
3809*404b540aSrobert
3810*404b540aSrobert int
ar7_mem_operand(rtx op,enum machine_mode mode)3811*404b540aSrobert ar7_mem_operand (rtx op, enum machine_mode mode)
3812*404b540aSrobert {
3813*404b540aSrobert return c4x_arn_mem_operand (op, mode, AR7_REGNO);
3814*404b540aSrobert }
3815*404b540aSrobert
3816*404b540aSrobert
3817*404b540aSrobert int
ir0_reg_operand(rtx op,enum machine_mode mode)3818*404b540aSrobert ir0_reg_operand (rtx op, enum machine_mode mode)
3819*404b540aSrobert {
3820*404b540aSrobert return c4x_arn_reg_operand (op, mode, IR0_REGNO);
3821*404b540aSrobert }
3822*404b540aSrobert
3823*404b540aSrobert
3824*404b540aSrobert int
ir0_mem_operand(rtx op,enum machine_mode mode)3825*404b540aSrobert ir0_mem_operand (rtx op, enum machine_mode mode)
3826*404b540aSrobert {
3827*404b540aSrobert return c4x_arn_mem_operand (op, mode, IR0_REGNO);
3828*404b540aSrobert }
3829*404b540aSrobert
3830*404b540aSrobert
3831*404b540aSrobert int
ir1_reg_operand(rtx op,enum machine_mode mode)3832*404b540aSrobert ir1_reg_operand (rtx op, enum machine_mode mode)
3833*404b540aSrobert {
3834*404b540aSrobert return c4x_arn_reg_operand (op, mode, IR1_REGNO);
3835*404b540aSrobert }
3836*404b540aSrobert
3837*404b540aSrobert
3838*404b540aSrobert int
ir1_mem_operand(rtx op,enum machine_mode mode)3839*404b540aSrobert ir1_mem_operand (rtx op, enum machine_mode mode)
3840*404b540aSrobert {
3841*404b540aSrobert return c4x_arn_mem_operand (op, mode, IR1_REGNO);
3842*404b540aSrobert }
3843*404b540aSrobert
3844*404b540aSrobert
3845*404b540aSrobert /* This is similar to operand_subword but allows autoincrement
3846*404b540aSrobert addressing. */
3847*404b540aSrobert
3848*404b540aSrobert rtx
c4x_operand_subword(rtx op,int i,int validate_address,enum machine_mode mode)3849*404b540aSrobert c4x_operand_subword (rtx op, int i, int validate_address,
3850*404b540aSrobert enum machine_mode mode)
3851*404b540aSrobert {
3852*404b540aSrobert if (mode != HImode && mode != HFmode)
3853*404b540aSrobert fatal_insn ("c4x_operand_subword: invalid mode", op);
3854*404b540aSrobert
3855*404b540aSrobert if (mode == HFmode && REG_P (op))
3856*404b540aSrobert fatal_insn ("c4x_operand_subword: invalid operand", op);
3857*404b540aSrobert
3858*404b540aSrobert if (GET_CODE (op) == MEM)
3859*404b540aSrobert {
3860*404b540aSrobert enum rtx_code code = GET_CODE (XEXP (op, 0));
3861*404b540aSrobert enum machine_mode mode = GET_MODE (XEXP (op, 0));
3862*404b540aSrobert enum machine_mode submode;
3863*404b540aSrobert
3864*404b540aSrobert submode = mode;
3865*404b540aSrobert if (mode == HImode)
3866*404b540aSrobert submode = QImode;
3867*404b540aSrobert else if (mode == HFmode)
3868*404b540aSrobert submode = QFmode;
3869*404b540aSrobert
3870*404b540aSrobert switch (code)
3871*404b540aSrobert {
3872*404b540aSrobert case POST_INC:
3873*404b540aSrobert case PRE_INC:
3874*404b540aSrobert return gen_rtx_MEM (submode, XEXP (op, 0));
3875*404b540aSrobert
3876*404b540aSrobert case POST_DEC:
3877*404b540aSrobert case PRE_DEC:
3878*404b540aSrobert case PRE_MODIFY:
3879*404b540aSrobert case POST_MODIFY:
3880*404b540aSrobert /* We could handle these with some difficulty.
3881*404b540aSrobert e.g., *p-- => *(p-=2); *(p+1). */
3882*404b540aSrobert fatal_insn ("c4x_operand_subword: invalid autoincrement", op);
3883*404b540aSrobert
3884*404b540aSrobert case SYMBOL_REF:
3885*404b540aSrobert case LABEL_REF:
3886*404b540aSrobert case CONST:
3887*404b540aSrobert case CONST_INT:
3888*404b540aSrobert fatal_insn ("c4x_operand_subword: invalid address", op);
3889*404b540aSrobert
3890*404b540aSrobert /* Even though offsettable_address_p considers (MEM
3891*404b540aSrobert (LO_SUM)) to be offsettable, it is not safe if the
3892*404b540aSrobert address is at the end of the data page since we also have
3893*404b540aSrobert to fix up the associated high PART. In this case where
3894*404b540aSrobert we are trying to split a HImode or HFmode memory
3895*404b540aSrobert reference, we would have to emit another insn to reload a
3896*404b540aSrobert new HIGH value. It's easier to disable LO_SUM memory references
3897*404b540aSrobert in HImode or HFmode and we probably get better code. */
3898*404b540aSrobert case LO_SUM:
3899*404b540aSrobert fatal_insn ("c4x_operand_subword: address not offsettable", op);
3900*404b540aSrobert
3901*404b540aSrobert default:
3902*404b540aSrobert break;
3903*404b540aSrobert }
3904*404b540aSrobert }
3905*404b540aSrobert
3906*404b540aSrobert return operand_subword (op, i, validate_address, mode);
3907*404b540aSrobert }
3908*404b540aSrobert
3909*404b540aSrobert struct name_list
3910*404b540aSrobert {
3911*404b540aSrobert struct name_list *next;
3912*404b540aSrobert const char *name;
3913*404b540aSrobert };
3914*404b540aSrobert
3915*404b540aSrobert static struct name_list *global_head;
3916*404b540aSrobert static struct name_list *extern_head;
3917*404b540aSrobert
3918*404b540aSrobert
3919*404b540aSrobert /* Add NAME to list of global symbols and remove from external list if
3920*404b540aSrobert present on external list. */
3921*404b540aSrobert
3922*404b540aSrobert void
c4x_global_label(const char * name)3923*404b540aSrobert c4x_global_label (const char *name)
3924*404b540aSrobert {
3925*404b540aSrobert struct name_list *p, *last;
3926*404b540aSrobert
3927*404b540aSrobert /* Do not insert duplicate names, so linearly search through list of
3928*404b540aSrobert existing names. */
3929*404b540aSrobert p = global_head;
3930*404b540aSrobert while (p)
3931*404b540aSrobert {
3932*404b540aSrobert if (strcmp (p->name, name) == 0)
3933*404b540aSrobert return;
3934*404b540aSrobert p = p->next;
3935*404b540aSrobert }
3936*404b540aSrobert p = (struct name_list *) xmalloc (sizeof *p);
3937*404b540aSrobert p->next = global_head;
3938*404b540aSrobert p->name = name;
3939*404b540aSrobert global_head = p;
3940*404b540aSrobert
3941*404b540aSrobert /* Remove this name from ref list if present. */
3942*404b540aSrobert last = NULL;
3943*404b540aSrobert p = extern_head;
3944*404b540aSrobert while (p)
3945*404b540aSrobert {
3946*404b540aSrobert if (strcmp (p->name, name) == 0)
3947*404b540aSrobert {
3948*404b540aSrobert if (last)
3949*404b540aSrobert last->next = p->next;
3950*404b540aSrobert else
3951*404b540aSrobert extern_head = p->next;
3952*404b540aSrobert break;
3953*404b540aSrobert }
3954*404b540aSrobert last = p;
3955*404b540aSrobert p = p->next;
3956*404b540aSrobert }
3957*404b540aSrobert }
3958*404b540aSrobert
3959*404b540aSrobert
3960*404b540aSrobert /* Add NAME to list of external symbols. */
3961*404b540aSrobert
3962*404b540aSrobert void
c4x_external_ref(const char * name)3963*404b540aSrobert c4x_external_ref (const char *name)
3964*404b540aSrobert {
3965*404b540aSrobert struct name_list *p;
3966*404b540aSrobert
3967*404b540aSrobert /* Do not insert duplicate names. */
3968*404b540aSrobert p = extern_head;
3969*404b540aSrobert while (p)
3970*404b540aSrobert {
3971*404b540aSrobert if (strcmp (p->name, name) == 0)
3972*404b540aSrobert return;
3973*404b540aSrobert p = p->next;
3974*404b540aSrobert }
3975*404b540aSrobert
3976*404b540aSrobert /* Do not insert ref if global found. */
3977*404b540aSrobert p = global_head;
3978*404b540aSrobert while (p)
3979*404b540aSrobert {
3980*404b540aSrobert if (strcmp (p->name, name) == 0)
3981*404b540aSrobert return;
3982*404b540aSrobert p = p->next;
3983*404b540aSrobert }
3984*404b540aSrobert p = (struct name_list *) xmalloc (sizeof *p);
3985*404b540aSrobert p->next = extern_head;
3986*404b540aSrobert p->name = name;
3987*404b540aSrobert extern_head = p;
3988*404b540aSrobert }
3989*404b540aSrobert
3990*404b540aSrobert /* We need to have a data section we can identify so that we can set
3991*404b540aSrobert the DP register back to a data pointer in the small memory model.
3992*404b540aSrobert This is only required for ISRs if we are paranoid that someone
3993*404b540aSrobert may have quietly changed this register on the sly. */
3994*404b540aSrobert static void
c4x_file_start(void)3995*404b540aSrobert c4x_file_start (void)
3996*404b540aSrobert {
3997*404b540aSrobert default_file_start ();
3998*404b540aSrobert fprintf (asm_out_file, "\t.version\t%d\n", c4x_cpu_version);
3999*404b540aSrobert fputs ("\n\t.data\ndata_sec:\n", asm_out_file);
4000*404b540aSrobert }
4001*404b540aSrobert
4002*404b540aSrobert
4003*404b540aSrobert static void
c4x_file_end(void)4004*404b540aSrobert c4x_file_end (void)
4005*404b540aSrobert {
4006*404b540aSrobert struct name_list *p;
4007*404b540aSrobert
4008*404b540aSrobert /* Output all external names that are not global. */
4009*404b540aSrobert p = extern_head;
4010*404b540aSrobert while (p)
4011*404b540aSrobert {
4012*404b540aSrobert fprintf (asm_out_file, "\t.ref\t");
4013*404b540aSrobert assemble_name (asm_out_file, p->name);
4014*404b540aSrobert fprintf (asm_out_file, "\n");
4015*404b540aSrobert p = p->next;
4016*404b540aSrobert }
4017*404b540aSrobert fprintf (asm_out_file, "\t.end\n");
4018*404b540aSrobert }
4019*404b540aSrobert
4020*404b540aSrobert
4021*404b540aSrobert static void
c4x_check_attribute(const char * attrib,tree list,tree decl,tree * attributes)4022*404b540aSrobert c4x_check_attribute (const char *attrib, tree list, tree decl, tree *attributes)
4023*404b540aSrobert {
4024*404b540aSrobert while (list != NULL_TREE
4025*404b540aSrobert && IDENTIFIER_POINTER (TREE_PURPOSE (list))
4026*404b540aSrobert != IDENTIFIER_POINTER (DECL_NAME (decl)))
4027*404b540aSrobert list = TREE_CHAIN (list);
4028*404b540aSrobert if (list)
4029*404b540aSrobert *attributes = tree_cons (get_identifier (attrib), TREE_VALUE (list),
4030*404b540aSrobert *attributes);
4031*404b540aSrobert }
4032*404b540aSrobert
4033*404b540aSrobert
4034*404b540aSrobert static void
c4x_insert_attributes(tree decl,tree * attributes)4035*404b540aSrobert c4x_insert_attributes (tree decl, tree *attributes)
4036*404b540aSrobert {
4037*404b540aSrobert switch (TREE_CODE (decl))
4038*404b540aSrobert {
4039*404b540aSrobert case FUNCTION_DECL:
4040*404b540aSrobert c4x_check_attribute ("section", code_tree, decl, attributes);
4041*404b540aSrobert c4x_check_attribute ("const", pure_tree, decl, attributes);
4042*404b540aSrobert c4x_check_attribute ("noreturn", noreturn_tree, decl, attributes);
4043*404b540aSrobert c4x_check_attribute ("interrupt", interrupt_tree, decl, attributes);
4044*404b540aSrobert c4x_check_attribute ("naked", naked_tree, decl, attributes);
4045*404b540aSrobert break;
4046*404b540aSrobert
4047*404b540aSrobert case VAR_DECL:
4048*404b540aSrobert c4x_check_attribute ("section", data_tree, decl, attributes);
4049*404b540aSrobert break;
4050*404b540aSrobert
4051*404b540aSrobert default:
4052*404b540aSrobert break;
4053*404b540aSrobert }
4054*404b540aSrobert }
4055*404b540aSrobert
4056*404b540aSrobert /* Table of valid machine attributes. */
4057*404b540aSrobert const struct attribute_spec c4x_attribute_table[] =
4058*404b540aSrobert {
4059*404b540aSrobert /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
4060*404b540aSrobert { "interrupt", 0, 0, false, true, true, c4x_handle_fntype_attribute },
4061*404b540aSrobert { "naked", 0, 0, false, true, true, c4x_handle_fntype_attribute },
4062*404b540aSrobert { "leaf_pretend", 0, 0, false, true, true, c4x_handle_fntype_attribute },
4063*404b540aSrobert { NULL, 0, 0, false, false, false, NULL }
4064*404b540aSrobert };
4065*404b540aSrobert
4066*404b540aSrobert /* Handle an attribute requiring a FUNCTION_TYPE;
4067*404b540aSrobert arguments as in struct attribute_spec.handler. */
4068*404b540aSrobert static tree
c4x_handle_fntype_attribute(tree * node,tree name,tree args ATTRIBUTE_UNUSED,int flags ATTRIBUTE_UNUSED,bool * no_add_attrs)4069*404b540aSrobert c4x_handle_fntype_attribute (tree *node, tree name,
4070*404b540aSrobert tree args ATTRIBUTE_UNUSED,
4071*404b540aSrobert int flags ATTRIBUTE_UNUSED,
4072*404b540aSrobert bool *no_add_attrs)
4073*404b540aSrobert {
4074*404b540aSrobert if (TREE_CODE (*node) != FUNCTION_TYPE)
4075*404b540aSrobert {
4076*404b540aSrobert warning (OPT_Wattributes, "%qs attribute only applies to functions",
4077*404b540aSrobert IDENTIFIER_POINTER (name));
4078*404b540aSrobert *no_add_attrs = true;
4079*404b540aSrobert }
4080*404b540aSrobert
4081*404b540aSrobert return NULL_TREE;
4082*404b540aSrobert }
4083*404b540aSrobert
4084*404b540aSrobert
4085*404b540aSrobert /* !!! FIXME to emit RPTS correctly. */
4086*404b540aSrobert
4087*404b540aSrobert int
c4x_rptb_rpts_p(rtx insn,rtx op)4088*404b540aSrobert c4x_rptb_rpts_p (rtx insn, rtx op)
4089*404b540aSrobert {
4090*404b540aSrobert /* The next insn should be our label marking where the
4091*404b540aSrobert repeat block starts. */
4092*404b540aSrobert insn = NEXT_INSN (insn);
4093*404b540aSrobert if (GET_CODE (insn) != CODE_LABEL)
4094*404b540aSrobert {
4095*404b540aSrobert /* Some insns may have been shifted between the RPTB insn
4096*404b540aSrobert and the top label... They were probably destined to
4097*404b540aSrobert be moved out of the loop. For now, let's leave them
4098*404b540aSrobert where they are and print a warning. We should
4099*404b540aSrobert probably move these insns before the repeat block insn. */
4100*404b540aSrobert if (TARGET_DEBUG)
4101*404b540aSrobert fatal_insn ("c4x_rptb_rpts_p: Repeat block top label moved",
4102*404b540aSrobert insn);
4103*404b540aSrobert return 0;
4104*404b540aSrobert }
4105*404b540aSrobert
4106*404b540aSrobert /* Skip any notes. */
4107*404b540aSrobert insn = next_nonnote_insn (insn);
4108*404b540aSrobert
4109*404b540aSrobert /* This should be our first insn in the loop. */
4110*404b540aSrobert if (! INSN_P (insn))
4111*404b540aSrobert return 0;
4112*404b540aSrobert
4113*404b540aSrobert /* Skip any notes. */
4114*404b540aSrobert insn = next_nonnote_insn (insn);
4115*404b540aSrobert
4116*404b540aSrobert if (! INSN_P (insn))
4117*404b540aSrobert return 0;
4118*404b540aSrobert
4119*404b540aSrobert if (recog_memoized (insn) != CODE_FOR_rptb_end)
4120*404b540aSrobert return 0;
4121*404b540aSrobert
4122*404b540aSrobert if (TARGET_RPTS)
4123*404b540aSrobert return 1;
4124*404b540aSrobert
4125*404b540aSrobert return (GET_CODE (op) == CONST_INT) && TARGET_RPTS_CYCLES (INTVAL (op));
4126*404b540aSrobert }
4127*404b540aSrobert
4128*404b540aSrobert
4129*404b540aSrobert /* Check if register r11 is used as the destination of an insn. */
4130*404b540aSrobert
4131*404b540aSrobert static int
c4x_r11_set_p(rtx x)4132*404b540aSrobert c4x_r11_set_p(rtx x)
4133*404b540aSrobert {
4134*404b540aSrobert rtx set;
4135*404b540aSrobert int i, j;
4136*404b540aSrobert const char *fmt;
4137*404b540aSrobert
4138*404b540aSrobert if (x == 0)
4139*404b540aSrobert return 0;
4140*404b540aSrobert
4141*404b540aSrobert if (INSN_P (x) && GET_CODE (PATTERN (x)) == SEQUENCE)
4142*404b540aSrobert x = XVECEXP (PATTERN (x), 0, XVECLEN (PATTERN (x), 0) - 1);
4143*404b540aSrobert
4144*404b540aSrobert if (INSN_P (x) && (set = single_set (x)))
4145*404b540aSrobert x = SET_DEST (set);
4146*404b540aSrobert
4147*404b540aSrobert if (GET_CODE (x) == REG && REGNO (x) == R11_REGNO)
4148*404b540aSrobert return 1;
4149*404b540aSrobert
4150*404b540aSrobert fmt = GET_RTX_FORMAT (GET_CODE (x));
4151*404b540aSrobert for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
4152*404b540aSrobert {
4153*404b540aSrobert if (fmt[i] == 'e')
4154*404b540aSrobert {
4155*404b540aSrobert if (c4x_r11_set_p (XEXP (x, i)))
4156*404b540aSrobert return 1;
4157*404b540aSrobert }
4158*404b540aSrobert else if (fmt[i] == 'E')
4159*404b540aSrobert for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4160*404b540aSrobert if (c4x_r11_set_p (XVECEXP (x, i, j)))
4161*404b540aSrobert return 1;
4162*404b540aSrobert }
4163*404b540aSrobert return 0;
4164*404b540aSrobert }
4165*404b540aSrobert
4166*404b540aSrobert
4167*404b540aSrobert /* The c4x sometimes has a problem when the insn before the laj insn
4168*404b540aSrobert sets the r11 register. Check for this situation. */
4169*404b540aSrobert
4170*404b540aSrobert int
c4x_check_laj_p(rtx insn)4171*404b540aSrobert c4x_check_laj_p (rtx insn)
4172*404b540aSrobert {
4173*404b540aSrobert insn = prev_nonnote_insn (insn);
4174*404b540aSrobert
4175*404b540aSrobert /* If this is the start of the function no nop is needed. */
4176*404b540aSrobert if (insn == 0)
4177*404b540aSrobert return 0;
4178*404b540aSrobert
4179*404b540aSrobert /* If the previous insn is a code label we have to insert a nop. This
4180*404b540aSrobert could be a jump or table jump. We can find the normal jumps by
4181*404b540aSrobert scanning the function but this will not find table jumps. */
4182*404b540aSrobert if (GET_CODE (insn) == CODE_LABEL)
4183*404b540aSrobert return 1;
4184*404b540aSrobert
4185*404b540aSrobert /* If the previous insn sets register r11 we have to insert a nop. */
4186*404b540aSrobert if (c4x_r11_set_p (insn))
4187*404b540aSrobert return 1;
4188*404b540aSrobert
4189*404b540aSrobert /* No nop needed. */
4190*404b540aSrobert return 0;
4191*404b540aSrobert }
4192*404b540aSrobert
4193*404b540aSrobert
4194*404b540aSrobert /* Adjust the cost of a scheduling dependency. Return the new cost of
4195*404b540aSrobert a dependency LINK or INSN on DEP_INSN. COST is the current cost.
4196*404b540aSrobert A set of an address register followed by a use occurs a 2 cycle
4197*404b540aSrobert stall (reduced to a single cycle on the c40 using LDA), while
4198*404b540aSrobert a read of an address register followed by a use occurs a single cycle. */
4199*404b540aSrobert
4200*404b540aSrobert #define SET_USE_COST 3
4201*404b540aSrobert #define SETLDA_USE_COST 2
4202*404b540aSrobert #define READ_USE_COST 2
4203*404b540aSrobert
4204*404b540aSrobert static int
c4x_adjust_cost(rtx insn,rtx link,rtx dep_insn,int cost)4205*404b540aSrobert c4x_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
4206*404b540aSrobert {
4207*404b540aSrobert /* Don't worry about this until we know what registers have been
4208*404b540aSrobert assigned. */
4209*404b540aSrobert if (flag_schedule_insns == 0 && ! reload_completed)
4210*404b540aSrobert return 0;
4211*404b540aSrobert
4212*404b540aSrobert /* How do we handle dependencies where a read followed by another
4213*404b540aSrobert read causes a pipeline stall? For example, a read of ar0 followed
4214*404b540aSrobert by the use of ar0 for a memory reference. It looks like we
4215*404b540aSrobert need to extend the scheduler to handle this case. */
4216*404b540aSrobert
4217*404b540aSrobert /* Reload sometimes generates a CLOBBER of a stack slot, e.g.,
4218*404b540aSrobert (clobber (mem:QI (plus:QI (reg:QI 11 ar3) (const_int 261)))),
4219*404b540aSrobert so only deal with insns we know about. */
4220*404b540aSrobert if (recog_memoized (dep_insn) < 0)
4221*404b540aSrobert return 0;
4222*404b540aSrobert
4223*404b540aSrobert if (REG_NOTE_KIND (link) == 0)
4224*404b540aSrobert {
4225*404b540aSrobert int max = 0;
4226*404b540aSrobert
4227*404b540aSrobert /* Data dependency; DEP_INSN writes a register that INSN reads some
4228*404b540aSrobert cycles later. */
4229*404b540aSrobert if (TARGET_C3X)
4230*404b540aSrobert {
4231*404b540aSrobert if (get_attr_setgroup1 (dep_insn) && get_attr_usegroup1 (insn))
4232*404b540aSrobert max = SET_USE_COST > max ? SET_USE_COST : max;
4233*404b540aSrobert if (get_attr_readarx (dep_insn) && get_attr_usegroup1 (insn))
4234*404b540aSrobert max = READ_USE_COST > max ? READ_USE_COST : max;
4235*404b540aSrobert }
4236*404b540aSrobert else
4237*404b540aSrobert {
4238*404b540aSrobert /* This could be significantly optimized. We should look
4239*404b540aSrobert to see if dep_insn sets ar0-ar7 or ir0-ir1 and if
4240*404b540aSrobert insn uses ar0-ar7. We then test if the same register
4241*404b540aSrobert is used. The tricky bit is that some operands will
4242*404b540aSrobert use several registers... */
4243*404b540aSrobert if (get_attr_setar0 (dep_insn) && get_attr_usear0 (insn))
4244*404b540aSrobert max = SET_USE_COST > max ? SET_USE_COST : max;
4245*404b540aSrobert if (get_attr_setlda_ar0 (dep_insn) && get_attr_usear0 (insn))
4246*404b540aSrobert max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4247*404b540aSrobert if (get_attr_readar0 (dep_insn) && get_attr_usear0 (insn))
4248*404b540aSrobert max = READ_USE_COST > max ? READ_USE_COST : max;
4249*404b540aSrobert
4250*404b540aSrobert if (get_attr_setar1 (dep_insn) && get_attr_usear1 (insn))
4251*404b540aSrobert max = SET_USE_COST > max ? SET_USE_COST : max;
4252*404b540aSrobert if (get_attr_setlda_ar1 (dep_insn) && get_attr_usear1 (insn))
4253*404b540aSrobert max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4254*404b540aSrobert if (get_attr_readar1 (dep_insn) && get_attr_usear1 (insn))
4255*404b540aSrobert max = READ_USE_COST > max ? READ_USE_COST : max;
4256*404b540aSrobert
4257*404b540aSrobert if (get_attr_setar2 (dep_insn) && get_attr_usear2 (insn))
4258*404b540aSrobert max = SET_USE_COST > max ? SET_USE_COST : max;
4259*404b540aSrobert if (get_attr_setlda_ar2 (dep_insn) && get_attr_usear2 (insn))
4260*404b540aSrobert max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4261*404b540aSrobert if (get_attr_readar2 (dep_insn) && get_attr_usear2 (insn))
4262*404b540aSrobert max = READ_USE_COST > max ? READ_USE_COST : max;
4263*404b540aSrobert
4264*404b540aSrobert if (get_attr_setar3 (dep_insn) && get_attr_usear3 (insn))
4265*404b540aSrobert max = SET_USE_COST > max ? SET_USE_COST : max;
4266*404b540aSrobert if (get_attr_setlda_ar3 (dep_insn) && get_attr_usear3 (insn))
4267*404b540aSrobert max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4268*404b540aSrobert if (get_attr_readar3 (dep_insn) && get_attr_usear3 (insn))
4269*404b540aSrobert max = READ_USE_COST > max ? READ_USE_COST : max;
4270*404b540aSrobert
4271*404b540aSrobert if (get_attr_setar4 (dep_insn) && get_attr_usear4 (insn))
4272*404b540aSrobert max = SET_USE_COST > max ? SET_USE_COST : max;
4273*404b540aSrobert if (get_attr_setlda_ar4 (dep_insn) && get_attr_usear4 (insn))
4274*404b540aSrobert max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4275*404b540aSrobert if (get_attr_readar4 (dep_insn) && get_attr_usear4 (insn))
4276*404b540aSrobert max = READ_USE_COST > max ? READ_USE_COST : max;
4277*404b540aSrobert
4278*404b540aSrobert if (get_attr_setar5 (dep_insn) && get_attr_usear5 (insn))
4279*404b540aSrobert max = SET_USE_COST > max ? SET_USE_COST : max;
4280*404b540aSrobert if (get_attr_setlda_ar5 (dep_insn) && get_attr_usear5 (insn))
4281*404b540aSrobert max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4282*404b540aSrobert if (get_attr_readar5 (dep_insn) && get_attr_usear5 (insn))
4283*404b540aSrobert max = READ_USE_COST > max ? READ_USE_COST : max;
4284*404b540aSrobert
4285*404b540aSrobert if (get_attr_setar6 (dep_insn) && get_attr_usear6 (insn))
4286*404b540aSrobert max = SET_USE_COST > max ? SET_USE_COST : max;
4287*404b540aSrobert if (get_attr_setlda_ar6 (dep_insn) && get_attr_usear6 (insn))
4288*404b540aSrobert max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4289*404b540aSrobert if (get_attr_readar6 (dep_insn) && get_attr_usear6 (insn))
4290*404b540aSrobert max = READ_USE_COST > max ? READ_USE_COST : max;
4291*404b540aSrobert
4292*404b540aSrobert if (get_attr_setar7 (dep_insn) && get_attr_usear7 (insn))
4293*404b540aSrobert max = SET_USE_COST > max ? SET_USE_COST : max;
4294*404b540aSrobert if (get_attr_setlda_ar7 (dep_insn) && get_attr_usear7 (insn))
4295*404b540aSrobert max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4296*404b540aSrobert if (get_attr_readar7 (dep_insn) && get_attr_usear7 (insn))
4297*404b540aSrobert max = READ_USE_COST > max ? READ_USE_COST : max;
4298*404b540aSrobert
4299*404b540aSrobert if (get_attr_setir0 (dep_insn) && get_attr_useir0 (insn))
4300*404b540aSrobert max = SET_USE_COST > max ? SET_USE_COST : max;
4301*404b540aSrobert if (get_attr_setlda_ir0 (dep_insn) && get_attr_useir0 (insn))
4302*404b540aSrobert max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4303*404b540aSrobert
4304*404b540aSrobert if (get_attr_setir1 (dep_insn) && get_attr_useir1 (insn))
4305*404b540aSrobert max = SET_USE_COST > max ? SET_USE_COST : max;
4306*404b540aSrobert if (get_attr_setlda_ir1 (dep_insn) && get_attr_useir1 (insn))
4307*404b540aSrobert max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4308*404b540aSrobert }
4309*404b540aSrobert
4310*404b540aSrobert if (max)
4311*404b540aSrobert cost = max;
4312*404b540aSrobert
4313*404b540aSrobert /* For other data dependencies, the default cost specified in the
4314*404b540aSrobert md is correct. */
4315*404b540aSrobert return cost;
4316*404b540aSrobert }
4317*404b540aSrobert else if (REG_NOTE_KIND (link) == REG_DEP_ANTI)
4318*404b540aSrobert {
4319*404b540aSrobert /* Anti dependency; DEP_INSN reads a register that INSN writes some
4320*404b540aSrobert cycles later. */
4321*404b540aSrobert
4322*404b540aSrobert /* For c4x anti dependencies, the cost is 0. */
4323*404b540aSrobert return 0;
4324*404b540aSrobert }
4325*404b540aSrobert else if (REG_NOTE_KIND (link) == REG_DEP_OUTPUT)
4326*404b540aSrobert {
4327*404b540aSrobert /* Output dependency; DEP_INSN writes a register that INSN writes some
4328*404b540aSrobert cycles later. */
4329*404b540aSrobert
4330*404b540aSrobert /* For c4x output dependencies, the cost is 0. */
4331*404b540aSrobert return 0;
4332*404b540aSrobert }
4333*404b540aSrobert else
4334*404b540aSrobert abort ();
4335*404b540aSrobert }
4336*404b540aSrobert
4337*404b540aSrobert void
c4x_init_builtins(void)4338*404b540aSrobert c4x_init_builtins (void)
4339*404b540aSrobert {
4340*404b540aSrobert tree endlink = void_list_node;
4341*404b540aSrobert
4342*404b540aSrobert lang_hooks.builtin_function ("fast_ftoi",
4343*404b540aSrobert build_function_type
4344*404b540aSrobert (integer_type_node,
4345*404b540aSrobert tree_cons (NULL_TREE, double_type_node,
4346*404b540aSrobert endlink)),
4347*404b540aSrobert C4X_BUILTIN_FIX, BUILT_IN_MD, NULL, NULL_TREE);
4348*404b540aSrobert lang_hooks.builtin_function ("ansi_ftoi",
4349*404b540aSrobert build_function_type
4350*404b540aSrobert (integer_type_node,
4351*404b540aSrobert tree_cons (NULL_TREE, double_type_node,
4352*404b540aSrobert endlink)),
4353*404b540aSrobert C4X_BUILTIN_FIX_ANSI, BUILT_IN_MD, NULL,
4354*404b540aSrobert NULL_TREE);
4355*404b540aSrobert if (TARGET_C3X)
4356*404b540aSrobert lang_hooks.builtin_function ("fast_imult",
4357*404b540aSrobert build_function_type
4358*404b540aSrobert (integer_type_node,
4359*404b540aSrobert tree_cons (NULL_TREE, integer_type_node,
4360*404b540aSrobert tree_cons (NULL_TREE,
4361*404b540aSrobert integer_type_node,
4362*404b540aSrobert endlink))),
4363*404b540aSrobert C4X_BUILTIN_MPYI, BUILT_IN_MD, NULL,
4364*404b540aSrobert NULL_TREE);
4365*404b540aSrobert else
4366*404b540aSrobert {
4367*404b540aSrobert lang_hooks.builtin_function ("toieee",
4368*404b540aSrobert build_function_type
4369*404b540aSrobert (double_type_node,
4370*404b540aSrobert tree_cons (NULL_TREE, double_type_node,
4371*404b540aSrobert endlink)),
4372*404b540aSrobert C4X_BUILTIN_TOIEEE, BUILT_IN_MD, NULL,
4373*404b540aSrobert NULL_TREE);
4374*404b540aSrobert lang_hooks.builtin_function ("frieee",
4375*404b540aSrobert build_function_type
4376*404b540aSrobert (double_type_node,
4377*404b540aSrobert tree_cons (NULL_TREE, double_type_node,
4378*404b540aSrobert endlink)),
4379*404b540aSrobert C4X_BUILTIN_FRIEEE, BUILT_IN_MD, NULL,
4380*404b540aSrobert NULL_TREE);
4381*404b540aSrobert lang_hooks.builtin_function ("fast_invf",
4382*404b540aSrobert build_function_type
4383*404b540aSrobert (double_type_node,
4384*404b540aSrobert tree_cons (NULL_TREE, double_type_node,
4385*404b540aSrobert endlink)),
4386*404b540aSrobert C4X_BUILTIN_RCPF, BUILT_IN_MD, NULL,
4387*404b540aSrobert NULL_TREE);
4388*404b540aSrobert }
4389*404b540aSrobert }
4390*404b540aSrobert
4391*404b540aSrobert
4392*404b540aSrobert rtx
c4x_expand_builtin(tree exp,rtx target,rtx subtarget ATTRIBUTE_UNUSED,enum machine_mode mode ATTRIBUTE_UNUSED,int ignore ATTRIBUTE_UNUSED)4393*404b540aSrobert c4x_expand_builtin (tree exp, rtx target,
4394*404b540aSrobert rtx subtarget ATTRIBUTE_UNUSED,
4395*404b540aSrobert enum machine_mode mode ATTRIBUTE_UNUSED,
4396*404b540aSrobert int ignore ATTRIBUTE_UNUSED)
4397*404b540aSrobert {
4398*404b540aSrobert tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4399*404b540aSrobert unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4400*404b540aSrobert tree arglist = TREE_OPERAND (exp, 1);
4401*404b540aSrobert tree arg0, arg1;
4402*404b540aSrobert rtx r0, r1;
4403*404b540aSrobert
4404*404b540aSrobert switch (fcode)
4405*404b540aSrobert {
4406*404b540aSrobert case C4X_BUILTIN_FIX:
4407*404b540aSrobert arg0 = TREE_VALUE (arglist);
4408*404b540aSrobert r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4409*404b540aSrobert if (! target || ! register_operand (target, QImode))
4410*404b540aSrobert target = gen_reg_rtx (QImode);
4411*404b540aSrobert emit_insn (gen_fixqfqi_clobber (target, r0));
4412*404b540aSrobert return target;
4413*404b540aSrobert
4414*404b540aSrobert case C4X_BUILTIN_FIX_ANSI:
4415*404b540aSrobert arg0 = TREE_VALUE (arglist);
4416*404b540aSrobert r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4417*404b540aSrobert if (! target || ! register_operand (target, QImode))
4418*404b540aSrobert target = gen_reg_rtx (QImode);
4419*404b540aSrobert emit_insn (gen_fix_truncqfqi2 (target, r0));
4420*404b540aSrobert return target;
4421*404b540aSrobert
4422*404b540aSrobert case C4X_BUILTIN_MPYI:
4423*404b540aSrobert if (! TARGET_C3X)
4424*404b540aSrobert break;
4425*404b540aSrobert arg0 = TREE_VALUE (arglist);
4426*404b540aSrobert arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4427*404b540aSrobert r0 = expand_expr (arg0, NULL_RTX, QImode, 0);
4428*404b540aSrobert r1 = expand_expr (arg1, NULL_RTX, QImode, 0);
4429*404b540aSrobert if (! target || ! register_operand (target, QImode))
4430*404b540aSrobert target = gen_reg_rtx (QImode);
4431*404b540aSrobert emit_insn (gen_mulqi3_24_clobber (target, r0, r1));
4432*404b540aSrobert return target;
4433*404b540aSrobert
4434*404b540aSrobert case C4X_BUILTIN_TOIEEE:
4435*404b540aSrobert if (TARGET_C3X)
4436*404b540aSrobert break;
4437*404b540aSrobert arg0 = TREE_VALUE (arglist);
4438*404b540aSrobert r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4439*404b540aSrobert if (! target || ! register_operand (target, QFmode))
4440*404b540aSrobert target = gen_reg_rtx (QFmode);
4441*404b540aSrobert emit_insn (gen_toieee (target, r0));
4442*404b540aSrobert return target;
4443*404b540aSrobert
4444*404b540aSrobert case C4X_BUILTIN_FRIEEE:
4445*404b540aSrobert if (TARGET_C3X)
4446*404b540aSrobert break;
4447*404b540aSrobert arg0 = TREE_VALUE (arglist);
4448*404b540aSrobert r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4449*404b540aSrobert if (register_operand (r0, QFmode))
4450*404b540aSrobert {
4451*404b540aSrobert r1 = assign_stack_local (QFmode, GET_MODE_SIZE (QFmode), 0);
4452*404b540aSrobert emit_move_insn (r1, r0);
4453*404b540aSrobert r0 = r1;
4454*404b540aSrobert }
4455*404b540aSrobert if (! target || ! register_operand (target, QFmode))
4456*404b540aSrobert target = gen_reg_rtx (QFmode);
4457*404b540aSrobert emit_insn (gen_frieee (target, r0));
4458*404b540aSrobert return target;
4459*404b540aSrobert
4460*404b540aSrobert case C4X_BUILTIN_RCPF:
4461*404b540aSrobert if (TARGET_C3X)
4462*404b540aSrobert break;
4463*404b540aSrobert arg0 = TREE_VALUE (arglist);
4464*404b540aSrobert r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4465*404b540aSrobert if (! target || ! register_operand (target, QFmode))
4466*404b540aSrobert target = gen_reg_rtx (QFmode);
4467*404b540aSrobert emit_insn (gen_rcpfqf_clobber (target, r0));
4468*404b540aSrobert return target;
4469*404b540aSrobert }
4470*404b540aSrobert return NULL_RTX;
4471*404b540aSrobert }
4472*404b540aSrobert
4473*404b540aSrobert static void
c4x_init_libfuncs(void)4474*404b540aSrobert c4x_init_libfuncs (void)
4475*404b540aSrobert {
4476*404b540aSrobert set_optab_libfunc (smul_optab, QImode, "__mulqi3");
4477*404b540aSrobert set_optab_libfunc (sdiv_optab, QImode, "__divqi3");
4478*404b540aSrobert set_optab_libfunc (udiv_optab, QImode, "__udivqi3");
4479*404b540aSrobert set_optab_libfunc (smod_optab, QImode, "__modqi3");
4480*404b540aSrobert set_optab_libfunc (umod_optab, QImode, "__umodqi3");
4481*404b540aSrobert set_optab_libfunc (sdiv_optab, QFmode, "__divqf3");
4482*404b540aSrobert set_optab_libfunc (smul_optab, HFmode, "__mulhf3");
4483*404b540aSrobert set_optab_libfunc (sdiv_optab, HFmode, "__divhf3");
4484*404b540aSrobert set_optab_libfunc (smul_optab, HImode, "__mulhi3");
4485*404b540aSrobert set_optab_libfunc (sdiv_optab, HImode, "__divhi3");
4486*404b540aSrobert set_optab_libfunc (udiv_optab, HImode, "__udivhi3");
4487*404b540aSrobert set_optab_libfunc (smod_optab, HImode, "__modhi3");
4488*404b540aSrobert set_optab_libfunc (umod_optab, HImode, "__umodhi3");
4489*404b540aSrobert set_optab_libfunc (ffs_optab, QImode, "__ffs");
4490*404b540aSrobert smulhi3_libfunc = init_one_libfunc ("__smulhi3_high");
4491*404b540aSrobert umulhi3_libfunc = init_one_libfunc ("__umulhi3_high");
4492*404b540aSrobert fix_truncqfhi2_libfunc = init_one_libfunc ("__fix_truncqfhi2");
4493*404b540aSrobert fixuns_truncqfhi2_libfunc = init_one_libfunc ("__ufix_truncqfhi2");
4494*404b540aSrobert fix_trunchfhi2_libfunc = init_one_libfunc ("__fix_trunchfhi2");
4495*404b540aSrobert fixuns_trunchfhi2_libfunc = init_one_libfunc ("__ufix_trunchfhi2");
4496*404b540aSrobert floathiqf2_libfunc = init_one_libfunc ("__floathiqf2");
4497*404b540aSrobert floatunshiqf2_libfunc = init_one_libfunc ("__ufloathiqf2");
4498*404b540aSrobert floathihf2_libfunc = init_one_libfunc ("__floathihf2");
4499*404b540aSrobert floatunshihf2_libfunc = init_one_libfunc ("__ufloathihf2");
4500*404b540aSrobert }
4501*404b540aSrobert
4502*404b540aSrobert static void
c4x_asm_named_section(const char * name,unsigned int flags ATTRIBUTE_UNUSED,tree decl ATTRIBUTE_UNUSED)4503*404b540aSrobert c4x_asm_named_section (const char *name, unsigned int flags ATTRIBUTE_UNUSED,
4504*404b540aSrobert tree decl ATTRIBUTE_UNUSED)
4505*404b540aSrobert {
4506*404b540aSrobert fprintf (asm_out_file, "\t.sect\t\"%s\"\n", name);
4507*404b540aSrobert }
4508*404b540aSrobert
4509*404b540aSrobert static void
c4x_globalize_label(FILE * stream,const char * name)4510*404b540aSrobert c4x_globalize_label (FILE *stream, const char *name)
4511*404b540aSrobert {
4512*404b540aSrobert default_globalize_label (stream, name);
4513*404b540aSrobert c4x_global_label (name);
4514*404b540aSrobert }
4515*404b540aSrobert
4516*404b540aSrobert #define SHIFT_CODE_P(C) \
4517*404b540aSrobert ((C) == ASHIFT || (C) == ASHIFTRT || (C) == LSHIFTRT)
4518*404b540aSrobert #define LOGICAL_CODE_P(C) \
4519*404b540aSrobert ((C) == NOT || (C) == AND || (C) == IOR || (C) == XOR)
4520*404b540aSrobert
4521*404b540aSrobert /* Compute a (partial) cost for rtx X. Return true if the complete
4522*404b540aSrobert cost has been computed, and false if subexpressions should be
4523*404b540aSrobert scanned. In either case, *TOTAL contains the cost result. */
4524*404b540aSrobert
4525*404b540aSrobert static bool
c4x_rtx_costs(rtx x,int code,int outer_code,int * total)4526*404b540aSrobert c4x_rtx_costs (rtx x, int code, int outer_code, int *total)
4527*404b540aSrobert {
4528*404b540aSrobert HOST_WIDE_INT val;
4529*404b540aSrobert
4530*404b540aSrobert switch (code)
4531*404b540aSrobert {
4532*404b540aSrobert /* Some small integers are effectively free for the C40. We should
4533*404b540aSrobert also consider if we are using the small memory model. With
4534*404b540aSrobert the big memory model we require an extra insn for a constant
4535*404b540aSrobert loaded from memory. */
4536*404b540aSrobert
4537*404b540aSrobert case CONST_INT:
4538*404b540aSrobert val = INTVAL (x);
4539*404b540aSrobert if (c4x_J_constant (x))
4540*404b540aSrobert *total = 0;
4541*404b540aSrobert else if (! TARGET_C3X
4542*404b540aSrobert && outer_code == AND
4543*404b540aSrobert && (val == 255 || val == 65535))
4544*404b540aSrobert *total = 0;
4545*404b540aSrobert else if (! TARGET_C3X
4546*404b540aSrobert && (outer_code == ASHIFTRT || outer_code == LSHIFTRT)
4547*404b540aSrobert && (val == 16 || val == 24))
4548*404b540aSrobert *total = 0;
4549*404b540aSrobert else if (TARGET_C3X && SHIFT_CODE_P (outer_code))
4550*404b540aSrobert *total = 3;
4551*404b540aSrobert else if (LOGICAL_CODE_P (outer_code)
4552*404b540aSrobert ? c4x_L_constant (x) : c4x_I_constant (x))
4553*404b540aSrobert *total = 2;
4554*404b540aSrobert else
4555*404b540aSrobert *total = 4;
4556*404b540aSrobert return true;
4557*404b540aSrobert
4558*404b540aSrobert case CONST:
4559*404b540aSrobert case LABEL_REF:
4560*404b540aSrobert case SYMBOL_REF:
4561*404b540aSrobert *total = 4;
4562*404b540aSrobert return true;
4563*404b540aSrobert
4564*404b540aSrobert case CONST_DOUBLE:
4565*404b540aSrobert if (c4x_H_constant (x))
4566*404b540aSrobert *total = 2;
4567*404b540aSrobert else if (GET_MODE (x) == QFmode)
4568*404b540aSrobert *total = 4;
4569*404b540aSrobert else
4570*404b540aSrobert *total = 8;
4571*404b540aSrobert return true;
4572*404b540aSrobert
4573*404b540aSrobert /* ??? Note that we return true, rather than false so that rtx_cost
4574*404b540aSrobert doesn't include the constant costs. Otherwise expand_mult will
4575*404b540aSrobert think that it is cheaper to synthesize a multiply rather than to
4576*404b540aSrobert use a multiply instruction. I think this is because the algorithm
4577*404b540aSrobert synth_mult doesn't take into account the loading of the operands,
4578*404b540aSrobert whereas the calculation of mult_cost does. */
4579*404b540aSrobert case PLUS:
4580*404b540aSrobert case MINUS:
4581*404b540aSrobert case AND:
4582*404b540aSrobert case IOR:
4583*404b540aSrobert case XOR:
4584*404b540aSrobert case ASHIFT:
4585*404b540aSrobert case ASHIFTRT:
4586*404b540aSrobert case LSHIFTRT:
4587*404b540aSrobert *total = COSTS_N_INSNS (1);
4588*404b540aSrobert return true;
4589*404b540aSrobert
4590*404b540aSrobert case MULT:
4591*404b540aSrobert *total = COSTS_N_INSNS (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
4592*404b540aSrobert || TARGET_MPYI ? 1 : 14);
4593*404b540aSrobert return true;
4594*404b540aSrobert
4595*404b540aSrobert case DIV:
4596*404b540aSrobert case UDIV:
4597*404b540aSrobert case MOD:
4598*404b540aSrobert case UMOD:
4599*404b540aSrobert *total = COSTS_N_INSNS (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
4600*404b540aSrobert ? 15 : 50);
4601*404b540aSrobert return true;
4602*404b540aSrobert
4603*404b540aSrobert default:
4604*404b540aSrobert return false;
4605*404b540aSrobert }
4606*404b540aSrobert }
4607*404b540aSrobert
4608*404b540aSrobert /* Worker function for TARGET_ASM_EXTERNAL_LIBCALL. */
4609*404b540aSrobert
4610*404b540aSrobert static void
c4x_external_libcall(rtx fun)4611*404b540aSrobert c4x_external_libcall (rtx fun)
4612*404b540aSrobert {
4613*404b540aSrobert /* This is only needed to keep asm30 happy for ___divqf3 etc. */
4614*404b540aSrobert c4x_external_ref (XSTR (fun, 0));
4615*404b540aSrobert }
4616*404b540aSrobert
4617*404b540aSrobert /* Worker function for TARGET_STRUCT_VALUE_RTX. */
4618*404b540aSrobert
4619*404b540aSrobert static rtx
c4x_struct_value_rtx(tree fntype ATTRIBUTE_UNUSED,int incoming ATTRIBUTE_UNUSED)4620*404b540aSrobert c4x_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
4621*404b540aSrobert int incoming ATTRIBUTE_UNUSED)
4622*404b540aSrobert {
4623*404b540aSrobert return gen_rtx_REG (Pmode, AR0_REGNO);
4624*404b540aSrobert }
4625