1 /* Subroutines used for code generation on the Renesas M32R cpu.
2 Copyright (C) 1996-2018 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published
8 by the Free Software Foundation; either version 3, or (at your
9 option) any later version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #define IN_TARGET_CODE 1
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "backend.h"
26 #include "target.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "df.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "stringpool.h"
33 #include "attribs.h"
34 #include "insn-config.h"
35 #include "emit-rtl.h"
36 #include "recog.h"
37 #include "diagnostic-core.h"
38 #include "alias.h"
39 #include "stor-layout.h"
40 #include "varasm.h"
41 #include "calls.h"
42 #include "output.h"
43 #include "insn-attr.h"
44 #include "explow.h"
45 #include "expr.h"
46 #include "tm-constrs.h"
47 #include "builtins.h"
48
49 /* This file should be included last. */
50 #include "target-def.h"
51
52 /* Array of valid operand punctuation characters. */
53 static char m32r_punct_chars[256];
54
55 /* Machine-specific symbol_ref flags. */
56 #define SYMBOL_FLAG_MODEL_SHIFT SYMBOL_FLAG_MACH_DEP_SHIFT
57 #define SYMBOL_REF_MODEL(X) \
58 ((enum m32r_model) ((SYMBOL_REF_FLAGS (X) >> SYMBOL_FLAG_MODEL_SHIFT) & 3))
59
60 /* For string literals, etc. */
61 #define LIT_NAME_P(NAME) ((NAME)[0] == '*' && (NAME)[1] == '.')
62
63 /* Forward declaration. */
64 static void m32r_option_override (void);
65 static void init_reg_tables (void);
66 static void block_move_call (rtx, rtx, rtx);
67 static int m32r_is_insn (rtx);
68 static bool m32r_legitimate_address_p (machine_mode, rtx, bool);
69 static rtx m32r_legitimize_address (rtx, rtx, machine_mode);
70 static bool m32r_mode_dependent_address_p (const_rtx, addr_space_t);
71 static tree m32r_handle_model_attribute (tree *, tree, tree, int, bool *);
72 static void m32r_print_operand (FILE *, rtx, int);
73 static void m32r_print_operand_address (FILE *, machine_mode, rtx);
74 static bool m32r_print_operand_punct_valid_p (unsigned char code);
75 static void m32r_output_function_prologue (FILE *);
76 static void m32r_output_function_epilogue (FILE *);
77
78 static void m32r_file_start (void);
79
80 static int m32r_adjust_priority (rtx_insn *, int);
81 static int m32r_issue_rate (void);
82
83 static void m32r_encode_section_info (tree, rtx, int);
84 static bool m32r_in_small_data_p (const_tree);
85 static bool m32r_return_in_memory (const_tree, const_tree);
86 static rtx m32r_function_value (const_tree, const_tree, bool);
87 static rtx m32r_libcall_value (machine_mode, const_rtx);
88 static bool m32r_function_value_regno_p (const unsigned int);
89 static void m32r_setup_incoming_varargs (cumulative_args_t, machine_mode,
90 tree, int *, int);
91 static void init_idents (void);
92 static bool m32r_rtx_costs (rtx, machine_mode, int, int, int *, bool speed);
93 static int m32r_memory_move_cost (machine_mode, reg_class_t, bool);
94 static bool m32r_pass_by_reference (cumulative_args_t, machine_mode,
95 const_tree, bool);
96 static int m32r_arg_partial_bytes (cumulative_args_t, machine_mode,
97 tree, bool);
98 static rtx m32r_function_arg (cumulative_args_t, machine_mode,
99 const_tree, bool);
100 static void m32r_function_arg_advance (cumulative_args_t, machine_mode,
101 const_tree, bool);
102 static bool m32r_can_eliminate (const int, const int);
103 static void m32r_conditional_register_usage (void);
104 static void m32r_trampoline_init (rtx, tree, rtx);
105 static bool m32r_legitimate_constant_p (machine_mode, rtx);
106 static bool m32r_attribute_identifier (const_tree);
107 static bool m32r_hard_regno_mode_ok (unsigned int, machine_mode);
108 static bool m32r_modes_tieable_p (machine_mode, machine_mode);
109 static HOST_WIDE_INT m32r_starting_frame_offset (void);
110
111 /* M32R specific attributes. */
112
113 static const struct attribute_spec m32r_attribute_table[] =
114 {
115 /* { name, min_len, max_len, decl_req, type_req, fn_type_req,
116 affects_type_identity, handler, exclude } */
117 { "interrupt", 0, 0, true, false, false, false, NULL, NULL },
118 { "model", 1, 1, true, false, false, false, m32r_handle_model_attribute,
119 NULL },
120 { NULL, 0, 0, false, false, false, false, NULL, NULL }
121 };
122
123 /* Initialize the GCC target structure. */
124 #undef TARGET_ATTRIBUTE_TABLE
125 #define TARGET_ATTRIBUTE_TABLE m32r_attribute_table
126 #undef TARGET_ATTRIBUTE_TAKES_IDENTIFIER_P
127 #define TARGET_ATTRIBUTE_TAKES_IDENTIFIER_P m32r_attribute_identifier
128
129 #undef TARGET_LRA_P
130 #define TARGET_LRA_P hook_bool_void_false
131
132 #undef TARGET_LEGITIMATE_ADDRESS_P
133 #define TARGET_LEGITIMATE_ADDRESS_P m32r_legitimate_address_p
134 #undef TARGET_LEGITIMIZE_ADDRESS
135 #define TARGET_LEGITIMIZE_ADDRESS m32r_legitimize_address
136 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
137 #define TARGET_MODE_DEPENDENT_ADDRESS_P m32r_mode_dependent_address_p
138
139 #undef TARGET_ASM_ALIGNED_HI_OP
140 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
141 #undef TARGET_ASM_ALIGNED_SI_OP
142 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
143
144 #undef TARGET_PRINT_OPERAND
145 #define TARGET_PRINT_OPERAND m32r_print_operand
146 #undef TARGET_PRINT_OPERAND_ADDRESS
147 #define TARGET_PRINT_OPERAND_ADDRESS m32r_print_operand_address
148 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
149 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P m32r_print_operand_punct_valid_p
150
151 #undef TARGET_ASM_FUNCTION_PROLOGUE
152 #define TARGET_ASM_FUNCTION_PROLOGUE m32r_output_function_prologue
153 #undef TARGET_ASM_FUNCTION_EPILOGUE
154 #define TARGET_ASM_FUNCTION_EPILOGUE m32r_output_function_epilogue
155
156 #undef TARGET_ASM_FILE_START
157 #define TARGET_ASM_FILE_START m32r_file_start
158
159 #undef TARGET_SCHED_ADJUST_PRIORITY
160 #define TARGET_SCHED_ADJUST_PRIORITY m32r_adjust_priority
161 #undef TARGET_SCHED_ISSUE_RATE
162 #define TARGET_SCHED_ISSUE_RATE m32r_issue_rate
163
164 #undef TARGET_OPTION_OVERRIDE
165 #define TARGET_OPTION_OVERRIDE m32r_option_override
166
167 #undef TARGET_ENCODE_SECTION_INFO
168 #define TARGET_ENCODE_SECTION_INFO m32r_encode_section_info
169 #undef TARGET_IN_SMALL_DATA_P
170 #define TARGET_IN_SMALL_DATA_P m32r_in_small_data_p
171
172
173 #undef TARGET_MEMORY_MOVE_COST
174 #define TARGET_MEMORY_MOVE_COST m32r_memory_move_cost
175 #undef TARGET_RTX_COSTS
176 #define TARGET_RTX_COSTS m32r_rtx_costs
177 #undef TARGET_ADDRESS_COST
178 #define TARGET_ADDRESS_COST hook_int_rtx_mode_as_bool_0
179
180 #undef TARGET_PROMOTE_PROTOTYPES
181 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
182 #undef TARGET_RETURN_IN_MEMORY
183 #define TARGET_RETURN_IN_MEMORY m32r_return_in_memory
184
185 #undef TARGET_FUNCTION_VALUE
186 #define TARGET_FUNCTION_VALUE m32r_function_value
187 #undef TARGET_LIBCALL_VALUE
188 #define TARGET_LIBCALL_VALUE m32r_libcall_value
189 #undef TARGET_FUNCTION_VALUE_REGNO_P
190 #define TARGET_FUNCTION_VALUE_REGNO_P m32r_function_value_regno_p
191
192 #undef TARGET_SETUP_INCOMING_VARARGS
193 #define TARGET_SETUP_INCOMING_VARARGS m32r_setup_incoming_varargs
194 #undef TARGET_MUST_PASS_IN_STACK
195 #define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
196 #undef TARGET_PASS_BY_REFERENCE
197 #define TARGET_PASS_BY_REFERENCE m32r_pass_by_reference
198 #undef TARGET_ARG_PARTIAL_BYTES
199 #define TARGET_ARG_PARTIAL_BYTES m32r_arg_partial_bytes
200 #undef TARGET_FUNCTION_ARG
201 #define TARGET_FUNCTION_ARG m32r_function_arg
202 #undef TARGET_FUNCTION_ARG_ADVANCE
203 #define TARGET_FUNCTION_ARG_ADVANCE m32r_function_arg_advance
204
205 #undef TARGET_CAN_ELIMINATE
206 #define TARGET_CAN_ELIMINATE m32r_can_eliminate
207
208 #undef TARGET_CONDITIONAL_REGISTER_USAGE
209 #define TARGET_CONDITIONAL_REGISTER_USAGE m32r_conditional_register_usage
210
211 #undef TARGET_TRAMPOLINE_INIT
212 #define TARGET_TRAMPOLINE_INIT m32r_trampoline_init
213
214 #undef TARGET_LEGITIMATE_CONSTANT_P
215 #define TARGET_LEGITIMATE_CONSTANT_P m32r_legitimate_constant_p
216
217 #undef TARGET_HARD_REGNO_MODE_OK
218 #define TARGET_HARD_REGNO_MODE_OK m32r_hard_regno_mode_ok
219
220 #undef TARGET_MODES_TIEABLE_P
221 #define TARGET_MODES_TIEABLE_P m32r_modes_tieable_p
222
223 #undef TARGET_CONSTANT_ALIGNMENT
224 #define TARGET_CONSTANT_ALIGNMENT constant_alignment_word_strings
225
226 #undef TARGET_STARTING_FRAME_OFFSET
227 #define TARGET_STARTING_FRAME_OFFSET m32r_starting_frame_offset
228
229 struct gcc_target targetm = TARGET_INITIALIZER;
230
231 /* Called by m32r_option_override to initialize various things. */
232
233 void
m32r_init(void)234 m32r_init (void)
235 {
236 init_reg_tables ();
237
238 /* Initialize array for TARGET_PRINT_OPERAND_PUNCT_VALID_P. */
239 memset (m32r_punct_chars, 0, sizeof (m32r_punct_chars));
240 m32r_punct_chars['#'] = 1;
241 m32r_punct_chars['@'] = 1; /* ??? no longer used */
242
243 /* Provide default value if not specified. */
244 if (!global_options_set.x_g_switch_value)
245 g_switch_value = SDATA_DEFAULT_SIZE;
246 }
247
248 static void
m32r_option_override(void)249 m32r_option_override (void)
250 {
251 /* These need to be done at start up.
252 It's convenient to do them here. */
253 m32r_init ();
254 SUBTARGET_OVERRIDE_OPTIONS;
255 }
256
257 /* Vectors to keep interesting information about registers where it can easily
258 be got. We use to use the actual mode value as the bit number, but there
259 is (or may be) more than 32 modes now. Instead we use two tables: one
260 indexed by hard register number, and one indexed by mode. */
261
262 /* The purpose of m32r_mode_class is to shrink the range of modes so that
263 they all fit (as bit numbers) in a 32-bit word (again). Each real mode is
264 mapped into one m32r_mode_class mode. */
265
266 enum m32r_mode_class
267 {
268 C_MODE,
269 S_MODE, D_MODE, T_MODE, O_MODE,
270 SF_MODE, DF_MODE, TF_MODE, OF_MODE, A_MODE
271 };
272
273 /* Modes for condition codes. */
274 #define C_MODES (1 << (int) C_MODE)
275
276 /* Modes for single-word and smaller quantities. */
277 #define S_MODES ((1 << (int) S_MODE) | (1 << (int) SF_MODE))
278
279 /* Modes for double-word and smaller quantities. */
280 #define D_MODES (S_MODES | (1 << (int) D_MODE) | (1 << DF_MODE))
281
282 /* Modes for quad-word and smaller quantities. */
283 #define T_MODES (D_MODES | (1 << (int) T_MODE) | (1 << (int) TF_MODE))
284
285 /* Modes for accumulators. */
286 #define A_MODES (1 << (int) A_MODE)
287
288 /* Value is 1 if register/mode pair is acceptable on arc. */
289
290 static const unsigned int m32r_hard_regno_modes[FIRST_PSEUDO_REGISTER] =
291 {
292 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES,
293 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, S_MODES, S_MODES, S_MODES,
294 S_MODES, C_MODES, A_MODES, A_MODES
295 };
296
297 static unsigned int m32r_mode_class [NUM_MACHINE_MODES];
298
299 enum reg_class m32r_regno_reg_class[FIRST_PSEUDO_REGISTER];
300
301 static void
init_reg_tables(void)302 init_reg_tables (void)
303 {
304 int i;
305
306 for (i = 0; i < NUM_MACHINE_MODES; i++)
307 {
308 machine_mode m = (machine_mode) i;
309
310 switch (GET_MODE_CLASS (m))
311 {
312 case MODE_INT:
313 case MODE_PARTIAL_INT:
314 case MODE_COMPLEX_INT:
315 if (GET_MODE_SIZE (m) <= 4)
316 m32r_mode_class[i] = 1 << (int) S_MODE;
317 else if (GET_MODE_SIZE (m) == 8)
318 m32r_mode_class[i] = 1 << (int) D_MODE;
319 else if (GET_MODE_SIZE (m) == 16)
320 m32r_mode_class[i] = 1 << (int) T_MODE;
321 else if (GET_MODE_SIZE (m) == 32)
322 m32r_mode_class[i] = 1 << (int) O_MODE;
323 else
324 m32r_mode_class[i] = 0;
325 break;
326 case MODE_FLOAT:
327 case MODE_COMPLEX_FLOAT:
328 if (GET_MODE_SIZE (m) <= 4)
329 m32r_mode_class[i] = 1 << (int) SF_MODE;
330 else if (GET_MODE_SIZE (m) == 8)
331 m32r_mode_class[i] = 1 << (int) DF_MODE;
332 else if (GET_MODE_SIZE (m) == 16)
333 m32r_mode_class[i] = 1 << (int) TF_MODE;
334 else if (GET_MODE_SIZE (m) == 32)
335 m32r_mode_class[i] = 1 << (int) OF_MODE;
336 else
337 m32r_mode_class[i] = 0;
338 break;
339 case MODE_CC:
340 m32r_mode_class[i] = 1 << (int) C_MODE;
341 break;
342 default:
343 m32r_mode_class[i] = 0;
344 break;
345 }
346 }
347
348 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
349 {
350 if (GPR_P (i))
351 m32r_regno_reg_class[i] = GENERAL_REGS;
352 else if (i == ARG_POINTER_REGNUM)
353 m32r_regno_reg_class[i] = GENERAL_REGS;
354 else
355 m32r_regno_reg_class[i] = NO_REGS;
356 }
357 }
358
359 /* M32R specific attribute support.
360
361 interrupt - for interrupt functions
362
363 model - select code model used to access object
364
365 small: addresses use 24 bits, use bl to make calls
366 medium: addresses use 32 bits, use bl to make calls
367 large: addresses use 32 bits, use seth/add3/jl to make calls
368
369 Grep for MODEL in m32r.h for more info. */
370
371 static tree small_ident1;
372 static tree small_ident2;
373 static tree medium_ident1;
374 static tree medium_ident2;
375 static tree large_ident1;
376 static tree large_ident2;
377
378 static void
init_idents(void)379 init_idents (void)
380 {
381 if (small_ident1 == 0)
382 {
383 small_ident1 = get_identifier ("small");
384 small_ident2 = get_identifier ("__small__");
385 medium_ident1 = get_identifier ("medium");
386 medium_ident2 = get_identifier ("__medium__");
387 large_ident1 = get_identifier ("large");
388 large_ident2 = get_identifier ("__large__");
389 }
390 }
391
392 /* Handle an "model" attribute; arguments as in
393 struct attribute_spec.handler. */
394 static tree
m32r_handle_model_attribute(tree * node ATTRIBUTE_UNUSED,tree name,tree args,int flags ATTRIBUTE_UNUSED,bool * no_add_attrs)395 m32r_handle_model_attribute (tree *node ATTRIBUTE_UNUSED, tree name,
396 tree args, int flags ATTRIBUTE_UNUSED,
397 bool *no_add_attrs)
398 {
399 tree arg;
400
401 init_idents ();
402 arg = TREE_VALUE (args);
403
404 if (arg != small_ident1
405 && arg != small_ident2
406 && arg != medium_ident1
407 && arg != medium_ident2
408 && arg != large_ident1
409 && arg != large_ident2)
410 {
411 warning (OPT_Wattributes, "invalid argument of %qs attribute",
412 IDENTIFIER_POINTER (name));
413 *no_add_attrs = true;
414 }
415
416 return NULL_TREE;
417 }
418
419 static bool
m32r_attribute_identifier(const_tree name)420 m32r_attribute_identifier (const_tree name)
421 {
422 return strcmp (IDENTIFIER_POINTER (name), "model") == 0
423 || strcmp (IDENTIFIER_POINTER (name), "__model__") == 0;
424 }
425
426 /* Encode section information of DECL, which is either a VAR_DECL,
427 FUNCTION_DECL, STRING_CST, CONSTRUCTOR, or ???.
428
429 For the M32R we want to record:
430
431 - whether the object lives in .sdata/.sbss.
432 - what code model should be used to access the object
433 */
434
435 static void
m32r_encode_section_info(tree decl,rtx rtl,int first)436 m32r_encode_section_info (tree decl, rtx rtl, int first)
437 {
438 int extra_flags = 0;
439 tree model_attr;
440 enum m32r_model model;
441
442 default_encode_section_info (decl, rtl, first);
443
444 if (!DECL_P (decl))
445 return;
446
447 model_attr = lookup_attribute ("model", DECL_ATTRIBUTES (decl));
448 if (model_attr)
449 {
450 tree id;
451
452 init_idents ();
453
454 id = TREE_VALUE (TREE_VALUE (model_attr));
455
456 if (id == small_ident1 || id == small_ident2)
457 model = M32R_MODEL_SMALL;
458 else if (id == medium_ident1 || id == medium_ident2)
459 model = M32R_MODEL_MEDIUM;
460 else if (id == large_ident1 || id == large_ident2)
461 model = M32R_MODEL_LARGE;
462 else
463 gcc_unreachable (); /* shouldn't happen */
464 }
465 else
466 {
467 if (TARGET_MODEL_SMALL)
468 model = M32R_MODEL_SMALL;
469 else if (TARGET_MODEL_MEDIUM)
470 model = M32R_MODEL_MEDIUM;
471 else if (TARGET_MODEL_LARGE)
472 model = M32R_MODEL_LARGE;
473 else
474 gcc_unreachable (); /* shouldn't happen */
475 }
476 extra_flags |= model << SYMBOL_FLAG_MODEL_SHIFT;
477
478 if (extra_flags)
479 SYMBOL_REF_FLAGS (XEXP (rtl, 0)) |= extra_flags;
480 }
481
482 /* Only mark the object as being small data area addressable if
483 it hasn't been explicitly marked with a code model.
484
485 The user can explicitly put an object in the small data area with the
486 section attribute. If the object is in sdata/sbss and marked with a
487 code model do both [put the object in .sdata and mark it as being
488 addressed with a specific code model - don't mark it as being addressed
489 with an SDA reloc though]. This is ok and might be useful at times. If
490 the object doesn't fit the linker will give an error. */
491
492 static bool
m32r_in_small_data_p(const_tree decl)493 m32r_in_small_data_p (const_tree decl)
494 {
495 const char *section;
496
497 if (TREE_CODE (decl) != VAR_DECL)
498 return false;
499
500 if (lookup_attribute ("model", DECL_ATTRIBUTES (decl)))
501 return false;
502
503 section = DECL_SECTION_NAME (decl);
504 if (section)
505 {
506 if (strcmp (section, ".sdata") == 0 || strcmp (section, ".sbss") == 0)
507 return true;
508 }
509 else
510 {
511 if (! TREE_READONLY (decl) && ! TARGET_SDATA_NONE)
512 {
513 int size = int_size_in_bytes (TREE_TYPE (decl));
514
515 if (size > 0 && size <= g_switch_value)
516 return true;
517 }
518 }
519
520 return false;
521 }
522
523 /* Do anything needed before RTL is emitted for each function. */
524
525 void
m32r_init_expanders(void)526 m32r_init_expanders (void)
527 {
528 /* ??? At one point there was code here. The function is left in
529 to make it easy to experiment. */
530 }
531
532 int
call_operand(rtx op,machine_mode mode)533 call_operand (rtx op, machine_mode mode)
534 {
535 if (!MEM_P (op))
536 return 0;
537 op = XEXP (op, 0);
538 return call_address_operand (op, mode);
539 }
540
541 /* Return 1 if OP is a reference to an object in .sdata/.sbss. */
542
543 int
small_data_operand(rtx op,machine_mode mode ATTRIBUTE_UNUSED)544 small_data_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
545 {
546 if (! TARGET_SDATA_USE)
547 return 0;
548
549 if (GET_CODE (op) == SYMBOL_REF)
550 return SYMBOL_REF_SMALL_P (op);
551
552 if (GET_CODE (op) == CONST
553 && GET_CODE (XEXP (op, 0)) == PLUS
554 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
555 && satisfies_constraint_J (XEXP (XEXP (op, 0), 1)))
556 return SYMBOL_REF_SMALL_P (XEXP (XEXP (op, 0), 0));
557
558 return 0;
559 }
560
561 /* Return 1 if OP is a symbol that can use 24-bit addressing. */
562
563 int
addr24_operand(rtx op,machine_mode mode ATTRIBUTE_UNUSED)564 addr24_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
565 {
566 rtx sym;
567
568 if (flag_pic)
569 return 0;
570
571 if (GET_CODE (op) == LABEL_REF)
572 return TARGET_ADDR24;
573
574 if (GET_CODE (op) == SYMBOL_REF)
575 sym = op;
576 else if (GET_CODE (op) == CONST
577 && GET_CODE (XEXP (op, 0)) == PLUS
578 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
579 && satisfies_constraint_M (XEXP (XEXP (op, 0), 1)))
580 sym = XEXP (XEXP (op, 0), 0);
581 else
582 return 0;
583
584 if (SYMBOL_REF_MODEL (sym) == M32R_MODEL_SMALL)
585 return 1;
586
587 if (TARGET_ADDR24
588 && (CONSTANT_POOL_ADDRESS_P (sym)
589 || LIT_NAME_P (XSTR (sym, 0))))
590 return 1;
591
592 return 0;
593 }
594
595 /* Return 1 if OP is a symbol that needs 32-bit addressing. */
596
597 int
addr32_operand(rtx op,machine_mode mode)598 addr32_operand (rtx op, machine_mode mode)
599 {
600 rtx sym;
601
602 if (GET_CODE (op) == LABEL_REF)
603 return TARGET_ADDR32;
604
605 if (GET_CODE (op) == SYMBOL_REF)
606 sym = op;
607 else if (GET_CODE (op) == CONST
608 && GET_CODE (XEXP (op, 0)) == PLUS
609 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
610 && CONST_INT_P (XEXP (XEXP (op, 0), 1))
611 && ! flag_pic)
612 sym = XEXP (XEXP (op, 0), 0);
613 else
614 return 0;
615
616 return (! addr24_operand (sym, mode)
617 && ! small_data_operand (sym, mode));
618 }
619
620 /* Return 1 if OP is a function that can be called with the `bl' insn. */
621
622 int
call26_operand(rtx op,machine_mode mode ATTRIBUTE_UNUSED)623 call26_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
624 {
625 if (flag_pic)
626 return 1;
627
628 if (GET_CODE (op) == SYMBOL_REF)
629 return SYMBOL_REF_MODEL (op) != M32R_MODEL_LARGE;
630
631 return TARGET_CALL26;
632 }
633
634 /* Return 1 if OP is a DImode const we want to handle inline.
635 This must match the code in the movdi pattern.
636 It is used by the 'G' constraint. */
637
638 int
easy_di_const(rtx op)639 easy_di_const (rtx op)
640 {
641 rtx high_rtx, low_rtx;
642 HOST_WIDE_INT high, low;
643
644 split_double (op, &high_rtx, &low_rtx);
645 high = INTVAL (high_rtx);
646 low = INTVAL (low_rtx);
647 /* Pick constants loadable with 2 16-bit `ldi' insns. */
648 if (high >= -128 && high <= 127
649 && low >= -128 && low <= 127)
650 return 1;
651 return 0;
652 }
653
654 /* Return 1 if OP is a DFmode const we want to handle inline.
655 This must match the code in the movdf pattern.
656 It is used by the 'H' constraint. */
657
658 int
easy_df_const(rtx op)659 easy_df_const (rtx op)
660 {
661 long l[2];
662
663 REAL_VALUE_TO_TARGET_DOUBLE (*CONST_DOUBLE_REAL_VALUE (op), l);
664 if (l[0] == 0 && l[1] == 0)
665 return 1;
666 if ((l[0] & 0xffff) == 0 && l[1] == 0)
667 return 1;
668 return 0;
669 }
670
671 /* Return 1 if OP is (mem (reg ...)).
672 This is used in insn length calcs. */
673
674 int
memreg_operand(rtx op,machine_mode mode ATTRIBUTE_UNUSED)675 memreg_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
676 {
677 return MEM_P (op) && REG_P (XEXP (op, 0));
678 }
679
680 /* Return nonzero if TYPE must be passed by indirect reference. */
681
682 static bool
m32r_pass_by_reference(cumulative_args_t ca ATTRIBUTE_UNUSED,machine_mode mode,const_tree type,bool named ATTRIBUTE_UNUSED)683 m32r_pass_by_reference (cumulative_args_t ca ATTRIBUTE_UNUSED,
684 machine_mode mode, const_tree type,
685 bool named ATTRIBUTE_UNUSED)
686 {
687 int size;
688
689 if (type)
690 size = int_size_in_bytes (type);
691 else
692 size = GET_MODE_SIZE (mode);
693
694 return (size < 0 || size > 8);
695 }
696
697 /* Comparisons. */
698
699 /* X and Y are two things to compare using CODE. Emit the compare insn and
700 return the rtx for compare [arg0 of the if_then_else].
701 If need_compare is true then the comparison insn must be generated, rather
702 than being subsumed into the following branch instruction. */
703
704 rtx
gen_compare(enum rtx_code code,rtx x,rtx y,int need_compare)705 gen_compare (enum rtx_code code, rtx x, rtx y, int need_compare)
706 {
707 enum rtx_code compare_code;
708 enum rtx_code branch_code;
709 rtx cc_reg = gen_rtx_REG (CCmode, CARRY_REGNUM);
710 int must_swap = 0;
711
712 switch (code)
713 {
714 case EQ: compare_code = EQ; branch_code = NE; break;
715 case NE: compare_code = EQ; branch_code = EQ; break;
716 case LT: compare_code = LT; branch_code = NE; break;
717 case LE: compare_code = LT; branch_code = EQ; must_swap = 1; break;
718 case GT: compare_code = LT; branch_code = NE; must_swap = 1; break;
719 case GE: compare_code = LT; branch_code = EQ; break;
720 case LTU: compare_code = LTU; branch_code = NE; break;
721 case LEU: compare_code = LTU; branch_code = EQ; must_swap = 1; break;
722 case GTU: compare_code = LTU; branch_code = NE; must_swap = 1; break;
723 case GEU: compare_code = LTU; branch_code = EQ; break;
724
725 default:
726 gcc_unreachable ();
727 }
728
729 if (need_compare)
730 {
731 switch (compare_code)
732 {
733 case EQ:
734 if (satisfies_constraint_P (y) /* Reg equal to small const. */
735 && y != const0_rtx)
736 {
737 rtx tmp = gen_reg_rtx (SImode);
738
739 emit_insn (gen_addsi3 (tmp, x, GEN_INT (-INTVAL (y))));
740 x = tmp;
741 y = const0_rtx;
742 }
743 else if (CONSTANT_P (y)) /* Reg equal to const. */
744 {
745 rtx tmp = force_reg (GET_MODE (x), y);
746 y = tmp;
747 }
748
749 if (register_operand (y, SImode) /* Reg equal to reg. */
750 || y == const0_rtx) /* Reg equal to zero. */
751 {
752 emit_insn (gen_cmp_eqsi_insn (x, y));
753
754 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
755 }
756 break;
757
758 case LT:
759 if (register_operand (y, SImode)
760 || satisfies_constraint_P (y))
761 {
762 rtx tmp = gen_reg_rtx (SImode); /* Reg compared to reg. */
763
764 switch (code)
765 {
766 case LT:
767 emit_insn (gen_cmp_ltsi_insn (x, y));
768 code = EQ;
769 break;
770 case LE:
771 if (y == const0_rtx)
772 tmp = const1_rtx;
773 else
774 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
775 emit_insn (gen_cmp_ltsi_insn (x, tmp));
776 code = EQ;
777 break;
778 case GT:
779 if (CONST_INT_P (y))
780 tmp = gen_rtx_PLUS (SImode, y, const1_rtx);
781 else
782 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
783 emit_insn (gen_cmp_ltsi_insn (x, tmp));
784 code = NE;
785 break;
786 case GE:
787 emit_insn (gen_cmp_ltsi_insn (x, y));
788 code = NE;
789 break;
790 default:
791 gcc_unreachable ();
792 }
793
794 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
795 }
796 break;
797
798 case LTU:
799 if (register_operand (y, SImode)
800 || satisfies_constraint_P (y))
801 {
802 rtx tmp = gen_reg_rtx (SImode); /* Reg (unsigned) compared to reg. */
803
804 switch (code)
805 {
806 case LTU:
807 emit_insn (gen_cmp_ltusi_insn (x, y));
808 code = EQ;
809 break;
810 case LEU:
811 if (y == const0_rtx)
812 tmp = const1_rtx;
813 else
814 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
815 emit_insn (gen_cmp_ltusi_insn (x, tmp));
816 code = EQ;
817 break;
818 case GTU:
819 if (CONST_INT_P (y))
820 tmp = gen_rtx_PLUS (SImode, y, const1_rtx);
821 else
822 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
823 emit_insn (gen_cmp_ltusi_insn (x, tmp));
824 code = NE;
825 break;
826 case GEU:
827 emit_insn (gen_cmp_ltusi_insn (x, y));
828 code = NE;
829 break;
830 default:
831 gcc_unreachable ();
832 }
833
834 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
835 }
836 break;
837
838 default:
839 gcc_unreachable ();
840 }
841 }
842 else
843 {
844 /* Reg/reg equal comparison. */
845 if (compare_code == EQ
846 && register_operand (y, SImode))
847 return gen_rtx_fmt_ee (code, CCmode, x, y);
848
849 /* Reg/zero signed comparison. */
850 if ((compare_code == EQ || compare_code == LT)
851 && y == const0_rtx)
852 return gen_rtx_fmt_ee (code, CCmode, x, y);
853
854 /* Reg/smallconst equal comparison. */
855 if (compare_code == EQ
856 && satisfies_constraint_P (y))
857 {
858 rtx tmp = gen_reg_rtx (SImode);
859
860 emit_insn (gen_addsi3 (tmp, x, GEN_INT (-INTVAL (y))));
861 return gen_rtx_fmt_ee (code, CCmode, tmp, const0_rtx);
862 }
863
864 /* Reg/const equal comparison. */
865 if (compare_code == EQ
866 && CONSTANT_P (y))
867 {
868 rtx tmp = force_reg (GET_MODE (x), y);
869
870 return gen_rtx_fmt_ee (code, CCmode, x, tmp);
871 }
872 }
873
874 if (CONSTANT_P (y))
875 {
876 if (must_swap)
877 y = force_reg (GET_MODE (x), y);
878 else
879 {
880 int ok_const = reg_or_int16_operand (y, GET_MODE (y));
881
882 if (! ok_const)
883 y = force_reg (GET_MODE (x), y);
884 }
885 }
886
887 switch (compare_code)
888 {
889 case EQ :
890 emit_insn (gen_cmp_eqsi_insn (must_swap ? y : x, must_swap ? x : y));
891 break;
892 case LT :
893 emit_insn (gen_cmp_ltsi_insn (must_swap ? y : x, must_swap ? x : y));
894 break;
895 case LTU :
896 emit_insn (gen_cmp_ltusi_insn (must_swap ? y : x, must_swap ? x : y));
897 break;
898
899 default:
900 gcc_unreachable ();
901 }
902
903 return gen_rtx_fmt_ee (branch_code, VOIDmode, cc_reg, CONST0_RTX (CCmode));
904 }
905
906 bool
gen_cond_store(enum rtx_code code,rtx op0,rtx op1,rtx op2)907 gen_cond_store (enum rtx_code code, rtx op0, rtx op1, rtx op2)
908 {
909 machine_mode mode = GET_MODE (op0);
910
911 gcc_assert (mode == SImode);
912 switch (code)
913 {
914 case EQ:
915 if (!register_operand (op1, mode))
916 op1 = force_reg (mode, op1);
917
918 if (TARGET_M32RX || TARGET_M32R2)
919 {
920 if (!reg_or_zero_operand (op2, mode))
921 op2 = force_reg (mode, op2);
922
923 emit_insn (gen_seq_insn_m32rx (op0, op1, op2));
924 return true;
925 }
926 if (CONST_INT_P (op2) && INTVAL (op2) == 0)
927 {
928 emit_insn (gen_seq_zero_insn (op0, op1));
929 return true;
930 }
931
932 if (!reg_or_eq_int16_operand (op2, mode))
933 op2 = force_reg (mode, op2);
934
935 emit_insn (gen_seq_insn (op0, op1, op2));
936 return true;
937
938 case NE:
939 if (!CONST_INT_P (op2)
940 || (INTVAL (op2) != 0 && satisfies_constraint_K (op2)))
941 {
942 rtx reg;
943
944 if (reload_completed || reload_in_progress)
945 return false;
946
947 reg = gen_reg_rtx (SImode);
948 emit_insn (gen_xorsi3 (reg, op1, op2));
949 op1 = reg;
950
951 if (!register_operand (op1, mode))
952 op1 = force_reg (mode, op1);
953
954 emit_insn (gen_sne_zero_insn (op0, op1));
955 return true;
956 }
957 return false;
958
959 case LT:
960 case GT:
961 if (code == GT)
962 {
963 rtx tmp = op2;
964 op2 = op1;
965 op1 = tmp;
966 code = LT;
967 }
968
969 if (!register_operand (op1, mode))
970 op1 = force_reg (mode, op1);
971
972 if (!reg_or_int16_operand (op2, mode))
973 op2 = force_reg (mode, op2);
974
975 emit_insn (gen_slt_insn (op0, op1, op2));
976 return true;
977
978 case LTU:
979 case GTU:
980 if (code == GTU)
981 {
982 rtx tmp = op2;
983 op2 = op1;
984 op1 = tmp;
985 code = LTU;
986 }
987
988 if (!register_operand (op1, mode))
989 op1 = force_reg (mode, op1);
990
991 if (!reg_or_int16_operand (op2, mode))
992 op2 = force_reg (mode, op2);
993
994 emit_insn (gen_sltu_insn (op0, op1, op2));
995 return true;
996
997 case GE:
998 case GEU:
999 if (!register_operand (op1, mode))
1000 op1 = force_reg (mode, op1);
1001
1002 if (!reg_or_int16_operand (op2, mode))
1003 op2 = force_reg (mode, op2);
1004
1005 if (code == GE)
1006 emit_insn (gen_sge_insn (op0, op1, op2));
1007 else
1008 emit_insn (gen_sgeu_insn (op0, op1, op2));
1009 return true;
1010
1011 case LE:
1012 case LEU:
1013 if (!register_operand (op1, mode))
1014 op1 = force_reg (mode, op1);
1015
1016 if (CONST_INT_P (op2))
1017 {
1018 HOST_WIDE_INT value = INTVAL (op2);
1019 if (value >= 2147483647)
1020 {
1021 emit_move_insn (op0, const1_rtx);
1022 return true;
1023 }
1024
1025 op2 = GEN_INT (value + 1);
1026 if (value < -32768 || value >= 32767)
1027 op2 = force_reg (mode, op2);
1028
1029 if (code == LEU)
1030 emit_insn (gen_sltu_insn (op0, op1, op2));
1031 else
1032 emit_insn (gen_slt_insn (op0, op1, op2));
1033 return true;
1034 }
1035
1036 if (!register_operand (op2, mode))
1037 op2 = force_reg (mode, op2);
1038
1039 if (code == LEU)
1040 emit_insn (gen_sleu_insn (op0, op1, op2));
1041 else
1042 emit_insn (gen_sle_insn (op0, op1, op2));
1043 return true;
1044
1045 default:
1046 gcc_unreachable ();
1047 }
1048 }
1049
1050
1051 /* Split a 2 word move (DI or DF) into component parts. */
1052
1053 rtx
gen_split_move_double(rtx operands[])1054 gen_split_move_double (rtx operands[])
1055 {
1056 machine_mode mode = GET_MODE (operands[0]);
1057 rtx dest = operands[0];
1058 rtx src = operands[1];
1059 rtx val;
1060
1061 /* We might have (SUBREG (MEM)) here, so just get rid of the
1062 subregs to make this code simpler. It is safe to call
1063 alter_subreg any time after reload. */
1064 if (GET_CODE (dest) == SUBREG)
1065 alter_subreg (&dest, true);
1066 if (GET_CODE (src) == SUBREG)
1067 alter_subreg (&src, true);
1068
1069 start_sequence ();
1070 if (REG_P (dest))
1071 {
1072 int dregno = REGNO (dest);
1073
1074 /* Reg = reg. */
1075 if (REG_P (src))
1076 {
1077 int sregno = REGNO (src);
1078
1079 int reverse = (dregno == sregno + 1);
1080
1081 /* We normally copy the low-numbered register first. However, if
1082 the first register operand 0 is the same as the second register of
1083 operand 1, we must copy in the opposite order. */
1084 emit_insn (gen_rtx_SET (operand_subword (dest, reverse, TRUE, mode),
1085 operand_subword (src, reverse, TRUE, mode)));
1086
1087 emit_insn (gen_rtx_SET (operand_subword (dest, !reverse, TRUE, mode),
1088 operand_subword (src, !reverse, TRUE, mode)));
1089 }
1090
1091 /* Reg = constant. */
1092 else if (CONST_INT_P (src) || GET_CODE (src) == CONST_DOUBLE)
1093 {
1094 rtx words[2];
1095 split_double (src, &words[0], &words[1]);
1096 emit_insn (gen_rtx_SET (operand_subword (dest, 0, TRUE, mode),
1097 words[0]));
1098
1099 emit_insn (gen_rtx_SET (operand_subword (dest, 1, TRUE, mode),
1100 words[1]));
1101 }
1102
1103 /* Reg = mem. */
1104 else if (MEM_P (src))
1105 {
1106 /* If the high-address word is used in the address, we must load it
1107 last. Otherwise, load it first. */
1108 int reverse = refers_to_regno_p (dregno, XEXP (src, 0));
1109
1110 /* We used to optimize loads from single registers as
1111
1112 ld r1,r3+; ld r2,r3
1113
1114 if r3 were not used subsequently. However, the REG_NOTES aren't
1115 propagated correctly by the reload phase, and it can cause bad
1116 code to be generated. We could still try:
1117
1118 ld r1,r3+; ld r2,r3; addi r3,-4
1119
1120 which saves 2 bytes and doesn't force longword alignment. */
1121 emit_insn (gen_rtx_SET (operand_subword (dest, reverse, TRUE, mode),
1122 adjust_address (src, SImode,
1123 reverse * UNITS_PER_WORD)));
1124
1125 emit_insn (gen_rtx_SET (operand_subword (dest, !reverse, TRUE, mode),
1126 adjust_address (src, SImode,
1127 !reverse * UNITS_PER_WORD)));
1128 }
1129 else
1130 gcc_unreachable ();
1131 }
1132
1133 /* Mem = reg. */
1134 /* We used to optimize loads from single registers as
1135
1136 st r1,r3; st r2,+r3
1137
1138 if r3 were not used subsequently. However, the REG_NOTES aren't
1139 propagated correctly by the reload phase, and it can cause bad
1140 code to be generated. We could still try:
1141
1142 st r1,r3; st r2,+r3; addi r3,-4
1143
1144 which saves 2 bytes and doesn't force longword alignment. */
1145 else if (MEM_P (dest) && REG_P (src))
1146 {
1147 emit_insn (gen_rtx_SET (adjust_address (dest, SImode, 0),
1148 operand_subword (src, 0, TRUE, mode)));
1149
1150 emit_insn (gen_rtx_SET (adjust_address (dest, SImode, UNITS_PER_WORD),
1151 operand_subword (src, 1, TRUE, mode)));
1152 }
1153
1154 else
1155 gcc_unreachable ();
1156
1157 val = get_insns ();
1158 end_sequence ();
1159 return val;
1160 }
1161
1162
1163 static int
m32r_arg_partial_bytes(cumulative_args_t cum_v,machine_mode mode,tree type,bool named ATTRIBUTE_UNUSED)1164 m32r_arg_partial_bytes (cumulative_args_t cum_v, machine_mode mode,
1165 tree type, bool named ATTRIBUTE_UNUSED)
1166 {
1167 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1168
1169 int words;
1170 unsigned int size =
1171 (((mode == BLKmode && type)
1172 ? (unsigned int) int_size_in_bytes (type)
1173 : GET_MODE_SIZE (mode)) + UNITS_PER_WORD - 1)
1174 / UNITS_PER_WORD;
1175
1176 if (*cum >= M32R_MAX_PARM_REGS)
1177 words = 0;
1178 else if (*cum + size > M32R_MAX_PARM_REGS)
1179 words = (*cum + size) - M32R_MAX_PARM_REGS;
1180 else
1181 words = 0;
1182
1183 return words * UNITS_PER_WORD;
1184 }
1185
1186 /* The ROUND_ADVANCE* macros are local to this file. */
1187 /* Round SIZE up to a word boundary. */
1188 #define ROUND_ADVANCE(SIZE) \
1189 (((SIZE) + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1190
1191 /* Round arg MODE/TYPE up to the next word boundary. */
1192 #define ROUND_ADVANCE_ARG(MODE, TYPE) \
1193 ((MODE) == BLKmode \
1194 ? ROUND_ADVANCE ((unsigned int) int_size_in_bytes (TYPE)) \
1195 : ROUND_ADVANCE ((unsigned int) GET_MODE_SIZE (MODE)))
1196
1197 /* Round CUM up to the necessary point for argument MODE/TYPE. */
1198 #define ROUND_ADVANCE_CUM(CUM, MODE, TYPE) (CUM)
1199
1200 /* Return boolean indicating arg of type TYPE and mode MODE will be passed in
1201 a reg. This includes arguments that have to be passed by reference as the
1202 pointer to them is passed in a reg if one is available (and that is what
1203 we're given).
1204 This macro is only used in this file. */
1205 #define PASS_IN_REG_P(CUM, MODE, TYPE) \
1206 (ROUND_ADVANCE_CUM ((CUM), (MODE), (TYPE)) < M32R_MAX_PARM_REGS)
1207
1208 /* Determine where to put an argument to a function.
1209 Value is zero to push the argument on the stack,
1210 or a hard register in which to store the argument.
1211
1212 MODE is the argument's machine mode.
1213 TYPE is the data type of the argument (as a tree).
1214 This is null for libcalls where that information may
1215 not be available.
1216 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1217 the preceding args and about the function being called.
1218 NAMED is nonzero if this argument is a named parameter
1219 (otherwise it is an extra parameter matching an ellipsis). */
1220 /* On the M32R the first M32R_MAX_PARM_REGS args are normally in registers
1221 and the rest are pushed. */
1222
1223 static rtx
m32r_function_arg(cumulative_args_t cum_v,machine_mode mode,const_tree type ATTRIBUTE_UNUSED,bool named ATTRIBUTE_UNUSED)1224 m32r_function_arg (cumulative_args_t cum_v, machine_mode mode,
1225 const_tree type ATTRIBUTE_UNUSED,
1226 bool named ATTRIBUTE_UNUSED)
1227 {
1228 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1229
1230 return (PASS_IN_REG_P (*cum, mode, type)
1231 ? gen_rtx_REG (mode, ROUND_ADVANCE_CUM (*cum, mode, type))
1232 : NULL_RTX);
1233 }
1234
1235 /* Update the data in CUM to advance over an argument
1236 of mode MODE and data type TYPE.
1237 (TYPE is null for libcalls where that information may not be available.) */
1238
1239 static void
m32r_function_arg_advance(cumulative_args_t cum_v,machine_mode mode,const_tree type,bool named ATTRIBUTE_UNUSED)1240 m32r_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
1241 const_tree type, bool named ATTRIBUTE_UNUSED)
1242 {
1243 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1244
1245 *cum = (ROUND_ADVANCE_CUM (*cum, mode, type)
1246 + ROUND_ADVANCE_ARG (mode, type));
1247 }
1248
1249 /* Worker function for TARGET_RETURN_IN_MEMORY. */
1250
1251 static bool
m32r_return_in_memory(const_tree type,const_tree fntype ATTRIBUTE_UNUSED)1252 m32r_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
1253 {
1254 cumulative_args_t dummy = pack_cumulative_args (NULL);
1255
1256 return m32r_pass_by_reference (dummy, TYPE_MODE (type), type, false);
1257 }
1258
1259 /* Worker function for TARGET_FUNCTION_VALUE. */
1260
1261 static rtx
m32r_function_value(const_tree valtype,const_tree fn_decl_or_type ATTRIBUTE_UNUSED,bool outgoing ATTRIBUTE_UNUSED)1262 m32r_function_value (const_tree valtype,
1263 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
1264 bool outgoing ATTRIBUTE_UNUSED)
1265 {
1266 return gen_rtx_REG (TYPE_MODE (valtype), 0);
1267 }
1268
1269 /* Worker function for TARGET_LIBCALL_VALUE. */
1270
1271 static rtx
m32r_libcall_value(machine_mode mode,const_rtx fun ATTRIBUTE_UNUSED)1272 m32r_libcall_value (machine_mode mode,
1273 const_rtx fun ATTRIBUTE_UNUSED)
1274 {
1275 return gen_rtx_REG (mode, 0);
1276 }
1277
1278 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P.
1279
1280 ??? What about r1 in DI/DF values. */
1281
1282 static bool
m32r_function_value_regno_p(const unsigned int regno)1283 m32r_function_value_regno_p (const unsigned int regno)
1284 {
1285 return (regno == 0);
1286 }
1287
1288 /* Do any needed setup for a variadic function. For the M32R, we must
1289 create a register parameter block, and then copy any anonymous arguments
1290 in registers to memory.
1291
1292 CUM has not been updated for the last named argument which has type TYPE
1293 and mode MODE, and we rely on this fact. */
1294
1295 static void
m32r_setup_incoming_varargs(cumulative_args_t cum,machine_mode mode,tree type,int * pretend_size,int no_rtl)1296 m32r_setup_incoming_varargs (cumulative_args_t cum, machine_mode mode,
1297 tree type, int *pretend_size, int no_rtl)
1298 {
1299 int first_anon_arg;
1300
1301 if (no_rtl)
1302 return;
1303
1304 /* All BLKmode values are passed by reference. */
1305 gcc_assert (mode != BLKmode);
1306
1307 first_anon_arg = (ROUND_ADVANCE_CUM (*get_cumulative_args (cum), mode, type)
1308 + ROUND_ADVANCE_ARG (mode, type));
1309
1310 if (first_anon_arg < M32R_MAX_PARM_REGS)
1311 {
1312 /* Note that first_reg_offset < M32R_MAX_PARM_REGS. */
1313 int first_reg_offset = first_anon_arg;
1314 /* Size in words to "pretend" allocate. */
1315 int size = M32R_MAX_PARM_REGS - first_reg_offset;
1316 rtx regblock;
1317
1318 regblock = gen_frame_mem (BLKmode,
1319 plus_constant (Pmode, arg_pointer_rtx,
1320 FIRST_PARM_OFFSET (0)));
1321 set_mem_alias_set (regblock, get_varargs_alias_set ());
1322 move_block_from_reg (first_reg_offset, regblock, size);
1323
1324 *pretend_size = (size * UNITS_PER_WORD);
1325 }
1326 }
1327
1328
1329 /* Return true if INSN is real instruction bearing insn. */
1330
1331 static int
m32r_is_insn(rtx insn)1332 m32r_is_insn (rtx insn)
1333 {
1334 return (NONDEBUG_INSN_P (insn)
1335 && GET_CODE (PATTERN (insn)) != USE
1336 && GET_CODE (PATTERN (insn)) != CLOBBER);
1337 }
1338
1339 /* Increase the priority of long instructions so that the
1340 short instructions are scheduled ahead of the long ones. */
1341
1342 static int
m32r_adjust_priority(rtx_insn * insn,int priority)1343 m32r_adjust_priority (rtx_insn *insn, int priority)
1344 {
1345 if (m32r_is_insn (insn)
1346 && get_attr_insn_size (insn) != INSN_SIZE_SHORT)
1347 priority <<= 3;
1348
1349 return priority;
1350 }
1351
1352
1353 /* Indicate how many instructions can be issued at the same time.
1354 This is sort of a lie. The m32r can issue only 1 long insn at
1355 once, but it can issue 2 short insns. The default therefore is
1356 set at 2, but this can be overridden by the command line option
1357 -missue-rate=1. */
1358
1359 static int
m32r_issue_rate(void)1360 m32r_issue_rate (void)
1361 {
1362 return ((TARGET_LOW_ISSUE_RATE) ? 1 : 2);
1363 }
1364
1365 /* Cost functions. */
1366 /* Memory is 3 times as expensive as registers.
1367 ??? Is that the right way to look at it? */
1368
1369 static int
m32r_memory_move_cost(machine_mode mode,reg_class_t rclass ATTRIBUTE_UNUSED,bool in ATTRIBUTE_UNUSED)1370 m32r_memory_move_cost (machine_mode mode,
1371 reg_class_t rclass ATTRIBUTE_UNUSED,
1372 bool in ATTRIBUTE_UNUSED)
1373 {
1374 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD)
1375 return 6;
1376 else
1377 return 12;
1378 }
1379
1380 static bool
m32r_rtx_costs(rtx x,machine_mode mode ATTRIBUTE_UNUSED,int outer_code ATTRIBUTE_UNUSED,int opno ATTRIBUTE_UNUSED,int * total,bool speed ATTRIBUTE_UNUSED)1381 m32r_rtx_costs (rtx x, machine_mode mode ATTRIBUTE_UNUSED,
1382 int outer_code ATTRIBUTE_UNUSED,
1383 int opno ATTRIBUTE_UNUSED, int *total,
1384 bool speed ATTRIBUTE_UNUSED)
1385 {
1386 int code = GET_CODE (x);
1387
1388 switch (code)
1389 {
1390 /* Small integers are as cheap as registers. 4 byte values can be
1391 fetched as immediate constants - let's give that the cost of an
1392 extra insn. */
1393 case CONST_INT:
1394 if (INT16_P (INTVAL (x)))
1395 {
1396 *total = 0;
1397 return true;
1398 }
1399 /* FALLTHRU */
1400
1401 case CONST:
1402 case LABEL_REF:
1403 case SYMBOL_REF:
1404 *total = COSTS_N_INSNS (1);
1405 return true;
1406
1407 case CONST_DOUBLE:
1408 {
1409 rtx high, low;
1410
1411 split_double (x, &high, &low);
1412 *total = COSTS_N_INSNS (!INT16_P (INTVAL (high))
1413 + !INT16_P (INTVAL (low)));
1414 return true;
1415 }
1416
1417 case MULT:
1418 *total = COSTS_N_INSNS (3);
1419 return true;
1420
1421 case DIV:
1422 case UDIV:
1423 case MOD:
1424 case UMOD:
1425 *total = COSTS_N_INSNS (10);
1426 return true;
1427
1428 default:
1429 return false;
1430 }
1431 }
1432
1433 /* Type of function DECL.
1434
1435 The result is cached. To reset the cache at the end of a function,
1436 call with DECL = NULL_TREE. */
1437
1438 enum m32r_function_type
m32r_compute_function_type(tree decl)1439 m32r_compute_function_type (tree decl)
1440 {
1441 /* Cached value. */
1442 static enum m32r_function_type fn_type = M32R_FUNCTION_UNKNOWN;
1443 /* Last function we were called for. */
1444 static tree last_fn = NULL_TREE;
1445
1446 /* Resetting the cached value? */
1447 if (decl == NULL_TREE)
1448 {
1449 fn_type = M32R_FUNCTION_UNKNOWN;
1450 last_fn = NULL_TREE;
1451 return fn_type;
1452 }
1453
1454 if (decl == last_fn && fn_type != M32R_FUNCTION_UNKNOWN)
1455 return fn_type;
1456
1457 /* Compute function type. */
1458 fn_type = (lookup_attribute ("interrupt", DECL_ATTRIBUTES (current_function_decl)) != NULL_TREE
1459 ? M32R_FUNCTION_INTERRUPT
1460 : M32R_FUNCTION_NORMAL);
1461
1462 last_fn = decl;
1463 return fn_type;
1464 }
1465 /* Function prologue/epilogue handlers. */
1466
1467 /* M32R stack frames look like:
1468
1469 Before call After call
1470 +-----------------------+ +-----------------------+
1471 | | | |
1472 high | local variables, | | local variables, |
1473 mem | reg save area, etc. | | reg save area, etc. |
1474 | | | |
1475 +-----------------------+ +-----------------------+
1476 | | | |
1477 | arguments on stack. | | arguments on stack. |
1478 | | | |
1479 SP+0->+-----------------------+ +-----------------------+
1480 | reg parm save area, |
1481 | only created for |
1482 | variable argument |
1483 | functions |
1484 +-----------------------+
1485 | previous frame ptr |
1486 +-----------------------+
1487 | |
1488 | register save area |
1489 | |
1490 +-----------------------+
1491 | return address |
1492 +-----------------------+
1493 | |
1494 | local variables |
1495 | |
1496 +-----------------------+
1497 | |
1498 | alloca allocations |
1499 | |
1500 +-----------------------+
1501 | |
1502 low | arguments on stack |
1503 memory | |
1504 SP+0->+-----------------------+
1505
1506 Notes:
1507 1) The "reg parm save area" does not exist for non variable argument fns.
1508 2) The "reg parm save area" can be eliminated completely if we saved regs
1509 containing anonymous args separately but that complicates things too
1510 much (so it's not done).
1511 3) The return address is saved after the register save area so as to have as
1512 many insns as possible between the restoration of `lr' and the `jmp lr'. */
1513
1514 /* Structure to be filled in by m32r_compute_frame_size with register
1515 save masks, and offsets for the current function. */
1516 struct m32r_frame_info
1517 {
1518 unsigned int total_size; /* # bytes that the entire frame takes up. */
1519 unsigned int extra_size; /* # bytes of extra stuff. */
1520 unsigned int pretend_size; /* # bytes we push and pretend caller did. */
1521 unsigned int args_size; /* # bytes that outgoing arguments take up. */
1522 unsigned int reg_size; /* # bytes needed to store regs. */
1523 unsigned int var_size; /* # bytes that variables take up. */
1524 unsigned int gmask; /* Mask of saved gp registers. */
1525 unsigned int save_fp; /* Nonzero if fp must be saved. */
1526 unsigned int save_lr; /* Nonzero if lr (return addr) must be saved. */
1527 int initialized; /* Nonzero if frame size already calculated. */
1528 };
1529
1530 /* Current frame information calculated by m32r_compute_frame_size. */
1531 static struct m32r_frame_info current_frame_info;
1532
1533 /* Zero structure to initialize current_frame_info. */
1534 static struct m32r_frame_info zero_frame_info;
1535
1536 #define FRAME_POINTER_MASK (1 << (FRAME_POINTER_REGNUM))
1537 #define RETURN_ADDR_MASK (1 << (RETURN_ADDR_REGNUM))
1538
1539 /* Tell prologue and epilogue if register REGNO should be saved / restored.
1540 The return address and frame pointer are treated separately.
1541 Don't consider them here. */
1542 #define MUST_SAVE_REGISTER(regno, interrupt_p) \
1543 ((regno) != RETURN_ADDR_REGNUM && (regno) != FRAME_POINTER_REGNUM \
1544 && (df_regs_ever_live_p (regno) && (!call_really_used_regs[regno] || interrupt_p)))
1545
1546 #define MUST_SAVE_FRAME_POINTER (df_regs_ever_live_p (FRAME_POINTER_REGNUM))
1547 #define MUST_SAVE_RETURN_ADDR (df_regs_ever_live_p (RETURN_ADDR_REGNUM) || crtl->profile)
1548
1549 #define SHORT_INSN_SIZE 2 /* Size of small instructions. */
1550 #define LONG_INSN_SIZE 4 /* Size of long instructions. */
1551
1552 /* Return the bytes needed to compute the frame pointer from the current
1553 stack pointer.
1554
1555 SIZE is the size needed for local variables. */
1556
1557 unsigned int
m32r_compute_frame_size(poly_int64 size)1558 m32r_compute_frame_size (poly_int64 size) /* # of var. bytes allocated. */
1559 {
1560 unsigned int regno;
1561 unsigned int total_size, var_size, args_size, pretend_size, extra_size;
1562 unsigned int reg_size;
1563 unsigned int gmask;
1564 enum m32r_function_type fn_type;
1565 int interrupt_p;
1566 int pic_reg_used = flag_pic && (crtl->uses_pic_offset_table
1567 | crtl->profile);
1568
1569 var_size = M32R_STACK_ALIGN (size);
1570 args_size = M32R_STACK_ALIGN (crtl->outgoing_args_size);
1571 pretend_size = crtl->args.pretend_args_size;
1572 extra_size = FIRST_PARM_OFFSET (0);
1573 total_size = extra_size + pretend_size + args_size + var_size;
1574 reg_size = 0;
1575 gmask = 0;
1576
1577 /* See if this is an interrupt handler. Call used registers must be saved
1578 for them too. */
1579 fn_type = m32r_compute_function_type (current_function_decl);
1580 interrupt_p = M32R_INTERRUPT_P (fn_type);
1581
1582 /* Calculate space needed for registers. */
1583 for (regno = 0; regno < M32R_MAX_INT_REGS; regno++)
1584 {
1585 if (MUST_SAVE_REGISTER (regno, interrupt_p)
1586 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
1587 {
1588 reg_size += UNITS_PER_WORD;
1589 gmask |= 1 << regno;
1590 }
1591 }
1592
1593 current_frame_info.save_fp = MUST_SAVE_FRAME_POINTER;
1594 current_frame_info.save_lr = MUST_SAVE_RETURN_ADDR || pic_reg_used;
1595
1596 reg_size += ((current_frame_info.save_fp + current_frame_info.save_lr)
1597 * UNITS_PER_WORD);
1598 total_size += reg_size;
1599
1600 /* ??? Not sure this is necessary, and I don't think the epilogue
1601 handler will do the right thing if this changes total_size. */
1602 total_size = M32R_STACK_ALIGN (total_size);
1603
1604 /* frame_size = total_size - (pretend_size + reg_size); */
1605
1606 /* Save computed information. */
1607 current_frame_info.total_size = total_size;
1608 current_frame_info.extra_size = extra_size;
1609 current_frame_info.pretend_size = pretend_size;
1610 current_frame_info.var_size = var_size;
1611 current_frame_info.args_size = args_size;
1612 current_frame_info.reg_size = reg_size;
1613 current_frame_info.gmask = gmask;
1614 current_frame_info.initialized = reload_completed;
1615
1616 /* Ok, we're done. */
1617 return total_size;
1618 }
1619
1620 /* Worker function for TARGET_CAN_ELIMINATE. */
1621
1622 bool
m32r_can_eliminate(const int from,const int to)1623 m32r_can_eliminate (const int from, const int to)
1624 {
1625 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
1626 ? ! frame_pointer_needed
1627 : true);
1628 }
1629
1630
1631 /* The table we use to reference PIC data. */
1632 static rtx global_offset_table;
1633
1634 static void
m32r_reload_lr(rtx sp,int size)1635 m32r_reload_lr (rtx sp, int size)
1636 {
1637 rtx lr = gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM);
1638
1639 if (size == 0)
1640 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode, sp)));
1641 else if (size < 32768)
1642 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode,
1643 gen_rtx_PLUS (Pmode, sp,
1644 GEN_INT (size)))));
1645 else
1646 {
1647 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1648
1649 emit_insn (gen_movsi (tmp, GEN_INT (size)));
1650 emit_insn (gen_addsi3 (tmp, tmp, sp));
1651 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode, tmp)));
1652 }
1653
1654 emit_use (lr);
1655 }
1656
1657 void
m32r_load_pic_register(void)1658 m32r_load_pic_register (void)
1659 {
1660 global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
1661 emit_insn (gen_get_pc (pic_offset_table_rtx, global_offset_table,
1662 GEN_INT (TARGET_MODEL_SMALL)));
1663
1664 /* Need to emit this whether or not we obey regdecls,
1665 since setjmp/longjmp can cause life info to screw up. */
1666 emit_use (pic_offset_table_rtx);
1667 }
1668
1669 /* Expand the m32r prologue as a series of insns. */
1670
1671 void
m32r_expand_prologue(void)1672 m32r_expand_prologue (void)
1673 {
1674 int regno;
1675 int frame_size;
1676 unsigned int gmask;
1677 int pic_reg_used = flag_pic && (crtl->uses_pic_offset_table
1678 | crtl->profile);
1679
1680 if (! current_frame_info.initialized)
1681 m32r_compute_frame_size (get_frame_size ());
1682
1683 if (flag_stack_usage_info)
1684 current_function_static_stack_size = current_frame_info.total_size;
1685
1686 gmask = current_frame_info.gmask;
1687
1688 /* These cases shouldn't happen. Catch them now. */
1689 gcc_assert (current_frame_info.total_size || !gmask);
1690
1691 /* Allocate space for register arguments if this is a variadic function. */
1692 if (current_frame_info.pretend_size != 0)
1693 {
1694 /* Use a HOST_WIDE_INT temporary, since negating an unsigned int gives
1695 the wrong result on a 64-bit host. */
1696 HOST_WIDE_INT pretend_size = current_frame_info.pretend_size;
1697 emit_insn (gen_addsi3 (stack_pointer_rtx,
1698 stack_pointer_rtx,
1699 GEN_INT (-pretend_size)));
1700 }
1701
1702 /* Save any registers we need to and set up fp. */
1703 if (current_frame_info.save_fp)
1704 emit_insn (gen_movsi_push (stack_pointer_rtx, frame_pointer_rtx));
1705
1706 gmask &= ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK);
1707
1708 /* Save any needed call-saved regs (and call-used if this is an
1709 interrupt handler). */
1710 for (regno = 0; regno <= M32R_MAX_INT_REGS; ++regno)
1711 {
1712 if ((gmask & (1 << regno)) != 0)
1713 emit_insn (gen_movsi_push (stack_pointer_rtx,
1714 gen_rtx_REG (Pmode, regno)));
1715 }
1716
1717 if (current_frame_info.save_lr)
1718 emit_insn (gen_movsi_push (stack_pointer_rtx,
1719 gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM)));
1720
1721 /* Allocate the stack frame. */
1722 frame_size = (current_frame_info.total_size
1723 - (current_frame_info.pretend_size
1724 + current_frame_info.reg_size));
1725
1726 if (frame_size == 0)
1727 ; /* Nothing to do. */
1728 else if (frame_size <= 32768)
1729 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1730 GEN_INT (-frame_size)));
1731 else
1732 {
1733 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1734
1735 emit_insn (gen_movsi (tmp, GEN_INT (frame_size)));
1736 emit_insn (gen_subsi3 (stack_pointer_rtx, stack_pointer_rtx, tmp));
1737 }
1738
1739 if (frame_pointer_needed)
1740 emit_insn (gen_movsi (frame_pointer_rtx, stack_pointer_rtx));
1741
1742 if (crtl->profile)
1743 /* Push lr for mcount (form_pc, x). */
1744 emit_insn (gen_movsi_push (stack_pointer_rtx,
1745 gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM)));
1746
1747 if (pic_reg_used)
1748 {
1749 m32r_load_pic_register ();
1750 m32r_reload_lr (stack_pointer_rtx,
1751 (crtl->profile ? 0 : frame_size));
1752 }
1753
1754 if (crtl->profile && !pic_reg_used)
1755 emit_insn (gen_blockage ());
1756 }
1757
1758
1759 /* Set up the stack and frame pointer (if desired) for the function.
1760 Note, if this is changed, you need to mirror the changes in
1761 m32r_compute_frame_size which calculates the prolog size. */
1762
1763 static void
m32r_output_function_prologue(FILE * file)1764 m32r_output_function_prologue (FILE * file)
1765 {
1766 enum m32r_function_type fn_type = m32r_compute_function_type (current_function_decl);
1767
1768 /* If this is an interrupt handler, mark it as such. */
1769 if (M32R_INTERRUPT_P (fn_type))
1770 fprintf (file, "\t%s interrupt handler\n", ASM_COMMENT_START);
1771
1772 if (! current_frame_info.initialized)
1773 m32r_compute_frame_size (get_frame_size ());
1774
1775 /* This is only for the human reader. */
1776 fprintf (file,
1777 "\t%s PROLOGUE, vars= %d, regs= %d, args= %d, extra= %d\n",
1778 ASM_COMMENT_START,
1779 current_frame_info.var_size,
1780 current_frame_info.reg_size / 4,
1781 current_frame_info.args_size,
1782 current_frame_info.extra_size);
1783 }
1784
1785 /* Output RTL to pop register REGNO from the stack. */
1786
1787 static void
pop(int regno)1788 pop (int regno)
1789 {
1790 rtx x;
1791
1792 x = emit_insn (gen_movsi_pop (gen_rtx_REG (Pmode, regno),
1793 stack_pointer_rtx));
1794 add_reg_note (x, REG_INC, stack_pointer_rtx);
1795 }
1796
1797 /* Expand the m32r epilogue as a series of insns. */
1798
1799 void
m32r_expand_epilogue(void)1800 m32r_expand_epilogue (void)
1801 {
1802 int regno;
1803 int noepilogue = FALSE;
1804 int total_size;
1805
1806 gcc_assert (current_frame_info.initialized);
1807 total_size = current_frame_info.total_size;
1808
1809 if (total_size == 0)
1810 {
1811 rtx_insn *insn = get_last_insn ();
1812
1813 /* If the last insn was a BARRIER, we don't have to write any code
1814 because a jump (aka return) was put there. */
1815 if (insn && NOTE_P (insn))
1816 insn = prev_nonnote_insn (insn);
1817 if (insn && BARRIER_P (insn))
1818 noepilogue = TRUE;
1819 }
1820
1821 if (!noepilogue)
1822 {
1823 unsigned int var_size = current_frame_info.var_size;
1824 unsigned int args_size = current_frame_info.args_size;
1825 unsigned int gmask = current_frame_info.gmask;
1826 int can_trust_sp_p = !cfun->calls_alloca;
1827
1828 if (flag_exceptions)
1829 emit_insn (gen_blockage ());
1830
1831 /* The first thing to do is point the sp at the bottom of the register
1832 save area. */
1833 if (can_trust_sp_p)
1834 {
1835 unsigned int reg_offset = var_size + args_size;
1836
1837 if (reg_offset == 0)
1838 ; /* Nothing to do. */
1839 else if (reg_offset < 32768)
1840 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1841 GEN_INT (reg_offset)));
1842 else
1843 {
1844 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1845
1846 emit_insn (gen_movsi (tmp, GEN_INT (reg_offset)));
1847 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1848 tmp));
1849 }
1850 }
1851 else if (frame_pointer_needed)
1852 {
1853 unsigned int reg_offset = var_size + args_size;
1854
1855 if (reg_offset == 0)
1856 emit_insn (gen_movsi (stack_pointer_rtx, frame_pointer_rtx));
1857 else if (reg_offset < 32768)
1858 emit_insn (gen_addsi3 (stack_pointer_rtx, frame_pointer_rtx,
1859 GEN_INT (reg_offset)));
1860 else
1861 {
1862 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1863
1864 emit_insn (gen_movsi (tmp, GEN_INT (reg_offset)));
1865 emit_insn (gen_movsi (stack_pointer_rtx, frame_pointer_rtx));
1866 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1867 tmp));
1868 }
1869 }
1870 else
1871 gcc_unreachable ();
1872
1873 if (current_frame_info.save_lr)
1874 pop (RETURN_ADDR_REGNUM);
1875
1876 /* Restore any saved registers, in reverse order of course. */
1877 gmask &= ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK);
1878 for (regno = M32R_MAX_INT_REGS - 1; regno >= 0; --regno)
1879 {
1880 if ((gmask & (1L << regno)) != 0)
1881 pop (regno);
1882 }
1883
1884 if (current_frame_info.save_fp)
1885 pop (FRAME_POINTER_REGNUM);
1886
1887 /* Remove varargs area if present. */
1888 if (current_frame_info.pretend_size != 0)
1889 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1890 GEN_INT (current_frame_info.pretend_size)));
1891
1892 emit_insn (gen_blockage ());
1893 }
1894 }
1895
1896 /* Do any necessary cleanup after a function to restore stack, frame,
1897 and regs. */
1898
1899 static void
m32r_output_function_epilogue(FILE *)1900 m32r_output_function_epilogue (FILE *)
1901 {
1902 /* Reset state info for each function. */
1903 current_frame_info = zero_frame_info;
1904 m32r_compute_function_type (NULL_TREE);
1905 }
1906
1907 /* Return nonzero if this function is known to have a null or 1 instruction
1908 epilogue. */
1909
1910 int
direct_return(void)1911 direct_return (void)
1912 {
1913 if (!reload_completed)
1914 return FALSE;
1915
1916 if (M32R_INTERRUPT_P (m32r_compute_function_type (current_function_decl)))
1917 return FALSE;
1918
1919 if (! current_frame_info.initialized)
1920 m32r_compute_frame_size (get_frame_size ());
1921
1922 return current_frame_info.total_size == 0;
1923 }
1924
1925
1926 /* PIC. */
1927
1928 int
m32r_legitimate_pic_operand_p(rtx x)1929 m32r_legitimate_pic_operand_p (rtx x)
1930 {
1931 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
1932 return 0;
1933
1934 if (GET_CODE (x) == CONST
1935 && GET_CODE (XEXP (x, 0)) == PLUS
1936 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
1937 || GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF)
1938 && (CONST_INT_P (XEXP (XEXP (x, 0), 1))))
1939 return 0;
1940
1941 return 1;
1942 }
1943
1944 rtx
m32r_legitimize_pic_address(rtx orig,rtx reg)1945 m32r_legitimize_pic_address (rtx orig, rtx reg)
1946 {
1947 #ifdef DEBUG_PIC
1948 printf("m32r_legitimize_pic_address()\n");
1949 #endif
1950
1951 if (GET_CODE (orig) == SYMBOL_REF || GET_CODE (orig) == LABEL_REF)
1952 {
1953 rtx pic_ref, address;
1954 int subregs = 0;
1955
1956 if (reg == 0)
1957 {
1958 gcc_assert (!reload_in_progress && !reload_completed);
1959 reg = gen_reg_rtx (Pmode);
1960
1961 subregs = 1;
1962 }
1963
1964 if (subregs)
1965 address = gen_reg_rtx (Pmode);
1966 else
1967 address = reg;
1968
1969 crtl->uses_pic_offset_table = 1;
1970
1971 if (GET_CODE (orig) == LABEL_REF
1972 || (GET_CODE (orig) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (orig)))
1973 {
1974 emit_insn (gen_gotoff_load_addr (reg, orig));
1975 emit_insn (gen_addsi3 (reg, reg, pic_offset_table_rtx));
1976 return reg;
1977 }
1978
1979 emit_insn (gen_pic_load_addr (address, orig));
1980
1981 emit_insn (gen_addsi3 (address, address, pic_offset_table_rtx));
1982 pic_ref = gen_const_mem (Pmode, address);
1983 emit_move_insn (reg, pic_ref);
1984 return reg;
1985 }
1986 else if (GET_CODE (orig) == CONST)
1987 {
1988 rtx base, offset;
1989
1990 if (GET_CODE (XEXP (orig, 0)) == PLUS
1991 && XEXP (XEXP (orig, 0), 1) == pic_offset_table_rtx)
1992 return orig;
1993
1994 if (reg == 0)
1995 {
1996 gcc_assert (!reload_in_progress && !reload_completed);
1997 reg = gen_reg_rtx (Pmode);
1998 }
1999
2000 if (GET_CODE (XEXP (orig, 0)) == PLUS)
2001 {
2002 base = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 0), reg);
2003 if (base == reg)
2004 offset = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 1), NULL_RTX);
2005 else
2006 offset = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 1), reg);
2007 }
2008 else
2009 return orig;
2010
2011 if (CONST_INT_P (offset))
2012 {
2013 if (INT16_P (INTVAL (offset)))
2014 return plus_constant (Pmode, base, INTVAL (offset));
2015 else
2016 {
2017 gcc_assert (! reload_in_progress && ! reload_completed);
2018 offset = force_reg (Pmode, offset);
2019 }
2020 }
2021
2022 return gen_rtx_PLUS (Pmode, base, offset);
2023 }
2024
2025 return orig;
2026 }
2027
2028 static rtx
m32r_legitimize_address(rtx x,rtx orig_x ATTRIBUTE_UNUSED,machine_mode mode ATTRIBUTE_UNUSED)2029 m32r_legitimize_address (rtx x, rtx orig_x ATTRIBUTE_UNUSED,
2030 machine_mode mode ATTRIBUTE_UNUSED)
2031 {
2032 if (flag_pic)
2033 return m32r_legitimize_pic_address (x, NULL_RTX);
2034 else
2035 return x;
2036 }
2037
2038 /* Worker function for TARGET_MODE_DEPENDENT_ADDRESS_P. */
2039
2040 static bool
m32r_mode_dependent_address_p(const_rtx addr,addr_space_t as ATTRIBUTE_UNUSED)2041 m32r_mode_dependent_address_p (const_rtx addr, addr_space_t as ATTRIBUTE_UNUSED)
2042 {
2043 if (GET_CODE (addr) == LO_SUM)
2044 return true;
2045
2046 return false;
2047 }
2048
2049 /* Nested function support. */
2050
2051 /* Emit RTL insns to initialize the variable parts of a trampoline.
2052 FNADDR is an RTX for the address of the function's pure code.
2053 CXT is an RTX for the static chain value for the function. */
2054
2055 void
m32r_initialize_trampoline(rtx tramp ATTRIBUTE_UNUSED,rtx fnaddr ATTRIBUTE_UNUSED,rtx cxt ATTRIBUTE_UNUSED)2056 m32r_initialize_trampoline (rtx tramp ATTRIBUTE_UNUSED,
2057 rtx fnaddr ATTRIBUTE_UNUSED,
2058 rtx cxt ATTRIBUTE_UNUSED)
2059 {
2060 }
2061
2062 static void
m32r_file_start(void)2063 m32r_file_start (void)
2064 {
2065 default_file_start ();
2066
2067 if (flag_verbose_asm)
2068 fprintf (asm_out_file,
2069 "%s M32R/D special options: -G %d\n",
2070 ASM_COMMENT_START, g_switch_value);
2071
2072 if (TARGET_LITTLE_ENDIAN)
2073 fprintf (asm_out_file, "\t.little\n");
2074 }
2075
2076 /* Print operand X (an rtx) in assembler syntax to file FILE.
2077 CODE is a letter or dot (`z' in `%z0') or 0 if no letter was specified.
2078 For `%' followed by punctuation, CODE is the punctuation and X is null. */
2079
2080 static void
m32r_print_operand(FILE * file,rtx x,int code)2081 m32r_print_operand (FILE * file, rtx x, int code)
2082 {
2083 rtx addr;
2084
2085 switch (code)
2086 {
2087 /* The 's' and 'p' codes are used by output_block_move() to
2088 indicate post-increment 's'tores and 'p're-increment loads. */
2089 case 's':
2090 if (REG_P (x))
2091 fprintf (file, "@+%s", reg_names [REGNO (x)]);
2092 else
2093 output_operand_lossage ("invalid operand to %%s code");
2094 return;
2095
2096 case 'p':
2097 if (REG_P (x))
2098 fprintf (file, "@%s+", reg_names [REGNO (x)]);
2099 else
2100 output_operand_lossage ("invalid operand to %%p code");
2101 return;
2102
2103 case 'R' :
2104 /* Write second word of DImode or DFmode reference,
2105 register or memory. */
2106 if (REG_P (x))
2107 fputs (reg_names[REGNO (x)+1], file);
2108 else if (MEM_P (x))
2109 {
2110 machine_mode mode = GET_MODE (x);
2111
2112 fprintf (file, "@(");
2113 /* Handle possible auto-increment. Since it is pre-increment and
2114 we have already done it, we can just use an offset of four. */
2115 /* ??? This is taken from rs6000.c I think. I don't think it is
2116 currently necessary, but keep it around. */
2117 if (GET_CODE (XEXP (x, 0)) == PRE_INC
2118 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
2119 output_address (mode, plus_constant (Pmode,
2120 XEXP (XEXP (x, 0), 0), 4));
2121 else
2122 output_address (mode, plus_constant (Pmode, XEXP (x, 0), 4));
2123 fputc (')', file);
2124 }
2125 else
2126 output_operand_lossage ("invalid operand to %%R code");
2127 return;
2128
2129 case 'H' : /* High word. */
2130 case 'L' : /* Low word. */
2131 if (REG_P (x))
2132 {
2133 /* L = least significant word, H = most significant word. */
2134 if ((WORDS_BIG_ENDIAN != 0) ^ (code == 'L'))
2135 fputs (reg_names[REGNO (x)], file);
2136 else
2137 fputs (reg_names[REGNO (x)+1], file);
2138 }
2139 else if (CONST_INT_P (x)
2140 || GET_CODE (x) == CONST_DOUBLE)
2141 {
2142 rtx first, second;
2143
2144 split_double (x, &first, &second);
2145 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
2146 code == 'L' ? INTVAL (first) : INTVAL (second));
2147 }
2148 else
2149 output_operand_lossage ("invalid operand to %%H/%%L code");
2150 return;
2151
2152 case 'A' :
2153 {
2154 char str[30];
2155
2156 if (GET_CODE (x) != CONST_DOUBLE
2157 || GET_MODE_CLASS (GET_MODE (x)) != MODE_FLOAT)
2158 fatal_insn ("bad insn for 'A'", x);
2159
2160 real_to_decimal (str, CONST_DOUBLE_REAL_VALUE (x), sizeof (str), 0, 1);
2161 fprintf (file, "%s", str);
2162 return;
2163 }
2164
2165 case 'B' : /* Bottom half. */
2166 case 'T' : /* Top half. */
2167 /* Output the argument to a `seth' insn (sets the Top half-word).
2168 For constants output arguments to a seth/or3 pair to set Top and
2169 Bottom halves. For symbols output arguments to a seth/add3 pair to
2170 set Top and Bottom halves. The difference exists because for
2171 constants seth/or3 is more readable but for symbols we need to use
2172 the same scheme as `ld' and `st' insns (16-bit addend is signed). */
2173 switch (GET_CODE (x))
2174 {
2175 case CONST_INT :
2176 case CONST_DOUBLE :
2177 {
2178 rtx first, second;
2179
2180 split_double (x, &first, &second);
2181 x = WORDS_BIG_ENDIAN ? second : first;
2182 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
2183 (code == 'B'
2184 ? INTVAL (x) & 0xffff
2185 : (INTVAL (x) >> 16) & 0xffff));
2186 }
2187 return;
2188 case CONST :
2189 case SYMBOL_REF :
2190 if (code == 'B'
2191 && small_data_operand (x, VOIDmode))
2192 {
2193 fputs ("sda(", file);
2194 output_addr_const (file, x);
2195 fputc (')', file);
2196 return;
2197 }
2198 /* fall through */
2199 case LABEL_REF :
2200 fputs (code == 'T' ? "shigh(" : "low(", file);
2201 output_addr_const (file, x);
2202 fputc (')', file);
2203 return;
2204 default :
2205 output_operand_lossage ("invalid operand to %%T/%%B code");
2206 return;
2207 }
2208 break;
2209
2210 case 'U' :
2211 /* ??? wip */
2212 /* Output a load/store with update indicator if appropriate. */
2213 if (MEM_P (x))
2214 {
2215 if (GET_CODE (XEXP (x, 0)) == PRE_INC
2216 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
2217 fputs (".a", file);
2218 }
2219 else
2220 output_operand_lossage ("invalid operand to %%U code");
2221 return;
2222
2223 case 'N' :
2224 /* Print a constant value negated. */
2225 if (CONST_INT_P (x))
2226 output_addr_const (file, GEN_INT (- INTVAL (x)));
2227 else
2228 output_operand_lossage ("invalid operand to %%N code");
2229 return;
2230
2231 case 'X' :
2232 /* Print a const_int in hex. Used in comments. */
2233 if (CONST_INT_P (x))
2234 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
2235 return;
2236
2237 case '#' :
2238 fputs (IMMEDIATE_PREFIX, file);
2239 return;
2240
2241 case 0 :
2242 /* Do nothing special. */
2243 break;
2244
2245 default :
2246 /* Unknown flag. */
2247 output_operand_lossage ("invalid operand output code");
2248 }
2249
2250 switch (GET_CODE (x))
2251 {
2252 case REG :
2253 fputs (reg_names[REGNO (x)], file);
2254 break;
2255
2256 case MEM :
2257 addr = XEXP (x, 0);
2258 if (GET_CODE (addr) == PRE_INC)
2259 {
2260 if (!REG_P (XEXP (addr, 0)))
2261 fatal_insn ("pre-increment address is not a register", x);
2262
2263 fprintf (file, "@+%s", reg_names[REGNO (XEXP (addr, 0))]);
2264 }
2265 else if (GET_CODE (addr) == PRE_DEC)
2266 {
2267 if (!REG_P (XEXP (addr, 0)))
2268 fatal_insn ("pre-decrement address is not a register", x);
2269
2270 fprintf (file, "@-%s", reg_names[REGNO (XEXP (addr, 0))]);
2271 }
2272 else if (GET_CODE (addr) == POST_INC)
2273 {
2274 if (!REG_P (XEXP (addr, 0)))
2275 fatal_insn ("post-increment address is not a register", x);
2276
2277 fprintf (file, "@%s+", reg_names[REGNO (XEXP (addr, 0))]);
2278 }
2279 else
2280 {
2281 fputs ("@(", file);
2282 output_address (GET_MODE (x), addr);
2283 fputc (')', file);
2284 }
2285 break;
2286
2287 case CONST_DOUBLE :
2288 /* We handle SFmode constants here as output_addr_const doesn't. */
2289 if (GET_MODE (x) == SFmode)
2290 {
2291 long l;
2292
2293 REAL_VALUE_TO_TARGET_SINGLE (*CONST_DOUBLE_REAL_VALUE (x), l);
2294 fprintf (file, "0x%08lx", l);
2295 break;
2296 }
2297
2298 /* FALLTHRU */
2299 /* Let output_addr_const deal with it. */
2300
2301 default :
2302 output_addr_const (file, x);
2303 break;
2304 }
2305 }
2306
2307 /* Print a memory address as an operand to reference that memory location. */
2308
2309 static void
m32r_print_operand_address(FILE * file,machine_mode,rtx addr)2310 m32r_print_operand_address (FILE * file, machine_mode /*mode*/, rtx addr)
2311 {
2312 rtx base;
2313 rtx index = 0;
2314 int offset = 0;
2315
2316 switch (GET_CODE (addr))
2317 {
2318 case REG :
2319 fputs (reg_names[REGNO (addr)], file);
2320 break;
2321
2322 case PLUS :
2323 if (CONST_INT_P (XEXP (addr, 0)))
2324 offset = INTVAL (XEXP (addr, 0)), base = XEXP (addr, 1);
2325 else if (CONST_INT_P (XEXP (addr, 1)))
2326 offset = INTVAL (XEXP (addr, 1)), base = XEXP (addr, 0);
2327 else
2328 base = XEXP (addr, 0), index = XEXP (addr, 1);
2329 if (REG_P (base))
2330 {
2331 /* Print the offset first (if present) to conform to the manual. */
2332 if (index == 0)
2333 {
2334 if (offset != 0)
2335 fprintf (file, "%d,", offset);
2336 fputs (reg_names[REGNO (base)], file);
2337 }
2338 /* The chip doesn't support this, but left in for generality. */
2339 else if (REG_P (index))
2340 fprintf (file, "%s,%s",
2341 reg_names[REGNO (base)], reg_names[REGNO (index)]);
2342 /* Not sure this can happen, but leave in for now. */
2343 else if (GET_CODE (index) == SYMBOL_REF)
2344 {
2345 output_addr_const (file, index);
2346 fputc (',', file);
2347 fputs (reg_names[REGNO (base)], file);
2348 }
2349 else
2350 fatal_insn ("bad address", addr);
2351 }
2352 else if (GET_CODE (base) == LO_SUM)
2353 {
2354 gcc_assert (!index && REG_P (XEXP (base, 0)));
2355 if (small_data_operand (XEXP (base, 1), VOIDmode))
2356 fputs ("sda(", file);
2357 else
2358 fputs ("low(", file);
2359 output_addr_const (file, plus_constant (Pmode, XEXP (base, 1),
2360 offset));
2361 fputs ("),", file);
2362 fputs (reg_names[REGNO (XEXP (base, 0))], file);
2363 }
2364 else
2365 fatal_insn ("bad address", addr);
2366 break;
2367
2368 case LO_SUM :
2369 if (!REG_P (XEXP (addr, 0)))
2370 fatal_insn ("lo_sum not of register", addr);
2371 if (small_data_operand (XEXP (addr, 1), VOIDmode))
2372 fputs ("sda(", file);
2373 else
2374 fputs ("low(", file);
2375 output_addr_const (file, XEXP (addr, 1));
2376 fputs ("),", file);
2377 fputs (reg_names[REGNO (XEXP (addr, 0))], file);
2378 break;
2379
2380 case PRE_INC : /* Assume SImode. */
2381 fprintf (file, "+%s", reg_names[REGNO (XEXP (addr, 0))]);
2382 break;
2383
2384 case PRE_DEC : /* Assume SImode. */
2385 fprintf (file, "-%s", reg_names[REGNO (XEXP (addr, 0))]);
2386 break;
2387
2388 case POST_INC : /* Assume SImode. */
2389 fprintf (file, "%s+", reg_names[REGNO (XEXP (addr, 0))]);
2390 break;
2391
2392 default :
2393 output_addr_const (file, addr);
2394 break;
2395 }
2396 }
2397
2398 static bool
m32r_print_operand_punct_valid_p(unsigned char code)2399 m32r_print_operand_punct_valid_p (unsigned char code)
2400 {
2401 return m32r_punct_chars[code];
2402 }
2403
2404 /* Return true if the operands are the constants 0 and 1. */
2405
2406 int
zero_and_one(rtx operand1,rtx operand2)2407 zero_and_one (rtx operand1, rtx operand2)
2408 {
2409 return
2410 CONST_INT_P (operand1)
2411 && CONST_INT_P (operand2)
2412 && ( ((INTVAL (operand1) == 0) && (INTVAL (operand2) == 1))
2413 ||((INTVAL (operand1) == 1) && (INTVAL (operand2) == 0)));
2414 }
2415
2416 /* Generate the correct assembler code to handle the conditional loading of a
2417 value into a register. It is known that the operands satisfy the
2418 conditional_move_operand() function above. The destination is operand[0].
2419 The condition is operand [1]. The 'true' value is operand [2] and the
2420 'false' value is operand [3]. */
2421
2422 char *
emit_cond_move(rtx * operands,rtx insn ATTRIBUTE_UNUSED)2423 emit_cond_move (rtx * operands, rtx insn ATTRIBUTE_UNUSED)
2424 {
2425 static char buffer [100];
2426 const char * dest = reg_names [REGNO (operands [0])];
2427
2428 buffer [0] = 0;
2429
2430 /* Destination must be a register. */
2431 gcc_assert (REG_P (operands [0]));
2432 gcc_assert (conditional_move_operand (operands [2], SImode));
2433 gcc_assert (conditional_move_operand (operands [3], SImode));
2434
2435 /* Check to see if the test is reversed. */
2436 if (GET_CODE (operands [1]) == NE)
2437 {
2438 rtx tmp = operands [2];
2439 operands [2] = operands [3];
2440 operands [3] = tmp;
2441 }
2442
2443 sprintf (buffer, "mvfc %s, cbr", dest);
2444
2445 /* If the true value was '0' then we need to invert the results of the move. */
2446 if (INTVAL (operands [2]) == 0)
2447 sprintf (buffer + strlen (buffer), "\n\txor3 %s, %s, #1",
2448 dest, dest);
2449
2450 return buffer;
2451 }
2452
2453 /* Returns true if the registers contained in the two
2454 rtl expressions are different. */
2455
2456 int
m32r_not_same_reg(rtx a,rtx b)2457 m32r_not_same_reg (rtx a, rtx b)
2458 {
2459 int reg_a = -1;
2460 int reg_b = -2;
2461
2462 while (GET_CODE (a) == SUBREG)
2463 a = SUBREG_REG (a);
2464
2465 if (REG_P (a))
2466 reg_a = REGNO (a);
2467
2468 while (GET_CODE (b) == SUBREG)
2469 b = SUBREG_REG (b);
2470
2471 if (REG_P (b))
2472 reg_b = REGNO (b);
2473
2474 return reg_a != reg_b;
2475 }
2476
2477
2478 rtx
m32r_function_symbol(const char * name)2479 m32r_function_symbol (const char *name)
2480 {
2481 int extra_flags = 0;
2482 enum m32r_model model;
2483 rtx sym = gen_rtx_SYMBOL_REF (Pmode, name);
2484
2485 if (TARGET_MODEL_SMALL)
2486 model = M32R_MODEL_SMALL;
2487 else if (TARGET_MODEL_MEDIUM)
2488 model = M32R_MODEL_MEDIUM;
2489 else if (TARGET_MODEL_LARGE)
2490 model = M32R_MODEL_LARGE;
2491 else
2492 gcc_unreachable (); /* Shouldn't happen. */
2493 extra_flags |= model << SYMBOL_FLAG_MODEL_SHIFT;
2494
2495 if (extra_flags)
2496 SYMBOL_REF_FLAGS (sym) |= extra_flags;
2497
2498 return sym;
2499 }
2500
2501 /* Use a library function to move some bytes. */
2502
2503 static void
block_move_call(rtx dest_reg,rtx src_reg,rtx bytes_rtx)2504 block_move_call (rtx dest_reg, rtx src_reg, rtx bytes_rtx)
2505 {
2506 /* We want to pass the size as Pmode, which will normally be SImode
2507 but will be DImode if we are using 64-bit longs and pointers. */
2508 if (GET_MODE (bytes_rtx) != VOIDmode
2509 && GET_MODE (bytes_rtx) != Pmode)
2510 bytes_rtx = convert_to_mode (Pmode, bytes_rtx, 1);
2511
2512 emit_library_call (m32r_function_symbol ("memcpy"), LCT_NORMAL,
2513 VOIDmode, dest_reg, Pmode, src_reg, Pmode,
2514 convert_to_mode (TYPE_MODE (sizetype), bytes_rtx,
2515 TYPE_UNSIGNED (sizetype)),
2516 TYPE_MODE (sizetype));
2517 }
2518
2519 /* Expand string/block move operations.
2520
2521 operands[0] is the pointer to the destination.
2522 operands[1] is the pointer to the source.
2523 operands[2] is the number of bytes to move.
2524 operands[3] is the alignment.
2525
2526 Returns 1 upon success, 0 otherwise. */
2527
2528 int
m32r_expand_block_move(rtx operands[])2529 m32r_expand_block_move (rtx operands[])
2530 {
2531 rtx orig_dst = operands[0];
2532 rtx orig_src = operands[1];
2533 rtx bytes_rtx = operands[2];
2534 rtx align_rtx = operands[3];
2535 int constp = CONST_INT_P (bytes_rtx);
2536 HOST_WIDE_INT bytes = constp ? INTVAL (bytes_rtx) : 0;
2537 int align = INTVAL (align_rtx);
2538 int leftover;
2539 rtx src_reg;
2540 rtx dst_reg;
2541
2542 if (constp && bytes <= 0)
2543 return 1;
2544
2545 /* Move the address into scratch registers. */
2546 dst_reg = copy_addr_to_reg (XEXP (orig_dst, 0));
2547 src_reg = copy_addr_to_reg (XEXP (orig_src, 0));
2548
2549 if (align > UNITS_PER_WORD)
2550 align = UNITS_PER_WORD;
2551
2552 /* If we prefer size over speed, always use a function call.
2553 If we do not know the size, use a function call.
2554 If the blocks are not word aligned, use a function call. */
2555 if (optimize_size || ! constp || align != UNITS_PER_WORD)
2556 {
2557 block_move_call (dst_reg, src_reg, bytes_rtx);
2558 return 0;
2559 }
2560
2561 leftover = bytes % MAX_MOVE_BYTES;
2562 bytes -= leftover;
2563
2564 /* If necessary, generate a loop to handle the bulk of the copy. */
2565 if (bytes)
2566 {
2567 rtx_code_label *label = NULL;
2568 rtx final_src = NULL_RTX;
2569 rtx at_a_time = GEN_INT (MAX_MOVE_BYTES);
2570 rtx rounded_total = GEN_INT (bytes);
2571 rtx new_dst_reg = gen_reg_rtx (SImode);
2572 rtx new_src_reg = gen_reg_rtx (SImode);
2573
2574 /* If we are going to have to perform this loop more than
2575 once, then generate a label and compute the address the
2576 source register will contain upon completion of the final
2577 iteration. */
2578 if (bytes > MAX_MOVE_BYTES)
2579 {
2580 final_src = gen_reg_rtx (Pmode);
2581
2582 if (INT16_P(bytes))
2583 emit_insn (gen_addsi3 (final_src, src_reg, rounded_total));
2584 else
2585 {
2586 emit_insn (gen_movsi (final_src, rounded_total));
2587 emit_insn (gen_addsi3 (final_src, final_src, src_reg));
2588 }
2589
2590 label = gen_label_rtx ();
2591 emit_label (label);
2592 }
2593
2594 /* It is known that output_block_move() will update src_reg to point
2595 to the word after the end of the source block, and dst_reg to point
2596 to the last word of the destination block, provided that the block
2597 is MAX_MOVE_BYTES long. */
2598 emit_insn (gen_movmemsi_internal (dst_reg, src_reg, at_a_time,
2599 new_dst_reg, new_src_reg));
2600 emit_move_insn (dst_reg, new_dst_reg);
2601 emit_move_insn (src_reg, new_src_reg);
2602 emit_insn (gen_addsi3 (dst_reg, dst_reg, GEN_INT (4)));
2603
2604 if (bytes > MAX_MOVE_BYTES)
2605 {
2606 rtx test = gen_rtx_NE (VOIDmode, src_reg, final_src);
2607 emit_jump_insn (gen_cbranchsi4 (test, src_reg, final_src, label));
2608 }
2609 }
2610
2611 if (leftover)
2612 emit_insn (gen_movmemsi_internal (dst_reg, src_reg, GEN_INT (leftover),
2613 gen_reg_rtx (SImode),
2614 gen_reg_rtx (SImode)));
2615 return 1;
2616 }
2617
2618
2619 /* Emit load/stores for a small constant word aligned block_move.
2620
2621 operands[0] is the memory address of the destination.
2622 operands[1] is the memory address of the source.
2623 operands[2] is the number of bytes to move.
2624 operands[3] is a temp register.
2625 operands[4] is a temp register. */
2626
2627 void
m32r_output_block_move(rtx insn ATTRIBUTE_UNUSED,rtx operands[])2628 m32r_output_block_move (rtx insn ATTRIBUTE_UNUSED, rtx operands[])
2629 {
2630 HOST_WIDE_INT bytes = INTVAL (operands[2]);
2631 int first_time;
2632 int got_extra = 0;
2633
2634 gcc_assert (bytes >= 1 && bytes <= MAX_MOVE_BYTES);
2635
2636 /* We do not have a post-increment store available, so the first set of
2637 stores are done without any increment, then the remaining ones can use
2638 the pre-increment addressing mode.
2639
2640 Note: expand_block_move() also relies upon this behavior when building
2641 loops to copy large blocks. */
2642 first_time = 1;
2643
2644 while (bytes > 0)
2645 {
2646 if (bytes >= 8)
2647 {
2648 if (first_time)
2649 {
2650 output_asm_insn ("ld\t%5, %p1", operands);
2651 output_asm_insn ("ld\t%6, %p1", operands);
2652 output_asm_insn ("st\t%5, @%0", operands);
2653 output_asm_insn ("st\t%6, %s0", operands);
2654 }
2655 else
2656 {
2657 output_asm_insn ("ld\t%5, %p1", operands);
2658 output_asm_insn ("ld\t%6, %p1", operands);
2659 output_asm_insn ("st\t%5, %s0", operands);
2660 output_asm_insn ("st\t%6, %s0", operands);
2661 }
2662
2663 bytes -= 8;
2664 }
2665 else if (bytes >= 4)
2666 {
2667 if (bytes > 4)
2668 got_extra = 1;
2669
2670 output_asm_insn ("ld\t%5, %p1", operands);
2671
2672 if (got_extra)
2673 output_asm_insn ("ld\t%6, %p1", operands);
2674
2675 if (first_time)
2676 output_asm_insn ("st\t%5, @%0", operands);
2677 else
2678 output_asm_insn ("st\t%5, %s0", operands);
2679
2680 bytes -= 4;
2681 }
2682 else
2683 {
2684 /* Get the entire next word, even though we do not want all of it.
2685 The saves us from doing several smaller loads, and we assume that
2686 we cannot cause a page fault when at least part of the word is in
2687 valid memory [since we don't get called if things aren't properly
2688 aligned]. */
2689 int dst_offset = first_time ? 0 : 4;
2690 /* The amount of increment we have to make to the
2691 destination pointer. */
2692 int dst_inc_amount = dst_offset + bytes - 4;
2693 /* The same for the source pointer. */
2694 int src_inc_amount = bytes;
2695 int last_shift;
2696 rtx my_operands[3];
2697
2698 /* If got_extra is true then we have already loaded
2699 the next word as part of loading and storing the previous word. */
2700 if (! got_extra)
2701 output_asm_insn ("ld\t%6, @%1", operands);
2702
2703 if (bytes >= 2)
2704 {
2705 bytes -= 2;
2706
2707 output_asm_insn ("sra3\t%5, %6, #16", operands);
2708 my_operands[0] = operands[5];
2709 my_operands[1] = GEN_INT (dst_offset);
2710 my_operands[2] = operands[0];
2711 output_asm_insn ("sth\t%0, @(%1,%2)", my_operands);
2712
2713 /* If there is a byte left to store then increment the
2714 destination address and shift the contents of the source
2715 register down by 8 bits. We could not do the address
2716 increment in the store half word instruction, because it does
2717 not have an auto increment mode. */
2718 if (bytes > 0) /* assert (bytes == 1) */
2719 {
2720 dst_offset += 2;
2721 last_shift = 8;
2722 }
2723 }
2724 else
2725 last_shift = 24;
2726
2727 if (bytes > 0)
2728 {
2729 my_operands[0] = operands[6];
2730 my_operands[1] = GEN_INT (last_shift);
2731 output_asm_insn ("srai\t%0, #%1", my_operands);
2732 my_operands[0] = operands[6];
2733 my_operands[1] = GEN_INT (dst_offset);
2734 my_operands[2] = operands[0];
2735 output_asm_insn ("stb\t%0, @(%1,%2)", my_operands);
2736 }
2737
2738 /* Update the destination pointer if needed. We have to do
2739 this so that the patterns matches what we output in this
2740 function. */
2741 if (dst_inc_amount
2742 && !find_reg_note (insn, REG_UNUSED, operands[0]))
2743 {
2744 my_operands[0] = operands[0];
2745 my_operands[1] = GEN_INT (dst_inc_amount);
2746 output_asm_insn ("addi\t%0, #%1", my_operands);
2747 }
2748
2749 /* Update the source pointer if needed. We have to do this
2750 so that the patterns matches what we output in this
2751 function. */
2752 if (src_inc_amount
2753 && !find_reg_note (insn, REG_UNUSED, operands[1]))
2754 {
2755 my_operands[0] = operands[1];
2756 my_operands[1] = GEN_INT (src_inc_amount);
2757 output_asm_insn ("addi\t%0, #%1", my_operands);
2758 }
2759
2760 bytes = 0;
2761 }
2762
2763 first_time = 0;
2764 }
2765 }
2766
2767 /* Implement TARGET_HARD_REGNO_MODE_OK. */
2768
2769 static bool
m32r_hard_regno_mode_ok(unsigned int regno,machine_mode mode)2770 m32r_hard_regno_mode_ok (unsigned int regno, machine_mode mode)
2771 {
2772 return (m32r_hard_regno_modes[regno] & m32r_mode_class[mode]) != 0;
2773 }
2774
2775 /* Implement TARGET_MODES_TIEABLE_P. Tie QI/HI/SI modes together. */
2776
2777 static bool
m32r_modes_tieable_p(machine_mode mode1,machine_mode mode2)2778 m32r_modes_tieable_p (machine_mode mode1, machine_mode mode2)
2779 {
2780 return (GET_MODE_CLASS (mode1) == MODE_INT
2781 && GET_MODE_CLASS (mode2) == MODE_INT
2782 && GET_MODE_SIZE (mode1) <= UNITS_PER_WORD
2783 && GET_MODE_SIZE (mode2) <= UNITS_PER_WORD);
2784 }
2785
2786 /* Return true if using NEW_REG in place of OLD_REG is ok. */
2787
2788 int
m32r_hard_regno_rename_ok(unsigned int old_reg ATTRIBUTE_UNUSED,unsigned int new_reg)2789 m32r_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
2790 unsigned int new_reg)
2791 {
2792 /* Interrupt routines can't clobber any register that isn't already used. */
2793 if (lookup_attribute ("interrupt", DECL_ATTRIBUTES (current_function_decl))
2794 && !df_regs_ever_live_p (new_reg))
2795 return 0;
2796
2797 return 1;
2798 }
2799
2800 rtx
m32r_return_addr(int count)2801 m32r_return_addr (int count)
2802 {
2803 if (count != 0)
2804 return const0_rtx;
2805
2806 return get_hard_reg_initial_val (Pmode, RETURN_ADDR_REGNUM);
2807 }
2808
2809 static void
m32r_trampoline_init(rtx m_tramp,tree fndecl,rtx chain_value)2810 m32r_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value)
2811 {
2812 emit_move_insn (adjust_address (m_tramp, SImode, 0),
2813 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2814 0x017e8e17 : 0x178e7e01, SImode));
2815 emit_move_insn (adjust_address (m_tramp, SImode, 4),
2816 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2817 0x0c00ae86 : 0x86ae000c, SImode));
2818 emit_move_insn (adjust_address (m_tramp, SImode, 8),
2819 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2820 0xe627871e : 0x1e8727e6, SImode));
2821 emit_move_insn (adjust_address (m_tramp, SImode, 12),
2822 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2823 0xc616c626 : 0x26c61fc6, SImode));
2824 emit_move_insn (adjust_address (m_tramp, SImode, 16),
2825 chain_value);
2826 emit_move_insn (adjust_address (m_tramp, SImode, 20),
2827 XEXP (DECL_RTL (fndecl), 0));
2828
2829 if (m32r_cache_flush_trap >= 0)
2830 emit_insn (gen_flush_icache
2831 (validize_mem (adjust_address (m_tramp, SImode, 0)),
2832 gen_int_mode (m32r_cache_flush_trap, SImode)));
2833 else if (m32r_cache_flush_func && m32r_cache_flush_func[0])
2834 emit_library_call (m32r_function_symbol (m32r_cache_flush_func),
2835 LCT_NORMAL, VOIDmode, XEXP (m_tramp, 0), Pmode,
2836 gen_int_mode (TRAMPOLINE_SIZE, SImode), SImode,
2837 GEN_INT (3), SImode);
2838 }
2839
2840 /* True if X is a reg that can be used as a base reg. */
2841
2842 static bool
m32r_rtx_ok_for_base_p(const_rtx x,bool strict)2843 m32r_rtx_ok_for_base_p (const_rtx x, bool strict)
2844 {
2845 if (! REG_P (x))
2846 return false;
2847
2848 if (strict)
2849 {
2850 if (GPR_P (REGNO (x)))
2851 return true;
2852 }
2853 else
2854 {
2855 if (GPR_P (REGNO (x))
2856 || REGNO (x) == ARG_POINTER_REGNUM
2857 || ! HARD_REGISTER_P (x))
2858 return true;
2859 }
2860
2861 return false;
2862 }
2863
2864 static inline bool
m32r_rtx_ok_for_offset_p(const_rtx x)2865 m32r_rtx_ok_for_offset_p (const_rtx x)
2866 {
2867 return (CONST_INT_P (x) && INT16_P (INTVAL (x)));
2868 }
2869
2870 static inline bool
m32r_legitimate_offset_addres_p(machine_mode mode ATTRIBUTE_UNUSED,const_rtx x,bool strict)2871 m32r_legitimate_offset_addres_p (machine_mode mode ATTRIBUTE_UNUSED,
2872 const_rtx x, bool strict)
2873 {
2874 if (GET_CODE (x) == PLUS
2875 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict)
2876 && m32r_rtx_ok_for_offset_p (XEXP (x, 1)))
2877 return true;
2878
2879 return false;
2880 }
2881
2882 /* For LO_SUM addresses, do not allow them if the MODE is > 1 word,
2883 since more than one instruction will be required. */
2884
2885 static inline bool
m32r_legitimate_lo_sum_addres_p(machine_mode mode,const_rtx x,bool strict)2886 m32r_legitimate_lo_sum_addres_p (machine_mode mode, const_rtx x,
2887 bool strict)
2888 {
2889 if (GET_CODE (x) == LO_SUM
2890 && (mode != BLKmode && GET_MODE_SIZE (mode) <= UNITS_PER_WORD)
2891 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict)
2892 && CONSTANT_P (XEXP (x, 1)))
2893 return true;
2894
2895 return false;
2896 }
2897
2898 /* Is this a load and increment operation. */
2899
2900 static inline bool
m32r_load_postinc_p(machine_mode mode,const_rtx x,bool strict)2901 m32r_load_postinc_p (machine_mode mode, const_rtx x, bool strict)
2902 {
2903 if ((mode == SImode || mode == SFmode)
2904 && GET_CODE (x) == POST_INC
2905 && REG_P (XEXP (x, 0))
2906 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict))
2907 return true;
2908
2909 return false;
2910 }
2911
2912 /* Is this an increment/decrement and store operation. */
2913
2914 static inline bool
m32r_store_preinc_predec_p(machine_mode mode,const_rtx x,bool strict)2915 m32r_store_preinc_predec_p (machine_mode mode, const_rtx x, bool strict)
2916 {
2917 if ((mode == SImode || mode == SFmode)
2918 && (GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
2919 && REG_P (XEXP (x, 0)) \
2920 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict))
2921 return true;
2922
2923 return false;
2924 }
2925
2926 /* Implement TARGET_LEGITIMATE_ADDRESS_P. */
2927
2928 static bool
m32r_legitimate_address_p(machine_mode mode,rtx x,bool strict)2929 m32r_legitimate_address_p (machine_mode mode, rtx x, bool strict)
2930 {
2931 if (m32r_rtx_ok_for_base_p (x, strict)
2932 || m32r_legitimate_offset_addres_p (mode, x, strict)
2933 || m32r_legitimate_lo_sum_addres_p (mode, x, strict)
2934 || m32r_load_postinc_p (mode, x, strict)
2935 || m32r_store_preinc_predec_p (mode, x, strict))
2936 return true;
2937
2938 return false;
2939 }
2940
2941 static void
m32r_conditional_register_usage(void)2942 m32r_conditional_register_usage (void)
2943 {
2944 if (flag_pic)
2945 {
2946 fixed_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
2947 call_used_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
2948 }
2949 }
2950
2951 /* Implement TARGET_LEGITIMATE_CONSTANT_P
2952
2953 We don't allow (plus symbol large-constant) as the relocations can't
2954 describe it. INTVAL > 32767 handles both 16-bit and 24-bit relocations.
2955 We allow all CONST_DOUBLE's as the md file patterns will force the
2956 constant to memory if they can't handle them. */
2957
2958 static bool
m32r_legitimate_constant_p(machine_mode mode ATTRIBUTE_UNUSED,rtx x)2959 m32r_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
2960 {
2961 return !(GET_CODE (x) == CONST
2962 && GET_CODE (XEXP (x, 0)) == PLUS
2963 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2964 || GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF)
2965 && CONST_INT_P (XEXP (XEXP (x, 0), 1))
2966 && UINTVAL (XEXP (XEXP (x, 0), 1)) > 32767);
2967 }
2968
2969 /* Implement TARGET_STARTING_FRAME_OFFSET. The frame pointer points at
2970 the same place as the stack pointer, except if alloca has been called. */
2971
2972 static HOST_WIDE_INT
m32r_starting_frame_offset(void)2973 m32r_starting_frame_offset (void)
2974 {
2975 return M32R_STACK_ALIGN (crtl->outgoing_args_size);
2976 }
2977