1 /* Subroutines used for code generation on Renesas RL78 processors.
2 Copyright (C) 2011-2018 Free Software Foundation, Inc.
3 Contributed by Red Hat.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #define IN_TARGET_CODE 1
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "df.h"
31 #include "memmodel.h"
32 #include "tm_p.h"
33 #include "stringpool.h"
34 #include "attribs.h"
35 #include "optabs.h"
36 #include "emit-rtl.h"
37 #include "recog.h"
38 #include "diagnostic-core.h"
39 #include "varasm.h"
40 #include "stor-layout.h"
41 #include "calls.h"
42 #include "output.h"
43 #include "insn-attr.h"
44 #include "explow.h"
45 #include "expr.h"
46 #include "reload.h"
47 #include "cfgrtl.h"
48 #include "langhooks.h"
49 #include "tree-pass.h"
50 #include "context.h"
51 #include "tm-constrs.h" /* for satisfies_constraint_*(). */
52 #include "builtins.h"
53
54 /* This file should be included last. */
55 #include "target-def.h"
56
57 static inline bool is_interrupt_func (const_tree decl);
58 static inline bool is_brk_interrupt_func (const_tree decl);
59 static void rl78_reorg (void);
60 static const char *rl78_strip_name_encoding (const char *);
61 static const char *rl78_strip_nonasm_name_encoding (const char *);
62 static section * rl78_select_section (tree, int, unsigned HOST_WIDE_INT);
63
64
65 /* Debugging statements are tagged with DEBUG0 only so that they can
66 be easily enabled individually, by replacing the '0' with '1' as
67 needed. */
68 #define DEBUG0 0
69 #define DEBUG1 1
70
71 /* REGISTER_NAMES has the names for individual 8-bit registers, but
72 these have the names we need to use when referring to 16-bit
73 register pairs. */
74 static const char * const word_regnames[] =
75 {
76 "ax", "AX", "bc", "BC", "de", "DE", "hl", "HL",
77 "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15",
78 "r16", "r17", "r18", "r19", "r20", "r21", "r22", "r23",
79 "r24", "r25", "r26", "r27", "r28", "r29", "r30", "r31",
80 "sp", "ap", "psw", "es", "cs"
81 };
82
83 /* used by rl78_addsi3_internal for formatting insns output */
84 static char fmt_buffer[1024];
85
86 /* Structure for G13 MDUC registers. */
87 struct mduc_reg_type
88 {
89 unsigned int address;
90 enum machine_mode mode;
91 };
92
93 struct mduc_reg_type mduc_regs[] =
94 {
95 {0xf00e8, E_QImode},
96 {0xffff0, E_HImode},
97 {0xffff2, E_HImode},
98 {0xf2224, E_HImode},
99 {0xf00e0, E_HImode},
100 {0xf00e2, E_HImode}
101 };
102
103 struct GTY(()) machine_function
104 {
105 /* If set, the rest of the fields have been computed. */
106 int computed;
107 /* Which register pairs need to be pushed in the prologue. */
108 int need_to_push [FIRST_PSEUDO_REGISTER / 2];
109
110 /* These fields describe the frame layout... */
111 /* arg pointer */
112 /* 4 bytes for saved PC */
113 int framesize_regs;
114 /* frame pointer */
115 int framesize_locals;
116 int framesize_outgoing;
117 /* stack pointer */
118 int framesize;
119
120 /* If set, recog is allowed to match against the "real" patterns. */
121 int real_insns_ok;
122 /* If set, recog is allowed to match against the "virtual" patterns. */
123 int virt_insns_ok;
124 /* Set if the current function needs to clean up any trampolines. */
125 int trampolines_used;
126 /* True if the ES register is used and hence
127 needs to be saved inside interrupt handlers. */
128 bool uses_es;
129 };
130
131 /* This is our init_machine_status, as set in
132 rl78_option_override. */
133 static struct machine_function *
rl78_init_machine_status(void)134 rl78_init_machine_status (void)
135 {
136 struct machine_function *m;
137
138 m = ggc_cleared_alloc<machine_function> ();
139 m->virt_insns_ok = 1;
140
141 return m;
142 }
143
144 /* This pass converts virtual instructions using virtual registers, to
145 real instructions using real registers. Rather than run it as
146 reorg, we reschedule it before vartrack to help with debugging. */
147 namespace
148 {
149 const pass_data pass_data_rl78_devirt =
150 {
151 RTL_PASS, /* type */
152 "devirt", /* name */
153 OPTGROUP_NONE, /* optinfo_flags */
154 TV_MACH_DEP, /* tv_id */
155 0, /* properties_required */
156 0, /* properties_provided */
157 0, /* properties_destroyed */
158 0, /* todo_flags_start */
159 0, /* todo_flags_finish */
160 };
161
162 class pass_rl78_devirt : public rtl_opt_pass
163 {
164 public:
pass_rl78_devirt(gcc::context * ctxt)165 pass_rl78_devirt (gcc::context *ctxt)
166 : rtl_opt_pass (pass_data_rl78_devirt, ctxt)
167 {
168 }
169
170 /* opt_pass methods: */
execute(function *)171 virtual unsigned int execute (function *)
172 {
173 rl78_reorg ();
174 return 0;
175 }
176 };
177 } // anon namespace
178
179 rtl_opt_pass *
make_pass_rl78_devirt(gcc::context * ctxt)180 make_pass_rl78_devirt (gcc::context *ctxt)
181 {
182 return new pass_rl78_devirt (ctxt);
183 }
184
185 /* Redundant move elimination pass. Must be run after the basic block
186 reordering pass for the best effect. */
187
188 static unsigned int
move_elim_pass(void)189 move_elim_pass (void)
190 {
191 rtx_insn *insn, *ninsn;
192 rtx prev = NULL_RTX;
193
194 for (insn = get_insns (); insn; insn = ninsn)
195 {
196 rtx set;
197
198 ninsn = next_nonnote_nondebug_insn (insn);
199
200 if ((set = single_set (insn)) == NULL_RTX)
201 {
202 prev = NULL_RTX;
203 continue;
204 }
205
206 /* If we have two SET insns in a row (without anything
207 between them) and the source of the second one is the
208 destination of the first one, and vice versa, then we
209 can eliminate the second SET. */
210 if (prev
211 && rtx_equal_p (SET_DEST (prev), SET_SRC (set))
212 && rtx_equal_p (SET_DEST (set), SET_SRC (prev))
213 /* ... and none of the operands are volatile. */
214 && ! volatile_refs_p (SET_SRC (prev))
215 && ! volatile_refs_p (SET_DEST (prev))
216 && ! volatile_refs_p (SET_SRC (set))
217 && ! volatile_refs_p (SET_DEST (set)))
218 {
219 if (dump_file)
220 fprintf (dump_file, " Delete insn %d because it is redundant\n",
221 INSN_UID (insn));
222
223 delete_insn (insn);
224 prev = NULL_RTX;
225 }
226 else
227 prev = set;
228 }
229
230 if (dump_file)
231 print_rtl_with_bb (dump_file, get_insns (), 0);
232
233 return 0;
234 }
235
236 namespace
237 {
238 const pass_data pass_data_rl78_move_elim =
239 {
240 RTL_PASS, /* type */
241 "move_elim", /* name */
242 OPTGROUP_NONE, /* optinfo_flags */
243 TV_MACH_DEP, /* tv_id */
244 0, /* properties_required */
245 0, /* properties_provided */
246 0, /* properties_destroyed */
247 0, /* todo_flags_start */
248 0, /* todo_flags_finish */
249 };
250
251 class pass_rl78_move_elim : public rtl_opt_pass
252 {
253 public:
pass_rl78_move_elim(gcc::context * ctxt)254 pass_rl78_move_elim (gcc::context *ctxt)
255 : rtl_opt_pass (pass_data_rl78_move_elim, ctxt)
256 {
257 }
258
259 /* opt_pass methods: */
execute(function *)260 virtual unsigned int execute (function *) { return move_elim_pass (); }
261 };
262 } // anon namespace
263
264 rtl_opt_pass *
make_pass_rl78_move_elim(gcc::context * ctxt)265 make_pass_rl78_move_elim (gcc::context *ctxt)
266 {
267 return new pass_rl78_move_elim (ctxt);
268 }
269
270 #undef TARGET_ASM_FILE_START
271 #define TARGET_ASM_FILE_START rl78_asm_file_start
272
273 static void
rl78_asm_file_start(void)274 rl78_asm_file_start (void)
275 {
276 int i;
277
278 if (TARGET_G10)
279 {
280 /* The memory used is 0xffec8 to 0xffedf; real registers are in
281 0xffee0 to 0xffee7. */
282 for (i = 8; i < 32; i++)
283 fprintf (asm_out_file, "r%d\t=\t0x%x\n", i, 0xffec0 + i);
284 }
285 else
286 {
287 for (i = 0; i < 8; i++)
288 {
289 fprintf (asm_out_file, "r%d\t=\t0x%x\n", 8 + i, 0xffef0 + i);
290 fprintf (asm_out_file, "r%d\t=\t0x%x\n", 16 + i, 0xffee8 + i);
291 fprintf (asm_out_file, "r%d\t=\t0x%x\n", 24 + i, 0xffee0 + i);
292 }
293 }
294
295 opt_pass *rl78_devirt_pass = make_pass_rl78_devirt (g);
296 struct register_pass_info rl78_devirt_info =
297 {
298 rl78_devirt_pass,
299 "pro_and_epilogue",
300 1,
301 PASS_POS_INSERT_BEFORE
302 };
303
304 opt_pass *rl78_move_elim_pass = make_pass_rl78_move_elim (g);
305 struct register_pass_info rl78_move_elim_info =
306 {
307 rl78_move_elim_pass,
308 "bbro",
309 1,
310 PASS_POS_INSERT_AFTER
311 };
312
313 register_pass (& rl78_devirt_info);
314 register_pass (& rl78_move_elim_info);
315 }
316
317 void
rl78_output_symbol_ref(FILE * file,rtx sym)318 rl78_output_symbol_ref (FILE * file, rtx sym)
319 {
320 tree type = SYMBOL_REF_DECL (sym);
321 const char *str = XSTR (sym, 0);
322
323 if (str[0] == '*')
324 {
325 fputs (str + 1, file);
326 }
327 else
328 {
329 str = rl78_strip_nonasm_name_encoding (str);
330 if (type && TREE_CODE (type) == FUNCTION_DECL)
331 {
332 fprintf (file, "%%code(");
333 assemble_name (file, str);
334 fprintf (file, ")");
335 }
336 else
337 assemble_name (file, str);
338 }
339 }
340
341 #undef TARGET_OPTION_OVERRIDE
342 #define TARGET_OPTION_OVERRIDE rl78_option_override
343
344 #define MUST_SAVE_MDUC_REGISTERS \
345 (TARGET_SAVE_MDUC_REGISTERS \
346 && (is_interrupt_func (NULL_TREE)) && RL78_MUL_G13)
347
348 static void
rl78_option_override(void)349 rl78_option_override (void)
350 {
351 flag_omit_frame_pointer = 1;
352 flag_no_function_cse = 1;
353 flag_split_wide_types = 0;
354
355 init_machine_status = rl78_init_machine_status;
356
357 if (TARGET_ALLREGS)
358 {
359 int i;
360
361 for (i = 24; i < 32; i++)
362 fixed_regs[i] = 0;
363 }
364
365 if (TARGET_ES0
366 && strcmp (lang_hooks.name, "GNU C")
367 && strcmp (lang_hooks.name, "GNU C11")
368 && strcmp (lang_hooks.name, "GNU C17")
369 && strcmp (lang_hooks.name, "GNU C89")
370 && strcmp (lang_hooks.name, "GNU C99")
371 /* Compiling with -flto results in a language of GNU GIMPLE being used... */
372 && strcmp (lang_hooks.name, "GNU GIMPLE"))
373 /* Address spaces are currently only supported by C. */
374 error ("-mes0 can only be used with C");
375
376 if (TARGET_SAVE_MDUC_REGISTERS && !(TARGET_G13 || RL78_MUL_G13))
377 warning (0, "mduc registers only saved for G13 target");
378
379 switch (rl78_cpu_type)
380 {
381 case CPU_UNINIT:
382 rl78_cpu_type = CPU_G14;
383 if (rl78_mul_type == MUL_UNINIT)
384 rl78_mul_type = MUL_NONE;
385 break;
386
387 case CPU_G10:
388 switch (rl78_mul_type)
389 {
390 case MUL_UNINIT: rl78_mul_type = MUL_NONE; break;
391 case MUL_NONE: break;
392 case MUL_G13: error ("-mmul=g13 cannot be used with -mcpu=g10"); break;
393 case MUL_G14: error ("-mmul=g14 cannot be used with -mcpu=g10"); break;
394 }
395 break;
396
397 case CPU_G13:
398 switch (rl78_mul_type)
399 {
400 case MUL_UNINIT: rl78_mul_type = MUL_G13; break;
401 case MUL_NONE: break;
402 case MUL_G13: break;
403 /* The S2 core does not have mul/div instructions. */
404 case MUL_G14: error ("-mmul=g14 cannot be used with -mcpu=g13"); break;
405 }
406 break;
407
408 case CPU_G14:
409 switch (rl78_mul_type)
410 {
411 case MUL_UNINIT: rl78_mul_type = MUL_G14; break;
412 case MUL_NONE: break;
413 case MUL_G14: break;
414 /* The G14 core does not have the hardware multiply peripheral used by the
415 G13 core, hence you cannot use G13 multipliy routines on G14 hardware. */
416 case MUL_G13: error ("-mmul=g13 cannot be used with -mcpu=g14"); break;
417 }
418 break;
419 }
420 }
421
422 /* Most registers are 8 bits. Some are 16 bits because, for example,
423 gcc doesn't like dealing with $FP as a register pair (the second
424 half of $fp is also 2 to keep reload happy wrt register pairs, but
425 no register class includes it). This table maps register numbers
426 to size in bytes. */
427 static const int register_sizes[] =
428 {
429 1, 1, 1, 1, 1, 1, 1, 1,
430 1, 1, 1, 1, 1, 1, 1, 1,
431 1, 1, 1, 1, 1, 1, 2, 2,
432 1, 1, 1, 1, 1, 1, 1, 1,
433 2, 2, 1, 1, 1
434 };
435
436 /* Predicates used in the MD patterns. This one is true when virtual
437 insns may be matched, which typically means before (or during) the
438 devirt pass. */
439 bool
rl78_virt_insns_ok(void)440 rl78_virt_insns_ok (void)
441 {
442 if (cfun)
443 return cfun->machine->virt_insns_ok;
444 return true;
445 }
446
447 /* Predicates used in the MD patterns. This one is true when real
448 insns may be matched, which typically means after (or during) the
449 devirt pass. */
450 bool
rl78_real_insns_ok(void)451 rl78_real_insns_ok (void)
452 {
453 if (cfun)
454 return cfun->machine->real_insns_ok;
455 return false;
456 }
457
458 #undef TARGET_HARD_REGNO_NREGS
459 #define TARGET_HARD_REGNO_NREGS rl78_hard_regno_nregs
460
461 static unsigned int
rl78_hard_regno_nregs(unsigned int regno,machine_mode mode)462 rl78_hard_regno_nregs (unsigned int regno, machine_mode mode)
463 {
464 int rs = register_sizes[regno];
465 if (rs < 1)
466 rs = 1;
467 return ((GET_MODE_SIZE (mode) + rs - 1) / rs);
468 }
469
470 #undef TARGET_HARD_REGNO_MODE_OK
471 #define TARGET_HARD_REGNO_MODE_OK rl78_hard_regno_mode_ok
472
473 static bool
rl78_hard_regno_mode_ok(unsigned int regno,machine_mode mode)474 rl78_hard_regno_mode_ok (unsigned int regno, machine_mode mode)
475 {
476 int s = GET_MODE_SIZE (mode);
477
478 if (s < 1)
479 return false;
480 /* These are not to be used by gcc. */
481 if (regno == 23 || regno == ES_REG || regno == CS_REG)
482 return false;
483 /* $fp can always be accessed as a 16-bit value. */
484 if (regno == FP_REG && s == 2)
485 return true;
486 if (regno < SP_REG)
487 {
488 /* Since a reg-reg move is really a reg-mem move, we must
489 enforce alignment. */
490 if (s > 1 && (regno % 2))
491 return false;
492 return true;
493 }
494 if (s == CC_REGNUM)
495 return (mode == BImode);
496 /* All other registers must be accessed in their natural sizes. */
497 if (s == register_sizes [regno])
498 return true;
499 return false;
500 }
501
502 #undef TARGET_MODES_TIEABLE_P
503 #define TARGET_MODES_TIEABLE_P rl78_modes_tieable_p
504
505 static bool
rl78_modes_tieable_p(machine_mode mode1,machine_mode mode2)506 rl78_modes_tieable_p (machine_mode mode1, machine_mode mode2)
507 {
508 return ((GET_MODE_CLASS (mode1) == MODE_FLOAT
509 || GET_MODE_CLASS (mode1) == MODE_COMPLEX_FLOAT)
510 == (GET_MODE_CLASS (mode2) == MODE_FLOAT
511 || GET_MODE_CLASS (mode2) == MODE_COMPLEX_FLOAT));
512 }
513
514 /* Simplify_gen_subreg() doesn't handle memory references the way we
515 need it to below, so we use this function for when we must get a
516 valid subreg in a "natural" state. */
517 static rtx
rl78_subreg(machine_mode mode,rtx r,machine_mode omode,int byte)518 rl78_subreg (machine_mode mode, rtx r, machine_mode omode, int byte)
519 {
520 if (GET_CODE (r) == MEM)
521 return adjust_address (r, mode, byte);
522 else
523 return simplify_gen_subreg (mode, r, omode, byte);
524 }
525
526 /* Used by movsi. Split SImode moves into two HImode moves, using
527 appropriate patterns for the upper and lower halves of symbols. */
528 void
rl78_expand_movsi(rtx * operands)529 rl78_expand_movsi (rtx *operands)
530 {
531 rtx op00, op02, op10, op12;
532
533 op00 = rl78_subreg (HImode, operands[0], SImode, 0);
534 op02 = rl78_subreg (HImode, operands[0], SImode, 2);
535 if (GET_CODE (operands[1]) == CONST
536 || GET_CODE (operands[1]) == SYMBOL_REF)
537 {
538 op10 = gen_rtx_ZERO_EXTRACT (HImode, operands[1], GEN_INT (16), GEN_INT (0));
539 op10 = gen_rtx_CONST (HImode, op10);
540 op12 = gen_rtx_ZERO_EXTRACT (HImode, operands[1], GEN_INT (16), GEN_INT (16));
541 op12 = gen_rtx_CONST (HImode, op12);
542 }
543 else
544 {
545 op10 = rl78_subreg (HImode, operands[1], SImode, 0);
546 op12 = rl78_subreg (HImode, operands[1], SImode, 2);
547 }
548
549 if (rtx_equal_p (operands[0], operands[1]))
550 ;
551 else if (rtx_equal_p (op00, op12))
552 {
553 emit_move_insn (op02, op12);
554 emit_move_insn (op00, op10);
555 }
556 else
557 {
558 emit_move_insn (op00, op10);
559 emit_move_insn (op02, op12);
560 }
561 }
562
563 /* Generate code to move an SImode value. */
564 void
rl78_split_movsi(rtx * operands,machine_mode omode)565 rl78_split_movsi (rtx *operands, machine_mode omode)
566 {
567 rtx op00, op02, op10, op12;
568
569 op00 = rl78_subreg (HImode, operands[0], omode, 0);
570 op02 = rl78_subreg (HImode, operands[0], omode, 2);
571
572 if (GET_CODE (operands[1]) == CONST
573 || GET_CODE (operands[1]) == SYMBOL_REF)
574 {
575 op10 = gen_rtx_ZERO_EXTRACT (HImode, operands[1], GEN_INT (16), GEN_INT (0));
576 op10 = gen_rtx_CONST (HImode, op10);
577 op12 = gen_rtx_ZERO_EXTRACT (HImode, operands[1], GEN_INT (16), GEN_INT (16));
578 op12 = gen_rtx_CONST (HImode, op12);
579 }
580 else
581 {
582 op10 = rl78_subreg (HImode, operands[1], omode, 0);
583 op12 = rl78_subreg (HImode, operands[1], omode, 2);
584 }
585
586 if (rtx_equal_p (operands[0], operands[1]))
587 ;
588 else if (rtx_equal_p (op00, op12))
589 {
590 operands[2] = op02;
591 operands[4] = op12;
592 operands[3] = op00;
593 operands[5] = op10;
594 }
595 else
596 {
597 operands[2] = op00;
598 operands[4] = op10;
599 operands[3] = op02;
600 operands[5] = op12;
601 }
602 }
603
604 void
rl78_split_movdi(rtx * operands,enum machine_mode omode)605 rl78_split_movdi (rtx *operands, enum machine_mode omode)
606 {
607 rtx op00, op04, op10, op14;
608 op00 = rl78_subreg (SImode, operands[0], omode, 0);
609 op04 = rl78_subreg (SImode, operands[0], omode, 4);
610 op10 = rl78_subreg (SImode, operands[1], omode, 0);
611 op14 = rl78_subreg (SImode, operands[1], omode, 4);
612 emit_insn (gen_movsi (op00, op10));
613 emit_insn (gen_movsi (op04, op14));
614 }
615
616 /* Used by various two-operand expanders which cannot accept all
617 operands in the "far" namespace. Force some such operands into
618 registers so that each pattern has at most one far operand. */
619 int
rl78_force_nonfar_2(rtx * operands,rtx (* gen)(rtx,rtx))620 rl78_force_nonfar_2 (rtx *operands, rtx (*gen)(rtx,rtx))
621 {
622 int did = 0;
623 rtx temp_reg = NULL;
624
625 /* FIXME: in the future, be smarter about only doing this if the
626 other operand is also far, assuming the devirtualizer can also
627 handle that. */
628 if (rl78_far_p (operands[0]))
629 {
630 temp_reg = operands[0];
631 operands[0] = gen_reg_rtx (GET_MODE (operands[0]));
632 did = 1;
633 }
634 if (!did)
635 return 0;
636
637 emit_insn (gen (operands[0], operands[1]));
638 if (temp_reg)
639 emit_move_insn (temp_reg, operands[0]);
640 return 1;
641 }
642
643 /* Likewise, but for three-operand expanders. */
644 int
rl78_force_nonfar_3(rtx * operands,rtx (* gen)(rtx,rtx,rtx))645 rl78_force_nonfar_3 (rtx *operands, rtx (*gen)(rtx,rtx,rtx))
646 {
647 int did = 0;
648 rtx temp_reg = NULL;
649
650 /* FIXME: Likewise. */
651 if (rl78_far_p (operands[1]))
652 {
653 rtx temp_reg = gen_reg_rtx (GET_MODE (operands[1]));
654 emit_move_insn (temp_reg, operands[1]);
655 operands[1] = temp_reg;
656 did = 1;
657 }
658 if (rl78_far_p (operands[0]))
659 {
660 temp_reg = operands[0];
661 operands[0] = gen_reg_rtx (GET_MODE (operands[0]));
662 did = 1;
663 }
664 if (!did)
665 return 0;
666
667 emit_insn (gen (operands[0], operands[1], operands[2]));
668 if (temp_reg)
669 emit_move_insn (temp_reg, operands[0]);
670 return 1;
671 }
672
673 int
rl78_one_far_p(rtx * operands,int n)674 rl78_one_far_p (rtx *operands, int n)
675 {
676 rtx which = NULL;
677 int i, c = 0;
678
679 for (i = 0; i < n; i ++)
680 if (rl78_far_p (operands[i]))
681 {
682 if (which == NULL)
683 which = operands[i];
684 else if (rtx_equal_p (operands[i], which))
685 continue;
686 c ++;
687 }
688 return c <= 1;
689 }
690
691 #undef TARGET_CAN_ELIMINATE
692 #define TARGET_CAN_ELIMINATE rl78_can_eliminate
693
694 static bool
rl78_can_eliminate(const int from ATTRIBUTE_UNUSED,const int to ATTRIBUTE_UNUSED)695 rl78_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to ATTRIBUTE_UNUSED)
696 {
697 return true;
698 }
699
700 /* Returns true if the given register needs to be saved by the
701 current function. */
702 static bool
need_to_save(unsigned int regno)703 need_to_save (unsigned int regno)
704 {
705 if (is_interrupt_func (cfun->decl))
706 {
707 /* We don't know what devirt will need */
708 if (regno < 8)
709 return true;
710
711 /* We don't need to save registers that have
712 been reserved for interrupt handlers. */
713 if (regno > 23)
714 return false;
715
716 /* If the handler is a non-leaf function then it may call
717 non-interrupt aware routines which will happily clobber
718 any call_used registers, so we have to preserve them.
719 We do not have to worry about the frame pointer register
720 though, as that is handled below. */
721 if (!crtl->is_leaf && call_used_regs[regno] && regno < 22)
722 return true;
723
724 /* Otherwise we only have to save a register, call_used
725 or not, if it is used by this handler. */
726 return df_regs_ever_live_p (regno);
727 }
728
729 if (regno == FRAME_POINTER_REGNUM
730 && (frame_pointer_needed || df_regs_ever_live_p (regno)))
731 return true;
732 if (fixed_regs[regno])
733 return false;
734 if (crtl->calls_eh_return)
735 return true;
736 if (df_regs_ever_live_p (regno)
737 && !call_used_regs[regno])
738 return true;
739 return false;
740 }
741
742 /* We use this to wrap all emitted insns in the prologue. */
743 static rtx
F(rtx x)744 F (rtx x)
745 {
746 RTX_FRAME_RELATED_P (x) = 1;
747 return x;
748 }
749
750 /* Compute all the frame-related fields in our machine_function
751 structure. */
752 static void
rl78_compute_frame_info(void)753 rl78_compute_frame_info (void)
754 {
755 int i;
756
757 cfun->machine->computed = 1;
758 cfun->machine->framesize_regs = 0;
759 cfun->machine->framesize_locals = get_frame_size ();
760 cfun->machine->framesize_outgoing = crtl->outgoing_args_size;
761
762 for (i = 0; i < 16; i ++)
763 if (need_to_save (i * 2) || need_to_save (i * 2 + 1))
764 {
765 cfun->machine->need_to_push [i] = 1;
766 cfun->machine->framesize_regs += 2;
767 }
768 else
769 cfun->machine->need_to_push [i] = 0;
770
771 if ((cfun->machine->framesize_locals + cfun->machine->framesize_outgoing) & 1)
772 cfun->machine->framesize_locals ++;
773
774 cfun->machine->framesize = (cfun->machine->framesize_regs
775 + cfun->machine->framesize_locals
776 + cfun->machine->framesize_outgoing);
777 }
778
779 /* Returns true if the provided function has the specified attribute. */
780 static inline bool
has_func_attr(const_tree decl,const char * func_attr)781 has_func_attr (const_tree decl, const char * func_attr)
782 {
783 if (decl == NULL_TREE)
784 decl = current_function_decl;
785
786 return lookup_attribute (func_attr, DECL_ATTRIBUTES (decl)) != NULL_TREE;
787 }
788
789 /* Returns true if the provided function has the "interrupt" attribute. */
790 static inline bool
is_interrupt_func(const_tree decl)791 is_interrupt_func (const_tree decl)
792 {
793 return has_func_attr (decl, "interrupt") || has_func_attr (decl, "brk_interrupt");
794 }
795
796 /* Returns true if the provided function has the "brk_interrupt" attribute. */
797 static inline bool
is_brk_interrupt_func(const_tree decl)798 is_brk_interrupt_func (const_tree decl)
799 {
800 return has_func_attr (decl, "brk_interrupt");
801 }
802
803 /* Check "interrupt" attributes. */
804 static tree
rl78_handle_func_attribute(tree * node,tree name,tree args ATTRIBUTE_UNUSED,int flags ATTRIBUTE_UNUSED,bool * no_add_attrs)805 rl78_handle_func_attribute (tree * node,
806 tree name,
807 tree args ATTRIBUTE_UNUSED,
808 int flags ATTRIBUTE_UNUSED,
809 bool * no_add_attrs)
810 {
811 gcc_assert (DECL_P (* node));
812
813 if (TREE_CODE (* node) != FUNCTION_DECL)
814 {
815 warning (OPT_Wattributes, "%qE attribute only applies to functions",
816 name);
817 * no_add_attrs = true;
818 }
819
820 /* FIXME: We ought to check that the interrupt and exception
821 handler attributes have been applied to void functions. */
822 return NULL_TREE;
823 }
824
825 /* Check "naked" attributes. */
826 static tree
rl78_handle_naked_attribute(tree * node,tree name ATTRIBUTE_UNUSED,tree args,int flags ATTRIBUTE_UNUSED,bool * no_add_attrs)827 rl78_handle_naked_attribute (tree * node,
828 tree name ATTRIBUTE_UNUSED,
829 tree args,
830 int flags ATTRIBUTE_UNUSED,
831 bool * no_add_attrs)
832 {
833 gcc_assert (DECL_P (* node));
834 gcc_assert (args == NULL_TREE);
835
836 if (TREE_CODE (* node) != FUNCTION_DECL)
837 {
838 warning (OPT_Wattributes, "naked attribute only applies to functions");
839 * no_add_attrs = true;
840 }
841
842 /* Disable warnings about this function - eg reaching the end without
843 seeing a return statement - because the programmer is doing things
844 that gcc does not know about. */
845 TREE_NO_WARNING (* node) = 1;
846
847 return NULL_TREE;
848 }
849
850 /* Check "saddr" attributes. */
851 static tree
rl78_handle_saddr_attribute(tree * node,tree name,tree args ATTRIBUTE_UNUSED,int flags ATTRIBUTE_UNUSED,bool * no_add_attrs)852 rl78_handle_saddr_attribute (tree * node,
853 tree name,
854 tree args ATTRIBUTE_UNUSED,
855 int flags ATTRIBUTE_UNUSED,
856 bool * no_add_attrs)
857 {
858 gcc_assert (DECL_P (* node));
859
860 if (TREE_CODE (* node) == FUNCTION_DECL)
861 {
862 warning (OPT_Wattributes, "%qE attribute doesn't apply to functions",
863 name);
864 * no_add_attrs = true;
865 }
866
867 return NULL_TREE;
868 }
869
870 /* Check "vector" attribute. */
871
872 static tree
rl78_handle_vector_attribute(tree * node,tree name,tree args,int flags ATTRIBUTE_UNUSED,bool * no_add_attrs)873 rl78_handle_vector_attribute (tree * node,
874 tree name,
875 tree args,
876 int flags ATTRIBUTE_UNUSED,
877 bool * no_add_attrs)
878 {
879 gcc_assert (DECL_P (* node));
880 gcc_assert (args != NULL_TREE);
881
882 if (TREE_CODE (* node) != FUNCTION_DECL)
883 {
884 warning (OPT_Wattributes, "%qE attribute only applies to functions",
885 name);
886 * no_add_attrs = true;
887 }
888
889 return NULL_TREE;
890 }
891
892 #undef TARGET_ATTRIBUTE_TABLE
893 #define TARGET_ATTRIBUTE_TABLE rl78_attribute_table
894
895 /* Table of RL78-specific attributes. */
896 const struct attribute_spec rl78_attribute_table[] =
897 {
898 /* Name, min_len, max_len, decl_req, type_req, fn_type_req,
899 affects_type_identity, handler, exclude. */
900 { "interrupt", 0, -1, true, false, false, false,
901 rl78_handle_func_attribute, NULL },
902 { "brk_interrupt", 0, 0, true, false, false, false,
903 rl78_handle_func_attribute, NULL },
904 { "naked", 0, 0, true, false, false, false,
905 rl78_handle_naked_attribute, NULL },
906 { "saddr", 0, 0, true, false, false, false,
907 rl78_handle_saddr_attribute, NULL },
908 { "vector", 1, -1, true, false, false, false,
909 rl78_handle_vector_attribute, NULL },
910 { NULL, 0, 0, false, false, false, false, NULL, NULL }
911 };
912
913
914
915 /* Break down an address RTX into its component base/index/addend
916 portions and return TRUE if the address is of a valid form, else
917 FALSE. */
918 static bool
characterize_address(rtx x,rtx * base,rtx * index,rtx * addend)919 characterize_address (rtx x, rtx *base, rtx *index, rtx *addend)
920 {
921 *base = NULL_RTX;
922 *index = NULL_RTX;
923 *addend = NULL_RTX;
924
925 if (GET_CODE (x) == UNSPEC
926 && XINT (x, 1) == UNS_ES_ADDR)
927 x = XVECEXP (x, 0, 1);
928
929 if (GET_CODE (x) == REG)
930 {
931 *base = x;
932 return true;
933 }
934
935 /* We sometimes get these without the CONST wrapper */
936 if (GET_CODE (x) == PLUS
937 && GET_CODE (XEXP (x, 0)) == SYMBOL_REF
938 && GET_CODE (XEXP (x, 1)) == CONST_INT)
939 {
940 *addend = x;
941 return true;
942 }
943
944 if (GET_CODE (x) == PLUS)
945 {
946 *base = XEXP (x, 0);
947 x = XEXP (x, 1);
948
949 if (GET_CODE (*base) == SUBREG)
950 {
951 if (GET_MODE (*base) == HImode
952 && GET_MODE (XEXP (*base, 0)) == SImode
953 && GET_CODE (XEXP (*base, 0)) == REG)
954 {
955 /* This is a throw-away rtx just to tell everyone
956 else what effective register we're using. */
957 *base = gen_rtx_REG (HImode, REGNO (XEXP (*base, 0)));
958 }
959 }
960
961 if (GET_CODE (*base) != REG
962 && GET_CODE (x) == REG)
963 {
964 rtx tmp = *base;
965 *base = x;
966 x = tmp;
967 }
968
969 if (GET_CODE (*base) != REG)
970 return false;
971
972 if (GET_CODE (x) == ZERO_EXTEND
973 && GET_CODE (XEXP (x, 0)) == REG)
974 {
975 *index = XEXP (x, 0);
976 return false;
977 }
978 }
979
980 switch (GET_CODE (x))
981 {
982 case PLUS:
983 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
984 && GET_CODE (XEXP (x, 0)) == CONST_INT)
985 {
986 *addend = x;
987 return true;
988 }
989 /* fall through */
990 case MEM:
991 case REG:
992 return false;
993
994 case SUBREG:
995 switch (GET_CODE (XEXP (x, 0)))
996 {
997 case CONST:
998 case SYMBOL_REF:
999 case CONST_INT:
1000 *addend = x;
1001 return true;
1002 default:
1003 return false;
1004 }
1005
1006 case CONST:
1007 case SYMBOL_REF:
1008 case CONST_INT:
1009 *addend = x;
1010 return true;
1011
1012 default:
1013 return false;
1014 }
1015
1016 return false;
1017 }
1018
1019 /* Used by the Whb constraint. Match addresses that use HL+B or HL+C
1020 addressing. */
1021 bool
rl78_hl_b_c_addr_p(rtx op)1022 rl78_hl_b_c_addr_p (rtx op)
1023 {
1024 rtx hl, bc;
1025
1026 if (GET_CODE (op) != PLUS)
1027 return false;
1028 hl = XEXP (op, 0);
1029 bc = XEXP (op, 1);
1030 if (GET_CODE (hl) == ZERO_EXTEND)
1031 {
1032 rtx tmp = hl;
1033 hl = bc;
1034 bc = tmp;
1035 }
1036 if (GET_CODE (hl) != REG)
1037 return false;
1038 if (GET_CODE (bc) != ZERO_EXTEND)
1039 return false;
1040 bc = XEXP (bc, 0);
1041 if (GET_CODE (bc) != REG)
1042 return false;
1043 if (REGNO (hl) != HL_REG)
1044 return false;
1045 if (REGNO (bc) != B_REG && REGNO (bc) != C_REG)
1046 return false;
1047
1048 return true;
1049 }
1050
1051 #define REG_IS(r, regno) (((r) == (regno)) || ((r) >= FIRST_PSEUDO_REGISTER && !(strict)))
1052
1053 /* Return the appropriate mode for a named address address. */
1054
1055 #undef TARGET_ADDR_SPACE_ADDRESS_MODE
1056 #define TARGET_ADDR_SPACE_ADDRESS_MODE rl78_addr_space_address_mode
1057
1058 static scalar_int_mode
rl78_addr_space_address_mode(addr_space_t addrspace)1059 rl78_addr_space_address_mode (addr_space_t addrspace)
1060 {
1061 switch (addrspace)
1062 {
1063 case ADDR_SPACE_GENERIC:
1064 return HImode;
1065 case ADDR_SPACE_NEAR:
1066 return HImode;
1067 case ADDR_SPACE_FAR:
1068 return SImode;
1069 default:
1070 gcc_unreachable ();
1071 }
1072 }
1073
1074 /* Used in various constraints and predicates to match operands in the
1075 "far" address space. */
1076 int
rl78_far_p(rtx x)1077 rl78_far_p (rtx x)
1078 {
1079 if (! MEM_P (x))
1080 return 0;
1081 #if DEBUG0
1082 fprintf (stderr, "\033[35mrl78_far_p: "); debug_rtx (x);
1083 fprintf (stderr, " = %d\033[0m\n", MEM_ADDR_SPACE (x) == ADDR_SPACE_FAR);
1084 #endif
1085
1086 /* Not all far addresses are legitimate, because the devirtualizer
1087 can't handle them. */
1088 if (! rl78_as_legitimate_address (GET_MODE (x), XEXP (x, 0), false, ADDR_SPACE_FAR))
1089 return 0;
1090
1091 return GET_MODE_BITSIZE (rl78_addr_space_address_mode (MEM_ADDR_SPACE (x))) == 32;
1092 }
1093
1094 /* Return the appropriate mode for a named address pointer. */
1095 #undef TARGET_ADDR_SPACE_POINTER_MODE
1096 #define TARGET_ADDR_SPACE_POINTER_MODE rl78_addr_space_pointer_mode
1097
1098 static scalar_int_mode
rl78_addr_space_pointer_mode(addr_space_t addrspace)1099 rl78_addr_space_pointer_mode (addr_space_t addrspace)
1100 {
1101 switch (addrspace)
1102 {
1103 case ADDR_SPACE_GENERIC:
1104 return HImode;
1105 case ADDR_SPACE_NEAR:
1106 return HImode;
1107 case ADDR_SPACE_FAR:
1108 return SImode;
1109 default:
1110 gcc_unreachable ();
1111 }
1112 }
1113
1114 /* Returns TRUE for valid addresses. */
1115 #undef TARGET_VALID_POINTER_MODE
1116 #define TARGET_VALID_POINTER_MODE rl78_valid_pointer_mode
1117
1118 static bool
rl78_valid_pointer_mode(scalar_int_mode m)1119 rl78_valid_pointer_mode (scalar_int_mode m)
1120 {
1121 return (m == HImode || m == SImode);
1122 }
1123
1124 #undef TARGET_LEGITIMATE_CONSTANT_P
1125 #define TARGET_LEGITIMATE_CONSTANT_P rl78_is_legitimate_constant
1126
1127 static bool
rl78_is_legitimate_constant(machine_mode mode ATTRIBUTE_UNUSED,rtx x ATTRIBUTE_UNUSED)1128 rl78_is_legitimate_constant (machine_mode mode ATTRIBUTE_UNUSED, rtx x ATTRIBUTE_UNUSED)
1129 {
1130 return true;
1131 }
1132
1133 #undef TARGET_LRA_P
1134 #define TARGET_LRA_P hook_bool_void_false
1135
1136 #undef TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
1137 #define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P rl78_as_legitimate_address
1138
1139 bool
rl78_as_legitimate_address(machine_mode mode ATTRIBUTE_UNUSED,rtx x,bool strict ATTRIBUTE_UNUSED,addr_space_t as ATTRIBUTE_UNUSED)1140 rl78_as_legitimate_address (machine_mode mode ATTRIBUTE_UNUSED, rtx x,
1141 bool strict ATTRIBUTE_UNUSED, addr_space_t as ATTRIBUTE_UNUSED)
1142 {
1143 rtx base, index, addend;
1144 bool is_far_addr = false;
1145 int as_bits;
1146
1147 as_bits = GET_MODE_BITSIZE (rl78_addr_space_address_mode (as));
1148
1149 if (GET_CODE (x) == UNSPEC
1150 && XINT (x, 1) == UNS_ES_ADDR)
1151 {
1152 x = XVECEXP (x, 0, 1);
1153 is_far_addr = true;
1154 }
1155
1156 if (as_bits == 16 && is_far_addr)
1157 return false;
1158
1159 if (! characterize_address (x, &base, &index, &addend))
1160 return false;
1161
1162 /* We can't extract the high/low portions of a PLUS address
1163 involving a register during devirtualization, so make sure all
1164 such __far addresses do not have addends. This forces GCC to do
1165 the sum separately. */
1166 if (addend && base && as_bits == 32 && GET_MODE (base) == SImode)
1167 return false;
1168
1169 if (base && index)
1170 {
1171 int ir = REGNO (index);
1172 int br = REGNO (base);
1173
1174 #define OK(test, debug) if (test) { /*fprintf(stderr, "%d: OK %s\n", __LINE__, debug);*/ return true; }
1175 OK (REG_IS (br, HL_REG) && REG_IS (ir, B_REG), "[hl+b]");
1176 OK (REG_IS (br, HL_REG) && REG_IS (ir, C_REG), "[hl+c]");
1177 return false;
1178 }
1179
1180 if (strict && base && GET_CODE (base) == REG && REGNO (base) >= FIRST_PSEUDO_REGISTER)
1181 return false;
1182
1183 if (! cfun->machine->virt_insns_ok && base && GET_CODE (base) == REG
1184 && REGNO (base) >= 8 && REGNO (base) <= 31)
1185 return false;
1186
1187 return true;
1188 }
1189
1190 /* Determine if one named address space is a subset of another. */
1191 #undef TARGET_ADDR_SPACE_SUBSET_P
1192 #define TARGET_ADDR_SPACE_SUBSET_P rl78_addr_space_subset_p
1193
1194 static bool
rl78_addr_space_subset_p(addr_space_t subset,addr_space_t superset)1195 rl78_addr_space_subset_p (addr_space_t subset, addr_space_t superset)
1196 {
1197 int subset_bits;
1198 int superset_bits;
1199
1200 subset_bits = GET_MODE_BITSIZE (rl78_addr_space_address_mode (subset));
1201 superset_bits = GET_MODE_BITSIZE (rl78_addr_space_address_mode (superset));
1202
1203 return (subset_bits <= superset_bits);
1204 }
1205
1206 #undef TARGET_ADDR_SPACE_CONVERT
1207 #define TARGET_ADDR_SPACE_CONVERT rl78_addr_space_convert
1208
1209 /* Convert from one address space to another. */
1210 static rtx
rl78_addr_space_convert(rtx op,tree from_type,tree to_type)1211 rl78_addr_space_convert (rtx op, tree from_type, tree to_type)
1212 {
1213 addr_space_t from_as = TYPE_ADDR_SPACE (TREE_TYPE (from_type));
1214 addr_space_t to_as = TYPE_ADDR_SPACE (TREE_TYPE (to_type));
1215 rtx result;
1216 int to_bits;
1217 int from_bits;
1218
1219 to_bits = GET_MODE_BITSIZE (rl78_addr_space_address_mode (to_as));
1220 from_bits = GET_MODE_BITSIZE (rl78_addr_space_address_mode (from_as));
1221
1222 if (to_bits < from_bits)
1223 {
1224 rtx tmp;
1225 /* This is unpredictable, as we're truncating off usable address
1226 bits. */
1227
1228 warning (OPT_Waddress, "converting far pointer to near pointer");
1229 result = gen_reg_rtx (HImode);
1230 if (GET_CODE (op) == SYMBOL_REF
1231 || (GET_CODE (op) == REG && REGNO (op) >= FIRST_PSEUDO_REGISTER))
1232 tmp = gen_rtx_raw_SUBREG (HImode, op, 0);
1233 else
1234 tmp = simplify_subreg (HImode, op, SImode, 0);
1235 gcc_assert (tmp != NULL_RTX);
1236 emit_move_insn (result, tmp);
1237 return result;
1238 }
1239 else if (to_bits > from_bits)
1240 {
1241 /* This always works. */
1242 result = gen_reg_rtx (SImode);
1243 emit_move_insn (rl78_subreg (HImode, result, SImode, 0), op);
1244 if (TREE_CODE (from_type) == POINTER_TYPE
1245 && TREE_CODE (TREE_TYPE (from_type)) == FUNCTION_TYPE)
1246 emit_move_insn (rl78_subreg (HImode, result, SImode, 2), const0_rtx);
1247 else
1248 emit_move_insn (rl78_subreg (HImode, result, SImode, 2), GEN_INT (0x0f));
1249 return result;
1250 }
1251 else
1252 return op;
1253 gcc_unreachable ();
1254 }
1255
1256 /* Implements REGNO_MODE_CODE_OK_FOR_BASE_P. */
1257 bool
rl78_regno_mode_code_ok_for_base_p(int regno,machine_mode mode ATTRIBUTE_UNUSED,addr_space_t address_space ATTRIBUTE_UNUSED,int outer_code ATTRIBUTE_UNUSED,int index_code)1258 rl78_regno_mode_code_ok_for_base_p (int regno, machine_mode mode ATTRIBUTE_UNUSED,
1259 addr_space_t address_space ATTRIBUTE_UNUSED,
1260 int outer_code ATTRIBUTE_UNUSED, int index_code)
1261 {
1262 if (regno <= SP_REG && regno >= 16)
1263 return true;
1264 if (index_code == REG)
1265 return (regno == HL_REG);
1266 if (regno == C_REG || regno == B_REG || regno == E_REG || regno == L_REG)
1267 return true;
1268 return false;
1269 }
1270
1271 /* Implements MODE_CODE_BASE_REG_CLASS. */
1272 enum reg_class
rl78_mode_code_base_reg_class(machine_mode mode ATTRIBUTE_UNUSED,addr_space_t address_space ATTRIBUTE_UNUSED,int outer_code ATTRIBUTE_UNUSED,int index_code ATTRIBUTE_UNUSED)1273 rl78_mode_code_base_reg_class (machine_mode mode ATTRIBUTE_UNUSED,
1274 addr_space_t address_space ATTRIBUTE_UNUSED,
1275 int outer_code ATTRIBUTE_UNUSED,
1276 int index_code ATTRIBUTE_UNUSED)
1277 {
1278 return V_REGS;
1279 }
1280
1281 /* Typical stack layout should looks like this after the function's prologue:
1282
1283 | |
1284 -- ^
1285 | | \ |
1286 | | arguments saved | Increasing
1287 | | on the stack | addresses
1288 PARENT arg pointer -> | | /
1289 -------------------------- ---- -------------------
1290 CHILD |ret | return address
1291 --
1292 | | \
1293 | | call saved
1294 | | registers
1295 frame pointer -> | | /
1296 --
1297 | | \
1298 | | local
1299 | | variables
1300 | | /
1301 --
1302 | | \
1303 | | outgoing | Decreasing
1304 | | arguments | addresses
1305 current stack pointer -> | | / |
1306 -------------------------- ---- ------------------ V
1307 | | */
1308
1309 /* Implements INITIAL_ELIMINATION_OFFSET. The frame layout is
1310 described in the machine_Function struct definition, above. */
1311 int
rl78_initial_elimination_offset(int from,int to)1312 rl78_initial_elimination_offset (int from, int to)
1313 {
1314 int rv = 0; /* as if arg to arg */
1315
1316 rl78_compute_frame_info ();
1317
1318 switch (to)
1319 {
1320 case STACK_POINTER_REGNUM:
1321 rv += cfun->machine->framesize_outgoing;
1322 rv += cfun->machine->framesize_locals;
1323 /* Fall through. */
1324 case FRAME_POINTER_REGNUM:
1325 rv += cfun->machine->framesize_regs;
1326 rv += 4;
1327 break;
1328 default:
1329 gcc_unreachable ();
1330 }
1331
1332 switch (from)
1333 {
1334 case FRAME_POINTER_REGNUM:
1335 rv -= 4;
1336 rv -= cfun->machine->framesize_regs;
1337 case ARG_POINTER_REGNUM:
1338 break;
1339 default:
1340 gcc_unreachable ();
1341 }
1342
1343 return rv;
1344 }
1345
1346 static bool
rl78_is_naked_func(void)1347 rl78_is_naked_func (void)
1348 {
1349 return (lookup_attribute ("naked", DECL_ATTRIBUTES (current_function_decl)) != NULL_TREE);
1350 }
1351
1352 /* Check if the block uses mul/div insns for G13 target. */
1353
1354 static bool
check_mduc_usage(void)1355 check_mduc_usage (void)
1356 {
1357 rtx_insn * insn;
1358 basic_block bb;
1359
1360 FOR_EACH_BB_FN (bb, cfun)
1361 {
1362 FOR_BB_INSNS (bb, insn)
1363 {
1364 if (INSN_P (insn)
1365 && (get_attr_is_g13_muldiv_insn (insn) == IS_G13_MULDIV_INSN_YES))
1366 return true;
1367 }
1368 }
1369 return false;
1370 }
1371
1372 /* Expand the function prologue (from the prologue pattern). */
1373
1374 void
rl78_expand_prologue(void)1375 rl78_expand_prologue (void)
1376 {
1377 int i, fs;
1378 rtx sp = gen_rtx_REG (HImode, STACK_POINTER_REGNUM);
1379 rtx ax = gen_rtx_REG (HImode, AX_REG);
1380 int rb = 0;
1381
1382 if (rl78_is_naked_func ())
1383 return;
1384
1385 /* Always re-compute the frame info - the register usage may have changed. */
1386 rl78_compute_frame_info ();
1387
1388 if (MUST_SAVE_MDUC_REGISTERS && (!crtl->is_leaf || check_mduc_usage ()))
1389 cfun->machine->framesize += ARRAY_SIZE (mduc_regs) * 2;
1390
1391 if (flag_stack_usage_info)
1392 current_function_static_stack_size = cfun->machine->framesize;
1393
1394 if (is_interrupt_func (cfun->decl) && !TARGET_G10)
1395 for (i = 0; i < 4; i++)
1396 if (cfun->machine->need_to_push [i])
1397 {
1398 /* Select Bank 0 if we are using any registers from Bank 0. */
1399 emit_insn (gen_sel_rb (GEN_INT (0)));
1400 break;
1401 }
1402
1403 for (i = 0; i < 16; i++)
1404 if (cfun->machine->need_to_push [i])
1405 {
1406 int reg = i * 2;
1407
1408 if (TARGET_G10)
1409 {
1410 if (reg >= 8)
1411 {
1412 emit_move_insn (ax, gen_rtx_REG (HImode, reg));
1413 reg = AX_REG;
1414 }
1415 }
1416 else
1417 {
1418 int need_bank = i/4;
1419
1420 if (need_bank != rb)
1421 {
1422 emit_insn (gen_sel_rb (GEN_INT (need_bank)));
1423 rb = need_bank;
1424 }
1425 }
1426
1427 F (emit_insn (gen_push (gen_rtx_REG (HImode, reg))));
1428 }
1429
1430 if (rb != 0)
1431 emit_insn (gen_sel_rb (GEN_INT (0)));
1432
1433 /* Save ES register inside interrupt functions if it is used. */
1434 if (is_interrupt_func (cfun->decl) && cfun->machine->uses_es)
1435 {
1436 emit_insn (gen_movqi_from_es (gen_rtx_REG (QImode, A_REG)));
1437 F (emit_insn (gen_push (ax)));
1438 }
1439
1440 /* Save MDUC registers inside interrupt routine. */
1441 if (MUST_SAVE_MDUC_REGISTERS && (!crtl->is_leaf || check_mduc_usage ()))
1442 {
1443 for (unsigned i = 0; i < ARRAY_SIZE (mduc_regs); i++)
1444 {
1445 mduc_reg_type *reg = mduc_regs + i;
1446 rtx mem_mduc = gen_rtx_MEM (reg->mode, GEN_INT (reg->address));
1447
1448 MEM_VOLATILE_P (mem_mduc) = 1;
1449 if (reg->mode == QImode)
1450 emit_insn (gen_movqi (gen_rtx_REG (QImode, A_REG), mem_mduc));
1451 else
1452 emit_insn (gen_movhi (gen_rtx_REG (HImode, AX_REG), mem_mduc));
1453
1454 emit_insn (gen_push (gen_rtx_REG (HImode, AX_REG)));
1455 }
1456 }
1457
1458 if (frame_pointer_needed)
1459 {
1460 F (emit_move_insn (ax, sp));
1461 F (emit_move_insn (gen_rtx_REG (HImode, FRAME_POINTER_REGNUM), ax));
1462 }
1463
1464 fs = cfun->machine->framesize_locals + cfun->machine->framesize_outgoing;
1465 if (fs > 0)
1466 {
1467 /* If we need to subtract more than 254*3 then it is faster and
1468 smaller to move SP into AX and perform the subtraction there. */
1469 if (fs > 254 * 3)
1470 {
1471 rtx insn;
1472
1473 emit_move_insn (ax, sp);
1474 emit_insn (gen_subhi3 (ax, ax, GEN_INT (fs)));
1475 insn = F (emit_move_insn (sp, ax));
1476 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
1477 gen_rtx_SET (sp, gen_rtx_PLUS (HImode, sp,
1478 GEN_INT (-fs))));
1479 }
1480 else
1481 {
1482 while (fs > 0)
1483 {
1484 int fs_byte = (fs > 254) ? 254 : fs;
1485
1486 F (emit_insn (gen_subhi3 (sp, sp, GEN_INT (fs_byte))));
1487 fs -= fs_byte;
1488 }
1489 }
1490 }
1491 }
1492
1493 /* Expand the function epilogue (from the epilogue pattern). */
1494 void
rl78_expand_epilogue(void)1495 rl78_expand_epilogue (void)
1496 {
1497 int i, fs;
1498 rtx sp = gen_rtx_REG (HImode, STACK_POINTER_REGNUM);
1499 rtx ax = gen_rtx_REG (HImode, AX_REG);
1500 int rb = 0;
1501
1502 if (rl78_is_naked_func ())
1503 return;
1504
1505 if (frame_pointer_needed)
1506 {
1507 emit_move_insn (ax, gen_rtx_REG (HImode, FRAME_POINTER_REGNUM));
1508 emit_move_insn (sp, ax);
1509 }
1510 else
1511 {
1512 fs = cfun->machine->framesize_locals + cfun->machine->framesize_outgoing;
1513 if (fs > 254 * 3)
1514 {
1515 emit_move_insn (ax, sp);
1516 emit_insn (gen_addhi3 (ax, ax, GEN_INT (fs)));
1517 emit_move_insn (sp, ax);
1518 }
1519 else
1520 {
1521 while (fs > 0)
1522 {
1523 int fs_byte = (fs > 254) ? 254 : fs;
1524
1525 emit_insn (gen_addhi3 (sp, sp, GEN_INT (fs_byte)));
1526 fs -= fs_byte;
1527 }
1528 }
1529 }
1530
1531 /* Restore MDUC registers from interrupt routine. */
1532 if (MUST_SAVE_MDUC_REGISTERS && (!crtl->is_leaf || check_mduc_usage ()))
1533 {
1534 for (int i = ARRAY_SIZE (mduc_regs) - 1; i >= 0; i--)
1535 {
1536 mduc_reg_type *reg = mduc_regs + i;
1537 rtx mem_mduc = gen_rtx_MEM (reg->mode, GEN_INT (reg->address));
1538
1539 emit_insn (gen_pop (gen_rtx_REG (HImode, AX_REG)));
1540 MEM_VOLATILE_P (mem_mduc) = 1;
1541 if (reg->mode == QImode)
1542 emit_insn (gen_movqi (mem_mduc, gen_rtx_REG (QImode, A_REG)));
1543 else
1544 emit_insn (gen_movhi (mem_mduc, gen_rtx_REG (HImode, AX_REG)));
1545 }
1546 }
1547
1548 if (is_interrupt_func (cfun->decl) && cfun->machine->uses_es)
1549 {
1550 emit_insn (gen_pop (gen_rtx_REG (HImode, AX_REG)));
1551 emit_insn (gen_movqi_to_es (gen_rtx_REG (QImode, A_REG)));
1552 }
1553
1554 for (i = 15; i >= 0; i--)
1555 if (cfun->machine->need_to_push [i])
1556 {
1557 rtx dest = gen_rtx_REG (HImode, i * 2);
1558
1559 if (TARGET_G10)
1560 {
1561 if (i < 8)
1562 emit_insn (gen_pop (dest));
1563 else
1564 {
1565 emit_insn (gen_pop (ax));
1566 emit_move_insn (dest, ax);
1567 /* Generate a USE of the pop'd register so that DCE will not eliminate the move. */
1568 emit_insn (gen_use (dest));
1569 }
1570 }
1571 else
1572 {
1573 int need_bank = i / 4;
1574
1575 if (need_bank != rb)
1576 {
1577 emit_insn (gen_sel_rb (GEN_INT (need_bank)));
1578 rb = need_bank;
1579 }
1580 emit_insn (gen_pop (dest));
1581 }
1582 }
1583
1584 if (rb != 0)
1585 emit_insn (gen_sel_rb (GEN_INT (0)));
1586
1587 if (cfun->machine->trampolines_used)
1588 emit_insn (gen_trampoline_uninit ());
1589
1590 if (is_brk_interrupt_func (cfun->decl))
1591 emit_jump_insn (gen_brk_interrupt_return ());
1592 else if (is_interrupt_func (cfun->decl))
1593 emit_jump_insn (gen_interrupt_return ());
1594 else
1595 emit_jump_insn (gen_rl78_return ());
1596 }
1597
1598 /* Likewise, for exception handlers. */
1599 void
rl78_expand_eh_epilogue(rtx x ATTRIBUTE_UNUSED)1600 rl78_expand_eh_epilogue (rtx x ATTRIBUTE_UNUSED)
1601 {
1602 /* FIXME - replace this with an indirect jump with stack adjust. */
1603 emit_jump_insn (gen_rl78_return ());
1604 }
1605
1606 #undef TARGET_ASM_FUNCTION_PROLOGUE
1607 #define TARGET_ASM_FUNCTION_PROLOGUE rl78_start_function
1608
1609 static void
add_vector_labels(FILE * file,const char * aname)1610 add_vector_labels (FILE *file, const char *aname)
1611 {
1612 tree vec_attr;
1613 tree val_attr;
1614 const char *vname = "vect";
1615 const char *s;
1616 int vnum;
1617
1618 /* This node is for the vector/interrupt tag itself */
1619 vec_attr = lookup_attribute (aname, DECL_ATTRIBUTES (current_function_decl));
1620 if (!vec_attr)
1621 return;
1622
1623 /* Now point it at the first argument */
1624 vec_attr = TREE_VALUE (vec_attr);
1625
1626 /* Iterate through the arguments. */
1627 while (vec_attr)
1628 {
1629 val_attr = TREE_VALUE (vec_attr);
1630 switch (TREE_CODE (val_attr))
1631 {
1632 case STRING_CST:
1633 s = TREE_STRING_POINTER (val_attr);
1634 goto string_id_common;
1635
1636 case IDENTIFIER_NODE:
1637 s = IDENTIFIER_POINTER (val_attr);
1638
1639 string_id_common:
1640 if (strcmp (s, "$default") == 0)
1641 {
1642 fprintf (file, "\t.global\t$tableentry$default$%s\n", vname);
1643 fprintf (file, "$tableentry$default$%s:\n", vname);
1644 }
1645 else
1646 vname = s;
1647 break;
1648
1649 case INTEGER_CST:
1650 vnum = TREE_INT_CST_LOW (val_attr);
1651
1652 fprintf (file, "\t.global\t$tableentry$%d$%s\n", vnum, vname);
1653 fprintf (file, "$tableentry$%d$%s:\n", vnum, vname);
1654 break;
1655
1656 default:
1657 ;
1658 }
1659
1660 vec_attr = TREE_CHAIN (vec_attr);
1661 }
1662
1663 }
1664
1665 /* We don't use this to actually emit the function prologue. We use
1666 this to insert a comment in the asm file describing the
1667 function. */
1668 static void
rl78_start_function(FILE * file)1669 rl78_start_function (FILE *file)
1670 {
1671 int i;
1672
1673 add_vector_labels (file, "interrupt");
1674 add_vector_labels (file, "vector");
1675
1676 if (cfun->machine->framesize == 0)
1677 return;
1678 fprintf (file, "\t; start of function\n");
1679
1680 if (cfun->machine->framesize_regs)
1681 {
1682 fprintf (file, "\t; push %d:", cfun->machine->framesize_regs);
1683 for (i = 0; i < 16; i ++)
1684 if (cfun->machine->need_to_push[i])
1685 fprintf (file, " %s", word_regnames[i*2]);
1686 fprintf (file, "\n");
1687 }
1688
1689 if (frame_pointer_needed)
1690 fprintf (file, "\t; $fp points here (r22)\n");
1691
1692 if (cfun->machine->framesize_locals)
1693 fprintf (file, "\t; locals: %d byte%s\n", cfun->machine->framesize_locals,
1694 cfun->machine->framesize_locals == 1 ? "" : "s");
1695
1696 if (cfun->machine->framesize_outgoing)
1697 fprintf (file, "\t; outgoing: %d byte%s\n", cfun->machine->framesize_outgoing,
1698 cfun->machine->framesize_outgoing == 1 ? "" : "s");
1699
1700 if (cfun->machine->uses_es)
1701 fprintf (file, "\t; uses ES register\n");
1702
1703 if (MUST_SAVE_MDUC_REGISTERS)
1704 fprintf (file, "\t; preserves MDUC registers\n");
1705 }
1706
1707 /* Return an RTL describing where a function return value of type RET_TYPE
1708 is held. */
1709
1710 #undef TARGET_FUNCTION_VALUE
1711 #define TARGET_FUNCTION_VALUE rl78_function_value
1712
1713 static rtx
rl78_function_value(const_tree ret_type,const_tree fn_decl_or_type ATTRIBUTE_UNUSED,bool outgoing ATTRIBUTE_UNUSED)1714 rl78_function_value (const_tree ret_type,
1715 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
1716 bool outgoing ATTRIBUTE_UNUSED)
1717 {
1718 machine_mode mode = TYPE_MODE (ret_type);
1719
1720 return gen_rtx_REG (mode, 8);
1721 }
1722
1723 #undef TARGET_PROMOTE_FUNCTION_MODE
1724 #define TARGET_PROMOTE_FUNCTION_MODE rl78_promote_function_mode
1725
1726 static machine_mode
rl78_promote_function_mode(const_tree type ATTRIBUTE_UNUSED,machine_mode mode,int * punsignedp ATTRIBUTE_UNUSED,const_tree funtype ATTRIBUTE_UNUSED,int for_return ATTRIBUTE_UNUSED)1727 rl78_promote_function_mode (const_tree type ATTRIBUTE_UNUSED,
1728 machine_mode mode,
1729 int *punsignedp ATTRIBUTE_UNUSED,
1730 const_tree funtype ATTRIBUTE_UNUSED, int for_return ATTRIBUTE_UNUSED)
1731 {
1732 return mode;
1733 }
1734
1735 /* Return an RTL expression describing the register holding a function
1736 parameter of mode MODE and type TYPE or NULL_RTX if the parameter should
1737 be passed on the stack. CUM describes the previous parameters to the
1738 function and NAMED is false if the parameter is part of a variable
1739 parameter list, or the last named parameter before the start of a
1740 variable parameter list. */
1741
1742 #undef TARGET_FUNCTION_ARG
1743 #define TARGET_FUNCTION_ARG rl78_function_arg
1744
1745 static rtx
rl78_function_arg(cumulative_args_t cum_v ATTRIBUTE_UNUSED,machine_mode mode ATTRIBUTE_UNUSED,const_tree type ATTRIBUTE_UNUSED,bool named ATTRIBUTE_UNUSED)1746 rl78_function_arg (cumulative_args_t cum_v ATTRIBUTE_UNUSED,
1747 machine_mode mode ATTRIBUTE_UNUSED,
1748 const_tree type ATTRIBUTE_UNUSED,
1749 bool named ATTRIBUTE_UNUSED)
1750 {
1751 return NULL_RTX;
1752 }
1753
1754 #undef TARGET_FUNCTION_ARG_ADVANCE
1755 #define TARGET_FUNCTION_ARG_ADVANCE rl78_function_arg_advance
1756
1757 static void
rl78_function_arg_advance(cumulative_args_t cum_v,machine_mode mode,const_tree type,bool named ATTRIBUTE_UNUSED)1758 rl78_function_arg_advance (cumulative_args_t cum_v, machine_mode mode, const_tree type,
1759 bool named ATTRIBUTE_UNUSED)
1760 {
1761 int rounded_size;
1762 CUMULATIVE_ARGS * cum = get_cumulative_args (cum_v);
1763
1764 rounded_size = ((mode == BLKmode)
1765 ? int_size_in_bytes (type) : GET_MODE_SIZE (mode));
1766 if (rounded_size & 1)
1767 rounded_size ++;
1768 (*cum) += rounded_size;
1769 }
1770
1771 #undef TARGET_FUNCTION_ARG_BOUNDARY
1772 #define TARGET_FUNCTION_ARG_BOUNDARY rl78_function_arg_boundary
1773
1774 static unsigned int
rl78_function_arg_boundary(machine_mode mode ATTRIBUTE_UNUSED,const_tree type ATTRIBUTE_UNUSED)1775 rl78_function_arg_boundary (machine_mode mode ATTRIBUTE_UNUSED,
1776 const_tree type ATTRIBUTE_UNUSED)
1777 {
1778 return 16;
1779 }
1780
1781 /* Supported modifier letters:
1782
1783 A - address of a MEM
1784 S - SADDR form of a real register
1785 v - real register corresponding to a virtual register
1786 m - minus - negative of CONST_INT value.
1787 C - inverse of a conditional (NE vs EQ for example)
1788 C - complement of an integer
1789 z - collapsed conditional
1790 s - shift count mod 8
1791 S - shift count mod 16
1792 r - reverse shift count (8-(count mod 8))
1793 B - bit position
1794
1795 h - bottom HI of an SI
1796 H - top HI of an SI
1797 q - bottom QI of an HI
1798 Q - top QI of an HI
1799 e - third QI of an SI (i.e. where the ES register gets values from)
1800 E - fourth QI of an SI (i.e. MSB)
1801
1802 p - Add +0 to a zero-indexed HL based address.
1803 */
1804
1805 /* Implements the bulk of rl78_print_operand, below. We do it this
1806 way because we need to test for a constant at the top level and
1807 insert the '#', but not test for it anywhere else as we recurse
1808 down into the operand. */
1809 static void
rl78_print_operand_1(FILE * file,rtx op,int letter)1810 rl78_print_operand_1 (FILE * file, rtx op, int letter)
1811 {
1812 int need_paren;
1813
1814 switch (GET_CODE (op))
1815 {
1816 case MEM:
1817 if (letter == 'A')
1818 rl78_print_operand_1 (file, XEXP (op, 0), letter);
1819 else
1820 {
1821 if (rl78_far_p (op))
1822 {
1823 fprintf (file, "es:");
1824 if (GET_CODE (XEXP (op, 0)) == UNSPEC)
1825 op = gen_rtx_MEM (GET_MODE (op), XVECEXP (XEXP (op, 0), 0, 1));
1826 }
1827 if (letter == 'H')
1828 {
1829 op = adjust_address (op, HImode, 2);
1830 letter = 0;
1831 }
1832 if (letter == 'h')
1833 {
1834 op = adjust_address (op, HImode, 0);
1835 letter = 0;
1836 }
1837 if (letter == 'Q')
1838 {
1839 op = adjust_address (op, QImode, 1);
1840 letter = 0;
1841 }
1842 if (letter == 'q')
1843 {
1844 op = adjust_address (op, QImode, 0);
1845 letter = 0;
1846 }
1847 if (letter == 'e')
1848 {
1849 op = adjust_address (op, QImode, 2);
1850 letter = 0;
1851 }
1852 if (letter == 'E')
1853 {
1854 op = adjust_address (op, QImode, 3);
1855 letter = 0;
1856 }
1857 if (CONSTANT_P (XEXP (op, 0)))
1858 {
1859 if (!rl78_saddr_p (op))
1860 fprintf (file, "!");
1861 rl78_print_operand_1 (file, XEXP (op, 0), letter);
1862 }
1863 else if (GET_CODE (XEXP (op, 0)) == PLUS
1864 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF)
1865 {
1866 if (!rl78_saddr_p (op))
1867 fprintf (file, "!");
1868 rl78_print_operand_1 (file, XEXP (op, 0), letter);
1869 }
1870 else if (GET_CODE (XEXP (op, 0)) == PLUS
1871 && GET_CODE (XEXP (XEXP (op, 0), 0)) == REG
1872 && REGNO (XEXP (XEXP (op, 0), 0)) == 2)
1873 {
1874 rl78_print_operand_1 (file, XEXP (XEXP (op, 0), 1), 'u');
1875 fprintf (file, "[");
1876 rl78_print_operand_1 (file, XEXP (XEXP (op, 0), 0), 0);
1877 if (letter == 'p' && GET_CODE (XEXP (op, 0)) == REG)
1878 fprintf (file, "+0");
1879 fprintf (file, "]");
1880 }
1881 else
1882 {
1883 op = XEXP (op, 0);
1884 fprintf (file, "[");
1885 rl78_print_operand_1 (file, op, letter);
1886 if (letter == 'p' && REG_P (op) && REGNO (op) == 6)
1887 fprintf (file, "+0");
1888 fprintf (file, "]");
1889 }
1890 }
1891 break;
1892
1893 case REG:
1894 if (letter == 'Q')
1895 fprintf (file, "%s", reg_names [REGNO (op) | 1]);
1896 else if (letter == 'H')
1897 fprintf (file, "%s", reg_names [REGNO (op) + 2]);
1898 else if (letter == 'q')
1899 fprintf (file, "%s", reg_names [REGNO (op) & ~1]);
1900 else if (letter == 'e')
1901 fprintf (file, "%s", reg_names [REGNO (op) + 2]);
1902 else if (letter == 'E')
1903 fprintf (file, "%s", reg_names [REGNO (op) + 3]);
1904 else if (letter == 'S')
1905 fprintf (file, "0x%x", 0xffef8 + REGNO (op));
1906 else if (GET_MODE (op) == HImode
1907 && ! (REGNO (op) & ~0xfe))
1908 {
1909 if (letter == 'v')
1910 fprintf (file, "%s", word_regnames [REGNO (op) % 8]);
1911 else
1912 fprintf (file, "%s", word_regnames [REGNO (op)]);
1913 }
1914 else
1915 fprintf (file, "%s", reg_names [REGNO (op)]);
1916 break;
1917
1918 case CONST_INT:
1919 if (letter == 'Q')
1920 fprintf (file, "%ld", INTVAL (op) >> 8);
1921 else if (letter == 'H')
1922 fprintf (file, "%ld", INTVAL (op) >> 16);
1923 else if (letter == 'q')
1924 fprintf (file, "%ld", INTVAL (op) & 0xff);
1925 else if (letter == 'h')
1926 fprintf (file, "%ld", INTVAL (op) & 0xffff);
1927 else if (letter == 'e')
1928 fprintf (file, "%ld", (INTVAL (op) >> 16) & 0xff);
1929 else if (letter == 'B')
1930 {
1931 int ival = INTVAL (op);
1932 if (ival == -128)
1933 ival = 0x80;
1934 if (exact_log2 (ival) >= 0)
1935 fprintf (file, "%d", exact_log2 (ival));
1936 else
1937 fprintf (file, "%d", exact_log2 (~ival & 0xff));
1938 }
1939 else if (letter == 'E')
1940 fprintf (file, "%ld", (INTVAL (op) >> 24) & 0xff);
1941 else if (letter == 'm')
1942 fprintf (file, "%ld", - INTVAL (op));
1943 else if (letter == 's')
1944 fprintf (file, "%ld", INTVAL (op) % 8);
1945 else if (letter == 'S')
1946 fprintf (file, "%ld", INTVAL (op) % 16);
1947 else if (letter == 'r')
1948 fprintf (file, "%ld", 8 - (INTVAL (op) % 8));
1949 else if (letter == 'C')
1950 fprintf (file, "%ld", (INTVAL (op) ^ 0x8000) & 0xffff);
1951 else
1952 fprintf (file, "%ld", INTVAL (op));
1953 break;
1954
1955 case CONST:
1956 rl78_print_operand_1 (file, XEXP (op, 0), letter);
1957 break;
1958
1959 case ZERO_EXTRACT:
1960 {
1961 int bits = INTVAL (XEXP (op, 1));
1962 int ofs = INTVAL (XEXP (op, 2));
1963 if (bits == 16 && ofs == 0)
1964 fprintf (file, "%%lo16(");
1965 else if (bits == 16 && ofs == 16)
1966 fprintf (file, "%%hi16(");
1967 else if (bits == 8 && ofs == 16)
1968 fprintf (file, "%%hi8(");
1969 else
1970 gcc_unreachable ();
1971 rl78_print_operand_1 (file, XEXP (op, 0), 0);
1972 fprintf (file, ")");
1973 }
1974 break;
1975
1976 case ZERO_EXTEND:
1977 if (GET_CODE (XEXP (op, 0)) == REG)
1978 fprintf (file, "%s", reg_names [REGNO (XEXP (op, 0))]);
1979 else
1980 print_rtl (file, op);
1981 break;
1982
1983 case PLUS:
1984 need_paren = 0;
1985 if (letter == 'H')
1986 {
1987 fprintf (file, "%%hi16(");
1988 need_paren = 1;
1989 letter = 0;
1990 }
1991 if (letter == 'h')
1992 {
1993 fprintf (file, "%%lo16(");
1994 need_paren = 1;
1995 letter = 0;
1996 }
1997 if (letter == 'e')
1998 {
1999 fprintf (file, "%%hi8(");
2000 need_paren = 1;
2001 letter = 0;
2002 }
2003 if (letter == 'q' || letter == 'Q')
2004 output_operand_lossage ("q/Q modifiers invalid for symbol references");
2005
2006 if (GET_CODE (XEXP (op, 0)) == ZERO_EXTEND)
2007 {
2008 if (GET_CODE (XEXP (op, 1)) == SYMBOL_REF
2009 && SYMBOL_REF_DECL (XEXP (op, 1))
2010 && TREE_CODE (SYMBOL_REF_DECL (XEXP (op, 1))) == FUNCTION_DECL)
2011 {
2012 fprintf (file, "%%code(");
2013 assemble_name (file, rl78_strip_nonasm_name_encoding (XSTR (XEXP (op, 1), 0)));
2014 fprintf (file, "+");
2015 rl78_print_operand_1 (file, XEXP (op, 0), letter);
2016 fprintf (file, ")");
2017 }
2018 else
2019 {
2020 rl78_print_operand_1 (file, XEXP (op, 1), letter);
2021 fprintf (file, "+");
2022 rl78_print_operand_1 (file, XEXP (op, 0), letter);
2023 }
2024 }
2025 else
2026 {
2027 if (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
2028 && SYMBOL_REF_DECL (XEXP (op, 0))
2029 && TREE_CODE (SYMBOL_REF_DECL (XEXP (op, 0))) == FUNCTION_DECL)
2030 {
2031 fprintf (file, "%%code(");
2032 assemble_name (file, rl78_strip_nonasm_name_encoding (XSTR (XEXP (op, 0), 0)));
2033 fprintf (file, "+");
2034 rl78_print_operand_1 (file, XEXP (op, 1), letter);
2035 fprintf (file, ")");
2036 }
2037 else
2038 {
2039 rl78_print_operand_1 (file, XEXP (op, 0), letter);
2040 fprintf (file, "+");
2041 rl78_print_operand_1 (file, XEXP (op, 1), letter);
2042 }
2043 }
2044 if (need_paren)
2045 fprintf (file, ")");
2046 break;
2047
2048 case SUBREG:
2049 if (GET_MODE (op) == HImode
2050 && SUBREG_BYTE (op) == 0)
2051 {
2052 fprintf (file, "%%lo16(");
2053 rl78_print_operand_1 (file, SUBREG_REG (op), 0);
2054 fprintf (file, ")");
2055 }
2056 else if (GET_MODE (op) == HImode
2057 && SUBREG_BYTE (op) == 2)
2058 {
2059 fprintf (file, "%%hi16(");
2060 rl78_print_operand_1 (file, SUBREG_REG (op), 0);
2061 fprintf (file, ")");
2062 }
2063 else
2064 {
2065 fprintf (file, "(%s)", GET_RTX_NAME (GET_CODE (op)));
2066 }
2067 break;
2068
2069 case SYMBOL_REF:
2070 need_paren = 0;
2071 if (letter == 'H')
2072 {
2073 fprintf (file, "%%hi16(");
2074 need_paren = 1;
2075 letter = 0;
2076 }
2077 if (letter == 'h')
2078 {
2079 fprintf (file, "%%lo16(");
2080 need_paren = 1;
2081 letter = 0;
2082 }
2083 if (letter == 'e')
2084 {
2085 fprintf (file, "%%hi8(");
2086 need_paren = 1;
2087 letter = 0;
2088 }
2089 if (letter == 'q' || letter == 'Q')
2090 output_operand_lossage ("q/Q modifiers invalid for symbol references");
2091
2092 if (SYMBOL_REF_DECL (op) && TREE_CODE (SYMBOL_REF_DECL (op)) == FUNCTION_DECL)
2093 {
2094 fprintf (file, "%%code(");
2095 assemble_name (file, rl78_strip_nonasm_name_encoding (XSTR (op, 0)));
2096 fprintf (file, ")");
2097 }
2098 else
2099 assemble_name (file, rl78_strip_nonasm_name_encoding (XSTR (op, 0)));
2100 if (need_paren)
2101 fprintf (file, ")");
2102 break;
2103
2104 case CODE_LABEL:
2105 case LABEL_REF:
2106 output_asm_label (op);
2107 break;
2108
2109 case LTU:
2110 if (letter == 'z')
2111 fprintf (file, "#comparison eliminated");
2112 else
2113 fprintf (file, letter == 'C' ? "nc" : "c");
2114 break;
2115 case LEU:
2116 if (letter == 'z')
2117 fprintf (file, "br");
2118 else
2119 fprintf (file, letter == 'C' ? "h" : "nh");
2120 break;
2121 case GEU:
2122 if (letter == 'z')
2123 fprintf (file, "br");
2124 else
2125 fprintf (file, letter == 'C' ? "c" : "nc");
2126 break;
2127 case GTU:
2128 if (letter == 'z')
2129 fprintf (file, "#comparison eliminated");
2130 else
2131 fprintf (file, letter == 'C' ? "nh" : "h");
2132 break;
2133 case EQ:
2134 if (letter == 'z')
2135 fprintf (file, "br");
2136 else
2137 fprintf (file, letter == 'C' ? "nz" : "z");
2138 break;
2139 case NE:
2140 if (letter == 'z')
2141 fprintf (file, "#comparison eliminated");
2142 else
2143 fprintf (file, letter == 'C' ? "z" : "nz");
2144 break;
2145
2146 /* Note: these assume appropriate adjustments were made so that
2147 unsigned comparisons, which is all this chip has, will
2148 work. */
2149 case LT:
2150 if (letter == 'z')
2151 fprintf (file, "#comparison eliminated");
2152 else
2153 fprintf (file, letter == 'C' ? "nc" : "c");
2154 break;
2155 case LE:
2156 if (letter == 'z')
2157 fprintf (file, "br");
2158 else
2159 fprintf (file, letter == 'C' ? "h" : "nh");
2160 break;
2161 case GE:
2162 if (letter == 'z')
2163 fprintf (file, "br");
2164 else
2165 fprintf (file, letter == 'C' ? "c" : "nc");
2166 break;
2167 case GT:
2168 if (letter == 'z')
2169 fprintf (file, "#comparison eliminated");
2170 else
2171 fprintf (file, letter == 'C' ? "nh" : "h");
2172 break;
2173
2174 default:
2175 fprintf (file, "(%s)", GET_RTX_NAME (GET_CODE (op)));
2176 break;
2177 }
2178 }
2179
2180 #undef TARGET_PRINT_OPERAND
2181 #define TARGET_PRINT_OPERAND rl78_print_operand
2182
2183 static void
rl78_print_operand(FILE * file,rtx op,int letter)2184 rl78_print_operand (FILE * file, rtx op, int letter)
2185 {
2186 if (CONSTANT_P (op) && letter != 'u' && letter != 's' && letter != 'r' && letter != 'S' && letter != 'B')
2187 fprintf (file, "#");
2188 rl78_print_operand_1 (file, op, letter);
2189 }
2190
2191 #undef TARGET_TRAMPOLINE_INIT
2192 #define TARGET_TRAMPOLINE_INIT rl78_trampoline_init
2193
2194 /* Note that the RL78's addressing makes it very difficult to do
2195 trampolines on the stack. So, libgcc has a small pool of
2196 trampolines from which one is allocated to this task. */
2197 static void
rl78_trampoline_init(rtx m_tramp,tree fndecl,rtx static_chain)2198 rl78_trampoline_init (rtx m_tramp, tree fndecl, rtx static_chain)
2199 {
2200 rtx mov_addr, thunk_addr;
2201 rtx function = XEXP (DECL_RTL (fndecl), 0);
2202
2203 mov_addr = adjust_address (m_tramp, HImode, 0);
2204 thunk_addr = gen_reg_rtx (HImode);
2205
2206 function = force_reg (HImode, function);
2207 static_chain = force_reg (HImode, static_chain);
2208
2209 emit_insn (gen_trampoline_init (thunk_addr, function, static_chain));
2210 emit_move_insn (mov_addr, thunk_addr);
2211
2212 cfun->machine->trampolines_used = 1;
2213 }
2214
2215 #undef TARGET_TRAMPOLINE_ADJUST_ADDRESS
2216 #define TARGET_TRAMPOLINE_ADJUST_ADDRESS rl78_trampoline_adjust_address
2217
2218 static rtx
rl78_trampoline_adjust_address(rtx m_tramp)2219 rl78_trampoline_adjust_address (rtx m_tramp)
2220 {
2221 rtx x = gen_rtx_MEM (HImode, m_tramp);
2222 return x;
2223 }
2224
2225 /* Expander for cbranchqi4 and cbranchhi4. RL78 is missing some of
2226 the "normal" compares, specifically, it only has unsigned compares,
2227 so we must synthesize the missing ones. */
2228 void
rl78_expand_compare(rtx * operands)2229 rl78_expand_compare (rtx *operands)
2230 {
2231 if (GET_CODE (operands[2]) == MEM)
2232 operands[2] = copy_to_mode_reg (GET_MODE (operands[2]), operands[2]);
2233 }
2234
2235
2236
2237 /* Define this to 1 if you are debugging the peephole optimizers. */
2238 #define DEBUG_PEEP 0
2239
2240 /* Predicate used to enable the peephole2 patterns in rl78-virt.md.
2241 The default "word" size is a byte so we can effectively use all the
2242 registers, but we want to do 16-bit moves whenever possible. This
2243 function determines when such a move is an option. */
2244 bool
rl78_peep_movhi_p(rtx * operands)2245 rl78_peep_movhi_p (rtx *operands)
2246 {
2247 int i;
2248 rtx m, a;
2249
2250 /* (set (op0) (op1))
2251 (set (op2) (op3)) */
2252
2253 if (! rl78_virt_insns_ok ())
2254 return false;
2255
2256 #if DEBUG_PEEP
2257 fprintf (stderr, "\033[33m");
2258 debug_rtx (operands[0]);
2259 debug_rtx (operands[1]);
2260 debug_rtx (operands[2]);
2261 debug_rtx (operands[3]);
2262 fprintf (stderr, "\033[0m");
2263 #endif
2264
2265 /* You can move a constant to memory as QImode, but not HImode. */
2266 if (GET_CODE (operands[0]) == MEM
2267 && GET_CODE (operands[1]) != REG)
2268 {
2269 #if DEBUG_PEEP
2270 fprintf (stderr, "no peep: move constant to memory\n");
2271 #endif
2272 return false;
2273 }
2274
2275 if (rtx_equal_p (operands[0], operands[3]))
2276 {
2277 #if DEBUG_PEEP
2278 fprintf (stderr, "no peep: overlapping\n");
2279 #endif
2280 return false;
2281 }
2282
2283 for (i = 0; i < 2; i ++)
2284 {
2285 if (GET_CODE (operands[i]) != GET_CODE (operands[i+2]))
2286 {
2287 #if DEBUG_PEEP
2288 fprintf (stderr, "no peep: different codes\n");
2289 #endif
2290 return false;
2291 }
2292 if (GET_MODE (operands[i]) != GET_MODE (operands[i+2]))
2293 {
2294 #if DEBUG_PEEP
2295 fprintf (stderr, "no peep: different modes\n");
2296 #endif
2297 return false;
2298 }
2299
2300 switch (GET_CODE (operands[i]))
2301 {
2302 case REG:
2303 /* LSB MSB */
2304 if (REGNO (operands[i]) + 1 != REGNO (operands[i+2])
2305 || GET_MODE (operands[i]) != QImode)
2306 {
2307 #if DEBUG_PEEP
2308 fprintf (stderr, "no peep: wrong regnos %d %d %d\n",
2309 REGNO (operands[i]), REGNO (operands[i+2]),
2310 i);
2311 #endif
2312 return false;
2313 }
2314 if (! rl78_hard_regno_mode_ok (REGNO (operands[i]), HImode))
2315 {
2316 #if DEBUG_PEEP
2317 fprintf (stderr, "no peep: reg %d not HI\n", REGNO (operands[i]));
2318 #endif
2319 return false;
2320 }
2321 break;
2322
2323 case CONST_INT:
2324 break;
2325
2326 case MEM:
2327 if (GET_MODE (operands[i]) != QImode)
2328 return false;
2329 if (MEM_ALIGN (operands[i]) < 16)
2330 return false;
2331 a = XEXP (operands[i], 0);
2332 if (GET_CODE (a) == CONST)
2333 a = XEXP (a, 0);
2334 if (GET_CODE (a) == PLUS)
2335 a = XEXP (a, 1);
2336 if (GET_CODE (a) == CONST_INT
2337 && INTVAL (a) & 1)
2338 {
2339 #if DEBUG_PEEP
2340 fprintf (stderr, "no peep: misaligned mem %d\n", i);
2341 debug_rtx (operands[i]);
2342 #endif
2343 return false;
2344 }
2345 m = adjust_address (operands[i], QImode, 1);
2346 if (! rtx_equal_p (m, operands[i+2]))
2347 {
2348 #if DEBUG_PEEP
2349 fprintf (stderr, "no peep: wrong mem %d\n", i);
2350 debug_rtx (m);
2351 debug_rtx (operands[i+2]);
2352 #endif
2353 return false;
2354 }
2355 break;
2356
2357 default:
2358 #if DEBUG_PEEP
2359 fprintf (stderr, "no peep: wrong rtx %d\n", i);
2360 #endif
2361 return false;
2362 }
2363 }
2364 #if DEBUG_PEEP
2365 fprintf (stderr, "\033[32mpeep!\033[0m\n");
2366 #endif
2367 return true;
2368 }
2369
2370 /* Likewise, when a peephole is activated, this function helps compute
2371 the new operands. */
2372 void
rl78_setup_peep_movhi(rtx * operands)2373 rl78_setup_peep_movhi (rtx *operands)
2374 {
2375 int i;
2376
2377 for (i = 0; i < 2; i ++)
2378 {
2379 switch (GET_CODE (operands[i]))
2380 {
2381 case REG:
2382 operands[i+4] = gen_rtx_REG (HImode, REGNO (operands[i]));
2383 break;
2384
2385 case CONST_INT:
2386 operands[i+4] = GEN_INT ((INTVAL (operands[i]) & 0xff) + ((char) INTVAL (operands[i+2])) * 256);
2387 break;
2388
2389 case MEM:
2390 operands[i+4] = adjust_address (operands[i], HImode, 0);
2391 break;
2392
2393 default:
2394 break;
2395 }
2396 }
2397 }
2398
2399 /*
2400 How Devirtualization works in the RL78 GCC port
2401
2402 Background
2403
2404 The RL78 is an 8-bit port with some 16-bit operations. It has 32
2405 bytes of register space, in four banks, memory-mapped. One bank is
2406 the "selected" bank and holds the registers used for primary
2407 operations. Since the registers are memory mapped, often you can
2408 still refer to the unselected banks via memory accesses.
2409
2410 Virtual Registers
2411
2412 The GCC port uses bank 0 as the "selected" registers (A, X, BC, etc)
2413 and refers to the other banks via their memory addresses, although
2414 they're treated as regular registers internally. These "virtual"
2415 registers are R8 through R23 (bank3 is reserved for asm-based
2416 interrupt handlers).
2417
2418 There are four machine description files:
2419
2420 rl78.md - common register-independent patterns and definitions
2421 rl78-expand.md - expanders
2422 rl78-virt.md - patterns that match BEFORE devirtualization
2423 rl78-real.md - patterns that match AFTER devirtualization
2424
2425 At least through register allocation and reload, gcc is told that it
2426 can do pretty much anything - but may only use the virtual registers.
2427 GCC cannot properly create the varying addressing modes that the RL78
2428 supports in an efficient way.
2429
2430 Sometime after reload, the RL78 backend "devirtualizes" the RTL. It
2431 uses the "valloc" attribute in rl78-virt.md for determining the rules
2432 by which it will replace virtual registers with real registers (or
2433 not) and how to make up addressing modes. For example, insns tagged
2434 with "ro1" have a single read-only parameter, which may need to be
2435 moved from memory/constant/vreg to a suitable real register. As part
2436 of devirtualization, a flag is toggled, disabling the rl78-virt.md
2437 patterns and enabling the rl78-real.md patterns. The new patterns'
2438 constraints are used to determine the real registers used. NOTE:
2439 patterns in rl78-virt.md essentially ignore the constrains and rely on
2440 predicates, where the rl78-real.md ones essentially ignore the
2441 predicates and rely on the constraints.
2442
2443 The devirtualization pass is scheduled via the pass manager (despite
2444 being called "rl78_reorg") so it can be scheduled prior to var-track
2445 (the idea is to let gdb know about the new registers). Ideally, it
2446 would be scheduled right after pro/epilogue generation, so the
2447 post-reload optimizers could operate on the real registers, but when I
2448 tried that there were some issues building the target libraries.
2449
2450 During devirtualization, a simple register move optimizer is run. It
2451 would be better to run a full CSE/propogation pass on it though, but
2452 that has not yet been attempted.
2453
2454 */
2455 #define DEBUG_ALLOC 0
2456
2457 #define OP(x) (*recog_data.operand_loc[x])
2458
2459 /* This array is used to hold knowledge about the contents of the
2460 real registers (A ... H), the memory-based registers (r8 ... r31)
2461 and the first NUM_STACK_LOCS words on the stack. We use this to
2462 avoid generating redundant move instructions.
2463
2464 A value in the range 0 .. 31 indicates register A .. r31.
2465 A value in the range 32 .. 63 indicates stack slot (value - 32).
2466 A value of NOT_KNOWN indicates that the contents of that location
2467 are not known. */
2468
2469 #define NUM_STACK_LOCS 32
2470 #define NOT_KNOWN 127
2471
2472 static unsigned char content_memory [32 + NUM_STACK_LOCS];
2473
2474 static unsigned char saved_update_index = NOT_KNOWN;
2475 static unsigned char saved_update_value;
2476 static machine_mode saved_update_mode;
2477
2478
2479 static inline void
clear_content_memory(void)2480 clear_content_memory (void)
2481 {
2482 memset (content_memory, NOT_KNOWN, sizeof content_memory);
2483 if (dump_file)
2484 fprintf (dump_file, " clear content memory\n");
2485 saved_update_index = NOT_KNOWN;
2486 }
2487
2488 /* Convert LOC into an index into the content_memory array.
2489 If LOC cannot be converted, return NOT_KNOWN. */
2490
2491 static unsigned char
get_content_index(rtx loc)2492 get_content_index (rtx loc)
2493 {
2494 machine_mode mode;
2495
2496 if (loc == NULL_RTX)
2497 return NOT_KNOWN;
2498
2499 if (REG_P (loc))
2500 {
2501 if (REGNO (loc) < 32)
2502 return REGNO (loc);
2503 return NOT_KNOWN;
2504 }
2505
2506 mode = GET_MODE (loc);
2507
2508 if (! rl78_stack_based_mem (loc, mode))
2509 return NOT_KNOWN;
2510
2511 loc = XEXP (loc, 0);
2512
2513 if (REG_P (loc))
2514 /* loc = MEM (SP) */
2515 return 32;
2516
2517 /* loc = MEM (PLUS (SP, INT)). */
2518 loc = XEXP (loc, 1);
2519
2520 if (INTVAL (loc) < NUM_STACK_LOCS)
2521 return 32 + INTVAL (loc);
2522
2523 return NOT_KNOWN;
2524 }
2525
2526 /* Return a string describing content INDEX in mode MODE.
2527 WARNING: Can return a pointer to a static buffer. */
2528 static const char *
get_content_name(unsigned char index,machine_mode mode)2529 get_content_name (unsigned char index, machine_mode mode)
2530 {
2531 static char buffer [128];
2532
2533 if (index == NOT_KNOWN)
2534 return "Unknown";
2535
2536 if (index > 31)
2537 sprintf (buffer, "stack slot %d", index - 32);
2538 else if (mode == HImode)
2539 sprintf (buffer, "%s%s",
2540 reg_names [index + 1], reg_names [index]);
2541 else
2542 return reg_names [index];
2543
2544 return buffer;
2545 }
2546
2547 #if DEBUG_ALLOC
2548
2549 static void
display_content_memory(FILE * file)2550 display_content_memory (FILE * file)
2551 {
2552 unsigned int i;
2553
2554 fprintf (file, " Known memory contents:\n");
2555
2556 for (i = 0; i < sizeof content_memory; i++)
2557 if (content_memory[i] != NOT_KNOWN)
2558 {
2559 fprintf (file, " %s contains a copy of ", get_content_name (i, QImode));
2560 fprintf (file, "%s\n", get_content_name (content_memory [i], QImode));
2561 }
2562 }
2563 #endif
2564
2565 static void
update_content(unsigned char index,unsigned char val,machine_mode mode)2566 update_content (unsigned char index, unsigned char val, machine_mode mode)
2567 {
2568 unsigned int i;
2569
2570 gcc_assert (index < sizeof content_memory);
2571
2572 content_memory [index] = val;
2573 if (val != NOT_KNOWN)
2574 content_memory [val] = index;
2575
2576 /* Make the entry in dump_file *before* VAL is increased below. */
2577 if (dump_file)
2578 {
2579 fprintf (dump_file, " %s now contains ", get_content_name (index, mode));
2580 if (val == NOT_KNOWN)
2581 fprintf (dump_file, "Unknown\n");
2582 else
2583 fprintf (dump_file, "%s and vice versa\n", get_content_name (val, mode));
2584 }
2585
2586 if (mode == HImode)
2587 {
2588 val = val == NOT_KNOWN ? val : val + 1;
2589
2590 content_memory [index + 1] = val;
2591 if (val != NOT_KNOWN)
2592 {
2593 content_memory [val] = index + 1;
2594 -- val;
2595 }
2596 }
2597
2598 /* Any other places that had INDEX recorded as their contents are now invalid. */
2599 for (i = 0; i < sizeof content_memory; i++)
2600 {
2601 if (i == index
2602 || (val != NOT_KNOWN && i == val))
2603 {
2604 if (mode == HImode)
2605 ++ i;
2606 continue;
2607 }
2608
2609 if (content_memory[i] == index
2610 || (val != NOT_KNOWN && content_memory[i] == val))
2611 {
2612 content_memory[i] = NOT_KNOWN;
2613
2614 if (dump_file)
2615 fprintf (dump_file, " %s cleared\n", get_content_name (i, mode));
2616
2617 if (mode == HImode)
2618 content_memory[++ i] = NOT_KNOWN;
2619 }
2620 }
2621 }
2622
2623 /* Record that LOC contains VALUE.
2624 For HImode locations record that LOC+1 contains VALUE+1.
2625 If LOC is not a register or stack slot, do nothing.
2626 If VALUE is not a register or stack slot, clear the recorded content. */
2627
2628 static void
record_content(rtx loc,rtx value)2629 record_content (rtx loc, rtx value)
2630 {
2631 machine_mode mode;
2632 unsigned char index;
2633 unsigned char val;
2634
2635 if ((index = get_content_index (loc)) == NOT_KNOWN)
2636 return;
2637
2638 val = get_content_index (value);
2639
2640 mode = GET_MODE (loc);
2641
2642 if (val == index)
2643 {
2644 if (! optimize)
2645 return;
2646
2647 /* This should not happen when optimizing. */
2648 #if 1
2649 fprintf (stderr, "ASSIGNMENT of location to itself detected! [%s]\n",
2650 get_content_name (val, mode));
2651 return;
2652 #else
2653 gcc_unreachable ();
2654 #endif
2655 }
2656
2657 update_content (index, val, mode);
2658 }
2659
2660 /* Returns TRUE if LOC already contains a copy of VALUE. */
2661
2662 static bool
already_contains(rtx loc,rtx value)2663 already_contains (rtx loc, rtx value)
2664 {
2665 unsigned char index;
2666 unsigned char val;
2667
2668 if ((index = get_content_index (loc)) == NOT_KNOWN)
2669 return false;
2670
2671 if ((val = get_content_index (value)) == NOT_KNOWN)
2672 return false;
2673
2674 if (content_memory [index] != val)
2675 return false;
2676
2677 if (GET_MODE (loc) == HImode)
2678 return content_memory [index + 1] == val + 1;
2679
2680 return true;
2681 }
2682
2683 bool
rl78_es_addr(rtx addr)2684 rl78_es_addr (rtx addr)
2685 {
2686 if (GET_CODE (addr) == MEM)
2687 addr = XEXP (addr, 0);
2688 if (GET_CODE (addr) != UNSPEC)
2689 return false;
2690 if (XINT (addr, 1) != UNS_ES_ADDR)
2691 return false;
2692 return true;
2693 }
2694
2695 rtx
rl78_es_base(rtx addr)2696 rl78_es_base (rtx addr)
2697 {
2698 if (GET_CODE (addr) == MEM)
2699 addr = XEXP (addr, 0);
2700 addr = XVECEXP (addr, 0, 1);
2701 if (GET_CODE (addr) == CONST
2702 && GET_CODE (XEXP (addr, 0)) == ZERO_EXTRACT)
2703 addr = XEXP (XEXP (addr, 0), 0);
2704 /* Mode doesn't matter here. */
2705 return gen_rtx_MEM (HImode, addr);
2706 }
2707
2708 /* Rescans an insn to see if it's recognized again. This is done
2709 carefully to ensure that all the constraint information is accurate
2710 for the newly matched insn. */
2711 static bool
insn_ok_now(rtx_insn * insn)2712 insn_ok_now (rtx_insn * insn)
2713 {
2714 rtx pattern = PATTERN (insn);
2715 int i;
2716
2717 INSN_CODE (insn) = -1;
2718
2719 if (recog (pattern, insn, 0) > -1)
2720 {
2721 extract_insn (insn);
2722 if (constrain_operands (1, get_preferred_alternatives (insn)))
2723 {
2724 #if DEBUG_ALLOC
2725 fprintf (stderr, "\033[32m");
2726 debug_rtx (insn);
2727 fprintf (stderr, "\033[0m");
2728 #endif
2729 if (SET_P (pattern))
2730 record_content (SET_DEST (pattern), SET_SRC (pattern));
2731
2732 /* We need to detect far addresses that haven't been
2733 converted to es/lo16 format. */
2734 for (i=0; i<recog_data.n_operands; i++)
2735 if (GET_CODE (OP (i)) == MEM
2736 && GET_MODE (XEXP (OP (i), 0)) == SImode
2737 && GET_CODE (XEXP (OP (i), 0)) != UNSPEC)
2738 return false;
2739
2740 return true;
2741 }
2742 }
2743 else
2744 {
2745 /* We need to re-recog the insn with virtual registers to get
2746 the operands. */
2747 cfun->machine->virt_insns_ok = 1;
2748 if (recog (pattern, insn, 0) > -1)
2749 {
2750 extract_insn (insn);
2751 if (constrain_operands (0, get_preferred_alternatives (insn)))
2752 {
2753 cfun->machine->virt_insns_ok = 0;
2754 return false;
2755 }
2756 }
2757
2758 #if DEBUG_ALLOC
2759 fprintf (stderr, "\033[41;30m Unrecognized *virtual* insn \033[0m\n");
2760 debug_rtx (insn);
2761 #endif
2762 gcc_unreachable ();
2763 }
2764
2765 #if DEBUG_ALLOC
2766 fprintf (stderr, "\033[31m");
2767 debug_rtx (insn);
2768 fprintf (stderr, "\033[0m");
2769 #endif
2770 return false;
2771 }
2772
2773 #if DEBUG_ALLOC
2774 #define WORKED fprintf (stderr, "\033[48;5;22m Worked at line %d \033[0m\n", __LINE__)
2775 #define FAILEDSOFAR fprintf (stderr, "\033[48;5;52m FAILED at line %d \033[0m\n", __LINE__)
2776 #define FAILED fprintf (stderr, "\033[48;5;52m FAILED at line %d \033[0m\n", __LINE__), gcc_unreachable ()
2777 #define MAYBE_OK(insn) if (insn_ok_now (insn)) { WORKED; return; } else { FAILEDSOFAR; }
2778 #define MUST_BE_OK(insn) if (insn_ok_now (insn)) { WORKED; return; } FAILED
2779 #else
2780 #define FAILED gcc_unreachable ()
2781 #define MAYBE_OK(insn) if (insn_ok_now (insn)) return;
2782 #define MUST_BE_OK(insn) if (insn_ok_now (insn)) return; FAILED
2783 #endif
2784
2785 /* Registers into which we move the contents of virtual registers. */
2786 #define X gen_rtx_REG (QImode, X_REG)
2787 #define A gen_rtx_REG (QImode, A_REG)
2788 #define C gen_rtx_REG (QImode, C_REG)
2789 #define B gen_rtx_REG (QImode, B_REG)
2790 #define E gen_rtx_REG (QImode, E_REG)
2791 #define D gen_rtx_REG (QImode, D_REG)
2792 #define L gen_rtx_REG (QImode, L_REG)
2793 #define H gen_rtx_REG (QImode, H_REG)
2794
2795 #define AX gen_rtx_REG (HImode, AX_REG)
2796 #define BC gen_rtx_REG (HImode, BC_REG)
2797 #define DE gen_rtx_REG (HImode, DE_REG)
2798 #define HL gen_rtx_REG (HImode, HL_REG)
2799
2800 /* Returns TRUE if R is a virtual register. */
2801 static inline bool
is_virtual_register(rtx r)2802 is_virtual_register (rtx r)
2803 {
2804 return (GET_CODE (r) == REG
2805 && REGNO (r) >= 8
2806 && REGNO (r) < 32);
2807 }
2808
2809 /* In all these alloc routines, we expect the following: the insn
2810 pattern is unshared, the insn was previously recognized and failed
2811 due to predicates or constraints, and the operand data is in
2812 recog_data. */
2813
2814 static int virt_insn_was_frame;
2815
2816 /* Hook for all insns we emit. Re-mark them as FRAME_RELATED if
2817 needed. */
2818 static rtx
EM2(int line ATTRIBUTE_UNUSED,rtx r)2819 EM2 (int line ATTRIBUTE_UNUSED, rtx r)
2820 {
2821 #if DEBUG_ALLOC
2822 fprintf (stderr, "\033[36m%d: ", line);
2823 debug_rtx (r);
2824 fprintf (stderr, "\033[0m");
2825 #endif
2826 /*SCHED_GROUP_P (r) = 1;*/
2827 if (virt_insn_was_frame)
2828 RTX_FRAME_RELATED_P (r) = 1;
2829 return r;
2830 }
2831
2832 #define EM(x) EM2 (__LINE__, x)
2833
2834 /* Return a suitable RTX for the low half of a __far address. */
2835 static rtx
rl78_lo16(rtx addr)2836 rl78_lo16 (rtx addr)
2837 {
2838 rtx r;
2839
2840 if (GET_CODE (addr) == SYMBOL_REF
2841 || GET_CODE (addr) == CONST)
2842 {
2843 r = gen_rtx_ZERO_EXTRACT (HImode, addr, GEN_INT (16), GEN_INT (0));
2844 r = gen_rtx_CONST (HImode, r);
2845 }
2846 else
2847 r = rl78_subreg (HImode, addr, SImode, 0);
2848
2849 r = gen_es_addr (r);
2850 cfun->machine->uses_es = true;
2851
2852 return r;
2853 }
2854
2855 /* Return a suitable RTX for the high half's lower byte of a __far address. */
2856 static rtx
rl78_hi8(rtx addr)2857 rl78_hi8 (rtx addr)
2858 {
2859 if (GET_CODE (addr) == SYMBOL_REF
2860 || GET_CODE (addr) == CONST)
2861 {
2862 rtx r = gen_rtx_ZERO_EXTRACT (QImode, addr, GEN_INT (8), GEN_INT (16));
2863 r = gen_rtx_CONST (QImode, r);
2864 return r;
2865 }
2866 return rl78_subreg (QImode, addr, SImode, 2);
2867 }
2868
2869 static void
add_postponed_content_update(rtx to,rtx value)2870 add_postponed_content_update (rtx to, rtx value)
2871 {
2872 unsigned char index;
2873
2874 if ((index = get_content_index (to)) == NOT_KNOWN)
2875 return;
2876
2877 gcc_assert (saved_update_index == NOT_KNOWN);
2878 saved_update_index = index;
2879 saved_update_value = get_content_index (value);
2880 saved_update_mode = GET_MODE (to);
2881 }
2882
2883 static void
process_postponed_content_update(void)2884 process_postponed_content_update (void)
2885 {
2886 if (saved_update_index != NOT_KNOWN)
2887 {
2888 update_content (saved_update_index, saved_update_value, saved_update_mode);
2889 saved_update_index = NOT_KNOWN;
2890 }
2891 }
2892
2893 /* Generate and emit a move of (register) FROM into TO. if WHERE is not NULL
2894 then if BEFORE is true then emit the insn before WHERE, otherwise emit it
2895 after WHERE. If TO already contains FROM then do nothing. Returns TO if
2896 BEFORE is true, FROM otherwise. */
2897 static rtx
gen_and_emit_move(rtx to,rtx from,rtx_insn * where,bool before)2898 gen_and_emit_move (rtx to, rtx from, rtx_insn *where, bool before)
2899 {
2900 machine_mode mode = GET_MODE (to);
2901
2902 if (optimize && before && already_contains (to, from))
2903 {
2904 #if DEBUG_ALLOC
2905 display_content_memory (stderr);
2906 #endif
2907 if (dump_file)
2908 {
2909 fprintf (dump_file, " Omit move of %s into ",
2910 get_content_name (get_content_index (from), mode));
2911 fprintf (dump_file, "%s as it already contains this value\n",
2912 get_content_name (get_content_index (to), mode));
2913 }
2914 }
2915 else
2916 {
2917 rtx move = mode == QImode ? gen_movqi (to, from) : gen_movhi (to, from);
2918
2919 EM (move);
2920
2921 if (where == NULL_RTX)
2922 emit_insn (move);
2923 else if (before)
2924 emit_insn_before (move, where);
2925 else
2926 {
2927 rtx note = find_reg_note (where, REG_EH_REGION, NULL_RTX);
2928
2929 /* If necessary move REG_EH_REGION notes forward.
2930 cf. compiling gcc.dg/pr44545.c. */
2931 if (note != NULL_RTX)
2932 {
2933 add_reg_note (move, REG_EH_REGION, XEXP (note, 0));
2934 remove_note (where, note);
2935 }
2936
2937 emit_insn_after (move, where);
2938 }
2939
2940 if (before)
2941 record_content (to, from);
2942 else
2943 add_postponed_content_update (to, from);
2944 }
2945
2946 return before ? to : from;
2947 }
2948
2949 /* If M is MEM(REG) or MEM(PLUS(REG,INT)) and REG is virtual then
2950 copy it into NEWBASE and return the updated MEM. Otherwise just
2951 return M. Any needed insns are emitted before BEFORE. */
2952 static rtx
transcode_memory_rtx(rtx m,rtx newbase,rtx_insn * before)2953 transcode_memory_rtx (rtx m, rtx newbase, rtx_insn *before)
2954 {
2955 rtx base, index, addendr;
2956 int addend = 0;
2957 int need_es = 0;
2958
2959 if (! MEM_P (m))
2960 return m;
2961
2962 if (GET_MODE (XEXP (m, 0)) == SImode)
2963 {
2964 rtx new_m;
2965 rtx seg = rl78_hi8 (XEXP (m, 0));
2966
2967 if (!TARGET_ES0)
2968 {
2969 emit_insn_before (EM (gen_movqi (A, seg)), before);
2970 emit_insn_before (EM (gen_movqi_to_es (A)), before);
2971 }
2972
2973 record_content (A, NULL_RTX);
2974
2975 new_m = gen_rtx_MEM (GET_MODE (m), rl78_lo16 (XEXP (m, 0)));
2976 MEM_COPY_ATTRIBUTES (new_m, m);
2977 m = new_m;
2978 need_es = 1;
2979 }
2980
2981 characterize_address (XEXP (m, 0), & base, & index, & addendr);
2982 gcc_assert (index == NULL_RTX);
2983
2984 if (base == NULL_RTX)
2985 return m;
2986
2987 if (addendr && GET_CODE (addendr) == CONST_INT)
2988 addend = INTVAL (addendr);
2989
2990 gcc_assert (REG_P (base));
2991 gcc_assert (REG_P (newbase));
2992
2993 int limit = 256 - GET_MODE_SIZE (GET_MODE (m));
2994
2995 if (REGNO (base) == SP_REG)
2996 {
2997 if (addend >= 0 && addend <= limit)
2998 return m;
2999 }
3000
3001 /* BASE should be a virtual register. We copy it to NEWBASE. If
3002 the addend is out of range for DE/HL, we use AX to compute the full
3003 address. */
3004
3005 if (addend < 0
3006 || (addend > limit && REGNO (newbase) != BC_REG)
3007 || (addendr
3008 && (GET_CODE (addendr) != CONST_INT)
3009 && ((REGNO (newbase) != BC_REG))
3010 ))
3011 {
3012 /* mov ax, vreg
3013 add ax, #imm
3014 mov hl, ax */
3015 EM (emit_insn_before (gen_movhi (AX, base), before));
3016 EM (emit_insn_before (gen_addhi3 (AX, AX, addendr), before));
3017 EM (emit_insn_before (gen_movhi (newbase, AX), before));
3018 record_content (AX, NULL_RTX);
3019 record_content (newbase, NULL_RTX);
3020
3021 base = newbase;
3022 addend = 0;
3023 addendr = 0;
3024 }
3025 else
3026 {
3027 base = gen_and_emit_move (newbase, base, before, true);
3028 }
3029
3030 if (addend)
3031 {
3032 record_content (base, NULL_RTX);
3033 base = gen_rtx_PLUS (HImode, base, GEN_INT (addend));
3034 }
3035 else if (addendr)
3036 {
3037 record_content (base, NULL_RTX);
3038 base = gen_rtx_PLUS (HImode, base, addendr);
3039 }
3040
3041 if (need_es)
3042 {
3043 m = change_address (m, GET_MODE (m), gen_es_addr (base));
3044 cfun->machine->uses_es = true;
3045 }
3046 else
3047 m = change_address (m, GET_MODE (m), base);
3048 return m;
3049 }
3050
3051 /* Copy SRC to accumulator (A or AX), placing any generated insns
3052 before BEFORE. Returns accumulator RTX. */
3053 static rtx
move_to_acc(int opno,rtx_insn * before)3054 move_to_acc (int opno, rtx_insn *before)
3055 {
3056 rtx src = OP (opno);
3057 machine_mode mode = GET_MODE (src);
3058
3059 if (REG_P (src) && REGNO (src) < 2)
3060 return src;
3061
3062 if (mode == VOIDmode)
3063 mode = recog_data.operand_mode[opno];
3064
3065 return gen_and_emit_move (mode == QImode ? A : AX, src, before, true);
3066 }
3067
3068 static void
force_into_acc(rtx src,rtx_insn * before)3069 force_into_acc (rtx src, rtx_insn *before)
3070 {
3071 machine_mode mode = GET_MODE (src);
3072 rtx move;
3073
3074 if (REG_P (src) && REGNO (src) < 2)
3075 return;
3076
3077 move = mode == QImode ? gen_movqi (A, src) : gen_movhi (AX, src);
3078
3079 EM (move);
3080
3081 emit_insn_before (move, before);
3082 record_content (AX, NULL_RTX);
3083 }
3084
3085 /* Copy accumulator (A or AX) to DEST, placing any generated insns
3086 after AFTER. Returns accumulator RTX. */
3087 static rtx
move_from_acc(unsigned int opno,rtx_insn * after)3088 move_from_acc (unsigned int opno, rtx_insn *after)
3089 {
3090 rtx dest = OP (opno);
3091 machine_mode mode = GET_MODE (dest);
3092
3093 if (REG_P (dest) && REGNO (dest) < 2)
3094 return dest;
3095
3096 return gen_and_emit_move (dest, mode == QImode ? A : AX, after, false);
3097 }
3098
3099 /* Copy accumulator (A or AX) to REGNO, placing any generated insns
3100 before BEFORE. Returns reg RTX. */
3101 static rtx
move_acc_to_reg(rtx acc,int regno,rtx_insn * before)3102 move_acc_to_reg (rtx acc, int regno, rtx_insn *before)
3103 {
3104 machine_mode mode = GET_MODE (acc);
3105 rtx reg;
3106
3107 reg = gen_rtx_REG (mode, regno);
3108
3109 return gen_and_emit_move (reg, acc, before, true);
3110 }
3111
3112 /* Copy SRC to X, placing any generated insns before BEFORE.
3113 Returns X RTX. */
3114 static rtx
move_to_x(int opno,rtx_insn * before)3115 move_to_x (int opno, rtx_insn *before)
3116 {
3117 rtx src = OP (opno);
3118 machine_mode mode = GET_MODE (src);
3119 rtx reg;
3120
3121 if (mode == VOIDmode)
3122 mode = recog_data.operand_mode[opno];
3123 reg = (mode == QImode) ? X : AX;
3124
3125 if (mode == QImode || ! is_virtual_register (OP (opno)))
3126 {
3127 OP (opno) = move_to_acc (opno, before);
3128 OP (opno) = move_acc_to_reg (OP (opno), X_REG, before);
3129 return reg;
3130 }
3131
3132 return gen_and_emit_move (reg, src, before, true);
3133 }
3134
3135 /* Copy OP (opno) to H or HL, placing any generated insns before BEFORE.
3136 Returns H/HL RTX. */
3137 static rtx
move_to_hl(int opno,rtx_insn * before)3138 move_to_hl (int opno, rtx_insn *before)
3139 {
3140 rtx src = OP (opno);
3141 machine_mode mode = GET_MODE (src);
3142 rtx reg;
3143
3144 if (mode == VOIDmode)
3145 mode = recog_data.operand_mode[opno];
3146 reg = (mode == QImode) ? L : HL;
3147
3148 if (mode == QImode || ! is_virtual_register (OP (opno)))
3149 {
3150 OP (opno) = move_to_acc (opno, before);
3151 OP (opno) = move_acc_to_reg (OP (opno), L_REG, before);
3152 return reg;
3153 }
3154
3155 return gen_and_emit_move (reg, src, before, true);
3156 }
3157
3158 /* Copy OP (opno) to E or DE, placing any generated insns before BEFORE.
3159 Returns E/DE RTX. */
3160 static rtx
move_to_de(int opno,rtx_insn * before)3161 move_to_de (int opno, rtx_insn *before)
3162 {
3163 rtx src = OP (opno);
3164 machine_mode mode = GET_MODE (src);
3165 rtx reg;
3166
3167 if (mode == VOIDmode)
3168 mode = recog_data.operand_mode[opno];
3169
3170 reg = (mode == QImode) ? E : DE;
3171
3172 if (mode == QImode || ! is_virtual_register (OP (opno)))
3173 {
3174 OP (opno) = move_to_acc (opno, before);
3175 OP (opno) = move_acc_to_reg (OP (opno), E_REG, before);
3176 }
3177 else
3178 {
3179 gen_and_emit_move (reg, src, before, true);
3180 }
3181
3182 return reg;
3183 }
3184
3185 /* Devirtualize an insn of the form (SET (op) (unop (op))). */
3186 static void
rl78_alloc_physical_registers_op1(rtx_insn * insn)3187 rl78_alloc_physical_registers_op1 (rtx_insn * insn)
3188 {
3189 /* op[0] = func op[1] */
3190
3191 /* We first try using A as the destination, then copying it
3192 back. */
3193 if (rtx_equal_p (OP (0), OP (1)))
3194 {
3195 OP (0) =
3196 OP (1) = transcode_memory_rtx (OP (1), DE, insn);
3197 }
3198 else
3199 {
3200 /* If necessary, load the operands into BC and HL.
3201 Check to see if we already have OP (0) in HL
3202 and if so, swap the order.
3203
3204 It is tempting to perform this optimization when OP(0) does
3205 not hold a MEM, but this leads to bigger code in general.
3206 The problem is that if OP(1) holds a MEM then swapping it
3207 into BC means a BC-relative load is used and these are 3
3208 bytes long vs 1 byte for an HL load. */
3209 if (MEM_P (OP (0))
3210 && already_contains (HL, XEXP (OP (0), 0)))
3211 {
3212 OP (0) = transcode_memory_rtx (OP (0), HL, insn);
3213 OP (1) = transcode_memory_rtx (OP (1), BC, insn);
3214 }
3215 else
3216 {
3217 OP (0) = transcode_memory_rtx (OP (0), BC, insn);
3218 OP (1) = transcode_memory_rtx (OP (1), HL, insn);
3219 }
3220 }
3221
3222 MAYBE_OK (insn);
3223
3224 OP (0) = move_from_acc (0, insn);
3225
3226 MAYBE_OK (insn);
3227
3228 /* Try copying the src to acc first, then. This is for, for
3229 example, ZERO_EXTEND or NOT. */
3230 OP (1) = move_to_acc (1, insn);
3231
3232 MUST_BE_OK (insn);
3233 }
3234
3235 /* Returns true if operand OPNUM contains a constraint of type CONSTRAINT.
3236 Assumes that the current insn has already been recognised and hence the
3237 constraint data has been filled in. */
3238 static bool
has_constraint(unsigned int opnum,enum constraint_num constraint)3239 has_constraint (unsigned int opnum, enum constraint_num constraint)
3240 {
3241 const char * p = recog_data.constraints[opnum];
3242
3243 /* No constraints means anything is accepted. */
3244 if (p == NULL || *p == 0 || *p == ',')
3245 return true;
3246
3247 do
3248 {
3249 char c;
3250 unsigned int len;
3251
3252 c = *p;
3253 len = CONSTRAINT_LEN (c, p);
3254 gcc_assert (len > 0);
3255
3256 switch (c)
3257 {
3258 case 0:
3259 case ',':
3260 return false;
3261 default:
3262 if (lookup_constraint (p) == constraint)
3263 return true;
3264 }
3265 p += len;
3266 }
3267 while (1);
3268 }
3269
3270 /* Devirtualize an insn of the form (SET (op) (binop (op) (op))). */
3271 static void
rl78_alloc_physical_registers_op2(rtx_insn * insn)3272 rl78_alloc_physical_registers_op2 (rtx_insn * insn)
3273 {
3274 rtx_insn *prev;
3275 rtx_insn *first;
3276 bool hl_used;
3277 int tmp_id;
3278 rtx saved_op1;
3279
3280 if (rtx_equal_p (OP (0), OP (1)))
3281 {
3282 if (MEM_P (OP (2)))
3283 {
3284 OP (0) =
3285 OP (1) = transcode_memory_rtx (OP (1), DE, insn);
3286 OP (2) = transcode_memory_rtx (OP (2), HL, insn);
3287 }
3288 else
3289 {
3290 OP (0) =
3291 OP (1) = transcode_memory_rtx (OP (1), HL, insn);
3292 OP (2) = transcode_memory_rtx (OP (2), DE, insn);
3293 }
3294 }
3295 else if (rtx_equal_p (OP (0), OP (2)))
3296 {
3297 OP (1) = transcode_memory_rtx (OP (1), DE, insn);
3298 OP (0) =
3299 OP (2) = transcode_memory_rtx (OP (2), HL, insn);
3300 }
3301 else
3302 {
3303 OP (0) = transcode_memory_rtx (OP (0), BC, insn);
3304 OP (1) = transcode_memory_rtx (OP (1), DE, insn);
3305 OP (2) = transcode_memory_rtx (OP (2), HL, insn);
3306 }
3307
3308 MAYBE_OK (insn);
3309
3310 prev = prev_nonnote_nondebug_insn (insn);
3311 if (recog_data.constraints[1][0] == '%'
3312 && is_virtual_register (OP (1))
3313 && ! is_virtual_register (OP (2))
3314 && ! CONSTANT_P (OP (2)))
3315 {
3316 rtx tmp = OP (1);
3317 OP (1) = OP (2);
3318 OP (2) = tmp;
3319 }
3320
3321 /* Make a note of whether (H)L is being used. It matters
3322 because if OP (2) also needs reloading, then we must take
3323 care not to corrupt HL. */
3324 hl_used = reg_mentioned_p (L, OP (0)) || reg_mentioned_p (L, OP (1));
3325
3326 /* If HL is not currently being used and dest == op1 then there are
3327 some possible optimizations available by reloading one of the
3328 operands into HL, before trying to use the accumulator. */
3329 if (optimize
3330 && ! hl_used
3331 && rtx_equal_p (OP (0), OP (1)))
3332 {
3333 /* If op0 is a Ws1 type memory address then switching the base
3334 address register to HL might allow us to perform an in-memory
3335 operation. (eg for the INCW instruction).
3336
3337 FIXME: Adding the move into HL is costly if this optimization is not
3338 going to work, so for now, make sure that we know that the new insn will
3339 match the requirements of the addhi3_real pattern. Really we ought to
3340 generate a candidate sequence, test that, and then install it if the
3341 results are good. */
3342 if (satisfies_constraint_Ws1 (OP (0))
3343 && has_constraint (0, CONSTRAINT_Wh1)
3344 && (satisfies_constraint_K (OP (2)) || satisfies_constraint_L (OP (2))))
3345 {
3346 rtx base, index, addend, newbase;
3347
3348 characterize_address (XEXP (OP (0), 0), & base, & index, & addend);
3349 gcc_assert (index == NULL_RTX);
3350 gcc_assert (REG_P (base) && REGNO (base) == SP_REG);
3351
3352 /* Ws1 addressing allows an offset of 0, Wh1 addressing requires a non-zero offset. */
3353 if (addend != NULL_RTX)
3354 {
3355 newbase = gen_and_emit_move (HL, base, insn, true);
3356 record_content (newbase, NULL_RTX);
3357 newbase = gen_rtx_PLUS (HImode, newbase, addend);
3358
3359 OP (0) = OP (1) = change_address (OP (0), VOIDmode, newbase);
3360
3361 /* We do not want to fail here as this means that
3362 we have inserted useless insns into the stream. */
3363 MUST_BE_OK (insn);
3364 }
3365 }
3366 else if (REG_P (OP (0))
3367 && satisfies_constraint_Ws1 (OP (2))
3368 && has_constraint (2, CONSTRAINT_Wh1))
3369 {
3370 rtx base, index, addend, newbase;
3371
3372 characterize_address (XEXP (OP (2), 0), & base, & index, & addend);
3373 gcc_assert (index == NULL_RTX);
3374 gcc_assert (REG_P (base) && REGNO (base) == SP_REG);
3375
3376 /* Ws1 addressing allows an offset of 0, Wh1 addressing requires a non-zero offset. */
3377 if (addend != NULL_RTX)
3378 {
3379 gen_and_emit_move (HL, base, insn, true);
3380
3381 if (REGNO (OP (0)) != X_REG)
3382 {
3383 OP (1) = move_to_acc (1, insn);
3384 OP (0) = move_from_acc (0, insn);
3385 }
3386
3387 record_content (HL, NULL_RTX);
3388 newbase = gen_rtx_PLUS (HImode, HL, addend);
3389
3390 OP (2) = change_address (OP (2), VOIDmode, newbase);
3391
3392 /* We do not want to fail here as this means that
3393 we have inserted useless insns into the stream. */
3394 MUST_BE_OK (insn);
3395 }
3396 }
3397 }
3398
3399 OP (0) = move_from_acc (0, insn);
3400
3401 tmp_id = get_max_insn_count ();
3402 saved_op1 = OP (1);
3403
3404 if (rtx_equal_p (OP (1), OP (2)))
3405 OP (2) = OP (1) = move_to_acc (1, insn);
3406 else
3407 OP (1) = move_to_acc (1, insn);
3408
3409 MAYBE_OK (insn);
3410
3411 /* If we omitted the move of OP1 into the accumulator (because
3412 it was already there from a previous insn), then force the
3413 generation of the move instruction now. We know that we
3414 are about to emit a move into HL (or DE) via AX, and hence
3415 our optimization to remove the load of OP1 is no longer valid. */
3416 if (tmp_id == get_max_insn_count ())
3417 force_into_acc (saved_op1, insn);
3418
3419 /* We have to copy op2 to HL (or DE), but that involves AX, which
3420 already has a live value. Emit it before those insns. */
3421
3422 if (prev)
3423 first = next_nonnote_nondebug_insn (prev);
3424 else
3425 for (first = insn; prev_nonnote_nondebug_insn (first); first = prev_nonnote_nondebug_insn (first))
3426 ;
3427
3428 OP (2) = hl_used ? move_to_de (2, first) : move_to_hl (2, first);
3429
3430 MUST_BE_OK (insn);
3431 }
3432
3433 /* Devirtualize an insn of the form SET (PC) (MEM/REG). */
3434 static void
rl78_alloc_physical_registers_ro1(rtx_insn * insn)3435 rl78_alloc_physical_registers_ro1 (rtx_insn * insn)
3436 {
3437 OP (0) = transcode_memory_rtx (OP (0), BC, insn);
3438
3439 MAYBE_OK (insn);
3440
3441 OP (0) = move_to_acc (0, insn);
3442
3443 MUST_BE_OK (insn);
3444 }
3445
3446 /* Devirtualize a compare insn. */
3447 static void
rl78_alloc_physical_registers_cmp(rtx_insn * insn)3448 rl78_alloc_physical_registers_cmp (rtx_insn * insn)
3449 {
3450 int tmp_id;
3451 rtx saved_op1;
3452 rtx_insn *prev = prev_nonnote_nondebug_insn (insn);
3453 rtx_insn *first;
3454
3455 OP (1) = transcode_memory_rtx (OP (1), DE, insn);
3456 OP (2) = transcode_memory_rtx (OP (2), HL, insn);
3457
3458 /* HI compares have to have OP (1) in AX, but QI
3459 compares do not, so it is worth checking here. */
3460 MAYBE_OK (insn);
3461
3462 /* For an HImode compare, OP (1) must always be in AX.
3463 But if OP (1) is a REG (and not AX), then we can avoid
3464 a reload of OP (1) if we reload OP (2) into AX and invert
3465 the comparison. */
3466 if (REG_P (OP (1))
3467 && REGNO (OP (1)) != AX_REG
3468 && GET_MODE (OP (1)) == HImode
3469 && MEM_P (OP (2)))
3470 {
3471 rtx cmp = XEXP (SET_SRC (PATTERN (insn)), 0);
3472
3473 OP (2) = move_to_acc (2, insn);
3474
3475 switch (GET_CODE (cmp))
3476 {
3477 case EQ:
3478 case NE:
3479 break;
3480 case LTU: cmp = gen_rtx_GTU (HImode, OP (2), OP (1)); break;
3481 case GTU: cmp = gen_rtx_LTU (HImode, OP (2), OP (1)); break;
3482 case LEU: cmp = gen_rtx_GEU (HImode, OP (2), OP (1)); break;
3483 case GEU: cmp = gen_rtx_LEU (HImode, OP (2), OP (1)); break;
3484
3485 case LT:
3486 case GT:
3487 case LE:
3488 case GE:
3489 #if DEBUG_ALLOC
3490 debug_rtx (insn);
3491 #endif
3492 default:
3493 gcc_unreachable ();
3494 }
3495
3496 if (GET_CODE (cmp) == EQ || GET_CODE (cmp) == NE)
3497 PATTERN (insn) = gen_cbranchhi4_real (cmp, OP (2), OP (1), OP (3));
3498 else
3499 PATTERN (insn) = gen_cbranchhi4_real_inverted (cmp, OP (2), OP (1), OP (3));
3500
3501 MUST_BE_OK (insn);
3502 }
3503
3504 /* Surprisingly, gcc can generate a comparison of a register with itself, but this
3505 should be handled by the second alternative of the cbranchhi_real pattern. */
3506 if (rtx_equal_p (OP (1), OP (2)))
3507 {
3508 OP (1) = OP (2) = BC;
3509 MUST_BE_OK (insn);
3510 }
3511
3512 tmp_id = get_max_insn_count ();
3513 saved_op1 = OP (1);
3514
3515 OP (1) = move_to_acc (1, insn);
3516
3517 MAYBE_OK (insn);
3518
3519 /* If we omitted the move of OP1 into the accumulator (because
3520 it was already there from a previous insn), then force the
3521 generation of the move instruction now. We know that we
3522 are about to emit a move into HL via AX, and hence our
3523 optimization to remove the load of OP1 is no longer valid. */
3524 if (tmp_id == get_max_insn_count ())
3525 force_into_acc (saved_op1, insn);
3526
3527 /* We have to copy op2 to HL, but that involves the acc, which
3528 already has a live value. Emit it before those insns. */
3529 if (prev)
3530 first = next_nonnote_nondebug_insn (prev);
3531 else
3532 for (first = insn; prev_nonnote_nondebug_insn (first); first = prev_nonnote_nondebug_insn (first))
3533 ;
3534 OP (2) = move_to_hl (2, first);
3535
3536 MUST_BE_OK (insn);
3537 }
3538
3539 /* Like op2, but AX = A * X. */
3540 static void
rl78_alloc_physical_registers_umul(rtx_insn * insn)3541 rl78_alloc_physical_registers_umul (rtx_insn * insn)
3542 {
3543 rtx_insn *prev = prev_nonnote_nondebug_insn (insn);
3544 rtx_insn *first;
3545 int tmp_id;
3546 rtx saved_op1;
3547
3548 OP (0) = transcode_memory_rtx (OP (0), BC, insn);
3549 OP (1) = transcode_memory_rtx (OP (1), DE, insn);
3550 OP (2) = transcode_memory_rtx (OP (2), HL, insn);
3551
3552 MAYBE_OK (insn);
3553
3554 if (recog_data.constraints[1][0] == '%'
3555 && is_virtual_register (OP (1))
3556 && !is_virtual_register (OP (2))
3557 && !CONSTANT_P (OP (2)))
3558 {
3559 rtx tmp = OP (1);
3560 OP (1) = OP (2);
3561 OP (2) = tmp;
3562 }
3563
3564 OP (0) = move_from_acc (0, insn);
3565
3566 tmp_id = get_max_insn_count ();
3567 saved_op1 = OP (1);
3568
3569 if (rtx_equal_p (OP (1), OP (2)))
3570 {
3571 gcc_assert (GET_MODE (OP (2)) == QImode);
3572 /* The MULU instruction does not support duplicate arguments
3573 but we know that if we copy OP (2) to X it will do so via
3574 A and thus OP (1) will already be loaded into A. */
3575 OP (2) = move_to_x (2, insn);
3576 OP (1) = A;
3577 }
3578 else
3579 OP (1) = move_to_acc (1, insn);
3580
3581 MAYBE_OK (insn);
3582
3583 /* If we omitted the move of OP1 into the accumulator (because
3584 it was already there from a previous insn), then force the
3585 generation of the move instruction now. We know that we
3586 are about to emit a move into HL (or DE) via AX, and hence
3587 our optimization to remove the load of OP1 is no longer valid. */
3588 if (tmp_id == get_max_insn_count ())
3589 force_into_acc (saved_op1, insn);
3590
3591 /* We have to copy op2 to X, but that involves the acc, which
3592 already has a live value. Emit it before those insns. */
3593
3594 if (prev)
3595 first = next_nonnote_nondebug_insn (prev);
3596 else
3597 for (first = insn; prev_nonnote_nondebug_insn (first); first = prev_nonnote_nondebug_insn (first))
3598 ;
3599 OP (2) = move_to_x (2, first);
3600
3601 MUST_BE_OK (insn);
3602 }
3603
3604 static void
rl78_alloc_address_registers_macax(rtx_insn * insn)3605 rl78_alloc_address_registers_macax (rtx_insn * insn)
3606 {
3607 int which, op;
3608 bool replace_in_op0 = false;
3609 bool replace_in_op1 = false;
3610
3611 MAYBE_OK (insn);
3612
3613 /* Two different MEMs are not allowed. */
3614 which = 0;
3615 for (op = 2; op >= 0; op --)
3616 {
3617 if (MEM_P (OP (op)))
3618 {
3619 if (op == 0 && replace_in_op0)
3620 continue;
3621 if (op == 1 && replace_in_op1)
3622 continue;
3623
3624 switch (which)
3625 {
3626 case 0:
3627 /* If we replace a MEM, make sure that we replace it for all
3628 occurrences of the same MEM in the insn. */
3629 replace_in_op0 = (op > 0 && rtx_equal_p (OP (op), OP (0)));
3630 replace_in_op1 = (op > 1 && rtx_equal_p (OP (op), OP (1)));
3631
3632 OP (op) = transcode_memory_rtx (OP (op), HL, insn);
3633 if (op == 2
3634 && MEM_P (OP (op))
3635 && ((GET_CODE (XEXP (OP (op), 0)) == REG
3636 && REGNO (XEXP (OP (op), 0)) == SP_REG)
3637 || (GET_CODE (XEXP (OP (op), 0)) == PLUS
3638 && REGNO (XEXP (XEXP (OP (op), 0), 0)) == SP_REG)))
3639 {
3640 emit_insn_before (gen_movhi (HL, gen_rtx_REG (HImode, SP_REG)), insn);
3641 OP (op) = replace_rtx (OP (op), gen_rtx_REG (HImode, SP_REG), HL);
3642 }
3643 if (replace_in_op0)
3644 OP (0) = OP (op);
3645 if (replace_in_op1)
3646 OP (1) = OP (op);
3647 break;
3648 case 1:
3649 OP (op) = transcode_memory_rtx (OP (op), DE, insn);
3650 break;
3651 case 2:
3652 OP (op) = transcode_memory_rtx (OP (op), BC, insn);
3653 break;
3654 }
3655 which ++;
3656 }
3657 }
3658
3659 MUST_BE_OK (insn);
3660 }
3661
3662 static void
rl78_alloc_address_registers_div(rtx_insn * insn)3663 rl78_alloc_address_registers_div (rtx_insn * insn)
3664 {
3665 MUST_BE_OK (insn);
3666 }
3667
3668 /* Scan all insns and devirtualize them. */
3669 static void
rl78_alloc_physical_registers(void)3670 rl78_alloc_physical_registers (void)
3671 {
3672 /* During most of the compile, gcc is dealing with virtual
3673 registers. At this point, we need to assign physical registers
3674 to the vitual ones, and copy in/out as needed. */
3675
3676 rtx_insn *insn, *curr;
3677 enum attr_valloc valloc_method;
3678
3679 for (insn = get_insns (); insn; insn = curr)
3680 {
3681 int i;
3682
3683 curr = next_nonnote_nondebug_insn (insn);
3684
3685 if (INSN_P (insn)
3686 && (GET_CODE (PATTERN (insn)) == SET
3687 || GET_CODE (PATTERN (insn)) == CALL)
3688 && INSN_CODE (insn) == -1)
3689 {
3690 if (GET_CODE (SET_SRC (PATTERN (insn))) == ASM_OPERANDS)
3691 continue;
3692 i = recog (PATTERN (insn), insn, 0);
3693 if (i == -1)
3694 {
3695 debug_rtx (insn);
3696 gcc_unreachable ();
3697 }
3698 INSN_CODE (insn) = i;
3699 }
3700 }
3701
3702 cfun->machine->virt_insns_ok = 0;
3703 cfun->machine->real_insns_ok = 1;
3704
3705 clear_content_memory ();
3706
3707 for (insn = get_insns (); insn; insn = curr)
3708 {
3709 rtx pattern;
3710
3711 curr = insn ? next_nonnote_nondebug_insn (insn) : NULL;
3712
3713 if (!INSN_P (insn))
3714 {
3715 if (LABEL_P (insn))
3716 clear_content_memory ();
3717
3718 continue;
3719 }
3720
3721 if (dump_file)
3722 fprintf (dump_file, "Converting insn %d\n", INSN_UID (insn));
3723
3724 pattern = PATTERN (insn);
3725 if (GET_CODE (pattern) == PARALLEL)
3726 pattern = XVECEXP (pattern, 0, 0);
3727 if (JUMP_P (insn) || CALL_P (insn) || GET_CODE (pattern) == CALL)
3728 clear_content_memory ();
3729 if (GET_CODE (pattern) != SET
3730 && GET_CODE (pattern) != CALL)
3731 continue;
3732 if (GET_CODE (pattern) == SET
3733 && GET_CODE (SET_SRC (pattern)) == ASM_OPERANDS)
3734 continue;
3735
3736 valloc_method = get_attr_valloc (insn);
3737
3738 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
3739
3740 if (valloc_method == VALLOC_MACAX)
3741 {
3742 record_content (AX, NULL_RTX);
3743 record_content (BC, NULL_RTX);
3744 record_content (DE, NULL_RTX);
3745 }
3746 else if (valloc_method == VALLOC_DIVHI)
3747 {
3748 record_content (AX, NULL_RTX);
3749 record_content (BC, NULL_RTX);
3750 }
3751 else if (valloc_method == VALLOC_DIVSI)
3752 {
3753 record_content (AX, NULL_RTX);
3754 record_content (BC, NULL_RTX);
3755 record_content (DE, NULL_RTX);
3756 record_content (HL, NULL_RTX);
3757 }
3758
3759 if (insn_ok_now (insn))
3760 continue;
3761
3762 INSN_CODE (insn) = -1;
3763
3764 if (RTX_FRAME_RELATED_P (insn))
3765 virt_insn_was_frame = 1;
3766 else
3767 virt_insn_was_frame = 0;
3768
3769 switch (valloc_method)
3770 {
3771 case VALLOC_OP1:
3772 rl78_alloc_physical_registers_op1 (insn);
3773 break;
3774 case VALLOC_OP2:
3775 rl78_alloc_physical_registers_op2 (insn);
3776 break;
3777 case VALLOC_RO1:
3778 rl78_alloc_physical_registers_ro1 (insn);
3779 break;
3780 case VALLOC_CMP:
3781 rl78_alloc_physical_registers_cmp (insn);
3782 break;
3783 case VALLOC_UMUL:
3784 rl78_alloc_physical_registers_umul (insn);
3785 record_content (AX, NULL_RTX);
3786 break;
3787 case VALLOC_MACAX:
3788 /* Macro that clobbers AX. */
3789 rl78_alloc_address_registers_macax (insn);
3790 record_content (AX, NULL_RTX);
3791 record_content (BC, NULL_RTX);
3792 record_content (DE, NULL_RTX);
3793 break;
3794 case VALLOC_DIVSI:
3795 rl78_alloc_address_registers_div (insn);
3796 record_content (AX, NULL_RTX);
3797 record_content (BC, NULL_RTX);
3798 record_content (DE, NULL_RTX);
3799 record_content (HL, NULL_RTX);
3800 break;
3801 case VALLOC_DIVHI:
3802 rl78_alloc_address_registers_div (insn);
3803 record_content (AX, NULL_RTX);
3804 record_content (BC, NULL_RTX);
3805 break;
3806 default:
3807 gcc_unreachable ();
3808 }
3809
3810 if (JUMP_P (insn) || CALL_P (insn) || GET_CODE (pattern) == CALL)
3811 clear_content_memory ();
3812 else
3813 process_postponed_content_update ();
3814 }
3815
3816 #if DEBUG_ALLOC
3817 fprintf (stderr, "\033[0m");
3818 #endif
3819 }
3820
3821 /* Add REG_DEAD notes using DEAD[reg] for rtx S which is part of INSN.
3822 This function scans for uses of registers; the last use (i.e. first
3823 encounter when scanning backwards) triggers a REG_DEAD note if the
3824 reg was previously in DEAD[]. */
3825 static void
rl78_note_reg_uses(char * dead,rtx s,rtx insn)3826 rl78_note_reg_uses (char *dead, rtx s, rtx insn)
3827 {
3828 const char *fmt;
3829 int i, r;
3830 enum rtx_code code;
3831
3832 if (!s)
3833 return;
3834
3835 code = GET_CODE (s);
3836
3837 switch (code)
3838 {
3839 /* Compare registers by number. */
3840 case REG:
3841 r = REGNO (s);
3842 if (dump_file)
3843 {
3844 fprintf (dump_file, "note use reg %d size %d on insn %d\n",
3845 r, GET_MODE_SIZE (GET_MODE (s)), INSN_UID (insn));
3846 print_rtl_single (dump_file, s);
3847 }
3848 if (dead [r])
3849 add_reg_note (insn, REG_DEAD, gen_rtx_REG (GET_MODE (s), r));
3850 for (i = 0; i < GET_MODE_SIZE (GET_MODE (s)); i ++)
3851 dead [r + i] = 0;
3852 return;
3853
3854 /* These codes have no constituent expressions
3855 and are unique. */
3856 case SCRATCH:
3857 case CC0:
3858 case PC:
3859 return;
3860
3861 case CONST_INT:
3862 case CONST_VECTOR:
3863 case CONST_DOUBLE:
3864 case CONST_FIXED:
3865 /* These are kept unique for a given value. */
3866 return;
3867
3868 default:
3869 break;
3870 }
3871
3872 fmt = GET_RTX_FORMAT (code);
3873
3874 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3875 {
3876 if (fmt[i] == 'E')
3877 {
3878 int j;
3879 for (j = XVECLEN (s, i) - 1; j >= 0; j--)
3880 rl78_note_reg_uses (dead, XVECEXP (s, i, j), insn);
3881 }
3882 else if (fmt[i] == 'e')
3883 rl78_note_reg_uses (dead, XEXP (s, i), insn);
3884 }
3885 }
3886
3887 /* Like the previous function, but scan for SETs instead. */
3888 static void
rl78_note_reg_set(char * dead,rtx d,rtx insn)3889 rl78_note_reg_set (char *dead, rtx d, rtx insn)
3890 {
3891 int r, i;
3892 bool is_dead;
3893 if (GET_CODE (d) == MEM)
3894 rl78_note_reg_uses (dead, XEXP (d, 0), insn);
3895
3896 if (GET_CODE (d) != REG)
3897 return;
3898
3899 /* Do not mark the reg unused unless all QImode parts of it are dead. */
3900 r = REGNO (d);
3901 is_dead = true;
3902 for (i = 0; i < GET_MODE_SIZE (GET_MODE (d)); i ++)
3903 if (!dead [r + i])
3904 is_dead = false;
3905 if(is_dead)
3906 add_reg_note (insn, REG_UNUSED, gen_rtx_REG (GET_MODE (d), r));
3907 if (dump_file)
3908 fprintf (dump_file, "note set reg %d size %d\n", r, GET_MODE_SIZE (GET_MODE (d)));
3909 for (i = 0; i < GET_MODE_SIZE (GET_MODE (d)); i ++)
3910 dead [r + i] = 1;
3911 }
3912
3913 /* This is a rather crude register death pass. Death status is reset
3914 at every jump or call insn. */
3915 static void
rl78_calculate_death_notes(void)3916 rl78_calculate_death_notes (void)
3917 {
3918 char dead[FIRST_PSEUDO_REGISTER];
3919 rtx p, s, d;
3920 rtx_insn *insn;
3921 int i;
3922
3923 memset (dead, 0, sizeof (dead));
3924
3925 for (insn = get_last_insn ();
3926 insn;
3927 insn = prev_nonnote_nondebug_insn (insn))
3928 {
3929 if (dump_file)
3930 {
3931 fprintf (dump_file, "\n--------------------------------------------------");
3932 fprintf (dump_file, "\nDead:");
3933 for (i = 0; i < FIRST_PSEUDO_REGISTER; i ++)
3934 if (dead[i])
3935 fprintf (dump_file, " %s", reg_names[i]);
3936 fprintf (dump_file, "\n");
3937 print_rtl_single (dump_file, insn);
3938 }
3939
3940 switch (GET_CODE (insn))
3941 {
3942 case INSN:
3943 p = PATTERN (insn);
3944 if (GET_CODE (p) == PARALLEL)
3945 {
3946 rtx q = XVECEXP (p, 0 ,1);
3947
3948 /* This happens with the DIV patterns. */
3949 if (GET_CODE (q) == SET)
3950 {
3951 s = SET_SRC (q);
3952 d = SET_DEST (q);
3953 rl78_note_reg_set (dead, d, insn);
3954 rl78_note_reg_uses (dead, s, insn);
3955
3956 }
3957 p = XVECEXP (p, 0, 0);
3958 }
3959
3960 switch (GET_CODE (p))
3961 {
3962 case SET:
3963 s = SET_SRC (p);
3964 d = SET_DEST (p);
3965 rl78_note_reg_set (dead, d, insn);
3966 rl78_note_reg_uses (dead, s, insn);
3967 break;
3968
3969 case USE:
3970 rl78_note_reg_uses (dead, p, insn);
3971 break;
3972
3973 default:
3974 break;
3975 }
3976 break;
3977
3978 case JUMP_INSN:
3979 if (INSN_CODE (insn) == CODE_FOR_rl78_return)
3980 {
3981 memset (dead, 1, sizeof (dead));
3982 /* We expect a USE just prior to this, which will mark
3983 the actual return registers. The USE will have a
3984 death note, but we aren't going to be modifying it
3985 after this pass. */
3986 break;
3987 }
3988 /* FALLTHRU */
3989 case CALL_INSN:
3990 memset (dead, 0, sizeof (dead));
3991 break;
3992
3993 default:
3994 break;
3995 }
3996 if (dump_file)
3997 print_rtl_single (dump_file, insn);
3998 }
3999 }
4000
4001 /* Helper function to reset the origins in RP and the age in AGE for
4002 all registers. */
4003 static void
reset_origins(int * rp,int * age)4004 reset_origins (int *rp, int *age)
4005 {
4006 int i;
4007 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4008 {
4009 rp[i] = i;
4010 age[i] = 0;
4011 }
4012 }
4013
4014 static void
set_origin(rtx pat,rtx_insn * insn,int * origins,int * age)4015 set_origin (rtx pat, rtx_insn * insn, int * origins, int * age)
4016 {
4017 rtx src = SET_SRC (pat);
4018 rtx dest = SET_DEST (pat);
4019 int mb = GET_MODE_SIZE (GET_MODE (dest));
4020 int i;
4021
4022 if (GET_CODE (dest) == REG)
4023 {
4024 int dr = REGNO (dest);
4025
4026 if (GET_CODE (src) == REG)
4027 {
4028 int sr = REGNO (src);
4029 bool same = true;
4030 int best_age, best_reg;
4031
4032 /* See if the copy is not needed. */
4033 for (i = 0; i < mb; i ++)
4034 if (origins[dr + i] != origins[sr + i])
4035 same = false;
4036
4037 if (same)
4038 {
4039 if (dump_file)
4040 fprintf (dump_file, "deleting because dest already has correct value\n");
4041 delete_insn (insn);
4042 return;
4043 }
4044
4045 if (dr < 8 || sr >= 8)
4046 {
4047 int ar;
4048
4049 best_age = -1;
4050 best_reg = -1;
4051
4052 /* See if the copy can be made from another
4053 bank 0 register instead, instead of the
4054 virtual src register. */
4055 for (ar = 0; ar < 8; ar += mb)
4056 {
4057 same = true;
4058
4059 for (i = 0; i < mb; i ++)
4060 if (origins[ar + i] != origins[sr + i])
4061 same = false;
4062
4063 /* The chip has some reg-reg move limitations. */
4064 if (mb == 1 && dr > 3)
4065 same = false;
4066
4067 if (same)
4068 {
4069 if (best_age == -1 || best_age > age[sr + i])
4070 {
4071 best_age = age[sr + i];
4072 best_reg = sr;
4073 }
4074 }
4075 }
4076
4077 if (best_reg != -1)
4078 {
4079 /* FIXME: copy debug info too. */
4080 SET_SRC (pat) = gen_rtx_REG (GET_MODE (src), best_reg);
4081 sr = best_reg;
4082 }
4083 }
4084
4085 for (i = 0; i < mb; i++)
4086 {
4087 origins[dr + i] = origins[sr + i];
4088 age[dr + i] = age[sr + i] + 1;
4089 }
4090 }
4091 else
4092 {
4093 /* The destination is computed, its origin is itself. */
4094 if (dump_file)
4095 fprintf (dump_file, "resetting origin of r%d for %d byte%s\n",
4096 dr, mb, mb == 1 ? "" : "s");
4097
4098 for (i = 0; i < mb; i ++)
4099 {
4100 origins[dr + i] = dr + i;
4101 age[dr + i] = 0;
4102 }
4103 }
4104
4105 /* Any registers marked with that reg as an origin are reset. */
4106 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4107 if (origins[i] >= dr && origins[i] < dr + mb)
4108 {
4109 origins[i] = i;
4110 age[i] = 0;
4111 }
4112 }
4113
4114 /* Special case - our MUL patterns uses AX and sometimes BC. */
4115 if (get_attr_valloc (insn) == VALLOC_MACAX)
4116 {
4117 if (dump_file)
4118 fprintf (dump_file, "Resetting origin of AX/BC for MUL pattern.\n");
4119
4120 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4121 if (i <= 3 || origins[i] <= 3)
4122 {
4123 origins[i] = i;
4124 age[i] = 0;
4125 }
4126 }
4127 else if (get_attr_valloc (insn) == VALLOC_DIVHI)
4128 {
4129 if (dump_file)
4130 fprintf (dump_file, "Resetting origin of AX/DE for DIVHI pattern.\n");
4131
4132 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4133 if (i == A_REG
4134 || i == X_REG
4135 || i == D_REG
4136 || i == E_REG
4137 || origins[i] == A_REG
4138 || origins[i] == X_REG
4139 || origins[i] == D_REG
4140 || origins[i] == E_REG)
4141 {
4142 origins[i] = i;
4143 age[i] = 0;
4144 }
4145 }
4146 else if (get_attr_valloc (insn) == VALLOC_DIVSI)
4147 {
4148 if (dump_file)
4149 fprintf (dump_file, "Resetting origin of AX/BC/DE/HL for DIVSI pattern.\n");
4150
4151 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4152 if (i <= 7 || origins[i] <= 7)
4153 {
4154 origins[i] = i;
4155 age[i] = 0;
4156 }
4157 }
4158
4159 if (GET_CODE (src) == ASHIFT
4160 || GET_CODE (src) == ASHIFTRT
4161 || GET_CODE (src) == LSHIFTRT)
4162 {
4163 rtx count = XEXP (src, 1);
4164
4165 if (GET_CODE (count) == REG)
4166 {
4167 /* Special case - our pattern clobbers the count register. */
4168 int r = REGNO (count);
4169
4170 if (dump_file)
4171 fprintf (dump_file, "Resetting origin of r%d for shift.\n", r);
4172
4173 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4174 if (i == r || origins[i] == r)
4175 {
4176 origins[i] = i;
4177 age[i] = 0;
4178 }
4179 }
4180 }
4181 }
4182
4183 /* The idea behind this optimization is to look for cases where we
4184 move data from A to B to C, and instead move from A to B, and A to
4185 C. If B is a virtual register or memory, this is a big win on its
4186 own. If B turns out to be unneeded after this, it's a bigger win.
4187 For each register, we try to determine where it's value originally
4188 came from, if it's propogated purely through moves (and not
4189 computes). The ORIGINS[] array has the regno for the "origin" of
4190 the value in the [regno] it's indexed by. */
4191 static void
rl78_propogate_register_origins(void)4192 rl78_propogate_register_origins (void)
4193 {
4194 int origins[FIRST_PSEUDO_REGISTER];
4195 int age[FIRST_PSEUDO_REGISTER];
4196 int i;
4197 rtx_insn *insn, *ninsn = NULL;
4198 rtx pat;
4199
4200 reset_origins (origins, age);
4201
4202 for (insn = get_insns (); insn; insn = ninsn)
4203 {
4204 ninsn = next_nonnote_nondebug_insn (insn);
4205
4206 if (dump_file)
4207 {
4208 fprintf (dump_file, "\n");
4209 fprintf (dump_file, "Origins:");
4210 for (i = 0; i < FIRST_PSEUDO_REGISTER; i ++)
4211 if (origins[i] != i)
4212 fprintf (dump_file, " r%d=r%d", i, origins[i]);
4213 fprintf (dump_file, "\n");
4214 print_rtl_single (dump_file, insn);
4215 }
4216
4217 switch (GET_CODE (insn))
4218 {
4219 case CODE_LABEL:
4220 case BARRIER:
4221 case CALL_INSN:
4222 case JUMP_INSN:
4223 reset_origins (origins, age);
4224 break;
4225
4226 default:
4227 break;
4228
4229 case INSN:
4230 pat = PATTERN (insn);
4231
4232 if (GET_CODE (pat) == PARALLEL)
4233 {
4234 rtx clobber = XVECEXP (pat, 0, 1);
4235 pat = XVECEXP (pat, 0, 0);
4236 if (GET_CODE (clobber) == CLOBBER
4237 && GET_CODE (XEXP (clobber, 0)) == REG)
4238 {
4239 int cr = REGNO (XEXP (clobber, 0));
4240 int mb = GET_MODE_SIZE (GET_MODE (XEXP (clobber, 0)));
4241 if (dump_file)
4242 fprintf (dump_file, "reset origins of %d regs at %d\n", mb, cr);
4243 for (i = 0; i < mb; i++)
4244 {
4245 origins[cr + i] = cr + i;
4246 age[cr + i] = 0;
4247 }
4248 }
4249 /* This happens with the DIV patterns. */
4250 else if (GET_CODE (clobber) == SET)
4251 {
4252 set_origin (clobber, insn, origins, age);
4253 }
4254 else
4255 break;
4256 }
4257
4258 if (GET_CODE (pat) == SET)
4259 {
4260 set_origin (pat, insn, origins, age);
4261 }
4262 else if (GET_CODE (pat) == CLOBBER
4263 && GET_CODE (XEXP (pat, 0)) == REG)
4264 {
4265 if (REG_P (XEXP (pat, 0)))
4266 {
4267 unsigned int reg = REGNO (XEXP (pat, 0));
4268
4269 origins[reg] = reg;
4270 age[reg] = 0;
4271 }
4272 }
4273 }
4274 }
4275 }
4276
4277 /* Remove any SETs where the destination is unneeded. */
4278 static void
rl78_remove_unused_sets(void)4279 rl78_remove_unused_sets (void)
4280 {
4281 rtx_insn *insn, *ninsn = NULL;
4282 rtx dest;
4283
4284 for (insn = get_insns (); insn; insn = ninsn)
4285 {
4286 ninsn = next_nonnote_nondebug_insn (insn);
4287
4288 rtx set = single_set (insn);
4289 if (set == NULL)
4290 continue;
4291
4292 dest = SET_DEST (set);
4293
4294 if (GET_CODE (dest) != REG || REGNO (dest) > 23)
4295 continue;
4296
4297 if (find_regno_note (insn, REG_UNUSED, REGNO (dest)))
4298 {
4299 if (dump_file)
4300 fprintf (dump_file, "deleting because the set register is never used.\n");
4301 delete_insn (insn);
4302 }
4303 }
4304 }
4305
4306 /* This is the top of the devritualization pass. */
4307 static void
rl78_reorg(void)4308 rl78_reorg (void)
4309 {
4310 /* split2 only happens when optimizing, but we need all movSIs to be
4311 split now. */
4312 if (optimize <= 0)
4313 split_all_insns ();
4314
4315 rl78_alloc_physical_registers ();
4316
4317 if (dump_file)
4318 {
4319 fprintf (dump_file, "\n================DEVIRT:=AFTER=ALLOC=PHYSICAL=REGISTERS================\n");
4320 print_rtl_with_bb (dump_file, get_insns (), 0);
4321 }
4322
4323 rl78_propogate_register_origins ();
4324 rl78_calculate_death_notes ();
4325
4326 if (dump_file)
4327 {
4328 fprintf (dump_file, "\n================DEVIRT:=AFTER=PROPOGATION=============================\n");
4329 print_rtl_with_bb (dump_file, get_insns (), 0);
4330 fprintf (dump_file, "\n======================================================================\n");
4331 }
4332
4333 rl78_remove_unused_sets ();
4334
4335 /* The code after devirtualizing has changed so much that at this point
4336 we might as well just rescan everything. Note that
4337 df_rescan_all_insns is not going to help here because it does not
4338 touch the artificial uses and defs. */
4339 df_finish_pass (true);
4340 if (optimize > 1)
4341 df_live_add_problem ();
4342 df_scan_alloc (NULL);
4343 df_scan_blocks ();
4344
4345 if (optimize)
4346 df_analyze ();
4347 }
4348
4349 #undef TARGET_RETURN_IN_MEMORY
4350 #define TARGET_RETURN_IN_MEMORY rl78_return_in_memory
4351
4352 static bool
rl78_return_in_memory(const_tree type,const_tree fntype ATTRIBUTE_UNUSED)4353 rl78_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
4354 {
4355 const HOST_WIDE_INT size = int_size_in_bytes (type);
4356 return (size == -1 || size > 8);
4357 }
4358
4359
4360 #undef TARGET_RTX_COSTS
4361 #define TARGET_RTX_COSTS rl78_rtx_costs
4362
4363 static bool
rl78_rtx_costs(rtx x,machine_mode mode,int outer_code ATTRIBUTE_UNUSED,int opno ATTRIBUTE_UNUSED,int * total,bool speed ATTRIBUTE_UNUSED)4364 rl78_rtx_costs (rtx x,
4365 machine_mode mode,
4366 int outer_code ATTRIBUTE_UNUSED,
4367 int opno ATTRIBUTE_UNUSED,
4368 int * total,
4369 bool speed ATTRIBUTE_UNUSED)
4370 {
4371 int code = GET_CODE (x);
4372
4373 if (code == IF_THEN_ELSE)
4374 {
4375 *total = COSTS_N_INSNS (10);
4376 return true;
4377 }
4378
4379 if (mode == HImode)
4380 {
4381 if (code == MULT && ! speed)
4382 {
4383 * total = COSTS_N_INSNS (8);
4384 return true;
4385 }
4386 return false;
4387 }
4388
4389 if (mode == SImode)
4390 {
4391 switch (code)
4392 {
4393 case MULT:
4394 if (! speed)
4395 /* If we are compiling for space then we do not want to use the
4396 inline SImode multiplication patterns or shift sequences.
4397 The cost is not set to 1 or 5 however as we have to allow for
4398 the possibility that we might be converting a leaf function
4399 into a non-leaf function. (There is no way to tell here).
4400 A value of 13 seems to be a reasonable compromise for the
4401 moment. */
4402 * total = COSTS_N_INSNS (13);
4403 else if (RL78_MUL_G14)
4404 *total = COSTS_N_INSNS (14);
4405 else if (RL78_MUL_G13)
4406 *total = COSTS_N_INSNS (29);
4407 else
4408 *total = COSTS_N_INSNS (500);
4409 return true;
4410
4411 case PLUS:
4412 *total = COSTS_N_INSNS (8);
4413 return true;
4414
4415 case ASHIFT:
4416 case ASHIFTRT:
4417 case LSHIFTRT:
4418 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4419 {
4420 switch (INTVAL (XEXP (x, 1)))
4421 {
4422 case 0: *total = COSTS_N_INSNS (0); break;
4423 case 1: *total = COSTS_N_INSNS (6); break;
4424 case 2: case 3: case 4: case 5: case 6: case 7:
4425 *total = COSTS_N_INSNS (10); break;
4426 case 8: *total = COSTS_N_INSNS (6); break;
4427 case 9: case 10: case 11: case 12: case 13: case 14: case 15:
4428 *total = COSTS_N_INSNS (10); break;
4429 case 16: *total = COSTS_N_INSNS (3); break;
4430 case 17: case 18: case 19: case 20: case 21: case 22: case 23:
4431 *total = COSTS_N_INSNS (4); break;
4432 case 24: *total = COSTS_N_INSNS (4); break;
4433 case 25: case 26: case 27: case 28: case 29: case 30: case 31:
4434 *total = COSTS_N_INSNS (5); break;
4435 }
4436 }
4437 else
4438 *total = COSTS_N_INSNS (10+4*16);
4439 return true;
4440
4441 default:
4442 break;
4443 }
4444 }
4445 return false;
4446 }
4447
4448
4449 static GTY(()) section * saddr_section;
4450 static GTY(()) section * frodata_section;
4451
4452 int
rl78_saddr_p(rtx x)4453 rl78_saddr_p (rtx x)
4454 {
4455 const char * c;
4456
4457 if (MEM_P (x))
4458 x = XEXP (x, 0);
4459 if (GET_CODE (x) == PLUS)
4460 x = XEXP (x, 0);
4461 if (GET_CODE (x) != SYMBOL_REF)
4462 return 0;
4463
4464 c = XSTR (x, 0);
4465 if (memcmp (c, "@s.", 3) == 0)
4466 return 1;
4467
4468 return 0;
4469 }
4470
4471 int
rl78_sfr_p(rtx x)4472 rl78_sfr_p (rtx x)
4473 {
4474 if (MEM_P (x))
4475 x = XEXP (x, 0);
4476 if (GET_CODE (x) != CONST_INT)
4477 return 0;
4478
4479 if ((INTVAL (x) & 0xFF00) != 0xFF00)
4480 return 0;
4481
4482 return 1;
4483 }
4484
4485 #undef TARGET_STRIP_NAME_ENCODING
4486 #define TARGET_STRIP_NAME_ENCODING rl78_strip_name_encoding
4487
4488 static const char *
rl78_strip_name_encoding(const char * sym)4489 rl78_strip_name_encoding (const char * sym)
4490 {
4491 while (1)
4492 {
4493 if (*sym == '*')
4494 sym++;
4495 else if (*sym == '@' && sym[2] == '.')
4496 sym += 3;
4497 else
4498 return sym;
4499 }
4500 }
4501
4502 /* Like rl78_strip_name_encoding, but does not strip leading asterisks. This
4503 is important if the stripped name is going to be passed to assemble_name()
4504 as that handles asterisk prefixed names in a special manner. */
4505
4506 static const char *
rl78_strip_nonasm_name_encoding(const char * sym)4507 rl78_strip_nonasm_name_encoding (const char * sym)
4508 {
4509 while (1)
4510 {
4511 if (*sym == '@' && sym[2] == '.')
4512 sym += 3;
4513 else
4514 return sym;
4515 }
4516 }
4517
4518
4519 static int
rl78_attrlist_to_encoding(tree list,tree decl ATTRIBUTE_UNUSED)4520 rl78_attrlist_to_encoding (tree list, tree decl ATTRIBUTE_UNUSED)
4521 {
4522 while (list)
4523 {
4524 if (is_attribute_p ("saddr", TREE_PURPOSE (list)))
4525 return 's';
4526 list = TREE_CHAIN (list);
4527 }
4528
4529 return 0;
4530 }
4531
4532 #define RL78_ATTRIBUTES(decl) \
4533 (TYPE_P (decl)) ? TYPE_ATTRIBUTES (decl) \
4534 : DECL_ATTRIBUTES (decl) \
4535 ? (DECL_ATTRIBUTES (decl)) \
4536 : TYPE_ATTRIBUTES (TREE_TYPE (decl))
4537
4538 #undef TARGET_ENCODE_SECTION_INFO
4539 #define TARGET_ENCODE_SECTION_INFO rl78_encode_section_info
4540
4541 static void
rl78_encode_section_info(tree decl,rtx rtl,int first)4542 rl78_encode_section_info (tree decl, rtx rtl, int first)
4543 {
4544 rtx rtlname;
4545 const char * oldname;
4546 char encoding;
4547 char * newname;
4548 tree idp;
4549 tree type;
4550 tree rl78_attributes;
4551
4552 if (!first)
4553 return;
4554
4555 rtlname = XEXP (rtl, 0);
4556
4557 if (GET_CODE (rtlname) == SYMBOL_REF)
4558 oldname = XSTR (rtlname, 0);
4559 else if (GET_CODE (rtlname) == MEM
4560 && GET_CODE (XEXP (rtlname, 0)) == SYMBOL_REF)
4561 oldname = XSTR (XEXP (rtlname, 0), 0);
4562 else
4563 gcc_unreachable ();
4564
4565 type = TREE_TYPE (decl);
4566 if (type == error_mark_node)
4567 return;
4568 if (! DECL_P (decl))
4569 return;
4570 rl78_attributes = RL78_ATTRIBUTES (decl);
4571
4572 encoding = rl78_attrlist_to_encoding (rl78_attributes, decl);
4573
4574 if (encoding)
4575 {
4576 newname = (char *) alloca (strlen (oldname) + 4);
4577 sprintf (newname, "@%c.%s", encoding, oldname);
4578 idp = get_identifier (newname);
4579 XEXP (rtl, 0) =
4580 gen_rtx_SYMBOL_REF (Pmode, IDENTIFIER_POINTER (idp));
4581 SYMBOL_REF_WEAK (XEXP (rtl, 0)) = DECL_WEAK (decl);
4582 SET_SYMBOL_REF_DECL (XEXP (rtl, 0), decl);
4583 }
4584 }
4585
4586 #undef TARGET_ASM_INIT_SECTIONS
4587 #define TARGET_ASM_INIT_SECTIONS rl78_asm_init_sections
4588
4589 static void
rl78_asm_init_sections(void)4590 rl78_asm_init_sections (void)
4591 {
4592 saddr_section
4593 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
4594 "\t.section .saddr,\"aw\",@progbits");
4595 frodata_section
4596 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
4597 "\t.section .frodata,\"aw\",@progbits");
4598 }
4599
4600 #undef TARGET_ASM_SELECT_SECTION
4601 #define TARGET_ASM_SELECT_SECTION rl78_select_section
4602
4603 static section *
rl78_select_section(tree decl,int reloc,unsigned HOST_WIDE_INT align)4604 rl78_select_section (tree decl,
4605 int reloc,
4606 unsigned HOST_WIDE_INT align)
4607 {
4608 int readonly = 1;
4609
4610 switch (TREE_CODE (decl))
4611 {
4612 case VAR_DECL:
4613 if (!TREE_READONLY (decl)
4614 || TREE_SIDE_EFFECTS (decl)
4615 || !DECL_INITIAL (decl)
4616 || (DECL_INITIAL (decl) != error_mark_node
4617 && !TREE_CONSTANT (DECL_INITIAL (decl))))
4618 readonly = 0;
4619 break;
4620 case CONSTRUCTOR:
4621 if (! TREE_CONSTANT (decl))
4622 readonly = 0;
4623 break;
4624
4625 default:
4626 break;
4627 }
4628
4629 if (TREE_CODE (decl) == VAR_DECL)
4630 {
4631 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4632
4633 if (name[0] == '@' && name[2] == '.')
4634 switch (name[1])
4635 {
4636 case 's':
4637 return saddr_section;
4638 }
4639
4640 if (TYPE_ADDR_SPACE (TREE_TYPE (decl)) == ADDR_SPACE_FAR
4641 && readonly)
4642 {
4643 return frodata_section;
4644 }
4645 }
4646
4647 if (readonly)
4648 return TARGET_ES0 ? frodata_section : readonly_data_section;
4649
4650 switch (categorize_decl_for_section (decl, reloc))
4651 {
4652 case SECCAT_TEXT: return text_section;
4653 case SECCAT_DATA: return data_section;
4654 case SECCAT_BSS: return bss_section;
4655 case SECCAT_RODATA: return TARGET_ES0 ? frodata_section : readonly_data_section;
4656 default:
4657 return default_select_section (decl, reloc, align);
4658 }
4659 }
4660
4661 void
rl78_output_labelref(FILE * file,const char * str)4662 rl78_output_labelref (FILE *file, const char *str)
4663 {
4664 const char *str2;
4665
4666 str2 = targetm.strip_name_encoding (str);
4667 if (str2[0] != '.')
4668 fputs (user_label_prefix, file);
4669 fputs (str2, file);
4670 }
4671
4672 void
rl78_output_aligned_common(FILE * stream,tree decl ATTRIBUTE_UNUSED,const char * name,int size,int align,int global)4673 rl78_output_aligned_common (FILE *stream,
4674 tree decl ATTRIBUTE_UNUSED,
4675 const char *name,
4676 int size, int align, int global)
4677 {
4678 /* We intentionally don't use rl78_section_tag() here. */
4679 if (name[0] == '@' && name[2] == '.')
4680 {
4681 const char *sec = 0;
4682 switch (name[1])
4683 {
4684 case 's':
4685 switch_to_section (saddr_section);
4686 sec = ".saddr";
4687 break;
4688 }
4689 if (sec)
4690 {
4691 const char *name2;
4692 int p2align = 0;
4693
4694 while (align > BITS_PER_UNIT)
4695 {
4696 align /= 2;
4697 p2align ++;
4698 }
4699 name2 = targetm.strip_name_encoding (name);
4700 if (global)
4701 fprintf (stream, "\t.global\t_%s\n", name2);
4702 fprintf (stream, "\t.p2align %d\n", p2align);
4703 fprintf (stream, "\t.type\t_%s,@object\n", name2);
4704 fprintf (stream, "\t.size\t_%s,%d\n", name2, size);
4705 fprintf (stream, "_%s:\n\t.zero\t%d\n", name2, size);
4706 return;
4707 }
4708 }
4709
4710 if (!global)
4711 {
4712 fprintf (stream, "\t.local\t");
4713 assemble_name (stream, name);
4714 fprintf (stream, "\n");
4715 }
4716 fprintf (stream, "\t.comm\t");
4717 assemble_name (stream, name);
4718 fprintf (stream, ",%u,%u\n", size, align / BITS_PER_UNIT);
4719 }
4720
4721 #undef TARGET_INSERT_ATTRIBUTES
4722 #define TARGET_INSERT_ATTRIBUTES rl78_insert_attributes
4723
4724 static void
rl78_insert_attributes(tree decl,tree * attributes ATTRIBUTE_UNUSED)4725 rl78_insert_attributes (tree decl, tree *attributes ATTRIBUTE_UNUSED)
4726 {
4727 if (TARGET_ES0
4728 && TREE_CODE (decl) == VAR_DECL
4729 && TREE_READONLY (decl)
4730 && TREE_ADDRESSABLE (decl)
4731 && TYPE_ADDR_SPACE (TREE_TYPE (decl)) == ADDR_SPACE_GENERIC)
4732 {
4733 tree type = TREE_TYPE (decl);
4734 tree attr = TYPE_ATTRIBUTES (type);
4735 int q = TYPE_QUALS_NO_ADDR_SPACE (type) | ENCODE_QUAL_ADDR_SPACE (ADDR_SPACE_FAR);
4736
4737 TREE_TYPE (decl) = build_type_attribute_qual_variant (type, attr, q);
4738 }
4739 }
4740
4741 #undef TARGET_ASM_INTEGER
4742 #define TARGET_ASM_INTEGER rl78_asm_out_integer
4743
4744 static bool
rl78_asm_out_integer(rtx x,unsigned int size,int aligned_p)4745 rl78_asm_out_integer (rtx x, unsigned int size, int aligned_p)
4746 {
4747 if (default_assemble_integer (x, size, aligned_p))
4748 return true;
4749
4750 if (size == 4)
4751 {
4752 assemble_integer_with_op (".long\t", x);
4753 return true;
4754 }
4755
4756 return false;
4757 }
4758
4759 #undef TARGET_UNWIND_WORD_MODE
4760 #define TARGET_UNWIND_WORD_MODE rl78_unwind_word_mode
4761
4762 static scalar_int_mode
rl78_unwind_word_mode(void)4763 rl78_unwind_word_mode (void)
4764 {
4765 return HImode;
4766 }
4767
4768 #ifndef USE_COLLECT2
4769 #undef TARGET_ASM_CONSTRUCTOR
4770 #define TARGET_ASM_CONSTRUCTOR rl78_asm_constructor
4771 #undef TARGET_ASM_DESTRUCTOR
4772 #define TARGET_ASM_DESTRUCTOR rl78_asm_destructor
4773
4774 static void
rl78_asm_ctor_dtor(rtx symbol,int priority,bool is_ctor)4775 rl78_asm_ctor_dtor (rtx symbol, int priority, bool is_ctor)
4776 {
4777 section *sec;
4778
4779 if (priority != DEFAULT_INIT_PRIORITY)
4780 {
4781 /* This section of the function is based upon code copied
4782 from: gcc/varasm.c:get_cdtor_priority_section(). */
4783 char buf[18];
4784
4785 sprintf (buf, "%s.%.5u", is_ctor ? ".ctors" : ".dtors",
4786 MAX_INIT_PRIORITY - priority);
4787 sec = get_section (buf, 0, NULL);
4788 }
4789 else
4790 sec = is_ctor ? ctors_section : dtors_section;
4791
4792 assemble_addr_to_section (symbol, sec);
4793 }
4794
4795 static void
rl78_asm_constructor(rtx symbol,int priority)4796 rl78_asm_constructor (rtx symbol, int priority)
4797 {
4798 rl78_asm_ctor_dtor (symbol, priority, true);
4799 }
4800
4801 static void
rl78_asm_destructor(rtx symbol,int priority)4802 rl78_asm_destructor (rtx symbol, int priority)
4803 {
4804 rl78_asm_ctor_dtor (symbol, priority, false);
4805 }
4806 #endif /* ! USE_COLLECT2 */
4807
4808 /* Scan backwards through the insn chain looking to see if the flags
4809 have been set for a comparison of OP against OPERAND. Start with
4810 the insn *before* the current insn. */
4811
4812 bool
rl78_flags_already_set(rtx op,rtx operand)4813 rl78_flags_already_set (rtx op, rtx operand)
4814 {
4815 /* We only track the Z flag. */
4816 if (GET_CODE (op) != EQ && GET_CODE (op) != NE)
4817 return false;
4818
4819 /* This should not happen, but let's be paranoid. */
4820 if (current_output_insn == NULL_RTX)
4821 return false;
4822
4823 rtx_insn *insn;
4824 bool res = false;
4825
4826 for (insn = prev_nonnote_nondebug_insn (current_output_insn);
4827 insn != NULL_RTX;
4828 insn = prev_nonnote_nondebug_insn (insn))
4829 {
4830 if (LABEL_P (insn))
4831 break;
4832
4833 if (! INSN_P (insn))
4834 continue;
4835
4836 /* Make sure that the insn can be recognized. */
4837 if (recog_memoized (insn) == -1)
4838 continue;
4839
4840 enum attr_update_Z updated = get_attr_update_Z (insn);
4841
4842 rtx set = single_set (insn);
4843 bool must_break = (set != NULL_RTX && rtx_equal_p (operand, SET_DEST (set)));
4844
4845 switch (updated)
4846 {
4847 case UPDATE_Z_NO:
4848 break;
4849 case UPDATE_Z_CLOBBER:
4850 must_break = true;
4851 break;
4852 case UPDATE_Z_UPDATE_Z:
4853 res = must_break;
4854 must_break = true;
4855 break;
4856 default:
4857 gcc_unreachable ();
4858 }
4859
4860 if (must_break)
4861 break;
4862 }
4863
4864 /* We have to re-recognize the current insn as the call(s) to
4865 get_attr_update_Z() above will have overwritten the recog_data cache. */
4866 recog_memoized (current_output_insn);
4867 cleanup_subreg_operands (current_output_insn);
4868 constrain_operands_cached (current_output_insn, 1);
4869
4870 return res;
4871 }
4872
4873 const char *
rl78_addsi3_internal(rtx * operands,unsigned int alternative)4874 rl78_addsi3_internal (rtx * operands, unsigned int alternative)
4875 {
4876 const char *addH2 = "addw ax, %H2\n\t";
4877
4878 /* If we are adding in a constant symbolic address when -mes0
4879 is active then we know that the address must be <64K and
4880 that it is invalid to access anything above 64K relative to
4881 this address. So we can skip adding in the high bytes. */
4882 if (TARGET_ES0
4883 && GET_CODE (operands[2]) == SYMBOL_REF
4884 && TREE_CODE (SYMBOL_REF_DECL (operands[2])) == VAR_DECL
4885 && TREE_READONLY (SYMBOL_REF_DECL (operands[2]))
4886 && ! TREE_SIDE_EFFECTS (SYMBOL_REF_DECL (operands[2])))
4887 return "movw ax, %h1\n\taddw ax, %h2\n\tmovw %h0, ax";
4888
4889 if(CONST_INT_P(operands[2]))
4890 {
4891 if((INTVAL(operands[2]) & 0xFFFF0000) == 0)
4892 {
4893 addH2 = "";
4894 }
4895 else if((INTVAL(operands[2]) & 0xFFFF0000) == 0x00010000)
4896 {
4897 addH2 = "incw ax\n\t";
4898 }
4899 else if((INTVAL(operands[2]) & 0xFFFF0000) == 0xFFFF0000)
4900 {
4901 addH2 = "decw ax\n\t";
4902 }
4903 }
4904
4905 switch (alternative)
4906 {
4907 case 0:
4908 case 1:
4909 snprintf(fmt_buffer, sizeof(fmt_buffer),
4910 "movw ax, %%h1\n\taddw ax, %%h2\n\tmovw %%h0, ax\n\tmovw ax, %%H1\n\tsknc\n\tincw ax\n\t%smovw %%H0,ax", addH2);
4911 break;
4912 case 2:
4913 snprintf(fmt_buffer, sizeof(fmt_buffer),
4914 "movw ax, %%h1\n\taddw ax, %%h2\n\tmovw bc, ax\n\tmovw ax, %%H1\n\tsknc\n\tincw ax\n\t%smovw %%H0, ax\n\tmovw ax, bc\n\tmovw %%h0, ax", addH2);
4915 break;
4916 default:
4917 gcc_unreachable ();
4918 }
4919
4920 return fmt_buffer;
4921 }
4922
4923 rtx
rl78_emit_libcall(const char * name,enum rtx_code code,enum machine_mode dmode,enum machine_mode smode,int noperands,rtx * operands)4924 rl78_emit_libcall (const char *name, enum rtx_code code,
4925 enum machine_mode dmode, enum machine_mode smode,
4926 int noperands, rtx *operands)
4927 {
4928 rtx ret;
4929 rtx_insn *insns;
4930 rtx libcall;
4931 rtx equiv;
4932
4933 start_sequence ();
4934 libcall = gen_rtx_SYMBOL_REF (Pmode, name);
4935
4936 switch (noperands)
4937 {
4938 case 2:
4939 ret = emit_library_call_value (libcall, NULL_RTX, LCT_CONST,
4940 dmode, operands[1], smode);
4941 equiv = gen_rtx_fmt_e (code, dmode, operands[1]);
4942 break;
4943
4944 case 3:
4945 ret = emit_library_call_value (libcall, NULL_RTX,
4946 LCT_CONST, dmode,
4947 operands[1], smode, operands[2],
4948 smode);
4949 equiv = gen_rtx_fmt_ee (code, dmode, operands[1], operands[2]);
4950 break;
4951
4952 default:
4953 gcc_unreachable ();
4954 }
4955
4956 insns = get_insns ();
4957 end_sequence ();
4958 emit_libcall_block (insns, operands[0], ret, equiv);
4959 return ret;
4960 }
4961
4962
4963 #undef TARGET_PREFERRED_RELOAD_CLASS
4964 #define TARGET_PREFERRED_RELOAD_CLASS rl78_preferred_reload_class
4965
4966 static reg_class_t
rl78_preferred_reload_class(rtx x ATTRIBUTE_UNUSED,reg_class_t rclass)4967 rl78_preferred_reload_class (rtx x ATTRIBUTE_UNUSED, reg_class_t rclass)
4968 {
4969 if (rclass == NO_REGS)
4970 rclass = V_REGS;
4971
4972 return rclass;
4973 }
4974
4975
4976 struct gcc_target targetm = TARGET_INITIALIZER;
4977
4978 #include "gt-rl78.h"
4979