1 /* Subroutines used for code generation on Renesas RL78 processors.
2 Copyright (C) 2011-2020 Free Software Foundation, Inc.
3 Contributed by Red Hat.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #define IN_TARGET_CODE 1
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "df.h"
31 #include "memmodel.h"
32 #include "tm_p.h"
33 #include "stringpool.h"
34 #include "attribs.h"
35 #include "optabs.h"
36 #include "emit-rtl.h"
37 #include "recog.h"
38 #include "diagnostic-core.h"
39 #include "varasm.h"
40 #include "stor-layout.h"
41 #include "calls.h"
42 #include "output.h"
43 #include "insn-attr.h"
44 #include "explow.h"
45 #include "expr.h"
46 #include "reload.h"
47 #include "cfgrtl.h"
48 #include "langhooks.h"
49 #include "tree-pass.h"
50 #include "context.h"
51 #include "tm-constrs.h" /* for satisfies_constraint_*(). */
52 #include "builtins.h"
53
54 /* This file should be included last. */
55 #include "target-def.h"
56
57 static inline bool is_interrupt_func (const_tree decl);
58 static inline bool is_brk_interrupt_func (const_tree decl);
59 static void rl78_reorg (void);
60 static const char *rl78_strip_name_encoding (const char *);
61 static const char *rl78_strip_nonasm_name_encoding (const char *);
62 static section * rl78_select_section (tree, int, unsigned HOST_WIDE_INT);
63
64
65 /* Debugging statements are tagged with DEBUG0 only so that they can
66 be easily enabled individually, by replacing the '0' with '1' as
67 needed. */
68 #define DEBUG0 0
69 #define DEBUG1 1
70
71 /* REGISTER_NAMES has the names for individual 8-bit registers, but
72 these have the names we need to use when referring to 16-bit
73 register pairs. */
74 static const char * const word_regnames[] =
75 {
76 "ax", "AX", "bc", "BC", "de", "DE", "hl", "HL",
77 "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15",
78 "r16", "r17", "r18", "r19", "r20", "r21", "r22", "r23",
79 "r24", "r25", "r26", "r27", "r28", "r29", "r30", "r31",
80 "sp", "ap", "psw", "es", "cs"
81 };
82
83 /* used by rl78_addsi3_internal for formatting insns output */
84 static char fmt_buffer[1024];
85
86 /* Structure for G13 MDUC registers. */
87 struct mduc_reg_type
88 {
89 unsigned int address;
90 enum machine_mode mode;
91 };
92
93 struct mduc_reg_type mduc_regs[] =
94 {
95 {0xf00e8, E_QImode},
96 {0xffff0, E_HImode},
97 {0xffff2, E_HImode},
98 {0xf2224, E_HImode},
99 {0xf00e0, E_HImode},
100 {0xf00e2, E_HImode}
101 };
102
103 struct GTY(()) machine_function
104 {
105 /* If set, the rest of the fields have been computed. */
106 int computed;
107 /* Which register pairs need to be pushed in the prologue. */
108 int need_to_push [FIRST_PSEUDO_REGISTER / 2];
109
110 /* These fields describe the frame layout... */
111 /* arg pointer */
112 /* 4 bytes for saved PC */
113 int framesize_regs;
114 /* frame pointer */
115 int framesize_locals;
116 int framesize_outgoing;
117 /* stack pointer */
118 int framesize;
119
120 /* If set, recog is allowed to match against the "real" patterns. */
121 int real_insns_ok;
122 /* If set, recog is allowed to match against the "virtual" patterns. */
123 int virt_insns_ok;
124 /* Set if the current function needs to clean up any trampolines. */
125 int trampolines_used;
126 /* True if the ES register is used and hence
127 needs to be saved inside interrupt handlers. */
128 bool uses_es;
129 };
130
131 /* This is our init_machine_status, as set in
132 rl78_option_override. */
133 static struct machine_function *
rl78_init_machine_status(void)134 rl78_init_machine_status (void)
135 {
136 struct machine_function *m;
137
138 m = ggc_cleared_alloc<machine_function> ();
139 m->virt_insns_ok = 1;
140
141 return m;
142 }
143
144 /* This pass converts virtual instructions using virtual registers, to
145 real instructions using real registers. Rather than run it as
146 reorg, we reschedule it before vartrack to help with debugging. */
147 namespace
148 {
149 const pass_data pass_data_rl78_devirt =
150 {
151 RTL_PASS, /* type */
152 "devirt", /* name */
153 OPTGROUP_NONE, /* optinfo_flags */
154 TV_MACH_DEP, /* tv_id */
155 0, /* properties_required */
156 0, /* properties_provided */
157 0, /* properties_destroyed */
158 0, /* todo_flags_start */
159 0, /* todo_flags_finish */
160 };
161
162 class pass_rl78_devirt : public rtl_opt_pass
163 {
164 public:
pass_rl78_devirt(gcc::context * ctxt)165 pass_rl78_devirt (gcc::context *ctxt)
166 : rtl_opt_pass (pass_data_rl78_devirt, ctxt)
167 {
168 }
169
170 /* opt_pass methods: */
execute(function *)171 virtual unsigned int execute (function *)
172 {
173 rl78_reorg ();
174 return 0;
175 }
176 };
177 } // anon namespace
178
179 rtl_opt_pass *
make_pass_rl78_devirt(gcc::context * ctxt)180 make_pass_rl78_devirt (gcc::context *ctxt)
181 {
182 return new pass_rl78_devirt (ctxt);
183 }
184
185 /* Redundant move elimination pass. Must be run after the basic block
186 reordering pass for the best effect. */
187
188 static unsigned int
move_elim_pass(void)189 move_elim_pass (void)
190 {
191 rtx_insn *insn, *ninsn;
192 rtx prev = NULL_RTX;
193
194 for (insn = get_insns (); insn; insn = ninsn)
195 {
196 rtx set;
197
198 ninsn = next_nonnote_nondebug_insn (insn);
199
200 if ((set = single_set (insn)) == NULL_RTX)
201 {
202 prev = NULL_RTX;
203 continue;
204 }
205
206 /* If we have two SET insns in a row (without anything
207 between them) and the source of the second one is the
208 destination of the first one, and vice versa, then we
209 can eliminate the second SET. */
210 if (prev
211 && rtx_equal_p (SET_DEST (prev), SET_SRC (set))
212 && rtx_equal_p (SET_DEST (set), SET_SRC (prev))
213 /* ... and none of the operands are volatile. */
214 && ! volatile_refs_p (SET_SRC (prev))
215 && ! volatile_refs_p (SET_DEST (prev))
216 && ! volatile_refs_p (SET_SRC (set))
217 && ! volatile_refs_p (SET_DEST (set)))
218 {
219 if (dump_file)
220 fprintf (dump_file, " Delete insn %d because it is redundant\n",
221 INSN_UID (insn));
222
223 delete_insn (insn);
224 prev = NULL_RTX;
225 }
226 else
227 prev = set;
228 }
229
230 if (dump_file)
231 print_rtl_with_bb (dump_file, get_insns (), TDF_NONE);
232
233 return 0;
234 }
235
236 namespace
237 {
238 const pass_data pass_data_rl78_move_elim =
239 {
240 RTL_PASS, /* type */
241 "move_elim", /* name */
242 OPTGROUP_NONE, /* optinfo_flags */
243 TV_MACH_DEP, /* tv_id */
244 0, /* properties_required */
245 0, /* properties_provided */
246 0, /* properties_destroyed */
247 0, /* todo_flags_start */
248 0, /* todo_flags_finish */
249 };
250
251 class pass_rl78_move_elim : public rtl_opt_pass
252 {
253 public:
pass_rl78_move_elim(gcc::context * ctxt)254 pass_rl78_move_elim (gcc::context *ctxt)
255 : rtl_opt_pass (pass_data_rl78_move_elim, ctxt)
256 {
257 }
258
259 /* opt_pass methods: */
execute(function *)260 virtual unsigned int execute (function *) { return move_elim_pass (); }
261 };
262 } // anon namespace
263
264 rtl_opt_pass *
make_pass_rl78_move_elim(gcc::context * ctxt)265 make_pass_rl78_move_elim (gcc::context *ctxt)
266 {
267 return new pass_rl78_move_elim (ctxt);
268 }
269
270 #undef TARGET_ASM_FILE_START
271 #define TARGET_ASM_FILE_START rl78_asm_file_start
272
273 static void
rl78_asm_file_start(void)274 rl78_asm_file_start (void)
275 {
276 int i;
277
278 if (TARGET_G10)
279 {
280 /* The memory used is 0xffec8 to 0xffedf; real registers are in
281 0xffee0 to 0xffee7. */
282 for (i = 8; i < 32; i++)
283 fprintf (asm_out_file, "r%d\t=\t0x%x\n", i, 0xffec0 + i);
284 }
285 else
286 {
287 for (i = 0; i < 8; i++)
288 {
289 fprintf (asm_out_file, "r%d\t=\t0x%x\n", 8 + i, 0xffef0 + i);
290 fprintf (asm_out_file, "r%d\t=\t0x%x\n", 16 + i, 0xffee8 + i);
291 fprintf (asm_out_file, "r%d\t=\t0x%x\n", 24 + i, 0xffee0 + i);
292 }
293 }
294
295 opt_pass *rl78_devirt_pass = make_pass_rl78_devirt (g);
296 struct register_pass_info rl78_devirt_info =
297 {
298 rl78_devirt_pass,
299 "pro_and_epilogue",
300 1,
301 PASS_POS_INSERT_BEFORE
302 };
303
304 opt_pass *rl78_move_elim_pass = make_pass_rl78_move_elim (g);
305 struct register_pass_info rl78_move_elim_info =
306 {
307 rl78_move_elim_pass,
308 "bbro",
309 1,
310 PASS_POS_INSERT_AFTER
311 };
312
313 register_pass (& rl78_devirt_info);
314 register_pass (& rl78_move_elim_info);
315 }
316
317 void
rl78_output_symbol_ref(FILE * file,rtx sym)318 rl78_output_symbol_ref (FILE * file, rtx sym)
319 {
320 tree type = SYMBOL_REF_DECL (sym);
321 const char *str = XSTR (sym, 0);
322
323 if (str[0] == '*')
324 {
325 fputs (str + 1, file);
326 }
327 else
328 {
329 str = rl78_strip_nonasm_name_encoding (str);
330 if (type && TREE_CODE (type) == FUNCTION_DECL)
331 {
332 fprintf (file, "%%code(");
333 assemble_name (file, str);
334 fprintf (file, ")");
335 }
336 else
337 assemble_name (file, str);
338 }
339 }
340
341 #undef TARGET_OPTION_OVERRIDE
342 #define TARGET_OPTION_OVERRIDE rl78_option_override
343
344 #define MUST_SAVE_MDUC_REGISTERS \
345 (TARGET_SAVE_MDUC_REGISTERS \
346 && (is_interrupt_func (NULL_TREE)) && RL78_MUL_G13)
347
348 static void
rl78_option_override(void)349 rl78_option_override (void)
350 {
351 flag_omit_frame_pointer = 1;
352 flag_no_function_cse = 1;
353 flag_split_wide_types = 0;
354
355 init_machine_status = rl78_init_machine_status;
356
357 if (TARGET_ALLREGS)
358 {
359 int i;
360
361 for (i = 24; i < 32; i++)
362 fixed_regs[i] = 0;
363 }
364
365 if (TARGET_ES0
366 && strcmp (lang_hooks.name, "GNU C")
367 && strcmp (lang_hooks.name, "GNU C11")
368 && strcmp (lang_hooks.name, "GNU C17")
369 && strcmp (lang_hooks.name, "GNU C2X")
370 && strcmp (lang_hooks.name, "GNU C89")
371 && strcmp (lang_hooks.name, "GNU C99")
372 /* Compiling with -flto results in a language of GNU GIMPLE being used... */
373 && strcmp (lang_hooks.name, "GNU GIMPLE"))
374 /* Address spaces are currently only supported by C. */
375 error ("%<-mes0%> can only be used with C");
376
377 if (TARGET_SAVE_MDUC_REGISTERS && !(TARGET_G13 || RL78_MUL_G13))
378 warning (0, "mduc registers only saved for G13 target");
379
380 switch (rl78_cpu_type)
381 {
382 case CPU_UNINIT:
383 rl78_cpu_type = CPU_G14;
384 if (rl78_mul_type == MUL_UNINIT)
385 rl78_mul_type = MUL_NONE;
386 break;
387
388 case CPU_G10:
389 switch (rl78_mul_type)
390 {
391 case MUL_UNINIT: rl78_mul_type = MUL_NONE; break;
392 case MUL_NONE: break;
393 case MUL_G13: error ("%<-mmul=g13%> cannot be used with "
394 "%<-mcpu=g10%>"); break;
395 case MUL_G14: error ("%<-mmul=g14%> cannot be used with "
396 "%<-mcpu=g10%>"); break;
397 }
398 break;
399
400 case CPU_G13:
401 switch (rl78_mul_type)
402 {
403 case MUL_UNINIT: rl78_mul_type = MUL_G13; break;
404 case MUL_NONE: break;
405 case MUL_G13: break;
406 /* The S2 core does not have mul/div instructions. */
407 case MUL_G14: error ("%<-mmul=g14%> cannot be used with "
408 "%<-mcpu=g13%>"); break;
409 }
410 break;
411
412 case CPU_G14:
413 switch (rl78_mul_type)
414 {
415 case MUL_UNINIT: rl78_mul_type = MUL_G14; break;
416 case MUL_NONE: break;
417 case MUL_G14: break;
418 /* The G14 core does not have the hardware multiply peripheral used by the
419 G13 core, hence you cannot use G13 multipliy routines on G14 hardware. */
420 case MUL_G13: error ("%<-mmul=g13%> cannot be used with "
421 "%<-mcpu=g14%>"); break;
422 }
423 break;
424 }
425 }
426
427 /* Most registers are 8 bits. Some are 16 bits because, for example,
428 gcc doesn't like dealing with $FP as a register pair (the second
429 half of $fp is also 2 to keep reload happy wrt register pairs, but
430 no register class includes it). This table maps register numbers
431 to size in bytes. */
432 static const int register_sizes[] =
433 {
434 1, 1, 1, 1, 1, 1, 1, 1,
435 1, 1, 1, 1, 1, 1, 1, 1,
436 1, 1, 1, 1, 1, 1, 2, 2,
437 1, 1, 1, 1, 1, 1, 1, 1,
438 2, 2, 1, 1, 1
439 };
440
441 /* Predicates used in the MD patterns. This one is true when virtual
442 insns may be matched, which typically means before (or during) the
443 devirt pass. */
444 bool
rl78_virt_insns_ok(void)445 rl78_virt_insns_ok (void)
446 {
447 if (cfun)
448 return cfun->machine->virt_insns_ok;
449 return true;
450 }
451
452 /* Predicates used in the MD patterns. This one is true when real
453 insns may be matched, which typically means after (or during) the
454 devirt pass. */
455 bool
rl78_real_insns_ok(void)456 rl78_real_insns_ok (void)
457 {
458 if (cfun)
459 return cfun->machine->real_insns_ok;
460 return false;
461 }
462
463 #undef TARGET_HARD_REGNO_NREGS
464 #define TARGET_HARD_REGNO_NREGS rl78_hard_regno_nregs
465
466 static unsigned int
rl78_hard_regno_nregs(unsigned int regno,machine_mode mode)467 rl78_hard_regno_nregs (unsigned int regno, machine_mode mode)
468 {
469 int rs = register_sizes[regno];
470 if (rs < 1)
471 rs = 1;
472 return ((GET_MODE_SIZE (mode) + rs - 1) / rs);
473 }
474
475 #undef TARGET_HARD_REGNO_MODE_OK
476 #define TARGET_HARD_REGNO_MODE_OK rl78_hard_regno_mode_ok
477
478 static bool
rl78_hard_regno_mode_ok(unsigned int regno,machine_mode mode)479 rl78_hard_regno_mode_ok (unsigned int regno, machine_mode mode)
480 {
481 int s = GET_MODE_SIZE (mode);
482
483 if (s < 1)
484 return false;
485 /* These are not to be used by gcc. */
486 if (regno == 23 || regno == ES_REG || regno == CS_REG)
487 return false;
488 /* $fp can always be accessed as a 16-bit value. */
489 if (regno == FP_REG && s == 2)
490 return true;
491 if (regno < SP_REG)
492 {
493 /* Since a reg-reg move is really a reg-mem move, we must
494 enforce alignment. */
495 if (s > 1 && (regno % 2))
496 return false;
497 return true;
498 }
499 if (s == CC_REGNUM)
500 return (mode == BImode);
501 /* All other registers must be accessed in their natural sizes. */
502 if (s == register_sizes [regno])
503 return true;
504 return false;
505 }
506
507 #undef TARGET_MODES_TIEABLE_P
508 #define TARGET_MODES_TIEABLE_P rl78_modes_tieable_p
509
510 static bool
rl78_modes_tieable_p(machine_mode mode1,machine_mode mode2)511 rl78_modes_tieable_p (machine_mode mode1, machine_mode mode2)
512 {
513 return ((GET_MODE_CLASS (mode1) == MODE_FLOAT
514 || GET_MODE_CLASS (mode1) == MODE_COMPLEX_FLOAT)
515 == (GET_MODE_CLASS (mode2) == MODE_FLOAT
516 || GET_MODE_CLASS (mode2) == MODE_COMPLEX_FLOAT));
517 }
518
519 /* Simplify_gen_subreg() doesn't handle memory references the way we
520 need it to below, so we use this function for when we must get a
521 valid subreg in a "natural" state. */
522 static rtx
rl78_subreg(machine_mode mode,rtx r,machine_mode omode,int byte)523 rl78_subreg (machine_mode mode, rtx r, machine_mode omode, int byte)
524 {
525 if (GET_CODE (r) == MEM)
526 return adjust_address (r, mode, byte);
527 else
528 return simplify_gen_subreg (mode, r, omode, byte);
529 }
530
531 /* Used by movsi. Split SImode moves into two HImode moves, using
532 appropriate patterns for the upper and lower halves of symbols. */
533 void
rl78_expand_movsi(rtx * operands)534 rl78_expand_movsi (rtx *operands)
535 {
536 rtx op00, op02, op10, op12;
537
538 op00 = rl78_subreg (HImode, operands[0], SImode, 0);
539 op02 = rl78_subreg (HImode, operands[0], SImode, 2);
540 if (GET_CODE (operands[1]) == CONST
541 || GET_CODE (operands[1]) == SYMBOL_REF)
542 {
543 op10 = gen_rtx_ZERO_EXTRACT (HImode, operands[1], GEN_INT (16), GEN_INT (0));
544 op10 = gen_rtx_CONST (HImode, op10);
545 op12 = gen_rtx_ZERO_EXTRACT (HImode, operands[1], GEN_INT (16), GEN_INT (16));
546 op12 = gen_rtx_CONST (HImode, op12);
547 }
548 else
549 {
550 op10 = rl78_subreg (HImode, operands[1], SImode, 0);
551 op12 = rl78_subreg (HImode, operands[1], SImode, 2);
552 }
553
554 if (rtx_equal_p (operands[0], operands[1]))
555 ;
556 else if (rtx_equal_p (op00, op12))
557 {
558 emit_move_insn (op02, op12);
559 emit_move_insn (op00, op10);
560 }
561 else
562 {
563 emit_move_insn (op00, op10);
564 emit_move_insn (op02, op12);
565 }
566 }
567
568 /* Generate code to move an SImode value. */
569 void
rl78_split_movsi(rtx * operands,machine_mode omode)570 rl78_split_movsi (rtx *operands, machine_mode omode)
571 {
572 rtx op00, op02, op10, op12;
573
574 op00 = rl78_subreg (HImode, operands[0], omode, 0);
575 op02 = rl78_subreg (HImode, operands[0], omode, 2);
576
577 if (GET_CODE (operands[1]) == CONST
578 || GET_CODE (operands[1]) == SYMBOL_REF)
579 {
580 op10 = gen_rtx_ZERO_EXTRACT (HImode, operands[1], GEN_INT (16), GEN_INT (0));
581 op10 = gen_rtx_CONST (HImode, op10);
582 op12 = gen_rtx_ZERO_EXTRACT (HImode, operands[1], GEN_INT (16), GEN_INT (16));
583 op12 = gen_rtx_CONST (HImode, op12);
584 }
585 else
586 {
587 op10 = rl78_subreg (HImode, operands[1], omode, 0);
588 op12 = rl78_subreg (HImode, operands[1], omode, 2);
589 }
590
591 if (rtx_equal_p (operands[0], operands[1]))
592 ;
593 else if (rtx_equal_p (op00, op12))
594 {
595 operands[2] = op02;
596 operands[4] = op12;
597 operands[3] = op00;
598 operands[5] = op10;
599 }
600 else
601 {
602 operands[2] = op00;
603 operands[4] = op10;
604 operands[3] = op02;
605 operands[5] = op12;
606 }
607 }
608
609 void
rl78_split_movdi(rtx * operands,enum machine_mode omode)610 rl78_split_movdi (rtx *operands, enum machine_mode omode)
611 {
612 rtx op00, op04, op10, op14;
613 op00 = rl78_subreg (SImode, operands[0], omode, 0);
614 op04 = rl78_subreg (SImode, operands[0], omode, 4);
615 op10 = rl78_subreg (SImode, operands[1], omode, 0);
616 op14 = rl78_subreg (SImode, operands[1], omode, 4);
617 emit_insn (gen_movsi (op00, op10));
618 emit_insn (gen_movsi (op04, op14));
619 }
620
621 /* Used by various two-operand expanders which cannot accept all
622 operands in the "far" namespace. Force some such operands into
623 registers so that each pattern has at most one far operand. */
624 int
rl78_force_nonfar_2(rtx * operands,rtx (* gen)(rtx,rtx))625 rl78_force_nonfar_2 (rtx *operands, rtx (*gen)(rtx,rtx))
626 {
627 int did = 0;
628 rtx temp_reg = NULL;
629
630 /* FIXME: in the future, be smarter about only doing this if the
631 other operand is also far, assuming the devirtualizer can also
632 handle that. */
633 if (rl78_far_p (operands[0]))
634 {
635 temp_reg = operands[0];
636 operands[0] = gen_reg_rtx (GET_MODE (operands[0]));
637 did = 1;
638 }
639 if (!did)
640 return 0;
641
642 emit_insn (gen (operands[0], operands[1]));
643 if (temp_reg)
644 emit_move_insn (temp_reg, operands[0]);
645 return 1;
646 }
647
648 /* Likewise, but for three-operand expanders. */
649 int
rl78_force_nonfar_3(rtx * operands,rtx (* gen)(rtx,rtx,rtx))650 rl78_force_nonfar_3 (rtx *operands, rtx (*gen)(rtx,rtx,rtx))
651 {
652 int did = 0;
653 rtx temp_reg = NULL;
654
655 /* FIXME: Likewise. */
656 if (rl78_far_p (operands[1]))
657 {
658 rtx temp_reg = gen_reg_rtx (GET_MODE (operands[1]));
659 emit_move_insn (temp_reg, operands[1]);
660 operands[1] = temp_reg;
661 did = 1;
662 }
663 if (rl78_far_p (operands[0]))
664 {
665 temp_reg = operands[0];
666 operands[0] = gen_reg_rtx (GET_MODE (operands[0]));
667 did = 1;
668 }
669 if (!did)
670 return 0;
671
672 emit_insn (gen (operands[0], operands[1], operands[2]));
673 if (temp_reg)
674 emit_move_insn (temp_reg, operands[0]);
675 return 1;
676 }
677
678 int
rl78_one_far_p(rtx * operands,int n)679 rl78_one_far_p (rtx *operands, int n)
680 {
681 rtx which = NULL;
682 int i, c = 0;
683
684 for (i = 0; i < n; i ++)
685 if (rl78_far_p (operands[i]))
686 {
687 if (which == NULL)
688 which = operands[i];
689 else if (rtx_equal_p (operands[i], which))
690 continue;
691 c ++;
692 }
693 return c <= 1;
694 }
695
696 #undef TARGET_CAN_ELIMINATE
697 #define TARGET_CAN_ELIMINATE rl78_can_eliminate
698
699 static bool
rl78_can_eliminate(const int from ATTRIBUTE_UNUSED,const int to ATTRIBUTE_UNUSED)700 rl78_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to ATTRIBUTE_UNUSED)
701 {
702 return true;
703 }
704
705 /* Returns true if the given register needs to be saved by the
706 current function. */
707 static bool
need_to_save(unsigned int regno)708 need_to_save (unsigned int regno)
709 {
710 if (is_interrupt_func (cfun->decl))
711 {
712 /* We don't know what devirt will need */
713 if (regno < 8)
714 return true;
715
716 /* We don't need to save registers that have
717 been reserved for interrupt handlers. */
718 if (regno > 23)
719 return false;
720
721 /* If the handler is a non-leaf function then it may call
722 non-interrupt aware routines which will happily clobber
723 any call_used registers, so we have to preserve them.
724 We do not have to worry about the frame pointer register
725 though, as that is handled below. */
726 if (!crtl->is_leaf && call_used_or_fixed_reg_p (regno) && regno < 22)
727 return true;
728
729 /* Otherwise we only have to save a register, call_used
730 or not, if it is used by this handler. */
731 return df_regs_ever_live_p (regno);
732 }
733
734 if (regno == FRAME_POINTER_REGNUM
735 && (frame_pointer_needed || df_regs_ever_live_p (regno)))
736 return true;
737 if (fixed_regs[regno])
738 return false;
739 if (crtl->calls_eh_return)
740 return true;
741 if (df_regs_ever_live_p (regno)
742 && !call_used_or_fixed_reg_p (regno))
743 return true;
744 return false;
745 }
746
747 /* We use this to wrap all emitted insns in the prologue. */
748 static rtx
F(rtx x)749 F (rtx x)
750 {
751 RTX_FRAME_RELATED_P (x) = 1;
752 return x;
753 }
754
755 /* Compute all the frame-related fields in our machine_function
756 structure. */
757 static void
rl78_compute_frame_info(void)758 rl78_compute_frame_info (void)
759 {
760 int i;
761
762 cfun->machine->computed = 1;
763 cfun->machine->framesize_regs = 0;
764 cfun->machine->framesize_locals = get_frame_size ();
765 cfun->machine->framesize_outgoing = crtl->outgoing_args_size;
766
767 for (i = 0; i < 16; i ++)
768 if (need_to_save (i * 2) || need_to_save (i * 2 + 1))
769 {
770 cfun->machine->need_to_push [i] = 1;
771 cfun->machine->framesize_regs += 2;
772 }
773 else
774 cfun->machine->need_to_push [i] = 0;
775
776 if ((cfun->machine->framesize_locals + cfun->machine->framesize_outgoing) & 1)
777 cfun->machine->framesize_locals ++;
778
779 cfun->machine->framesize = (cfun->machine->framesize_regs
780 + cfun->machine->framesize_locals
781 + cfun->machine->framesize_outgoing);
782 }
783
784 /* Returns true if the provided function has the specified attribute. */
785 static inline bool
has_func_attr(const_tree decl,const char * func_attr)786 has_func_attr (const_tree decl, const char * func_attr)
787 {
788 if (decl == NULL_TREE)
789 decl = current_function_decl;
790
791 return lookup_attribute (func_attr, DECL_ATTRIBUTES (decl)) != NULL_TREE;
792 }
793
794 /* Returns true if the provided function has the "interrupt" attribute. */
795 static inline bool
is_interrupt_func(const_tree decl)796 is_interrupt_func (const_tree decl)
797 {
798 return has_func_attr (decl, "interrupt") || has_func_attr (decl, "brk_interrupt");
799 }
800
801 /* Returns true if the provided function has the "brk_interrupt" attribute. */
802 static inline bool
is_brk_interrupt_func(const_tree decl)803 is_brk_interrupt_func (const_tree decl)
804 {
805 return has_func_attr (decl, "brk_interrupt");
806 }
807
808 /* Check "interrupt" attributes. */
809 static tree
rl78_handle_func_attribute(tree * node,tree name,tree args ATTRIBUTE_UNUSED,int flags ATTRIBUTE_UNUSED,bool * no_add_attrs)810 rl78_handle_func_attribute (tree * node,
811 tree name,
812 tree args ATTRIBUTE_UNUSED,
813 int flags ATTRIBUTE_UNUSED,
814 bool * no_add_attrs)
815 {
816 gcc_assert (DECL_P (* node));
817
818 if (TREE_CODE (* node) != FUNCTION_DECL)
819 {
820 warning (OPT_Wattributes, "%qE attribute only applies to functions",
821 name);
822 * no_add_attrs = true;
823 }
824
825 /* FIXME: We ought to check that the interrupt and exception
826 handler attributes have been applied to void functions. */
827 return NULL_TREE;
828 }
829
830 /* Check "naked" attributes. */
831 static tree
rl78_handle_naked_attribute(tree * node,tree name ATTRIBUTE_UNUSED,tree args,int flags ATTRIBUTE_UNUSED,bool * no_add_attrs)832 rl78_handle_naked_attribute (tree * node,
833 tree name ATTRIBUTE_UNUSED,
834 tree args,
835 int flags ATTRIBUTE_UNUSED,
836 bool * no_add_attrs)
837 {
838 gcc_assert (DECL_P (* node));
839 gcc_assert (args == NULL_TREE);
840
841 if (TREE_CODE (* node) != FUNCTION_DECL)
842 {
843 warning (OPT_Wattributes, "naked attribute only applies to functions");
844 * no_add_attrs = true;
845 }
846
847 /* Disable warnings about this function - eg reaching the end without
848 seeing a return statement - because the programmer is doing things
849 that gcc does not know about. */
850 TREE_NO_WARNING (* node) = 1;
851
852 return NULL_TREE;
853 }
854
855 /* Check "saddr" attributes. */
856 static tree
rl78_handle_saddr_attribute(tree * node,tree name,tree args ATTRIBUTE_UNUSED,int flags ATTRIBUTE_UNUSED,bool * no_add_attrs)857 rl78_handle_saddr_attribute (tree * node,
858 tree name,
859 tree args ATTRIBUTE_UNUSED,
860 int flags ATTRIBUTE_UNUSED,
861 bool * no_add_attrs)
862 {
863 gcc_assert (DECL_P (* node));
864
865 if (TREE_CODE (* node) == FUNCTION_DECL)
866 {
867 warning (OPT_Wattributes, "%qE attribute doesn%'t apply to functions",
868 name);
869 * no_add_attrs = true;
870 }
871
872 return NULL_TREE;
873 }
874
875 /* Check "vector" attribute. */
876
877 static tree
rl78_handle_vector_attribute(tree * node,tree name,tree args,int flags ATTRIBUTE_UNUSED,bool * no_add_attrs)878 rl78_handle_vector_attribute (tree * node,
879 tree name,
880 tree args,
881 int flags ATTRIBUTE_UNUSED,
882 bool * no_add_attrs)
883 {
884 gcc_assert (DECL_P (* node));
885 gcc_assert (args != NULL_TREE);
886
887 if (TREE_CODE (* node) != FUNCTION_DECL)
888 {
889 warning (OPT_Wattributes, "%qE attribute only applies to functions",
890 name);
891 * no_add_attrs = true;
892 }
893
894 return NULL_TREE;
895 }
896
897 #undef TARGET_ATTRIBUTE_TABLE
898 #define TARGET_ATTRIBUTE_TABLE rl78_attribute_table
899
900 /* Table of RL78-specific attributes. */
901 const struct attribute_spec rl78_attribute_table[] =
902 {
903 /* Name, min_len, max_len, decl_req, type_req, fn_type_req,
904 affects_type_identity, handler, exclude. */
905 { "interrupt", 0, -1, true, false, false, false,
906 rl78_handle_func_attribute, NULL },
907 { "brk_interrupt", 0, 0, true, false, false, false,
908 rl78_handle_func_attribute, NULL },
909 { "naked", 0, 0, true, false, false, false,
910 rl78_handle_naked_attribute, NULL },
911 { "saddr", 0, 0, true, false, false, false,
912 rl78_handle_saddr_attribute, NULL },
913 { "vector", 1, -1, true, false, false, false,
914 rl78_handle_vector_attribute, NULL },
915 { NULL, 0, 0, false, false, false, false, NULL, NULL }
916 };
917
918
919
920 /* Break down an address RTX into its component base/index/addend
921 portions and return TRUE if the address is of a valid form, else
922 FALSE. */
923 static bool
characterize_address(rtx x,rtx * base,rtx * index,rtx * addend)924 characterize_address (rtx x, rtx *base, rtx *index, rtx *addend)
925 {
926 *base = NULL_RTX;
927 *index = NULL_RTX;
928 *addend = NULL_RTX;
929
930 if (GET_CODE (x) == UNSPEC
931 && XINT (x, 1) == UNS_ES_ADDR)
932 x = XVECEXP (x, 0, 1);
933
934 if (GET_CODE (x) == REG)
935 {
936 *base = x;
937 return true;
938 }
939
940 /* We sometimes get these without the CONST wrapper */
941 if (GET_CODE (x) == PLUS
942 && GET_CODE (XEXP (x, 0)) == SYMBOL_REF
943 && GET_CODE (XEXP (x, 1)) == CONST_INT)
944 {
945 *addend = x;
946 return true;
947 }
948
949 if (GET_CODE (x) == PLUS)
950 {
951 *base = XEXP (x, 0);
952 x = XEXP (x, 1);
953
954 if (GET_CODE (*base) == SUBREG)
955 {
956 if (GET_MODE (*base) == HImode
957 && GET_MODE (XEXP (*base, 0)) == SImode
958 && GET_CODE (XEXP (*base, 0)) == REG)
959 {
960 /* This is a throw-away rtx just to tell everyone
961 else what effective register we're using. */
962 *base = gen_rtx_REG (HImode, REGNO (XEXP (*base, 0)));
963 }
964 }
965
966 if (GET_CODE (*base) != REG
967 && GET_CODE (x) == REG)
968 {
969 rtx tmp = *base;
970 *base = x;
971 x = tmp;
972 }
973
974 if (GET_CODE (*base) != REG)
975 return false;
976
977 if (GET_CODE (x) == ZERO_EXTEND
978 && GET_CODE (XEXP (x, 0)) == REG)
979 {
980 *index = XEXP (x, 0);
981 return false;
982 }
983 }
984
985 switch (GET_CODE (x))
986 {
987 case PLUS:
988 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
989 && GET_CODE (XEXP (x, 0)) == CONST_INT)
990 {
991 *addend = x;
992 return true;
993 }
994 /* fall through */
995 case MEM:
996 case REG:
997 return false;
998
999 case SUBREG:
1000 switch (GET_CODE (XEXP (x, 0)))
1001 {
1002 case CONST:
1003 case SYMBOL_REF:
1004 case CONST_INT:
1005 *addend = x;
1006 return true;
1007 default:
1008 return false;
1009 }
1010
1011 case CONST:
1012 case SYMBOL_REF:
1013 case CONST_INT:
1014 *addend = x;
1015 return true;
1016
1017 default:
1018 return false;
1019 }
1020
1021 return false;
1022 }
1023
1024 /* Used by the Whb constraint. Match addresses that use HL+B or HL+C
1025 addressing. */
1026 bool
rl78_hl_b_c_addr_p(rtx op)1027 rl78_hl_b_c_addr_p (rtx op)
1028 {
1029 rtx hl, bc;
1030
1031 if (GET_CODE (op) != PLUS)
1032 return false;
1033 hl = XEXP (op, 0);
1034 bc = XEXP (op, 1);
1035 if (GET_CODE (hl) == ZERO_EXTEND)
1036 {
1037 rtx tmp = hl;
1038 hl = bc;
1039 bc = tmp;
1040 }
1041 if (GET_CODE (hl) != REG)
1042 return false;
1043 if (GET_CODE (bc) != ZERO_EXTEND)
1044 return false;
1045 bc = XEXP (bc, 0);
1046 if (GET_CODE (bc) != REG)
1047 return false;
1048 if (REGNO (hl) != HL_REG)
1049 return false;
1050 if (REGNO (bc) != B_REG && REGNO (bc) != C_REG)
1051 return false;
1052
1053 return true;
1054 }
1055
1056 #define REG_IS(r, regno) (((r) == (regno)) || ((r) >= FIRST_PSEUDO_REGISTER && !(strict)))
1057
1058 /* Return the appropriate mode for a named address address. */
1059
1060 #undef TARGET_ADDR_SPACE_ADDRESS_MODE
1061 #define TARGET_ADDR_SPACE_ADDRESS_MODE rl78_addr_space_address_mode
1062
1063 static scalar_int_mode
rl78_addr_space_address_mode(addr_space_t addrspace)1064 rl78_addr_space_address_mode (addr_space_t addrspace)
1065 {
1066 switch (addrspace)
1067 {
1068 case ADDR_SPACE_GENERIC:
1069 return HImode;
1070 case ADDR_SPACE_NEAR:
1071 return HImode;
1072 case ADDR_SPACE_FAR:
1073 return SImode;
1074 default:
1075 gcc_unreachable ();
1076 }
1077 }
1078
1079 /* Used in various constraints and predicates to match operands in the
1080 "far" address space. */
1081 int
rl78_far_p(rtx x)1082 rl78_far_p (rtx x)
1083 {
1084 if (! MEM_P (x))
1085 return 0;
1086 #if DEBUG0
1087 fprintf (stderr, "\033[35mrl78_far_p: "); debug_rtx (x);
1088 fprintf (stderr, " = %d\033[0m\n", MEM_ADDR_SPACE (x) == ADDR_SPACE_FAR);
1089 #endif
1090
1091 /* Not all far addresses are legitimate, because the devirtualizer
1092 can't handle them. */
1093 if (! rl78_as_legitimate_address (GET_MODE (x), XEXP (x, 0), false, ADDR_SPACE_FAR))
1094 return 0;
1095
1096 return GET_MODE_BITSIZE (rl78_addr_space_address_mode (MEM_ADDR_SPACE (x))) == 32;
1097 }
1098
1099 /* Return the appropriate mode for a named address pointer. */
1100 #undef TARGET_ADDR_SPACE_POINTER_MODE
1101 #define TARGET_ADDR_SPACE_POINTER_MODE rl78_addr_space_pointer_mode
1102
1103 static scalar_int_mode
rl78_addr_space_pointer_mode(addr_space_t addrspace)1104 rl78_addr_space_pointer_mode (addr_space_t addrspace)
1105 {
1106 switch (addrspace)
1107 {
1108 case ADDR_SPACE_GENERIC:
1109 return HImode;
1110 case ADDR_SPACE_NEAR:
1111 return HImode;
1112 case ADDR_SPACE_FAR:
1113 return SImode;
1114 default:
1115 gcc_unreachable ();
1116 }
1117 }
1118
1119 /* Returns TRUE for valid addresses. */
1120 #undef TARGET_VALID_POINTER_MODE
1121 #define TARGET_VALID_POINTER_MODE rl78_valid_pointer_mode
1122
1123 static bool
rl78_valid_pointer_mode(scalar_int_mode m)1124 rl78_valid_pointer_mode (scalar_int_mode m)
1125 {
1126 return (m == HImode || m == SImode);
1127 }
1128
1129 #undef TARGET_LEGITIMATE_CONSTANT_P
1130 #define TARGET_LEGITIMATE_CONSTANT_P rl78_is_legitimate_constant
1131
1132 static bool
rl78_is_legitimate_constant(machine_mode mode ATTRIBUTE_UNUSED,rtx x ATTRIBUTE_UNUSED)1133 rl78_is_legitimate_constant (machine_mode mode ATTRIBUTE_UNUSED, rtx x ATTRIBUTE_UNUSED)
1134 {
1135 return true;
1136 }
1137
1138 #undef TARGET_LRA_P
1139 #define TARGET_LRA_P hook_bool_void_false
1140
1141 #undef TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
1142 #define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P rl78_as_legitimate_address
1143
1144 bool
rl78_as_legitimate_address(machine_mode mode ATTRIBUTE_UNUSED,rtx x,bool strict ATTRIBUTE_UNUSED,addr_space_t as ATTRIBUTE_UNUSED)1145 rl78_as_legitimate_address (machine_mode mode ATTRIBUTE_UNUSED, rtx x,
1146 bool strict ATTRIBUTE_UNUSED, addr_space_t as ATTRIBUTE_UNUSED)
1147 {
1148 rtx base, index, addend;
1149 bool is_far_addr = false;
1150 int as_bits;
1151
1152 as_bits = GET_MODE_BITSIZE (rl78_addr_space_address_mode (as));
1153
1154 if (GET_CODE (x) == UNSPEC
1155 && XINT (x, 1) == UNS_ES_ADDR)
1156 {
1157 x = XVECEXP (x, 0, 1);
1158 is_far_addr = true;
1159 }
1160
1161 if (as_bits == 16 && is_far_addr)
1162 return false;
1163
1164 if (! characterize_address (x, &base, &index, &addend))
1165 return false;
1166
1167 /* We can't extract the high/low portions of a PLUS address
1168 involving a register during devirtualization, so make sure all
1169 such __far addresses do not have addends. This forces GCC to do
1170 the sum separately. */
1171 if (addend && base && as_bits == 32 && GET_MODE (base) == SImode)
1172 return false;
1173
1174 if (base && index)
1175 {
1176 int ir = REGNO (index);
1177 int br = REGNO (base);
1178
1179 #define OK(test, debug) if (test) { /*fprintf(stderr, "%d: OK %s\n", __LINE__, debug);*/ return true; }
1180 OK (REG_IS (br, HL_REG) && REG_IS (ir, B_REG), "[hl+b]");
1181 OK (REG_IS (br, HL_REG) && REG_IS (ir, C_REG), "[hl+c]");
1182 return false;
1183 }
1184
1185 if (strict && base && GET_CODE (base) == REG && REGNO (base) >= FIRST_PSEUDO_REGISTER)
1186 return false;
1187
1188 if (! cfun->machine->virt_insns_ok && base && GET_CODE (base) == REG
1189 && REGNO (base) >= 8 && REGNO (base) <= 31)
1190 return false;
1191
1192 return true;
1193 }
1194
1195 /* Determine if one named address space is a subset of another. */
1196 #undef TARGET_ADDR_SPACE_SUBSET_P
1197 #define TARGET_ADDR_SPACE_SUBSET_P rl78_addr_space_subset_p
1198
1199 static bool
rl78_addr_space_subset_p(addr_space_t subset,addr_space_t superset)1200 rl78_addr_space_subset_p (addr_space_t subset, addr_space_t superset)
1201 {
1202 int subset_bits;
1203 int superset_bits;
1204
1205 subset_bits = GET_MODE_BITSIZE (rl78_addr_space_address_mode (subset));
1206 superset_bits = GET_MODE_BITSIZE (rl78_addr_space_address_mode (superset));
1207
1208 return (subset_bits <= superset_bits);
1209 }
1210
1211 #undef TARGET_ADDR_SPACE_CONVERT
1212 #define TARGET_ADDR_SPACE_CONVERT rl78_addr_space_convert
1213
1214 /* Convert from one address space to another. */
1215 static rtx
rl78_addr_space_convert(rtx op,tree from_type,tree to_type)1216 rl78_addr_space_convert (rtx op, tree from_type, tree to_type)
1217 {
1218 addr_space_t from_as = TYPE_ADDR_SPACE (TREE_TYPE (from_type));
1219 addr_space_t to_as = TYPE_ADDR_SPACE (TREE_TYPE (to_type));
1220 rtx result;
1221 int to_bits;
1222 int from_bits;
1223
1224 to_bits = GET_MODE_BITSIZE (rl78_addr_space_address_mode (to_as));
1225 from_bits = GET_MODE_BITSIZE (rl78_addr_space_address_mode (from_as));
1226
1227 if (to_bits < from_bits)
1228 {
1229 rtx tmp;
1230 /* This is unpredictable, as we're truncating off usable address
1231 bits. */
1232
1233 warning (OPT_Waddress, "converting far pointer to near pointer");
1234 result = gen_reg_rtx (HImode);
1235 if (GET_CODE (op) == SYMBOL_REF
1236 || (GET_CODE (op) == REG && REGNO (op) >= FIRST_PSEUDO_REGISTER))
1237 tmp = gen_rtx_raw_SUBREG (HImode, op, 0);
1238 else
1239 tmp = simplify_subreg (HImode, op, SImode, 0);
1240 gcc_assert (tmp != NULL_RTX);
1241 emit_move_insn (result, tmp);
1242 return result;
1243 }
1244 else if (to_bits > from_bits)
1245 {
1246 /* This always works. */
1247 result = gen_reg_rtx (SImode);
1248 emit_move_insn (rl78_subreg (HImode, result, SImode, 0), op);
1249 if (TREE_CODE (from_type) == POINTER_TYPE
1250 && TREE_CODE (TREE_TYPE (from_type)) == FUNCTION_TYPE)
1251 emit_move_insn (rl78_subreg (HImode, result, SImode, 2), const0_rtx);
1252 else
1253 emit_move_insn (rl78_subreg (HImode, result, SImode, 2), GEN_INT (0x0f));
1254 return result;
1255 }
1256 else
1257 return op;
1258 gcc_unreachable ();
1259 }
1260
1261 /* Implements REGNO_MODE_CODE_OK_FOR_BASE_P. */
1262 bool
rl78_regno_mode_code_ok_for_base_p(int regno,machine_mode mode ATTRIBUTE_UNUSED,addr_space_t address_space ATTRIBUTE_UNUSED,int outer_code ATTRIBUTE_UNUSED,int index_code)1263 rl78_regno_mode_code_ok_for_base_p (int regno, machine_mode mode ATTRIBUTE_UNUSED,
1264 addr_space_t address_space ATTRIBUTE_UNUSED,
1265 int outer_code ATTRIBUTE_UNUSED, int index_code)
1266 {
1267 if (regno <= SP_REG && regno >= 16)
1268 return true;
1269 if (index_code == REG)
1270 return (regno == HL_REG);
1271 if (regno == C_REG || regno == B_REG || regno == E_REG || regno == L_REG)
1272 return true;
1273 return false;
1274 }
1275
1276 /* Implements MODE_CODE_BASE_REG_CLASS. */
1277 enum reg_class
rl78_mode_code_base_reg_class(machine_mode mode ATTRIBUTE_UNUSED,addr_space_t address_space ATTRIBUTE_UNUSED,int outer_code ATTRIBUTE_UNUSED,int index_code ATTRIBUTE_UNUSED)1278 rl78_mode_code_base_reg_class (machine_mode mode ATTRIBUTE_UNUSED,
1279 addr_space_t address_space ATTRIBUTE_UNUSED,
1280 int outer_code ATTRIBUTE_UNUSED,
1281 int index_code ATTRIBUTE_UNUSED)
1282 {
1283 return V_REGS;
1284 }
1285
1286 /* Typical stack layout should looks like this after the function's prologue:
1287
1288 | |
1289 -- ^
1290 | | \ |
1291 | | arguments saved | Increasing
1292 | | on the stack | addresses
1293 PARENT arg pointer -> | | /
1294 -------------------------- ---- -------------------
1295 CHILD |ret | return address
1296 --
1297 | | \
1298 | | call saved
1299 | | registers
1300 frame pointer -> | | /
1301 --
1302 | | \
1303 | | local
1304 | | variables
1305 | | /
1306 --
1307 | | \
1308 | | outgoing | Decreasing
1309 | | arguments | addresses
1310 current stack pointer -> | | / |
1311 -------------------------- ---- ------------------ V
1312 | | */
1313
1314 /* Implements INITIAL_ELIMINATION_OFFSET. The frame layout is
1315 described in the machine_Function struct definition, above. */
1316 int
rl78_initial_elimination_offset(int from,int to)1317 rl78_initial_elimination_offset (int from, int to)
1318 {
1319 int rv = 0; /* as if arg to arg */
1320
1321 rl78_compute_frame_info ();
1322
1323 switch (to)
1324 {
1325 case STACK_POINTER_REGNUM:
1326 rv += cfun->machine->framesize_outgoing;
1327 rv += cfun->machine->framesize_locals;
1328 /* Fall through. */
1329 case FRAME_POINTER_REGNUM:
1330 rv += cfun->machine->framesize_regs;
1331 rv += 4;
1332 break;
1333 default:
1334 gcc_unreachable ();
1335 }
1336
1337 switch (from)
1338 {
1339 case FRAME_POINTER_REGNUM:
1340 rv -= 4;
1341 rv -= cfun->machine->framesize_regs;
1342 case ARG_POINTER_REGNUM:
1343 break;
1344 default:
1345 gcc_unreachable ();
1346 }
1347
1348 return rv;
1349 }
1350
1351 static bool
rl78_is_naked_func(void)1352 rl78_is_naked_func (void)
1353 {
1354 return (lookup_attribute ("naked", DECL_ATTRIBUTES (current_function_decl)) != NULL_TREE);
1355 }
1356
1357 /* Check if the block uses mul/div insns for G13 target. */
1358
1359 static bool
check_mduc_usage(void)1360 check_mduc_usage (void)
1361 {
1362 rtx_insn * insn;
1363 basic_block bb;
1364
1365 FOR_EACH_BB_FN (bb, cfun)
1366 {
1367 FOR_BB_INSNS (bb, insn)
1368 {
1369 if (INSN_P (insn)
1370 && (get_attr_is_g13_muldiv_insn (insn) == IS_G13_MULDIV_INSN_YES))
1371 return true;
1372 }
1373 }
1374 return false;
1375 }
1376
1377 /* Expand the function prologue (from the prologue pattern). */
1378
1379 void
rl78_expand_prologue(void)1380 rl78_expand_prologue (void)
1381 {
1382 int i, fs;
1383 rtx sp = gen_rtx_REG (HImode, STACK_POINTER_REGNUM);
1384 rtx ax = gen_rtx_REG (HImode, AX_REG);
1385 int rb = 0;
1386
1387 if (rl78_is_naked_func ())
1388 return;
1389
1390 /* Always re-compute the frame info - the register usage may have changed. */
1391 rl78_compute_frame_info ();
1392
1393 if (MUST_SAVE_MDUC_REGISTERS && (!crtl->is_leaf || check_mduc_usage ()))
1394 cfun->machine->framesize += ARRAY_SIZE (mduc_regs) * 2;
1395
1396 if (flag_stack_usage_info)
1397 current_function_static_stack_size = cfun->machine->framesize;
1398
1399 if (is_interrupt_func (cfun->decl) && !TARGET_G10)
1400 for (i = 0; i < 4; i++)
1401 if (cfun->machine->need_to_push [i])
1402 {
1403 /* Select Bank 0 if we are using any registers from Bank 0. */
1404 emit_insn (gen_sel_rb (GEN_INT (0)));
1405 break;
1406 }
1407
1408 for (i = 0; i < 16; i++)
1409 if (cfun->machine->need_to_push [i])
1410 {
1411 int reg = i * 2;
1412
1413 if (TARGET_G10)
1414 {
1415 if (reg >= 8)
1416 {
1417 emit_move_insn (ax, gen_rtx_REG (HImode, reg));
1418 reg = AX_REG;
1419 }
1420 }
1421 else
1422 {
1423 int need_bank = i/4;
1424
1425 if (need_bank != rb)
1426 {
1427 emit_insn (gen_sel_rb (GEN_INT (need_bank)));
1428 rb = need_bank;
1429 }
1430 }
1431
1432 F (emit_insn (gen_push (gen_rtx_REG (HImode, reg))));
1433 }
1434
1435 if (rb != 0)
1436 emit_insn (gen_sel_rb (GEN_INT (0)));
1437
1438 /* Save ES register inside interrupt functions if it is used. */
1439 if (is_interrupt_func (cfun->decl) && cfun->machine->uses_es)
1440 {
1441 emit_insn (gen_movqi_from_es (gen_rtx_REG (QImode, A_REG)));
1442 F (emit_insn (gen_push (ax)));
1443 }
1444
1445 /* Save MDUC registers inside interrupt routine. */
1446 if (MUST_SAVE_MDUC_REGISTERS && (!crtl->is_leaf || check_mduc_usage ()))
1447 {
1448 for (unsigned i = 0; i < ARRAY_SIZE (mduc_regs); i++)
1449 {
1450 mduc_reg_type *reg = mduc_regs + i;
1451 rtx mem_mduc = gen_rtx_MEM (reg->mode, GEN_INT (reg->address));
1452
1453 MEM_VOLATILE_P (mem_mduc) = 1;
1454 if (reg->mode == QImode)
1455 emit_insn (gen_movqi (gen_rtx_REG (QImode, A_REG), mem_mduc));
1456 else
1457 emit_insn (gen_movhi (gen_rtx_REG (HImode, AX_REG), mem_mduc));
1458
1459 emit_insn (gen_push (gen_rtx_REG (HImode, AX_REG)));
1460 }
1461 }
1462
1463 if (frame_pointer_needed)
1464 {
1465 F (emit_move_insn (ax, sp));
1466 F (emit_move_insn (gen_rtx_REG (HImode, FRAME_POINTER_REGNUM), ax));
1467 }
1468
1469 fs = cfun->machine->framesize_locals + cfun->machine->framesize_outgoing;
1470 if (fs > 0)
1471 {
1472 /* If we need to subtract more than 254*3 then it is faster and
1473 smaller to move SP into AX and perform the subtraction there. */
1474 if (fs > 254 * 3)
1475 {
1476 rtx insn;
1477
1478 emit_move_insn (ax, sp);
1479 emit_insn (gen_subhi3 (ax, ax, GEN_INT (fs)));
1480 insn = F (emit_move_insn (sp, ax));
1481 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
1482 gen_rtx_SET (sp, gen_rtx_PLUS (HImode, sp,
1483 GEN_INT (-fs))));
1484 }
1485 else
1486 {
1487 while (fs > 0)
1488 {
1489 int fs_byte = (fs > 254) ? 254 : fs;
1490
1491 F (emit_insn (gen_subhi3 (sp, sp, GEN_INT (fs_byte))));
1492 fs -= fs_byte;
1493 }
1494 }
1495 }
1496 }
1497
1498 /* Expand the function epilogue (from the epilogue pattern). */
1499 void
rl78_expand_epilogue(void)1500 rl78_expand_epilogue (void)
1501 {
1502 int i, fs;
1503 rtx sp = gen_rtx_REG (HImode, STACK_POINTER_REGNUM);
1504 rtx ax = gen_rtx_REG (HImode, AX_REG);
1505 int rb = 0;
1506
1507 if (rl78_is_naked_func ())
1508 return;
1509
1510 if (frame_pointer_needed)
1511 {
1512 emit_move_insn (ax, gen_rtx_REG (HImode, FRAME_POINTER_REGNUM));
1513 emit_move_insn (sp, ax);
1514 }
1515 else
1516 {
1517 fs = cfun->machine->framesize_locals + cfun->machine->framesize_outgoing;
1518 if (fs > 254 * 3)
1519 {
1520 emit_move_insn (ax, sp);
1521 emit_insn (gen_addhi3 (ax, ax, GEN_INT (fs)));
1522 emit_move_insn (sp, ax);
1523 }
1524 else
1525 {
1526 while (fs > 0)
1527 {
1528 int fs_byte = (fs > 254) ? 254 : fs;
1529
1530 emit_insn (gen_addhi3 (sp, sp, GEN_INT (fs_byte)));
1531 fs -= fs_byte;
1532 }
1533 }
1534 }
1535
1536 /* Restore MDUC registers from interrupt routine. */
1537 if (MUST_SAVE_MDUC_REGISTERS && (!crtl->is_leaf || check_mduc_usage ()))
1538 {
1539 for (int i = ARRAY_SIZE (mduc_regs) - 1; i >= 0; i--)
1540 {
1541 mduc_reg_type *reg = mduc_regs + i;
1542 rtx mem_mduc = gen_rtx_MEM (reg->mode, GEN_INT (reg->address));
1543
1544 emit_insn (gen_pop (gen_rtx_REG (HImode, AX_REG)));
1545 MEM_VOLATILE_P (mem_mduc) = 1;
1546 if (reg->mode == QImode)
1547 emit_insn (gen_movqi (mem_mduc, gen_rtx_REG (QImode, A_REG)));
1548 else
1549 emit_insn (gen_movhi (mem_mduc, gen_rtx_REG (HImode, AX_REG)));
1550 }
1551 }
1552
1553 if (is_interrupt_func (cfun->decl) && cfun->machine->uses_es)
1554 {
1555 emit_insn (gen_pop (gen_rtx_REG (HImode, AX_REG)));
1556 emit_insn (gen_movqi_to_es (gen_rtx_REG (QImode, A_REG)));
1557 }
1558
1559 for (i = 15; i >= 0; i--)
1560 if (cfun->machine->need_to_push [i])
1561 {
1562 rtx dest = gen_rtx_REG (HImode, i * 2);
1563
1564 if (TARGET_G10)
1565 {
1566 if (i < 8)
1567 emit_insn (gen_pop (dest));
1568 else
1569 {
1570 emit_insn (gen_pop (ax));
1571 emit_move_insn (dest, ax);
1572 /* Generate a USE of the pop'd register so that DCE will not eliminate the move. */
1573 emit_insn (gen_use (dest));
1574 }
1575 }
1576 else
1577 {
1578 int need_bank = i / 4;
1579
1580 if (need_bank != rb)
1581 {
1582 emit_insn (gen_sel_rb (GEN_INT (need_bank)));
1583 rb = need_bank;
1584 }
1585 emit_insn (gen_pop (dest));
1586 }
1587 }
1588
1589 if (rb != 0)
1590 emit_insn (gen_sel_rb (GEN_INT (0)));
1591
1592 if (cfun->machine->trampolines_used)
1593 emit_insn (gen_trampoline_uninit ());
1594
1595 if (is_brk_interrupt_func (cfun->decl))
1596 emit_jump_insn (gen_brk_interrupt_return ());
1597 else if (is_interrupt_func (cfun->decl))
1598 emit_jump_insn (gen_interrupt_return ());
1599 else
1600 emit_jump_insn (gen_rl78_return ());
1601 }
1602
1603 /* Likewise, for exception handlers. */
1604 void
rl78_expand_eh_epilogue(rtx x ATTRIBUTE_UNUSED)1605 rl78_expand_eh_epilogue (rtx x ATTRIBUTE_UNUSED)
1606 {
1607 /* FIXME - replace this with an indirect jump with stack adjust. */
1608 emit_jump_insn (gen_rl78_return ());
1609 }
1610
1611 #undef TARGET_ASM_FUNCTION_PROLOGUE
1612 #define TARGET_ASM_FUNCTION_PROLOGUE rl78_start_function
1613
1614 static void
add_vector_labels(FILE * file,const char * aname)1615 add_vector_labels (FILE *file, const char *aname)
1616 {
1617 tree vec_attr;
1618 tree val_attr;
1619 const char *vname = "vect";
1620 const char *s;
1621 int vnum;
1622
1623 /* This node is for the vector/interrupt tag itself */
1624 vec_attr = lookup_attribute (aname, DECL_ATTRIBUTES (current_function_decl));
1625 if (!vec_attr)
1626 return;
1627
1628 /* Now point it at the first argument */
1629 vec_attr = TREE_VALUE (vec_attr);
1630
1631 /* Iterate through the arguments. */
1632 while (vec_attr)
1633 {
1634 val_attr = TREE_VALUE (vec_attr);
1635 switch (TREE_CODE (val_attr))
1636 {
1637 case STRING_CST:
1638 s = TREE_STRING_POINTER (val_attr);
1639 goto string_id_common;
1640
1641 case IDENTIFIER_NODE:
1642 s = IDENTIFIER_POINTER (val_attr);
1643
1644 string_id_common:
1645 if (strcmp (s, "$default") == 0)
1646 {
1647 fprintf (file, "\t.global\t$tableentry$default$%s\n", vname);
1648 fprintf (file, "$tableentry$default$%s:\n", vname);
1649 }
1650 else
1651 vname = s;
1652 break;
1653
1654 case INTEGER_CST:
1655 vnum = TREE_INT_CST_LOW (val_attr);
1656
1657 fprintf (file, "\t.global\t$tableentry$%d$%s\n", vnum, vname);
1658 fprintf (file, "$tableentry$%d$%s:\n", vnum, vname);
1659 break;
1660
1661 default:
1662 ;
1663 }
1664
1665 vec_attr = TREE_CHAIN (vec_attr);
1666 }
1667
1668 }
1669
1670 /* We don't use this to actually emit the function prologue. We use
1671 this to insert a comment in the asm file describing the
1672 function. */
1673 static void
rl78_start_function(FILE * file)1674 rl78_start_function (FILE *file)
1675 {
1676 int i;
1677
1678 add_vector_labels (file, "interrupt");
1679 add_vector_labels (file, "vector");
1680
1681 if (cfun->machine->framesize == 0)
1682 return;
1683 fprintf (file, "\t; start of function\n");
1684
1685 if (cfun->machine->framesize_regs)
1686 {
1687 fprintf (file, "\t; push %d:", cfun->machine->framesize_regs);
1688 for (i = 0; i < 16; i ++)
1689 if (cfun->machine->need_to_push[i])
1690 fprintf (file, " %s", word_regnames[i*2]);
1691 fprintf (file, "\n");
1692 }
1693
1694 if (frame_pointer_needed)
1695 fprintf (file, "\t; $fp points here (r22)\n");
1696
1697 if (cfun->machine->framesize_locals)
1698 fprintf (file, "\t; locals: %d byte%s\n", cfun->machine->framesize_locals,
1699 cfun->machine->framesize_locals == 1 ? "" : "s");
1700
1701 if (cfun->machine->framesize_outgoing)
1702 fprintf (file, "\t; outgoing: %d byte%s\n", cfun->machine->framesize_outgoing,
1703 cfun->machine->framesize_outgoing == 1 ? "" : "s");
1704
1705 if (cfun->machine->uses_es)
1706 fprintf (file, "\t; uses ES register\n");
1707
1708 if (MUST_SAVE_MDUC_REGISTERS)
1709 fprintf (file, "\t; preserves MDUC registers\n");
1710 }
1711
1712 /* Return an RTL describing where a function return value of type RET_TYPE
1713 is held. */
1714
1715 #undef TARGET_FUNCTION_VALUE
1716 #define TARGET_FUNCTION_VALUE rl78_function_value
1717
1718 static rtx
rl78_function_value(const_tree ret_type,const_tree fn_decl_or_type ATTRIBUTE_UNUSED,bool outgoing ATTRIBUTE_UNUSED)1719 rl78_function_value (const_tree ret_type,
1720 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
1721 bool outgoing ATTRIBUTE_UNUSED)
1722 {
1723 machine_mode mode = TYPE_MODE (ret_type);
1724
1725 return gen_rtx_REG (mode, 8);
1726 }
1727
1728 #undef TARGET_PROMOTE_FUNCTION_MODE
1729 #define TARGET_PROMOTE_FUNCTION_MODE rl78_promote_function_mode
1730
1731 static machine_mode
rl78_promote_function_mode(const_tree type ATTRIBUTE_UNUSED,machine_mode mode,int * punsignedp ATTRIBUTE_UNUSED,const_tree funtype ATTRIBUTE_UNUSED,int for_return ATTRIBUTE_UNUSED)1732 rl78_promote_function_mode (const_tree type ATTRIBUTE_UNUSED,
1733 machine_mode mode,
1734 int *punsignedp ATTRIBUTE_UNUSED,
1735 const_tree funtype ATTRIBUTE_UNUSED, int for_return ATTRIBUTE_UNUSED)
1736 {
1737 return mode;
1738 }
1739
1740 #undef TARGET_FUNCTION_ARG
1741 #define TARGET_FUNCTION_ARG rl78_function_arg
1742
1743 static rtx
rl78_function_arg(cumulative_args_t,const function_arg_info &)1744 rl78_function_arg (cumulative_args_t, const function_arg_info &)
1745 {
1746 return NULL_RTX;
1747 }
1748
1749 #undef TARGET_FUNCTION_ARG_ADVANCE
1750 #define TARGET_FUNCTION_ARG_ADVANCE rl78_function_arg_advance
1751
1752 static void
rl78_function_arg_advance(cumulative_args_t cum_v,const function_arg_info & arg)1753 rl78_function_arg_advance (cumulative_args_t cum_v,
1754 const function_arg_info &arg)
1755 {
1756 int rounded_size;
1757 CUMULATIVE_ARGS * cum = get_cumulative_args (cum_v);
1758
1759 rounded_size = arg.promoted_size_in_bytes ();
1760 if (rounded_size & 1)
1761 rounded_size ++;
1762 (*cum) += rounded_size;
1763 }
1764
1765 #undef TARGET_FUNCTION_ARG_BOUNDARY
1766 #define TARGET_FUNCTION_ARG_BOUNDARY rl78_function_arg_boundary
1767
1768 static unsigned int
rl78_function_arg_boundary(machine_mode mode ATTRIBUTE_UNUSED,const_tree type ATTRIBUTE_UNUSED)1769 rl78_function_arg_boundary (machine_mode mode ATTRIBUTE_UNUSED,
1770 const_tree type ATTRIBUTE_UNUSED)
1771 {
1772 return 16;
1773 }
1774
1775 /* Supported modifier letters:
1776
1777 A - address of a MEM
1778 S - SADDR form of a real register
1779 v - real register corresponding to a virtual register
1780 m - minus - negative of CONST_INT value.
1781 C - inverse of a conditional (NE vs EQ for example)
1782 C - complement of an integer
1783 z - collapsed conditional
1784 s - shift count mod 8
1785 S - shift count mod 16
1786 r - reverse shift count (8-(count mod 8))
1787 B - bit position
1788
1789 h - bottom HI of an SI
1790 H - top HI of an SI
1791 q - bottom QI of an HI
1792 Q - top QI of an HI
1793 e - third QI of an SI (i.e. where the ES register gets values from)
1794 E - fourth QI of an SI (i.e. MSB)
1795
1796 p - Add +0 to a zero-indexed HL based address.
1797 */
1798
1799 /* Implements the bulk of rl78_print_operand, below. We do it this
1800 way because we need to test for a constant at the top level and
1801 insert the '#', but not test for it anywhere else as we recurse
1802 down into the operand. */
1803 static void
rl78_print_operand_1(FILE * file,rtx op,int letter)1804 rl78_print_operand_1 (FILE * file, rtx op, int letter)
1805 {
1806 int need_paren;
1807
1808 switch (GET_CODE (op))
1809 {
1810 case MEM:
1811 if (letter == 'A')
1812 rl78_print_operand_1 (file, XEXP (op, 0), letter);
1813 else
1814 {
1815 if (rl78_far_p (op))
1816 {
1817 fprintf (file, "es:");
1818 if (GET_CODE (XEXP (op, 0)) == UNSPEC)
1819 op = gen_rtx_MEM (GET_MODE (op), XVECEXP (XEXP (op, 0), 0, 1));
1820 }
1821 if (letter == 'H')
1822 {
1823 op = adjust_address (op, HImode, 2);
1824 letter = 0;
1825 }
1826 if (letter == 'h')
1827 {
1828 op = adjust_address (op, HImode, 0);
1829 letter = 0;
1830 }
1831 if (letter == 'Q')
1832 {
1833 op = adjust_address (op, QImode, 1);
1834 letter = 0;
1835 }
1836 if (letter == 'q')
1837 {
1838 op = adjust_address (op, QImode, 0);
1839 letter = 0;
1840 }
1841 if (letter == 'e')
1842 {
1843 op = adjust_address (op, QImode, 2);
1844 letter = 0;
1845 }
1846 if (letter == 'E')
1847 {
1848 op = adjust_address (op, QImode, 3);
1849 letter = 0;
1850 }
1851 if (CONSTANT_P (XEXP (op, 0)))
1852 {
1853 if (!rl78_saddr_p (op))
1854 fprintf (file, "!");
1855 rl78_print_operand_1 (file, XEXP (op, 0), letter);
1856 }
1857 else if (GET_CODE (XEXP (op, 0)) == PLUS
1858 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF)
1859 {
1860 if (!rl78_saddr_p (op))
1861 fprintf (file, "!");
1862 rl78_print_operand_1 (file, XEXP (op, 0), letter);
1863 }
1864 else if (GET_CODE (XEXP (op, 0)) == PLUS
1865 && GET_CODE (XEXP (XEXP (op, 0), 0)) == REG
1866 && REGNO (XEXP (XEXP (op, 0), 0)) == 2)
1867 {
1868 rl78_print_operand_1 (file, XEXP (XEXP (op, 0), 1), 'u');
1869 fprintf (file, "[");
1870 rl78_print_operand_1 (file, XEXP (XEXP (op, 0), 0), 0);
1871 if (letter == 'p' && GET_CODE (XEXP (op, 0)) == REG)
1872 fprintf (file, "+0");
1873 fprintf (file, "]");
1874 }
1875 else
1876 {
1877 op = XEXP (op, 0);
1878 fprintf (file, "[");
1879 rl78_print_operand_1 (file, op, letter);
1880 if (letter == 'p' && REG_P (op) && REGNO (op) == 6)
1881 fprintf (file, "+0");
1882 fprintf (file, "]");
1883 }
1884 }
1885 break;
1886
1887 case REG:
1888 if (letter == 'Q')
1889 fprintf (file, "%s", reg_names [REGNO (op) | 1]);
1890 else if (letter == 'H')
1891 fprintf (file, "%s", reg_names [REGNO (op) + 2]);
1892 else if (letter == 'q')
1893 fprintf (file, "%s", reg_names [REGNO (op) & ~1]);
1894 else if (letter == 'e')
1895 fprintf (file, "%s", reg_names [REGNO (op) + 2]);
1896 else if (letter == 'E')
1897 fprintf (file, "%s", reg_names [REGNO (op) + 3]);
1898 else if (letter == 'S')
1899 fprintf (file, "0x%x", 0xffef8 + REGNO (op));
1900 else if (GET_MODE (op) == HImode
1901 && ! (REGNO (op) & ~0xfe))
1902 {
1903 if (letter == 'v')
1904 fprintf (file, "%s", word_regnames [REGNO (op) % 8]);
1905 else
1906 fprintf (file, "%s", word_regnames [REGNO (op)]);
1907 }
1908 else
1909 fprintf (file, "%s", reg_names [REGNO (op)]);
1910 break;
1911
1912 case CONST_INT:
1913 if (letter == 'Q')
1914 fprintf (file, "%ld", INTVAL (op) >> 8);
1915 else if (letter == 'H')
1916 fprintf (file, "%ld", INTVAL (op) >> 16);
1917 else if (letter == 'q')
1918 fprintf (file, "%ld", INTVAL (op) & 0xff);
1919 else if (letter == 'h')
1920 fprintf (file, "%ld", INTVAL (op) & 0xffff);
1921 else if (letter == 'e')
1922 fprintf (file, "%ld", (INTVAL (op) >> 16) & 0xff);
1923 else if (letter == 'B')
1924 {
1925 int ival = INTVAL (op);
1926 if (ival == -128)
1927 ival = 0x80;
1928 if (exact_log2 (ival) >= 0)
1929 fprintf (file, "%d", exact_log2 (ival));
1930 else
1931 fprintf (file, "%d", exact_log2 (~ival & 0xff));
1932 }
1933 else if (letter == 'E')
1934 fprintf (file, "%ld", (INTVAL (op) >> 24) & 0xff);
1935 else if (letter == 'm')
1936 fprintf (file, "%ld", - INTVAL (op));
1937 else if (letter == 's')
1938 fprintf (file, "%ld", INTVAL (op) % 8);
1939 else if (letter == 'S')
1940 fprintf (file, "%ld", INTVAL (op) % 16);
1941 else if (letter == 'r')
1942 fprintf (file, "%ld", 8 - (INTVAL (op) % 8));
1943 else if (letter == 'C')
1944 fprintf (file, "%ld", (INTVAL (op) ^ 0x8000) & 0xffff);
1945 else
1946 fprintf (file, "%ld", INTVAL (op));
1947 break;
1948
1949 case CONST:
1950 rl78_print_operand_1 (file, XEXP (op, 0), letter);
1951 break;
1952
1953 case ZERO_EXTRACT:
1954 {
1955 int bits = INTVAL (XEXP (op, 1));
1956 int ofs = INTVAL (XEXP (op, 2));
1957 if (bits == 16 && ofs == 0)
1958 fprintf (file, "%%lo16(");
1959 else if (bits == 16 && ofs == 16)
1960 fprintf (file, "%%hi16(");
1961 else if (bits == 8 && ofs == 16)
1962 fprintf (file, "%%hi8(");
1963 else
1964 gcc_unreachable ();
1965 rl78_print_operand_1 (file, XEXP (op, 0), 0);
1966 fprintf (file, ")");
1967 }
1968 break;
1969
1970 case ZERO_EXTEND:
1971 if (GET_CODE (XEXP (op, 0)) == REG)
1972 fprintf (file, "%s", reg_names [REGNO (XEXP (op, 0))]);
1973 else
1974 print_rtl (file, op);
1975 break;
1976
1977 case PLUS:
1978 need_paren = 0;
1979 if (letter == 'H')
1980 {
1981 fprintf (file, "%%hi16(");
1982 need_paren = 1;
1983 letter = 0;
1984 }
1985 if (letter == 'h')
1986 {
1987 fprintf (file, "%%lo16(");
1988 need_paren = 1;
1989 letter = 0;
1990 }
1991 if (letter == 'e')
1992 {
1993 fprintf (file, "%%hi8(");
1994 need_paren = 1;
1995 letter = 0;
1996 }
1997 if (letter == 'q' || letter == 'Q')
1998 output_operand_lossage ("q/Q modifiers invalid for symbol references");
1999
2000 if (GET_CODE (XEXP (op, 0)) == ZERO_EXTEND)
2001 {
2002 if (GET_CODE (XEXP (op, 1)) == SYMBOL_REF
2003 && SYMBOL_REF_DECL (XEXP (op, 1))
2004 && TREE_CODE (SYMBOL_REF_DECL (XEXP (op, 1))) == FUNCTION_DECL)
2005 {
2006 fprintf (file, "%%code(");
2007 assemble_name (file, rl78_strip_nonasm_name_encoding (XSTR (XEXP (op, 1), 0)));
2008 fprintf (file, "+");
2009 rl78_print_operand_1 (file, XEXP (op, 0), letter);
2010 fprintf (file, ")");
2011 }
2012 else
2013 {
2014 rl78_print_operand_1 (file, XEXP (op, 1), letter);
2015 fprintf (file, "+");
2016 rl78_print_operand_1 (file, XEXP (op, 0), letter);
2017 }
2018 }
2019 else
2020 {
2021 if (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
2022 && SYMBOL_REF_DECL (XEXP (op, 0))
2023 && TREE_CODE (SYMBOL_REF_DECL (XEXP (op, 0))) == FUNCTION_DECL)
2024 {
2025 fprintf (file, "%%code(");
2026 assemble_name (file, rl78_strip_nonasm_name_encoding (XSTR (XEXP (op, 0), 0)));
2027 fprintf (file, "+");
2028 rl78_print_operand_1 (file, XEXP (op, 1), letter);
2029 fprintf (file, ")");
2030 }
2031 else
2032 {
2033 rl78_print_operand_1 (file, XEXP (op, 0), letter);
2034 fprintf (file, "+");
2035 rl78_print_operand_1 (file, XEXP (op, 1), letter);
2036 }
2037 }
2038 if (need_paren)
2039 fprintf (file, ")");
2040 break;
2041
2042 case SUBREG:
2043 if (GET_MODE (op) == HImode
2044 && SUBREG_BYTE (op) == 0)
2045 {
2046 fprintf (file, "%%lo16(");
2047 rl78_print_operand_1 (file, SUBREG_REG (op), 0);
2048 fprintf (file, ")");
2049 }
2050 else if (GET_MODE (op) == HImode
2051 && SUBREG_BYTE (op) == 2)
2052 {
2053 fprintf (file, "%%hi16(");
2054 rl78_print_operand_1 (file, SUBREG_REG (op), 0);
2055 fprintf (file, ")");
2056 }
2057 else
2058 {
2059 fprintf (file, "(%s)", GET_RTX_NAME (GET_CODE (op)));
2060 }
2061 break;
2062
2063 case SYMBOL_REF:
2064 need_paren = 0;
2065 if (letter == 'H')
2066 {
2067 fprintf (file, "%%hi16(");
2068 need_paren = 1;
2069 letter = 0;
2070 }
2071 if (letter == 'h')
2072 {
2073 fprintf (file, "%%lo16(");
2074 need_paren = 1;
2075 letter = 0;
2076 }
2077 if (letter == 'e')
2078 {
2079 fprintf (file, "%%hi8(");
2080 need_paren = 1;
2081 letter = 0;
2082 }
2083 if (letter == 'q' || letter == 'Q')
2084 output_operand_lossage ("q/Q modifiers invalid for symbol references");
2085
2086 if (SYMBOL_REF_DECL (op) && TREE_CODE (SYMBOL_REF_DECL (op)) == FUNCTION_DECL)
2087 {
2088 fprintf (file, "%%code(");
2089 assemble_name (file, rl78_strip_nonasm_name_encoding (XSTR (op, 0)));
2090 fprintf (file, ")");
2091 }
2092 else
2093 assemble_name (file, rl78_strip_nonasm_name_encoding (XSTR (op, 0)));
2094 if (need_paren)
2095 fprintf (file, ")");
2096 break;
2097
2098 case CODE_LABEL:
2099 case LABEL_REF:
2100 output_asm_label (op);
2101 break;
2102
2103 case LTU:
2104 if (letter == 'z')
2105 fprintf (file, "#comparison eliminated");
2106 else
2107 fprintf (file, letter == 'C' ? "nc" : "c");
2108 break;
2109 case LEU:
2110 if (letter == 'z')
2111 fprintf (file, "br");
2112 else
2113 fprintf (file, letter == 'C' ? "h" : "nh");
2114 break;
2115 case GEU:
2116 if (letter == 'z')
2117 fprintf (file, "br");
2118 else
2119 fprintf (file, letter == 'C' ? "c" : "nc");
2120 break;
2121 case GTU:
2122 if (letter == 'z')
2123 fprintf (file, "#comparison eliminated");
2124 else
2125 fprintf (file, letter == 'C' ? "nh" : "h");
2126 break;
2127 case EQ:
2128 if (letter == 'z')
2129 fprintf (file, "br");
2130 else
2131 fprintf (file, letter == 'C' ? "nz" : "z");
2132 break;
2133 case NE:
2134 if (letter == 'z')
2135 fprintf (file, "#comparison eliminated");
2136 else
2137 fprintf (file, letter == 'C' ? "z" : "nz");
2138 break;
2139
2140 /* Note: these assume appropriate adjustments were made so that
2141 unsigned comparisons, which is all this chip has, will
2142 work. */
2143 case LT:
2144 if (letter == 'z')
2145 fprintf (file, "#comparison eliminated");
2146 else
2147 fprintf (file, letter == 'C' ? "nc" : "c");
2148 break;
2149 case LE:
2150 if (letter == 'z')
2151 fprintf (file, "br");
2152 else
2153 fprintf (file, letter == 'C' ? "h" : "nh");
2154 break;
2155 case GE:
2156 if (letter == 'z')
2157 fprintf (file, "br");
2158 else
2159 fprintf (file, letter == 'C' ? "c" : "nc");
2160 break;
2161 case GT:
2162 if (letter == 'z')
2163 fprintf (file, "#comparison eliminated");
2164 else
2165 fprintf (file, letter == 'C' ? "nh" : "h");
2166 break;
2167
2168 default:
2169 fprintf (file, "(%s)", GET_RTX_NAME (GET_CODE (op)));
2170 break;
2171 }
2172 }
2173
2174 #undef TARGET_PRINT_OPERAND
2175 #define TARGET_PRINT_OPERAND rl78_print_operand
2176
2177 static void
rl78_print_operand(FILE * file,rtx op,int letter)2178 rl78_print_operand (FILE * file, rtx op, int letter)
2179 {
2180 if (CONSTANT_P (op) && letter != 'u' && letter != 's' && letter != 'r' && letter != 'S' && letter != 'B')
2181 fprintf (file, "#");
2182 rl78_print_operand_1 (file, op, letter);
2183 }
2184
2185 #undef TARGET_TRAMPOLINE_INIT
2186 #define TARGET_TRAMPOLINE_INIT rl78_trampoline_init
2187
2188 /* Note that the RL78's addressing makes it very difficult to do
2189 trampolines on the stack. So, libgcc has a small pool of
2190 trampolines from which one is allocated to this task. */
2191 static void
rl78_trampoline_init(rtx m_tramp,tree fndecl,rtx static_chain)2192 rl78_trampoline_init (rtx m_tramp, tree fndecl, rtx static_chain)
2193 {
2194 rtx mov_addr, thunk_addr;
2195 rtx function = XEXP (DECL_RTL (fndecl), 0);
2196
2197 mov_addr = adjust_address (m_tramp, HImode, 0);
2198 thunk_addr = gen_reg_rtx (HImode);
2199
2200 function = force_reg (HImode, function);
2201 static_chain = force_reg (HImode, static_chain);
2202
2203 emit_insn (gen_trampoline_init (thunk_addr, function, static_chain));
2204 emit_move_insn (mov_addr, thunk_addr);
2205
2206 cfun->machine->trampolines_used = 1;
2207 }
2208
2209 #undef TARGET_TRAMPOLINE_ADJUST_ADDRESS
2210 #define TARGET_TRAMPOLINE_ADJUST_ADDRESS rl78_trampoline_adjust_address
2211
2212 static rtx
rl78_trampoline_adjust_address(rtx m_tramp)2213 rl78_trampoline_adjust_address (rtx m_tramp)
2214 {
2215 rtx x = gen_rtx_MEM (HImode, m_tramp);
2216 return x;
2217 }
2218
2219 /* Expander for cbranchqi4 and cbranchhi4. RL78 is missing some of
2220 the "normal" compares, specifically, it only has unsigned compares,
2221 so we must synthesize the missing ones. */
2222 void
rl78_expand_compare(rtx * operands)2223 rl78_expand_compare (rtx *operands)
2224 {
2225 if (GET_CODE (operands[2]) == MEM)
2226 operands[2] = copy_to_mode_reg (GET_MODE (operands[2]), operands[2]);
2227 }
2228
2229
2230
2231 /* Define this to 1 if you are debugging the peephole optimizers. */
2232 #define DEBUG_PEEP 0
2233
2234 /* Predicate used to enable the peephole2 patterns in rl78-virt.md.
2235 The default "word" size is a byte so we can effectively use all the
2236 registers, but we want to do 16-bit moves whenever possible. This
2237 function determines when such a move is an option. */
2238 bool
rl78_peep_movhi_p(rtx * operands)2239 rl78_peep_movhi_p (rtx *operands)
2240 {
2241 int i;
2242 rtx m, a;
2243
2244 /* (set (op0) (op1))
2245 (set (op2) (op3)) */
2246
2247 if (! rl78_virt_insns_ok ())
2248 return false;
2249
2250 #if DEBUG_PEEP
2251 fprintf (stderr, "\033[33m");
2252 debug_rtx (operands[0]);
2253 debug_rtx (operands[1]);
2254 debug_rtx (operands[2]);
2255 debug_rtx (operands[3]);
2256 fprintf (stderr, "\033[0m");
2257 #endif
2258
2259 /* You can move a constant to memory as QImode, but not HImode. */
2260 if (GET_CODE (operands[0]) == MEM
2261 && GET_CODE (operands[1]) != REG)
2262 {
2263 #if DEBUG_PEEP
2264 fprintf (stderr, "no peep: move constant to memory\n");
2265 #endif
2266 return false;
2267 }
2268
2269 if (rtx_equal_p (operands[0], operands[3]))
2270 {
2271 #if DEBUG_PEEP
2272 fprintf (stderr, "no peep: overlapping\n");
2273 #endif
2274 return false;
2275 }
2276
2277 for (i = 0; i < 2; i ++)
2278 {
2279 if (GET_CODE (operands[i]) != GET_CODE (operands[i+2]))
2280 {
2281 #if DEBUG_PEEP
2282 fprintf (stderr, "no peep: different codes\n");
2283 #endif
2284 return false;
2285 }
2286 if (GET_MODE (operands[i]) != GET_MODE (operands[i+2]))
2287 {
2288 #if DEBUG_PEEP
2289 fprintf (stderr, "no peep: different modes\n");
2290 #endif
2291 return false;
2292 }
2293
2294 switch (GET_CODE (operands[i]))
2295 {
2296 case REG:
2297 /* LSB MSB */
2298 if (REGNO (operands[i]) + 1 != REGNO (operands[i+2])
2299 || GET_MODE (operands[i]) != QImode)
2300 {
2301 #if DEBUG_PEEP
2302 fprintf (stderr, "no peep: wrong regnos %d %d %d\n",
2303 REGNO (operands[i]), REGNO (operands[i+2]),
2304 i);
2305 #endif
2306 return false;
2307 }
2308 if (! rl78_hard_regno_mode_ok (REGNO (operands[i]), HImode))
2309 {
2310 #if DEBUG_PEEP
2311 fprintf (stderr, "no peep: reg %d not HI\n", REGNO (operands[i]));
2312 #endif
2313 return false;
2314 }
2315 break;
2316
2317 case CONST_INT:
2318 break;
2319
2320 case MEM:
2321 if (GET_MODE (operands[i]) != QImode)
2322 return false;
2323 if (MEM_ALIGN (operands[i]) < 16)
2324 return false;
2325 a = XEXP (operands[i], 0);
2326 if (GET_CODE (a) == CONST)
2327 a = XEXP (a, 0);
2328 if (GET_CODE (a) == PLUS)
2329 a = XEXP (a, 1);
2330 if (GET_CODE (a) == CONST_INT
2331 && INTVAL (a) & 1)
2332 {
2333 #if DEBUG_PEEP
2334 fprintf (stderr, "no peep: misaligned mem %d\n", i);
2335 debug_rtx (operands[i]);
2336 #endif
2337 return false;
2338 }
2339 m = adjust_address (operands[i], QImode, 1);
2340 if (! rtx_equal_p (m, operands[i+2]))
2341 {
2342 #if DEBUG_PEEP
2343 fprintf (stderr, "no peep: wrong mem %d\n", i);
2344 debug_rtx (m);
2345 debug_rtx (operands[i+2]);
2346 #endif
2347 return false;
2348 }
2349 break;
2350
2351 default:
2352 #if DEBUG_PEEP
2353 fprintf (stderr, "no peep: wrong rtx %d\n", i);
2354 #endif
2355 return false;
2356 }
2357 }
2358 #if DEBUG_PEEP
2359 fprintf (stderr, "\033[32mpeep!\033[0m\n");
2360 #endif
2361 return true;
2362 }
2363
2364 /* Likewise, when a peephole is activated, this function helps compute
2365 the new operands. */
2366 void
rl78_setup_peep_movhi(rtx * operands)2367 rl78_setup_peep_movhi (rtx *operands)
2368 {
2369 int i;
2370
2371 for (i = 0; i < 2; i ++)
2372 {
2373 switch (GET_CODE (operands[i]))
2374 {
2375 case REG:
2376 operands[i+4] = gen_rtx_REG (HImode, REGNO (operands[i]));
2377 break;
2378
2379 case CONST_INT:
2380 operands[i+4] = GEN_INT ((INTVAL (operands[i]) & 0xff) + ((char) INTVAL (operands[i+2])) * 256);
2381 break;
2382
2383 case MEM:
2384 operands[i+4] = adjust_address (operands[i], HImode, 0);
2385 break;
2386
2387 default:
2388 break;
2389 }
2390 }
2391 }
2392
2393 /*
2394 How Devirtualization works in the RL78 GCC port
2395
2396 Background
2397
2398 The RL78 is an 8-bit port with some 16-bit operations. It has 32
2399 bytes of register space, in four banks, memory-mapped. One bank is
2400 the "selected" bank and holds the registers used for primary
2401 operations. Since the registers are memory mapped, often you can
2402 still refer to the unselected banks via memory accesses.
2403
2404 Virtual Registers
2405
2406 The GCC port uses bank 0 as the "selected" registers (A, X, BC, etc)
2407 and refers to the other banks via their memory addresses, although
2408 they're treated as regular registers internally. These "virtual"
2409 registers are R8 through R23 (bank3 is reserved for asm-based
2410 interrupt handlers).
2411
2412 There are four machine description files:
2413
2414 rl78.md - common register-independent patterns and definitions
2415 rl78-expand.md - expanders
2416 rl78-virt.md - patterns that match BEFORE devirtualization
2417 rl78-real.md - patterns that match AFTER devirtualization
2418
2419 At least through register allocation and reload, gcc is told that it
2420 can do pretty much anything - but may only use the virtual registers.
2421 GCC cannot properly create the varying addressing modes that the RL78
2422 supports in an efficient way.
2423
2424 Sometime after reload, the RL78 backend "devirtualizes" the RTL. It
2425 uses the "valloc" attribute in rl78-virt.md for determining the rules
2426 by which it will replace virtual registers with real registers (or
2427 not) and how to make up addressing modes. For example, insns tagged
2428 with "ro1" have a single read-only parameter, which may need to be
2429 moved from memory/constant/vreg to a suitable real register. As part
2430 of devirtualization, a flag is toggled, disabling the rl78-virt.md
2431 patterns and enabling the rl78-real.md patterns. The new patterns'
2432 constraints are used to determine the real registers used. NOTE:
2433 patterns in rl78-virt.md essentially ignore the constrains and rely on
2434 predicates, where the rl78-real.md ones essentially ignore the
2435 predicates and rely on the constraints.
2436
2437 The devirtualization pass is scheduled via the pass manager (despite
2438 being called "rl78_reorg") so it can be scheduled prior to var-track
2439 (the idea is to let gdb know about the new registers). Ideally, it
2440 would be scheduled right after pro/epilogue generation, so the
2441 post-reload optimizers could operate on the real registers, but when I
2442 tried that there were some issues building the target libraries.
2443
2444 During devirtualization, a simple register move optimizer is run. It
2445 would be better to run a full CSE/propogation pass on it though, but
2446 that has not yet been attempted.
2447
2448 */
2449 #define DEBUG_ALLOC 0
2450
2451 #define OP(x) (*recog_data.operand_loc[x])
2452
2453 /* This array is used to hold knowledge about the contents of the
2454 real registers (A ... H), the memory-based registers (r8 ... r31)
2455 and the first NUM_STACK_LOCS words on the stack. We use this to
2456 avoid generating redundant move instructions.
2457
2458 A value in the range 0 .. 31 indicates register A .. r31.
2459 A value in the range 32 .. 63 indicates stack slot (value - 32).
2460 A value of NOT_KNOWN indicates that the contents of that location
2461 are not known. */
2462
2463 #define NUM_STACK_LOCS 32
2464 #define NOT_KNOWN 127
2465
2466 static unsigned char content_memory [32 + NUM_STACK_LOCS];
2467
2468 static unsigned char saved_update_index = NOT_KNOWN;
2469 static unsigned char saved_update_value;
2470 static machine_mode saved_update_mode;
2471
2472
2473 static inline void
clear_content_memory(void)2474 clear_content_memory (void)
2475 {
2476 memset (content_memory, NOT_KNOWN, sizeof content_memory);
2477 if (dump_file)
2478 fprintf (dump_file, " clear content memory\n");
2479 saved_update_index = NOT_KNOWN;
2480 }
2481
2482 /* Convert LOC into an index into the content_memory array.
2483 If LOC cannot be converted, return NOT_KNOWN. */
2484
2485 static unsigned char
get_content_index(rtx loc)2486 get_content_index (rtx loc)
2487 {
2488 machine_mode mode;
2489
2490 if (loc == NULL_RTX)
2491 return NOT_KNOWN;
2492
2493 if (REG_P (loc))
2494 {
2495 if (REGNO (loc) < 32)
2496 return REGNO (loc);
2497 return NOT_KNOWN;
2498 }
2499
2500 mode = GET_MODE (loc);
2501
2502 if (! rl78_stack_based_mem (loc, mode))
2503 return NOT_KNOWN;
2504
2505 loc = XEXP (loc, 0);
2506
2507 if (REG_P (loc))
2508 /* loc = MEM (SP) */
2509 return 32;
2510
2511 /* loc = MEM (PLUS (SP, INT)). */
2512 loc = XEXP (loc, 1);
2513
2514 if (INTVAL (loc) < NUM_STACK_LOCS)
2515 return 32 + INTVAL (loc);
2516
2517 return NOT_KNOWN;
2518 }
2519
2520 /* Return a string describing content INDEX in mode MODE.
2521 WARNING: Can return a pointer to a static buffer. */
2522 static const char *
get_content_name(unsigned char index,machine_mode mode)2523 get_content_name (unsigned char index, machine_mode mode)
2524 {
2525 static char buffer [128];
2526
2527 if (index == NOT_KNOWN)
2528 return "Unknown";
2529
2530 if (index > 31)
2531 sprintf (buffer, "stack slot %d", index - 32);
2532 else if (mode == HImode)
2533 sprintf (buffer, "%s%s",
2534 reg_names [index + 1], reg_names [index]);
2535 else
2536 return reg_names [index];
2537
2538 return buffer;
2539 }
2540
2541 #if DEBUG_ALLOC
2542
2543 static void
display_content_memory(FILE * file)2544 display_content_memory (FILE * file)
2545 {
2546 unsigned int i;
2547
2548 fprintf (file, " Known memory contents:\n");
2549
2550 for (i = 0; i < sizeof content_memory; i++)
2551 if (content_memory[i] != NOT_KNOWN)
2552 {
2553 fprintf (file, " %s contains a copy of ", get_content_name (i, QImode));
2554 fprintf (file, "%s\n", get_content_name (content_memory [i], QImode));
2555 }
2556 }
2557 #endif
2558
2559 static void
update_content(unsigned char index,unsigned char val,machine_mode mode)2560 update_content (unsigned char index, unsigned char val, machine_mode mode)
2561 {
2562 unsigned int i;
2563
2564 gcc_assert (index < sizeof content_memory);
2565
2566 content_memory [index] = val;
2567 if (val != NOT_KNOWN)
2568 content_memory [val] = index;
2569
2570 /* Make the entry in dump_file *before* VAL is increased below. */
2571 if (dump_file)
2572 {
2573 fprintf (dump_file, " %s now contains ", get_content_name (index, mode));
2574 if (val == NOT_KNOWN)
2575 fprintf (dump_file, "Unknown\n");
2576 else
2577 fprintf (dump_file, "%s and vice versa\n", get_content_name (val, mode));
2578 }
2579
2580 if (mode == HImode)
2581 {
2582 val = val == NOT_KNOWN ? val : val + 1;
2583
2584 content_memory [index + 1] = val;
2585 if (val != NOT_KNOWN)
2586 {
2587 content_memory [val] = index + 1;
2588 -- val;
2589 }
2590 }
2591
2592 /* Any other places that had INDEX recorded as their contents are now invalid. */
2593 for (i = 0; i < sizeof content_memory; i++)
2594 {
2595 if (i == index
2596 || (val != NOT_KNOWN && i == val))
2597 {
2598 if (mode == HImode)
2599 ++ i;
2600 continue;
2601 }
2602
2603 if (content_memory[i] == index
2604 || (val != NOT_KNOWN && content_memory[i] == val))
2605 {
2606 content_memory[i] = NOT_KNOWN;
2607
2608 if (dump_file)
2609 fprintf (dump_file, " %s cleared\n", get_content_name (i, mode));
2610
2611 if (mode == HImode)
2612 content_memory[++ i] = NOT_KNOWN;
2613 }
2614 }
2615 }
2616
2617 /* Record that LOC contains VALUE.
2618 For HImode locations record that LOC+1 contains VALUE+1.
2619 If LOC is not a register or stack slot, do nothing.
2620 If VALUE is not a register or stack slot, clear the recorded content. */
2621
2622 static void
record_content(rtx loc,rtx value)2623 record_content (rtx loc, rtx value)
2624 {
2625 machine_mode mode;
2626 unsigned char index;
2627 unsigned char val;
2628
2629 if ((index = get_content_index (loc)) == NOT_KNOWN)
2630 return;
2631
2632 val = get_content_index (value);
2633
2634 mode = GET_MODE (loc);
2635
2636 if (val == index)
2637 {
2638 if (! optimize)
2639 return;
2640
2641 /* This should not happen when optimizing. */
2642 #if 1
2643 fprintf (stderr, "ASSIGNMENT of location to itself detected! [%s]\n",
2644 get_content_name (val, mode));
2645 return;
2646 #else
2647 gcc_unreachable ();
2648 #endif
2649 }
2650
2651 update_content (index, val, mode);
2652 }
2653
2654 /* Returns TRUE if LOC already contains a copy of VALUE. */
2655
2656 static bool
already_contains(rtx loc,rtx value)2657 already_contains (rtx loc, rtx value)
2658 {
2659 unsigned char index;
2660 unsigned char val;
2661
2662 if ((index = get_content_index (loc)) == NOT_KNOWN)
2663 return false;
2664
2665 if ((val = get_content_index (value)) == NOT_KNOWN)
2666 return false;
2667
2668 if (content_memory [index] != val)
2669 return false;
2670
2671 if (GET_MODE (loc) == HImode)
2672 return content_memory [index + 1] == val + 1;
2673
2674 return true;
2675 }
2676
2677 bool
rl78_es_addr(rtx addr)2678 rl78_es_addr (rtx addr)
2679 {
2680 if (GET_CODE (addr) == MEM)
2681 addr = XEXP (addr, 0);
2682 if (GET_CODE (addr) != UNSPEC)
2683 return false;
2684 if (XINT (addr, 1) != UNS_ES_ADDR)
2685 return false;
2686 return true;
2687 }
2688
2689 rtx
rl78_es_base(rtx addr)2690 rl78_es_base (rtx addr)
2691 {
2692 if (GET_CODE (addr) == MEM)
2693 addr = XEXP (addr, 0);
2694 addr = XVECEXP (addr, 0, 1);
2695 if (GET_CODE (addr) == CONST
2696 && GET_CODE (XEXP (addr, 0)) == ZERO_EXTRACT)
2697 addr = XEXP (XEXP (addr, 0), 0);
2698 /* Mode doesn't matter here. */
2699 return gen_rtx_MEM (HImode, addr);
2700 }
2701
2702 /* Rescans an insn to see if it's recognized again. This is done
2703 carefully to ensure that all the constraint information is accurate
2704 for the newly matched insn. */
2705 static bool
insn_ok_now(rtx_insn * insn)2706 insn_ok_now (rtx_insn * insn)
2707 {
2708 rtx pattern = PATTERN (insn);
2709 int i;
2710
2711 INSN_CODE (insn) = -1;
2712
2713 if (recog (pattern, insn, 0) > -1)
2714 {
2715 extract_insn (insn);
2716 if (constrain_operands (1, get_preferred_alternatives (insn)))
2717 {
2718 #if DEBUG_ALLOC
2719 fprintf (stderr, "\033[32m");
2720 debug_rtx (insn);
2721 fprintf (stderr, "\033[0m");
2722 #endif
2723 if (SET_P (pattern))
2724 record_content (SET_DEST (pattern), SET_SRC (pattern));
2725
2726 /* We need to detect far addresses that haven't been
2727 converted to es/lo16 format. */
2728 for (i=0; i<recog_data.n_operands; i++)
2729 if (GET_CODE (OP (i)) == MEM
2730 && GET_MODE (XEXP (OP (i), 0)) == SImode
2731 && GET_CODE (XEXP (OP (i), 0)) != UNSPEC)
2732 goto not_ok;
2733
2734 return true;
2735 }
2736 }
2737
2738 /* INSN is not OK as-is. It may not be recognized in real mode or
2739 it might not have satisfied its constraints in real mode. Either
2740 way it will require fixups.
2741
2742 It is vital we always re-recognize at this point as some insns
2743 have fewer operands in real mode than virtual mode. If we do
2744 not re-recognize, then the recog_data will refer to real mode
2745 operands and we may read invalid data. Usually this isn't a
2746 problem, but once in a while the data we read is bogus enough
2747 to cause a segfault or other undesirable behavior. */
2748 not_ok:
2749
2750 /* We need to re-recog the insn with virtual registers to get
2751 the operands. */
2752 INSN_CODE (insn) = -1;
2753 cfun->machine->virt_insns_ok = 1;
2754 if (recog (pattern, insn, 0) > -1)
2755 {
2756 extract_insn (insn);
2757 /* In theory this should always be true. */
2758 if (constrain_operands (0, get_preferred_alternatives (insn)))
2759 {
2760 cfun->machine->virt_insns_ok = 0;
2761 return false;
2762 }
2763 }
2764
2765 #if DEBUG_ALLOC
2766 fprintf (stderr, "\033[41;30m Unrecognized *virtual* insn \033[0m\n");
2767 debug_rtx (insn);
2768 #endif
2769 gcc_unreachable ();
2770 return false;
2771 }
2772
2773 #if DEBUG_ALLOC
2774 #define WORKED fprintf (stderr, "\033[48;5;22m Worked at line %d \033[0m\n", __LINE__)
2775 #define FAILEDSOFAR fprintf (stderr, "\033[48;5;52m FAILED at line %d \033[0m\n", __LINE__)
2776 #define FAILED fprintf (stderr, "\033[48;5;52m FAILED at line %d \033[0m\n", __LINE__), gcc_unreachable ()
2777 #define MAYBE_OK(insn) if (insn_ok_now (insn)) { WORKED; return; } else { FAILEDSOFAR; }
2778 #define MUST_BE_OK(insn) if (insn_ok_now (insn)) { WORKED; return; } FAILED
2779 #else
2780 #define FAILED gcc_unreachable ()
2781 #define MAYBE_OK(insn) if (insn_ok_now (insn)) return;
2782 #define MUST_BE_OK(insn) if (insn_ok_now (insn)) return; FAILED
2783 #endif
2784
2785 /* Registers into which we move the contents of virtual registers. */
2786 #define X gen_rtx_REG (QImode, X_REG)
2787 #define A gen_rtx_REG (QImode, A_REG)
2788 #define C gen_rtx_REG (QImode, C_REG)
2789 #define B gen_rtx_REG (QImode, B_REG)
2790 #define E gen_rtx_REG (QImode, E_REG)
2791 #define D gen_rtx_REG (QImode, D_REG)
2792 #define L gen_rtx_REG (QImode, L_REG)
2793 #define H gen_rtx_REG (QImode, H_REG)
2794
2795 #define AX gen_rtx_REG (HImode, AX_REG)
2796 #define BC gen_rtx_REG (HImode, BC_REG)
2797 #define DE gen_rtx_REG (HImode, DE_REG)
2798 #define HL gen_rtx_REG (HImode, HL_REG)
2799
2800 /* Returns TRUE if R is a virtual register. */
2801 static inline bool
is_virtual_register(rtx r)2802 is_virtual_register (rtx r)
2803 {
2804 return (GET_CODE (r) == REG
2805 && REGNO (r) >= 8
2806 && REGNO (r) < 32);
2807 }
2808
2809 /* In all these alloc routines, we expect the following: the insn
2810 pattern is unshared, the insn was previously recognized and failed
2811 due to predicates or constraints, and the operand data is in
2812 recog_data. */
2813
2814 static int virt_insn_was_frame;
2815
2816 /* Hook for all insns we emit. Re-mark them as FRAME_RELATED if
2817 needed. */
2818 static rtx
EM2(int line ATTRIBUTE_UNUSED,rtx r)2819 EM2 (int line ATTRIBUTE_UNUSED, rtx r)
2820 {
2821 #if DEBUG_ALLOC
2822 fprintf (stderr, "\033[36m%d: ", line);
2823 debug_rtx (r);
2824 fprintf (stderr, "\033[0m");
2825 #endif
2826 /*SCHED_GROUP_P (r) = 1;*/
2827 if (virt_insn_was_frame)
2828 RTX_FRAME_RELATED_P (r) = 1;
2829 return r;
2830 }
2831
2832 #define EM(x) EM2 (__LINE__, x)
2833
2834 /* Return a suitable RTX for the low half of a __far address. */
2835 static rtx
rl78_lo16(rtx addr)2836 rl78_lo16 (rtx addr)
2837 {
2838 rtx r;
2839
2840 if (GET_CODE (addr) == SYMBOL_REF
2841 || GET_CODE (addr) == CONST)
2842 {
2843 r = gen_rtx_ZERO_EXTRACT (HImode, addr, GEN_INT (16), GEN_INT (0));
2844 r = gen_rtx_CONST (HImode, r);
2845 }
2846 else
2847 r = rl78_subreg (HImode, addr, SImode, 0);
2848
2849 r = gen_es_addr (r);
2850 cfun->machine->uses_es = true;
2851
2852 return r;
2853 }
2854
2855 /* Return a suitable RTX for the high half's lower byte of a __far address. */
2856 static rtx
rl78_hi8(rtx addr)2857 rl78_hi8 (rtx addr)
2858 {
2859 if (GET_CODE (addr) == SYMBOL_REF
2860 || GET_CODE (addr) == CONST)
2861 {
2862 rtx r = gen_rtx_ZERO_EXTRACT (QImode, addr, GEN_INT (8), GEN_INT (16));
2863 r = gen_rtx_CONST (QImode, r);
2864 return r;
2865 }
2866 return rl78_subreg (QImode, addr, SImode, 2);
2867 }
2868
2869 static void
add_postponed_content_update(rtx to,rtx value)2870 add_postponed_content_update (rtx to, rtx value)
2871 {
2872 unsigned char index;
2873
2874 if ((index = get_content_index (to)) == NOT_KNOWN)
2875 return;
2876
2877 gcc_assert (saved_update_index == NOT_KNOWN);
2878 saved_update_index = index;
2879 saved_update_value = get_content_index (value);
2880 saved_update_mode = GET_MODE (to);
2881 }
2882
2883 static void
process_postponed_content_update(void)2884 process_postponed_content_update (void)
2885 {
2886 if (saved_update_index != NOT_KNOWN)
2887 {
2888 update_content (saved_update_index, saved_update_value, saved_update_mode);
2889 saved_update_index = NOT_KNOWN;
2890 }
2891 }
2892
2893 /* Generate and emit a move of (register) FROM into TO. if WHERE is not NULL
2894 then if BEFORE is true then emit the insn before WHERE, otherwise emit it
2895 after WHERE. If TO already contains FROM then do nothing. Returns TO if
2896 BEFORE is true, FROM otherwise. */
2897 static rtx
gen_and_emit_move(rtx to,rtx from,rtx_insn * where,bool before)2898 gen_and_emit_move (rtx to, rtx from, rtx_insn *where, bool before)
2899 {
2900 machine_mode mode = GET_MODE (to);
2901
2902 if (optimize && before && already_contains (to, from))
2903 {
2904 #if DEBUG_ALLOC
2905 display_content_memory (stderr);
2906 #endif
2907 if (dump_file)
2908 {
2909 fprintf (dump_file, " Omit move of %s into ",
2910 get_content_name (get_content_index (from), mode));
2911 fprintf (dump_file, "%s as it already contains this value\n",
2912 get_content_name (get_content_index (to), mode));
2913 }
2914 }
2915 else
2916 {
2917 rtx move = mode == QImode ? gen_movqi (to, from) : gen_movhi (to, from);
2918
2919 EM (move);
2920
2921 if (where == NULL_RTX)
2922 emit_insn (move);
2923 else if (before)
2924 emit_insn_before (move, where);
2925 else
2926 {
2927 rtx note = find_reg_note (where, REG_EH_REGION, NULL_RTX);
2928
2929 /* If necessary move REG_EH_REGION notes forward.
2930 cf. compiling gcc.dg/pr44545.c. */
2931 if (note != NULL_RTX)
2932 {
2933 add_reg_note (move, REG_EH_REGION, XEXP (note, 0));
2934 remove_note (where, note);
2935 }
2936
2937 emit_insn_after (move, where);
2938 }
2939
2940 if (before)
2941 record_content (to, from);
2942 else
2943 add_postponed_content_update (to, from);
2944 }
2945
2946 return before ? to : from;
2947 }
2948
2949 /* If M is MEM(REG) or MEM(PLUS(REG,INT)) and REG is virtual then
2950 copy it into NEWBASE and return the updated MEM. Otherwise just
2951 return M. Any needed insns are emitted before BEFORE. */
2952 static rtx
transcode_memory_rtx(rtx m,rtx newbase,rtx_insn * before)2953 transcode_memory_rtx (rtx m, rtx newbase, rtx_insn *before)
2954 {
2955 rtx base, index, addendr;
2956 int addend = 0;
2957 int need_es = 0;
2958
2959 if (! MEM_P (m))
2960 return m;
2961
2962 if (GET_MODE (XEXP (m, 0)) == SImode)
2963 {
2964 rtx new_m;
2965 rtx seg = rl78_hi8 (XEXP (m, 0));
2966
2967 if (!TARGET_ES0)
2968 {
2969 emit_insn_before (EM (gen_movqi (A, seg)), before);
2970 emit_insn_before (EM (gen_movqi_to_es (A)), before);
2971 }
2972
2973 record_content (A, NULL_RTX);
2974
2975 new_m = gen_rtx_MEM (GET_MODE (m), rl78_lo16 (XEXP (m, 0)));
2976 MEM_COPY_ATTRIBUTES (new_m, m);
2977 m = new_m;
2978 need_es = 1;
2979 }
2980
2981 characterize_address (XEXP (m, 0), & base, & index, & addendr);
2982 gcc_assert (index == NULL_RTX);
2983
2984 if (base == NULL_RTX)
2985 return m;
2986
2987 if (addendr && GET_CODE (addendr) == CONST_INT)
2988 addend = INTVAL (addendr);
2989
2990 gcc_assert (REG_P (base));
2991 gcc_assert (REG_P (newbase));
2992
2993 int limit = 256 - GET_MODE_SIZE (GET_MODE (m));
2994
2995 if (REGNO (base) == SP_REG)
2996 {
2997 if (addend >= 0 && addend <= limit)
2998 return m;
2999 }
3000
3001 /* BASE should be a virtual register. We copy it to NEWBASE. If
3002 the addend is out of range for DE/HL, we use AX to compute the full
3003 address. */
3004
3005 if (addend < 0
3006 || (addend > limit && REGNO (newbase) != BC_REG)
3007 || (addendr
3008 && (GET_CODE (addendr) != CONST_INT)
3009 && ((REGNO (newbase) != BC_REG))
3010 ))
3011 {
3012 /* mov ax, vreg
3013 add ax, #imm
3014 mov hl, ax */
3015 EM (emit_insn_before (gen_movhi (AX, base), before));
3016 EM (emit_insn_before (gen_addhi3 (AX, AX, addendr), before));
3017 EM (emit_insn_before (gen_movhi (newbase, AX), before));
3018 record_content (AX, NULL_RTX);
3019 record_content (newbase, NULL_RTX);
3020
3021 base = newbase;
3022 addend = 0;
3023 addendr = 0;
3024 }
3025 else
3026 {
3027 base = gen_and_emit_move (newbase, base, before, true);
3028 }
3029
3030 if (addend)
3031 {
3032 record_content (base, NULL_RTX);
3033 base = gen_rtx_PLUS (HImode, base, GEN_INT (addend));
3034 }
3035 else if (addendr)
3036 {
3037 record_content (base, NULL_RTX);
3038 base = gen_rtx_PLUS (HImode, base, addendr);
3039 }
3040
3041 if (need_es)
3042 {
3043 m = change_address (m, GET_MODE (m), gen_es_addr (base));
3044 cfun->machine->uses_es = true;
3045 }
3046 else
3047 m = change_address (m, GET_MODE (m), base);
3048 return m;
3049 }
3050
3051 /* Copy SRC to accumulator (A or AX), placing any generated insns
3052 before BEFORE. Returns accumulator RTX. */
3053 static rtx
move_to_acc(int opno,rtx_insn * before)3054 move_to_acc (int opno, rtx_insn *before)
3055 {
3056 rtx src = OP (opno);
3057 machine_mode mode = GET_MODE (src);
3058
3059 if (REG_P (src) && REGNO (src) < 2)
3060 return src;
3061
3062 if (mode == VOIDmode)
3063 mode = recog_data.operand_mode[opno];
3064
3065 return gen_and_emit_move (mode == QImode ? A : AX, src, before, true);
3066 }
3067
3068 static void
force_into_acc(rtx src,rtx_insn * before)3069 force_into_acc (rtx src, rtx_insn *before)
3070 {
3071 machine_mode mode = GET_MODE (src);
3072 rtx move;
3073
3074 if (REG_P (src) && REGNO (src) < 2)
3075 return;
3076
3077 move = mode == QImode ? gen_movqi (A, src) : gen_movhi (AX, src);
3078
3079 EM (move);
3080
3081 emit_insn_before (move, before);
3082 record_content (AX, NULL_RTX);
3083 }
3084
3085 /* Copy accumulator (A or AX) to DEST, placing any generated insns
3086 after AFTER. Returns accumulator RTX. */
3087 static rtx
move_from_acc(unsigned int opno,rtx_insn * after)3088 move_from_acc (unsigned int opno, rtx_insn *after)
3089 {
3090 rtx dest = OP (opno);
3091 machine_mode mode = GET_MODE (dest);
3092
3093 if (REG_P (dest) && REGNO (dest) < 2)
3094 return dest;
3095
3096 return gen_and_emit_move (dest, mode == QImode ? A : AX, after, false);
3097 }
3098
3099 /* Copy accumulator (A or AX) to REGNO, placing any generated insns
3100 before BEFORE. Returns reg RTX. */
3101 static rtx
move_acc_to_reg(rtx acc,int regno,rtx_insn * before)3102 move_acc_to_reg (rtx acc, int regno, rtx_insn *before)
3103 {
3104 machine_mode mode = GET_MODE (acc);
3105 rtx reg;
3106
3107 reg = gen_rtx_REG (mode, regno);
3108
3109 return gen_and_emit_move (reg, acc, before, true);
3110 }
3111
3112 /* Copy SRC to X, placing any generated insns before BEFORE.
3113 Returns X RTX. */
3114 static rtx
move_to_x(int opno,rtx_insn * before)3115 move_to_x (int opno, rtx_insn *before)
3116 {
3117 rtx src = OP (opno);
3118 machine_mode mode = GET_MODE (src);
3119 rtx reg;
3120
3121 if (mode == VOIDmode)
3122 mode = recog_data.operand_mode[opno];
3123 reg = (mode == QImode) ? X : AX;
3124
3125 if (mode == QImode || ! is_virtual_register (OP (opno)))
3126 {
3127 OP (opno) = move_to_acc (opno, before);
3128 OP (opno) = move_acc_to_reg (OP (opno), X_REG, before);
3129 return reg;
3130 }
3131
3132 return gen_and_emit_move (reg, src, before, true);
3133 }
3134
3135 /* Copy OP (opno) to H or HL, placing any generated insns before BEFORE.
3136 Returns H/HL RTX. */
3137 static rtx
move_to_hl(int opno,rtx_insn * before)3138 move_to_hl (int opno, rtx_insn *before)
3139 {
3140 rtx src = OP (opno);
3141 machine_mode mode = GET_MODE (src);
3142 rtx reg;
3143
3144 if (mode == VOIDmode)
3145 mode = recog_data.operand_mode[opno];
3146 reg = (mode == QImode) ? L : HL;
3147
3148 if (mode == QImode || ! is_virtual_register (OP (opno)))
3149 {
3150 OP (opno) = move_to_acc (opno, before);
3151 OP (opno) = move_acc_to_reg (OP (opno), L_REG, before);
3152 return reg;
3153 }
3154
3155 return gen_and_emit_move (reg, src, before, true);
3156 }
3157
3158 /* Copy OP (opno) to E or DE, placing any generated insns before BEFORE.
3159 Returns E/DE RTX. */
3160 static rtx
move_to_de(int opno,rtx_insn * before)3161 move_to_de (int opno, rtx_insn *before)
3162 {
3163 rtx src = OP (opno);
3164 machine_mode mode = GET_MODE (src);
3165 rtx reg;
3166
3167 if (mode == VOIDmode)
3168 mode = recog_data.operand_mode[opno];
3169
3170 reg = (mode == QImode) ? E : DE;
3171
3172 if (mode == QImode || ! is_virtual_register (OP (opno)))
3173 {
3174 OP (opno) = move_to_acc (opno, before);
3175 OP (opno) = move_acc_to_reg (OP (opno), E_REG, before);
3176 }
3177 else
3178 {
3179 gen_and_emit_move (reg, src, before, true);
3180 }
3181
3182 return reg;
3183 }
3184
3185 /* Devirtualize an insn of the form (SET (op) (unop (op))). */
3186 static void
rl78_alloc_physical_registers_op1(rtx_insn * insn)3187 rl78_alloc_physical_registers_op1 (rtx_insn * insn)
3188 {
3189 /* op[0] = func op[1] */
3190
3191 /* We first try using A as the destination, then copying it
3192 back. */
3193 if (rtx_equal_p (OP (0), OP (1)))
3194 {
3195 OP (0) =
3196 OP (1) = transcode_memory_rtx (OP (1), DE, insn);
3197 }
3198 else
3199 {
3200 /* If necessary, load the operands into BC and HL.
3201 Check to see if we already have OP (0) in HL
3202 and if so, swap the order.
3203
3204 It is tempting to perform this optimization when OP(0) does
3205 not hold a MEM, but this leads to bigger code in general.
3206 The problem is that if OP(1) holds a MEM then swapping it
3207 into BC means a BC-relative load is used and these are 3
3208 bytes long vs 1 byte for an HL load. */
3209 if (MEM_P (OP (0))
3210 && already_contains (HL, XEXP (OP (0), 0)))
3211 {
3212 OP (0) = transcode_memory_rtx (OP (0), HL, insn);
3213 OP (1) = transcode_memory_rtx (OP (1), BC, insn);
3214 }
3215 else
3216 {
3217 OP (0) = transcode_memory_rtx (OP (0), BC, insn);
3218 OP (1) = transcode_memory_rtx (OP (1), HL, insn);
3219 }
3220 }
3221
3222 MAYBE_OK (insn);
3223
3224 OP (0) = move_from_acc (0, insn);
3225
3226 MAYBE_OK (insn);
3227
3228 /* Try copying the src to acc first, then. This is for, for
3229 example, ZERO_EXTEND or NOT. */
3230 OP (1) = move_to_acc (1, insn);
3231
3232 MUST_BE_OK (insn);
3233 }
3234
3235 /* Returns true if operand OPNUM contains a constraint of type CONSTRAINT.
3236 Assumes that the current insn has already been recognised and hence the
3237 constraint data has been filled in. */
3238 static bool
has_constraint(unsigned int opnum,enum constraint_num constraint)3239 has_constraint (unsigned int opnum, enum constraint_num constraint)
3240 {
3241 const char * p = recog_data.constraints[opnum];
3242
3243 /* No constraints means anything is accepted. */
3244 if (p == NULL || *p == 0 || *p == ',')
3245 return true;
3246
3247 do
3248 {
3249 char c;
3250 unsigned int len;
3251
3252 c = *p;
3253 len = CONSTRAINT_LEN (c, p);
3254 gcc_assert (len > 0);
3255
3256 switch (c)
3257 {
3258 case 0:
3259 case ',':
3260 return false;
3261 default:
3262 if (lookup_constraint (p) == constraint)
3263 return true;
3264 }
3265 p += len;
3266 }
3267 while (1);
3268 }
3269
3270 /* Devirtualize an insn of the form (SET (op) (binop (op) (op))). */
3271 static void
rl78_alloc_physical_registers_op2(rtx_insn * insn)3272 rl78_alloc_physical_registers_op2 (rtx_insn * insn)
3273 {
3274 rtx_insn *prev;
3275 rtx_insn *first;
3276 bool hl_used;
3277 int tmp_id;
3278 rtx saved_op1;
3279
3280 if (rtx_equal_p (OP (0), OP (1)))
3281 {
3282 if (MEM_P (OP (2)))
3283 {
3284 OP (0) =
3285 OP (1) = transcode_memory_rtx (OP (1), DE, insn);
3286 OP (2) = transcode_memory_rtx (OP (2), HL, insn);
3287 }
3288 else
3289 {
3290 OP (0) =
3291 OP (1) = transcode_memory_rtx (OP (1), HL, insn);
3292 OP (2) = transcode_memory_rtx (OP (2), DE, insn);
3293 }
3294 }
3295 else if (rtx_equal_p (OP (0), OP (2)))
3296 {
3297 OP (1) = transcode_memory_rtx (OP (1), DE, insn);
3298 OP (0) =
3299 OP (2) = transcode_memory_rtx (OP (2), HL, insn);
3300 }
3301 else
3302 {
3303 OP (0) = transcode_memory_rtx (OP (0), BC, insn);
3304 OP (1) = transcode_memory_rtx (OP (1), DE, insn);
3305 OP (2) = transcode_memory_rtx (OP (2), HL, insn);
3306 }
3307
3308 MAYBE_OK (insn);
3309
3310 prev = prev_nonnote_nondebug_insn (insn);
3311 if (recog_data.constraints[1][0] == '%'
3312 && is_virtual_register (OP (1))
3313 && ! is_virtual_register (OP (2))
3314 && ! CONSTANT_P (OP (2)))
3315 {
3316 rtx tmp = OP (1);
3317 OP (1) = OP (2);
3318 OP (2) = tmp;
3319 }
3320
3321 /* Make a note of whether (H)L is being used. It matters
3322 because if OP (2) also needs reloading, then we must take
3323 care not to corrupt HL. */
3324 hl_used = reg_mentioned_p (L, OP (0)) || reg_mentioned_p (L, OP (1));
3325
3326 /* If HL is not currently being used and dest == op1 then there are
3327 some possible optimizations available by reloading one of the
3328 operands into HL, before trying to use the accumulator. */
3329 if (optimize
3330 && ! hl_used
3331 && rtx_equal_p (OP (0), OP (1)))
3332 {
3333 /* If op0 is a Ws1 type memory address then switching the base
3334 address register to HL might allow us to perform an in-memory
3335 operation. (eg for the INCW instruction).
3336
3337 FIXME: Adding the move into HL is costly if this optimization is not
3338 going to work, so for now, make sure that we know that the new insn will
3339 match the requirements of the addhi3_real pattern. Really we ought to
3340 generate a candidate sequence, test that, and then install it if the
3341 results are good. */
3342 if (satisfies_constraint_Ws1 (OP (0))
3343 && has_constraint (0, CONSTRAINT_Wh1)
3344 && (satisfies_constraint_K (OP (2)) || satisfies_constraint_L (OP (2))))
3345 {
3346 rtx base, index, addend, newbase;
3347
3348 characterize_address (XEXP (OP (0), 0), & base, & index, & addend);
3349 gcc_assert (index == NULL_RTX);
3350 gcc_assert (REG_P (base) && REGNO (base) == SP_REG);
3351
3352 /* Ws1 addressing allows an offset of 0, Wh1 addressing requires a non-zero offset. */
3353 if (addend != NULL_RTX)
3354 {
3355 newbase = gen_and_emit_move (HL, base, insn, true);
3356 record_content (newbase, NULL_RTX);
3357 newbase = gen_rtx_PLUS (HImode, newbase, addend);
3358
3359 OP (0) = OP (1) = change_address (OP (0), VOIDmode, newbase);
3360
3361 /* We do not want to fail here as this means that
3362 we have inserted useless insns into the stream. */
3363 MUST_BE_OK (insn);
3364 }
3365 }
3366 else if (REG_P (OP (0))
3367 && satisfies_constraint_Ws1 (OP (2))
3368 && has_constraint (2, CONSTRAINT_Wh1))
3369 {
3370 rtx base, index, addend, newbase;
3371
3372 characterize_address (XEXP (OP (2), 0), & base, & index, & addend);
3373 gcc_assert (index == NULL_RTX);
3374 gcc_assert (REG_P (base) && REGNO (base) == SP_REG);
3375
3376 /* Ws1 addressing allows an offset of 0, Wh1 addressing requires a non-zero offset. */
3377 if (addend != NULL_RTX)
3378 {
3379 gen_and_emit_move (HL, base, insn, true);
3380
3381 if (REGNO (OP (0)) != X_REG)
3382 {
3383 OP (1) = move_to_acc (1, insn);
3384 OP (0) = move_from_acc (0, insn);
3385 }
3386
3387 record_content (HL, NULL_RTX);
3388 newbase = gen_rtx_PLUS (HImode, HL, addend);
3389
3390 OP (2) = change_address (OP (2), VOIDmode, newbase);
3391
3392 /* We do not want to fail here as this means that
3393 we have inserted useless insns into the stream. */
3394 MUST_BE_OK (insn);
3395 }
3396 }
3397 }
3398
3399 OP (0) = move_from_acc (0, insn);
3400
3401 tmp_id = get_max_insn_count ();
3402 saved_op1 = OP (1);
3403
3404 if (rtx_equal_p (OP (1), OP (2)))
3405 OP (2) = OP (1) = move_to_acc (1, insn);
3406 else
3407 OP (1) = move_to_acc (1, insn);
3408
3409 MAYBE_OK (insn);
3410
3411 /* If we omitted the move of OP1 into the accumulator (because
3412 it was already there from a previous insn), then force the
3413 generation of the move instruction now. We know that we
3414 are about to emit a move into HL (or DE) via AX, and hence
3415 our optimization to remove the load of OP1 is no longer valid. */
3416 if (tmp_id == get_max_insn_count ())
3417 force_into_acc (saved_op1, insn);
3418
3419 /* We have to copy op2 to HL (or DE), but that involves AX, which
3420 already has a live value. Emit it before those insns. */
3421
3422 if (prev)
3423 first = next_nonnote_nondebug_insn (prev);
3424 else
3425 for (first = insn; prev_nonnote_nondebug_insn (first); first = prev_nonnote_nondebug_insn (first))
3426 ;
3427
3428 OP (2) = hl_used ? move_to_de (2, first) : move_to_hl (2, first);
3429
3430 MUST_BE_OK (insn);
3431 }
3432
3433 /* Devirtualize an insn of the form SET (PC) (MEM/REG). */
3434 static void
rl78_alloc_physical_registers_ro1(rtx_insn * insn)3435 rl78_alloc_physical_registers_ro1 (rtx_insn * insn)
3436 {
3437 OP (0) = transcode_memory_rtx (OP (0), BC, insn);
3438
3439 MAYBE_OK (insn);
3440
3441 OP (0) = move_to_acc (0, insn);
3442
3443 MUST_BE_OK (insn);
3444 }
3445
3446 /* Devirtualize a compare insn. */
3447 static void
rl78_alloc_physical_registers_cmp(rtx_insn * insn)3448 rl78_alloc_physical_registers_cmp (rtx_insn * insn)
3449 {
3450 int tmp_id;
3451 rtx saved_op1;
3452 rtx_insn *prev = prev_nonnote_nondebug_insn (insn);
3453 rtx_insn *first;
3454
3455 OP (1) = transcode_memory_rtx (OP (1), DE, insn);
3456 OP (2) = transcode_memory_rtx (OP (2), HL, insn);
3457
3458 /* HI compares have to have OP (1) in AX, but QI
3459 compares do not, so it is worth checking here. */
3460 MAYBE_OK (insn);
3461
3462 /* For an HImode compare, OP (1) must always be in AX.
3463 But if OP (1) is a REG (and not AX), then we can avoid
3464 a reload of OP (1) if we reload OP (2) into AX and invert
3465 the comparison. */
3466 if (REG_P (OP (1))
3467 && REGNO (OP (1)) != AX_REG
3468 && GET_MODE (OP (1)) == HImode
3469 && MEM_P (OP (2)))
3470 {
3471 rtx cmp = XEXP (SET_SRC (PATTERN (insn)), 0);
3472
3473 OP (2) = move_to_acc (2, insn);
3474
3475 switch (GET_CODE (cmp))
3476 {
3477 case EQ:
3478 case NE:
3479 break;
3480 case LTU: cmp = gen_rtx_GTU (HImode, OP (2), OP (1)); break;
3481 case GTU: cmp = gen_rtx_LTU (HImode, OP (2), OP (1)); break;
3482 case LEU: cmp = gen_rtx_GEU (HImode, OP (2), OP (1)); break;
3483 case GEU: cmp = gen_rtx_LEU (HImode, OP (2), OP (1)); break;
3484
3485 case LT:
3486 case GT:
3487 case LE:
3488 case GE:
3489 #if DEBUG_ALLOC
3490 debug_rtx (insn);
3491 #endif
3492 default:
3493 gcc_unreachable ();
3494 }
3495
3496 if (GET_CODE (cmp) == EQ || GET_CODE (cmp) == NE)
3497 PATTERN (insn) = gen_cbranchhi4_real (cmp, OP (2), OP (1), OP (3));
3498 else
3499 PATTERN (insn) = gen_cbranchhi4_real_inverted (cmp, OP (2), OP (1), OP (3));
3500
3501 MUST_BE_OK (insn);
3502 }
3503
3504 /* Surprisingly, gcc can generate a comparison of a register with itself, but this
3505 should be handled by the second alternative of the cbranchhi_real pattern. */
3506 if (rtx_equal_p (OP (1), OP (2)))
3507 {
3508 OP (1) = OP (2) = BC;
3509 MUST_BE_OK (insn);
3510 }
3511
3512 tmp_id = get_max_insn_count ();
3513 saved_op1 = OP (1);
3514
3515 OP (1) = move_to_acc (1, insn);
3516
3517 MAYBE_OK (insn);
3518
3519 /* If we omitted the move of OP1 into the accumulator (because
3520 it was already there from a previous insn), then force the
3521 generation of the move instruction now. We know that we
3522 are about to emit a move into HL via AX, and hence our
3523 optimization to remove the load of OP1 is no longer valid. */
3524 if (tmp_id == get_max_insn_count ())
3525 force_into_acc (saved_op1, insn);
3526
3527 /* We have to copy op2 to HL, but that involves the acc, which
3528 already has a live value. Emit it before those insns. */
3529 if (prev)
3530 first = next_nonnote_nondebug_insn (prev);
3531 else
3532 for (first = insn; prev_nonnote_nondebug_insn (first); first = prev_nonnote_nondebug_insn (first))
3533 ;
3534 OP (2) = move_to_hl (2, first);
3535
3536 MUST_BE_OK (insn);
3537 }
3538
3539 /* Like op2, but AX = A * X. */
3540 static void
rl78_alloc_physical_registers_umul(rtx_insn * insn)3541 rl78_alloc_physical_registers_umul (rtx_insn * insn)
3542 {
3543 rtx_insn *prev = prev_nonnote_nondebug_insn (insn);
3544 rtx_insn *first;
3545 int tmp_id;
3546 rtx saved_op1;
3547
3548 OP (0) = transcode_memory_rtx (OP (0), BC, insn);
3549 OP (1) = transcode_memory_rtx (OP (1), DE, insn);
3550 OP (2) = transcode_memory_rtx (OP (2), HL, insn);
3551
3552 MAYBE_OK (insn);
3553
3554 if (recog_data.constraints[1][0] == '%'
3555 && is_virtual_register (OP (1))
3556 && !is_virtual_register (OP (2))
3557 && !CONSTANT_P (OP (2)))
3558 {
3559 rtx tmp = OP (1);
3560 OP (1) = OP (2);
3561 OP (2) = tmp;
3562 }
3563
3564 OP (0) = move_from_acc (0, insn);
3565
3566 tmp_id = get_max_insn_count ();
3567 saved_op1 = OP (1);
3568
3569 if (rtx_equal_p (OP (1), OP (2)))
3570 {
3571 gcc_assert (GET_MODE (OP (2)) == QImode);
3572 /* The MULU instruction does not support duplicate arguments
3573 but we know that if we copy OP (2) to X it will do so via
3574 A and thus OP (1) will already be loaded into A. */
3575 OP (2) = move_to_x (2, insn);
3576 OP (1) = A;
3577 }
3578 else
3579 OP (1) = move_to_acc (1, insn);
3580
3581 MAYBE_OK (insn);
3582
3583 /* If we omitted the move of OP1 into the accumulator (because
3584 it was already there from a previous insn), then force the
3585 generation of the move instruction now. We know that we
3586 are about to emit a move into HL (or DE) via AX, and hence
3587 our optimization to remove the load of OP1 is no longer valid. */
3588 if (tmp_id == get_max_insn_count ())
3589 force_into_acc (saved_op1, insn);
3590
3591 /* We have to copy op2 to X, but that involves the acc, which
3592 already has a live value. Emit it before those insns. */
3593
3594 if (prev)
3595 first = next_nonnote_nondebug_insn (prev);
3596 else
3597 for (first = insn; prev_nonnote_nondebug_insn (first); first = prev_nonnote_nondebug_insn (first))
3598 ;
3599 OP (2) = move_to_x (2, first);
3600
3601 MUST_BE_OK (insn);
3602 }
3603
3604 static void
rl78_alloc_address_registers_macax(rtx_insn * insn)3605 rl78_alloc_address_registers_macax (rtx_insn * insn)
3606 {
3607 int which, op;
3608 bool replace_in_op0 = false;
3609 bool replace_in_op1 = false;
3610
3611 MAYBE_OK (insn);
3612
3613 /* Two different MEMs are not allowed. */
3614 which = 0;
3615 for (op = 2; op >= 0; op --)
3616 {
3617 if (MEM_P (OP (op)))
3618 {
3619 if (op == 0 && replace_in_op0)
3620 continue;
3621 if (op == 1 && replace_in_op1)
3622 continue;
3623
3624 switch (which)
3625 {
3626 case 0:
3627 /* If we replace a MEM, make sure that we replace it for all
3628 occurrences of the same MEM in the insn. */
3629 replace_in_op0 = (op > 0 && rtx_equal_p (OP (op), OP (0)));
3630 replace_in_op1 = (op > 1 && rtx_equal_p (OP (op), OP (1)));
3631
3632 OP (op) = transcode_memory_rtx (OP (op), HL, insn);
3633 if (op == 2
3634 && MEM_P (OP (op))
3635 && ((GET_CODE (XEXP (OP (op), 0)) == REG
3636 && REGNO (XEXP (OP (op), 0)) == SP_REG)
3637 || (GET_CODE (XEXP (OP (op), 0)) == PLUS
3638 && REGNO (XEXP (XEXP (OP (op), 0), 0)) == SP_REG)))
3639 {
3640 emit_insn_before (gen_movhi (HL, gen_rtx_REG (HImode, SP_REG)), insn);
3641 OP (op) = replace_rtx (OP (op), gen_rtx_REG (HImode, SP_REG), HL);
3642 }
3643 if (replace_in_op0)
3644 OP (0) = OP (op);
3645 if (replace_in_op1)
3646 OP (1) = OP (op);
3647 break;
3648 case 1:
3649 OP (op) = transcode_memory_rtx (OP (op), DE, insn);
3650 break;
3651 case 2:
3652 OP (op) = transcode_memory_rtx (OP (op), BC, insn);
3653 break;
3654 }
3655 which ++;
3656 }
3657 }
3658
3659 MUST_BE_OK (insn);
3660 }
3661
3662 static void
rl78_alloc_address_registers_div(rtx_insn * insn)3663 rl78_alloc_address_registers_div (rtx_insn * insn)
3664 {
3665 MUST_BE_OK (insn);
3666 }
3667
3668 /* Scan all insns and devirtualize them. */
3669 static void
rl78_alloc_physical_registers(void)3670 rl78_alloc_physical_registers (void)
3671 {
3672 /* During most of the compile, gcc is dealing with virtual
3673 registers. At this point, we need to assign physical registers
3674 to the vitual ones, and copy in/out as needed. */
3675
3676 rtx_insn *insn, *curr;
3677 enum attr_valloc valloc_method;
3678
3679 for (insn = get_insns (); insn; insn = curr)
3680 {
3681 int i;
3682
3683 curr = next_nonnote_nondebug_insn (insn);
3684
3685 if (INSN_P (insn)
3686 && (GET_CODE (PATTERN (insn)) == SET
3687 || GET_CODE (PATTERN (insn)) == CALL)
3688 && INSN_CODE (insn) == -1)
3689 {
3690 if (GET_CODE (SET_SRC (PATTERN (insn))) == ASM_OPERANDS)
3691 continue;
3692 i = recog (PATTERN (insn), insn, 0);
3693 if (i == -1)
3694 {
3695 debug_rtx (insn);
3696 gcc_unreachable ();
3697 }
3698 INSN_CODE (insn) = i;
3699 }
3700 }
3701
3702 cfun->machine->virt_insns_ok = 0;
3703 cfun->machine->real_insns_ok = 1;
3704
3705 clear_content_memory ();
3706
3707 for (insn = get_insns (); insn; insn = curr)
3708 {
3709 rtx pattern;
3710
3711 curr = insn ? next_nonnote_nondebug_insn (insn) : NULL;
3712
3713 if (!INSN_P (insn))
3714 {
3715 if (LABEL_P (insn))
3716 clear_content_memory ();
3717
3718 continue;
3719 }
3720
3721 if (dump_file)
3722 fprintf (dump_file, "Converting insn %d\n", INSN_UID (insn));
3723
3724 pattern = PATTERN (insn);
3725 if (GET_CODE (pattern) == PARALLEL)
3726 pattern = XVECEXP (pattern, 0, 0);
3727 if (JUMP_P (insn) || CALL_P (insn) || GET_CODE (pattern) == CALL)
3728 clear_content_memory ();
3729 if (GET_CODE (pattern) != SET
3730 && GET_CODE (pattern) != CALL)
3731 continue;
3732 if (GET_CODE (pattern) == SET
3733 && GET_CODE (SET_SRC (pattern)) == ASM_OPERANDS)
3734 continue;
3735
3736 valloc_method = get_attr_valloc (insn);
3737
3738 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
3739
3740 if (valloc_method == VALLOC_MACAX)
3741 {
3742 record_content (AX, NULL_RTX);
3743 record_content (BC, NULL_RTX);
3744 record_content (DE, NULL_RTX);
3745 }
3746 else if (valloc_method == VALLOC_DIVHI)
3747 {
3748 record_content (AX, NULL_RTX);
3749 record_content (BC, NULL_RTX);
3750 }
3751 else if (valloc_method == VALLOC_DIVSI)
3752 {
3753 record_content (AX, NULL_RTX);
3754 record_content (BC, NULL_RTX);
3755 record_content (DE, NULL_RTX);
3756 record_content (HL, NULL_RTX);
3757 }
3758
3759 if (insn_ok_now (insn))
3760 continue;
3761
3762 INSN_CODE (insn) = -1;
3763
3764 if (RTX_FRAME_RELATED_P (insn))
3765 virt_insn_was_frame = 1;
3766 else
3767 virt_insn_was_frame = 0;
3768
3769 switch (valloc_method)
3770 {
3771 case VALLOC_OP1:
3772 rl78_alloc_physical_registers_op1 (insn);
3773 break;
3774 case VALLOC_OP2:
3775 rl78_alloc_physical_registers_op2 (insn);
3776 break;
3777 case VALLOC_RO1:
3778 rl78_alloc_physical_registers_ro1 (insn);
3779 break;
3780 case VALLOC_CMP:
3781 rl78_alloc_physical_registers_cmp (insn);
3782 break;
3783 case VALLOC_UMUL:
3784 rl78_alloc_physical_registers_umul (insn);
3785 record_content (AX, NULL_RTX);
3786 break;
3787 case VALLOC_MACAX:
3788 /* Macro that clobbers AX. */
3789 rl78_alloc_address_registers_macax (insn);
3790 record_content (AX, NULL_RTX);
3791 record_content (BC, NULL_RTX);
3792 record_content (DE, NULL_RTX);
3793 break;
3794 case VALLOC_DIVSI:
3795 rl78_alloc_address_registers_div (insn);
3796 record_content (AX, NULL_RTX);
3797 record_content (BC, NULL_RTX);
3798 record_content (DE, NULL_RTX);
3799 record_content (HL, NULL_RTX);
3800 break;
3801 case VALLOC_DIVHI:
3802 rl78_alloc_address_registers_div (insn);
3803 record_content (AX, NULL_RTX);
3804 record_content (BC, NULL_RTX);
3805 break;
3806 default:
3807 gcc_unreachable ();
3808 }
3809
3810 if (JUMP_P (insn) || CALL_P (insn) || GET_CODE (pattern) == CALL)
3811 clear_content_memory ();
3812 else
3813 process_postponed_content_update ();
3814 }
3815
3816 #if DEBUG_ALLOC
3817 fprintf (stderr, "\033[0m");
3818 #endif
3819 }
3820
3821 /* Add REG_DEAD notes using DEAD[reg] for rtx S which is part of INSN.
3822 This function scans for uses of registers; the last use (i.e. first
3823 encounter when scanning backwards) triggers a REG_DEAD note if the
3824 reg was previously in DEAD[]. */
3825 static void
rl78_note_reg_uses(char * dead,rtx s,rtx insn)3826 rl78_note_reg_uses (char *dead, rtx s, rtx insn)
3827 {
3828 const char *fmt;
3829 int i, r;
3830 enum rtx_code code;
3831
3832 if (!s)
3833 return;
3834
3835 code = GET_CODE (s);
3836
3837 switch (code)
3838 {
3839 /* Compare registers by number. */
3840 case REG:
3841 r = REGNO (s);
3842 if (dump_file)
3843 {
3844 fprintf (dump_file, "note use reg %d size %d on insn %d\n",
3845 r, GET_MODE_SIZE (GET_MODE (s)), INSN_UID (insn));
3846 print_rtl_single (dump_file, s);
3847 }
3848 if (dead [r])
3849 add_reg_note (insn, REG_DEAD, gen_rtx_REG (GET_MODE (s), r));
3850 for (i = 0; i < GET_MODE_SIZE (GET_MODE (s)); i ++)
3851 dead [r + i] = 0;
3852 return;
3853
3854 /* These codes have no constituent expressions
3855 and are unique. */
3856 case SCRATCH:
3857 case CC0:
3858 case PC:
3859 return;
3860
3861 case CONST_INT:
3862 case CONST_VECTOR:
3863 case CONST_DOUBLE:
3864 case CONST_FIXED:
3865 /* These are kept unique for a given value. */
3866 return;
3867
3868 default:
3869 break;
3870 }
3871
3872 fmt = GET_RTX_FORMAT (code);
3873
3874 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3875 {
3876 if (fmt[i] == 'E')
3877 {
3878 int j;
3879 for (j = XVECLEN (s, i) - 1; j >= 0; j--)
3880 rl78_note_reg_uses (dead, XVECEXP (s, i, j), insn);
3881 }
3882 else if (fmt[i] == 'e')
3883 rl78_note_reg_uses (dead, XEXP (s, i), insn);
3884 }
3885 }
3886
3887 /* Like the previous function, but scan for SETs instead. */
3888 static void
rl78_note_reg_set(char * dead,rtx d,rtx insn)3889 rl78_note_reg_set (char *dead, rtx d, rtx insn)
3890 {
3891 int r, i;
3892 bool is_dead;
3893 if (GET_CODE (d) == MEM)
3894 rl78_note_reg_uses (dead, XEXP (d, 0), insn);
3895
3896 if (GET_CODE (d) != REG)
3897 return;
3898
3899 /* Do not mark the reg unused unless all QImode parts of it are dead. */
3900 r = REGNO (d);
3901 is_dead = true;
3902 for (i = 0; i < GET_MODE_SIZE (GET_MODE (d)); i ++)
3903 if (!dead [r + i])
3904 is_dead = false;
3905 if(is_dead)
3906 add_reg_note (insn, REG_UNUSED, gen_rtx_REG (GET_MODE (d), r));
3907 if (dump_file)
3908 fprintf (dump_file, "note set reg %d size %d\n", r, GET_MODE_SIZE (GET_MODE (d)));
3909 for (i = 0; i < GET_MODE_SIZE (GET_MODE (d)); i ++)
3910 dead [r + i] = 1;
3911 }
3912
3913 /* This is a rather crude register death pass. Death status is reset
3914 at every jump or call insn. */
3915 static void
rl78_calculate_death_notes(void)3916 rl78_calculate_death_notes (void)
3917 {
3918 char dead[FIRST_PSEUDO_REGISTER];
3919 rtx p, s, d;
3920 rtx_insn *insn;
3921 int i;
3922
3923 memset (dead, 0, sizeof (dead));
3924
3925 for (insn = get_last_insn ();
3926 insn;
3927 insn = prev_nonnote_nondebug_insn (insn))
3928 {
3929 if (dump_file)
3930 {
3931 fprintf (dump_file, "\n--------------------------------------------------");
3932 fprintf (dump_file, "\nDead:");
3933 for (i = 0; i < FIRST_PSEUDO_REGISTER; i ++)
3934 if (dead[i])
3935 fprintf (dump_file, " %s", reg_names[i]);
3936 fprintf (dump_file, "\n");
3937 print_rtl_single (dump_file, insn);
3938 }
3939
3940 switch (GET_CODE (insn))
3941 {
3942 case INSN:
3943 p = PATTERN (insn);
3944 if (GET_CODE (p) == PARALLEL)
3945 {
3946 rtx q = XVECEXP (p, 0 ,1);
3947
3948 /* This happens with the DIV patterns. */
3949 if (GET_CODE (q) == SET)
3950 {
3951 s = SET_SRC (q);
3952 d = SET_DEST (q);
3953 rl78_note_reg_set (dead, d, insn);
3954 rl78_note_reg_uses (dead, s, insn);
3955
3956 }
3957 p = XVECEXP (p, 0, 0);
3958 }
3959
3960 switch (GET_CODE (p))
3961 {
3962 case SET:
3963 s = SET_SRC (p);
3964 d = SET_DEST (p);
3965 rl78_note_reg_set (dead, d, insn);
3966 rl78_note_reg_uses (dead, s, insn);
3967 break;
3968
3969 case USE:
3970 rl78_note_reg_uses (dead, p, insn);
3971 break;
3972
3973 default:
3974 break;
3975 }
3976 break;
3977
3978 case JUMP_INSN:
3979 if (INSN_CODE (insn) == CODE_FOR_rl78_return)
3980 {
3981 memset (dead, 1, sizeof (dead));
3982 /* We expect a USE just prior to this, which will mark
3983 the actual return registers. The USE will have a
3984 death note, but we aren't going to be modifying it
3985 after this pass. */
3986 break;
3987 }
3988 /* FALLTHRU */
3989 case CALL_INSN:
3990 memset (dead, 0, sizeof (dead));
3991 break;
3992
3993 default:
3994 break;
3995 }
3996 if (dump_file)
3997 print_rtl_single (dump_file, insn);
3998 }
3999 }
4000
4001 /* Helper function to reset the origins in RP and the age in AGE for
4002 all registers. */
4003 static void
reset_origins(int * rp,int * age)4004 reset_origins (int *rp, int *age)
4005 {
4006 int i;
4007 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4008 {
4009 rp[i] = i;
4010 age[i] = 0;
4011 }
4012 }
4013
4014 static void
set_origin(rtx pat,rtx_insn * insn,int * origins,int * age)4015 set_origin (rtx pat, rtx_insn * insn, int * origins, int * age)
4016 {
4017 rtx src = SET_SRC (pat);
4018 rtx dest = SET_DEST (pat);
4019 int mb = GET_MODE_SIZE (GET_MODE (dest));
4020 int i;
4021
4022 if (GET_CODE (dest) == REG)
4023 {
4024 int dr = REGNO (dest);
4025
4026 if (GET_CODE (src) == REG)
4027 {
4028 int sr = REGNO (src);
4029 bool same = true;
4030 int best_age, best_reg;
4031
4032 /* See if the copy is not needed. */
4033 for (i = 0; i < mb; i ++)
4034 if (origins[dr + i] != origins[sr + i])
4035 same = false;
4036
4037 if (same)
4038 {
4039 if (dump_file)
4040 fprintf (dump_file, "deleting because dest already has correct value\n");
4041 delete_insn (insn);
4042 return;
4043 }
4044
4045 if (dr < 8 || sr >= 8)
4046 {
4047 int ar;
4048
4049 best_age = -1;
4050 best_reg = -1;
4051
4052 /* See if the copy can be made from another
4053 bank 0 register instead, instead of the
4054 virtual src register. */
4055 for (ar = 0; ar < 8; ar += mb)
4056 {
4057 same = true;
4058
4059 for (i = 0; i < mb; i ++)
4060 if (origins[ar + i] != origins[sr + i])
4061 same = false;
4062
4063 /* The chip has some reg-reg move limitations. */
4064 if (mb == 1 && dr > 3)
4065 same = false;
4066
4067 if (same)
4068 {
4069 if (best_age == -1 || best_age > age[sr + i])
4070 {
4071 best_age = age[sr + i];
4072 best_reg = sr;
4073 }
4074 }
4075 }
4076
4077 if (best_reg != -1)
4078 {
4079 /* FIXME: copy debug info too. */
4080 SET_SRC (pat) = gen_rtx_REG (GET_MODE (src), best_reg);
4081 sr = best_reg;
4082 }
4083 }
4084
4085 for (i = 0; i < mb; i++)
4086 {
4087 origins[dr + i] = origins[sr + i];
4088 age[dr + i] = age[sr + i] + 1;
4089 }
4090 }
4091 else
4092 {
4093 /* The destination is computed, its origin is itself. */
4094 if (dump_file)
4095 fprintf (dump_file, "resetting origin of r%d for %d byte%s\n",
4096 dr, mb, mb == 1 ? "" : "s");
4097
4098 for (i = 0; i < mb; i ++)
4099 {
4100 origins[dr + i] = dr + i;
4101 age[dr + i] = 0;
4102 }
4103 }
4104
4105 /* Any registers marked with that reg as an origin are reset. */
4106 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4107 if (origins[i] >= dr && origins[i] < dr + mb)
4108 {
4109 origins[i] = i;
4110 age[i] = 0;
4111 }
4112 }
4113
4114 /* Special case - our MUL patterns uses AX and sometimes BC. */
4115 if (get_attr_valloc (insn) == VALLOC_MACAX)
4116 {
4117 if (dump_file)
4118 fprintf (dump_file, "Resetting origin of AX/BC for MUL pattern.\n");
4119
4120 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4121 if (i <= 3 || origins[i] <= 3)
4122 {
4123 origins[i] = i;
4124 age[i] = 0;
4125 }
4126 }
4127 else if (get_attr_valloc (insn) == VALLOC_DIVHI)
4128 {
4129 if (dump_file)
4130 fprintf (dump_file, "Resetting origin of AX/DE for DIVHI pattern.\n");
4131
4132 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4133 if (i == A_REG
4134 || i == X_REG
4135 || i == D_REG
4136 || i == E_REG
4137 || origins[i] == A_REG
4138 || origins[i] == X_REG
4139 || origins[i] == D_REG
4140 || origins[i] == E_REG)
4141 {
4142 origins[i] = i;
4143 age[i] = 0;
4144 }
4145 }
4146 else if (get_attr_valloc (insn) == VALLOC_DIVSI)
4147 {
4148 if (dump_file)
4149 fprintf (dump_file, "Resetting origin of AX/BC/DE/HL for DIVSI pattern.\n");
4150
4151 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4152 if (i <= 7 || origins[i] <= 7)
4153 {
4154 origins[i] = i;
4155 age[i] = 0;
4156 }
4157 }
4158
4159 if (GET_CODE (src) == ASHIFT
4160 || GET_CODE (src) == ASHIFTRT
4161 || GET_CODE (src) == LSHIFTRT)
4162 {
4163 rtx count = XEXP (src, 1);
4164
4165 if (GET_CODE (count) == REG)
4166 {
4167 /* Special case - our pattern clobbers the count register. */
4168 int r = REGNO (count);
4169
4170 if (dump_file)
4171 fprintf (dump_file, "Resetting origin of r%d for shift.\n", r);
4172
4173 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4174 if (i == r || origins[i] == r)
4175 {
4176 origins[i] = i;
4177 age[i] = 0;
4178 }
4179 }
4180 }
4181 }
4182
4183 /* The idea behind this optimization is to look for cases where we
4184 move data from A to B to C, and instead move from A to B, and A to
4185 C. If B is a virtual register or memory, this is a big win on its
4186 own. If B turns out to be unneeded after this, it's a bigger win.
4187 For each register, we try to determine where it's value originally
4188 came from, if it's propogated purely through moves (and not
4189 computes). The ORIGINS[] array has the regno for the "origin" of
4190 the value in the [regno] it's indexed by. */
4191 static void
rl78_propogate_register_origins(void)4192 rl78_propogate_register_origins (void)
4193 {
4194 int origins[FIRST_PSEUDO_REGISTER];
4195 int age[FIRST_PSEUDO_REGISTER];
4196 int i;
4197 rtx_insn *insn, *ninsn = NULL;
4198 rtx pat;
4199
4200 reset_origins (origins, age);
4201
4202 for (insn = get_insns (); insn; insn = ninsn)
4203 {
4204 ninsn = next_nonnote_nondebug_insn (insn);
4205
4206 if (dump_file)
4207 {
4208 fprintf (dump_file, "\n");
4209 fprintf (dump_file, "Origins:");
4210 for (i = 0; i < FIRST_PSEUDO_REGISTER; i ++)
4211 if (origins[i] != i)
4212 fprintf (dump_file, " r%d=r%d", i, origins[i]);
4213 fprintf (dump_file, "\n");
4214 print_rtl_single (dump_file, insn);
4215 }
4216
4217 switch (GET_CODE (insn))
4218 {
4219 case CODE_LABEL:
4220 case BARRIER:
4221 case CALL_INSN:
4222 case JUMP_INSN:
4223 reset_origins (origins, age);
4224 break;
4225
4226 default:
4227 break;
4228
4229 case INSN:
4230 pat = PATTERN (insn);
4231
4232 if (GET_CODE (pat) == PARALLEL)
4233 {
4234 rtx clobber = XVECEXP (pat, 0, 1);
4235 pat = XVECEXP (pat, 0, 0);
4236 if (GET_CODE (clobber) == CLOBBER
4237 && GET_CODE (XEXP (clobber, 0)) == REG)
4238 {
4239 int cr = REGNO (XEXP (clobber, 0));
4240 int mb = GET_MODE_SIZE (GET_MODE (XEXP (clobber, 0)));
4241 if (dump_file)
4242 fprintf (dump_file, "reset origins of %d regs at %d\n", mb, cr);
4243 for (i = 0; i < mb; i++)
4244 {
4245 origins[cr + i] = cr + i;
4246 age[cr + i] = 0;
4247 }
4248 }
4249 /* This happens with the DIV patterns. */
4250 else if (GET_CODE (clobber) == SET)
4251 {
4252 set_origin (clobber, insn, origins, age);
4253 }
4254 else
4255 break;
4256 }
4257
4258 if (GET_CODE (pat) == SET)
4259 {
4260 set_origin (pat, insn, origins, age);
4261 }
4262 else if (GET_CODE (pat) == CLOBBER
4263 && GET_CODE (XEXP (pat, 0)) == REG)
4264 {
4265 if (REG_P (XEXP (pat, 0)))
4266 {
4267 unsigned int reg = REGNO (XEXP (pat, 0));
4268
4269 origins[reg] = reg;
4270 age[reg] = 0;
4271 }
4272 }
4273 }
4274 }
4275 }
4276
4277 /* Remove any SETs where the destination is unneeded. */
4278 static void
rl78_remove_unused_sets(void)4279 rl78_remove_unused_sets (void)
4280 {
4281 rtx_insn *insn, *ninsn = NULL;
4282 rtx dest;
4283
4284 for (insn = get_insns (); insn; insn = ninsn)
4285 {
4286 ninsn = next_nonnote_nondebug_insn (insn);
4287
4288 rtx set = single_set (insn);
4289 if (set == NULL)
4290 continue;
4291
4292 dest = SET_DEST (set);
4293
4294 if (GET_CODE (dest) != REG || REGNO (dest) > 23)
4295 continue;
4296
4297 if (find_regno_note (insn, REG_UNUSED, REGNO (dest)))
4298 {
4299 if (dump_file)
4300 fprintf (dump_file, "deleting because the set register is never used.\n");
4301 delete_insn (insn);
4302 }
4303 }
4304 }
4305
4306 /* This is the top of the devritualization pass. */
4307 static void
rl78_reorg(void)4308 rl78_reorg (void)
4309 {
4310 /* split2 only happens when optimizing, but we need all movSIs to be
4311 split now. */
4312 if (optimize <= 0)
4313 split_all_insns ();
4314
4315 rl78_alloc_physical_registers ();
4316
4317 if (dump_file)
4318 {
4319 fprintf (dump_file, "\n================DEVIRT:=AFTER=ALLOC=PHYSICAL=REGISTERS================\n");
4320 print_rtl_with_bb (dump_file, get_insns (), TDF_NONE);
4321 }
4322
4323 rl78_propogate_register_origins ();
4324 rl78_calculate_death_notes ();
4325
4326 if (dump_file)
4327 {
4328 fprintf (dump_file, "\n================DEVIRT:=AFTER=PROPOGATION=============================\n");
4329 print_rtl_with_bb (dump_file, get_insns (), TDF_NONE);
4330 fprintf (dump_file, "\n======================================================================\n");
4331 }
4332
4333 rl78_remove_unused_sets ();
4334
4335 /* The code after devirtualizing has changed so much that at this point
4336 we might as well just rescan everything. Note that
4337 df_rescan_all_insns is not going to help here because it does not
4338 touch the artificial uses and defs. */
4339 df_finish_pass (true);
4340 if (optimize > 1)
4341 df_live_add_problem ();
4342 df_scan_alloc (NULL);
4343 df_scan_blocks ();
4344
4345 if (optimize)
4346 df_analyze ();
4347 }
4348
4349 #undef TARGET_RETURN_IN_MEMORY
4350 #define TARGET_RETURN_IN_MEMORY rl78_return_in_memory
4351
4352 static bool
rl78_return_in_memory(const_tree type,const_tree fntype ATTRIBUTE_UNUSED)4353 rl78_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
4354 {
4355 const HOST_WIDE_INT size = int_size_in_bytes (type);
4356 return (size == -1 || size > 8);
4357 }
4358
4359
4360 #undef TARGET_RTX_COSTS
4361 #define TARGET_RTX_COSTS rl78_rtx_costs
4362
4363 static bool
rl78_rtx_costs(rtx x,machine_mode mode,int outer_code ATTRIBUTE_UNUSED,int opno ATTRIBUTE_UNUSED,int * total,bool speed ATTRIBUTE_UNUSED)4364 rl78_rtx_costs (rtx x,
4365 machine_mode mode,
4366 int outer_code ATTRIBUTE_UNUSED,
4367 int opno ATTRIBUTE_UNUSED,
4368 int * total,
4369 bool speed ATTRIBUTE_UNUSED)
4370 {
4371 int code = GET_CODE (x);
4372
4373 if (code == IF_THEN_ELSE)
4374 {
4375 *total = COSTS_N_INSNS (10);
4376 return true;
4377 }
4378
4379 if (mode == HImode)
4380 {
4381 if (code == MULT && ! speed)
4382 {
4383 * total = COSTS_N_INSNS (8);
4384 return true;
4385 }
4386 return false;
4387 }
4388
4389 if (mode == SImode)
4390 {
4391 switch (code)
4392 {
4393 case MULT:
4394 if (! speed)
4395 /* If we are compiling for space then we do not want to use the
4396 inline SImode multiplication patterns or shift sequences.
4397 The cost is not set to 1 or 5 however as we have to allow for
4398 the possibility that we might be converting a leaf function
4399 into a non-leaf function. (There is no way to tell here).
4400 A value of 13 seems to be a reasonable compromise for the
4401 moment. */
4402 * total = COSTS_N_INSNS (13);
4403 else if (RL78_MUL_G14)
4404 *total = COSTS_N_INSNS (14);
4405 else if (RL78_MUL_G13)
4406 *total = COSTS_N_INSNS (29);
4407 else
4408 *total = COSTS_N_INSNS (500);
4409 return true;
4410
4411 case PLUS:
4412 *total = COSTS_N_INSNS (8);
4413 return true;
4414
4415 case ASHIFT:
4416 case ASHIFTRT:
4417 case LSHIFTRT:
4418 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4419 {
4420 switch (INTVAL (XEXP (x, 1)))
4421 {
4422 case 0: *total = COSTS_N_INSNS (0); break;
4423 case 1: *total = COSTS_N_INSNS (6); break;
4424 case 2: case 3: case 4: case 5: case 6: case 7:
4425 *total = COSTS_N_INSNS (10); break;
4426 case 8: *total = COSTS_N_INSNS (6); break;
4427 case 9: case 10: case 11: case 12: case 13: case 14: case 15:
4428 *total = COSTS_N_INSNS (10); break;
4429 case 16: *total = COSTS_N_INSNS (3); break;
4430 case 17: case 18: case 19: case 20: case 21: case 22: case 23:
4431 *total = COSTS_N_INSNS (4); break;
4432 case 24: *total = COSTS_N_INSNS (4); break;
4433 case 25: case 26: case 27: case 28: case 29: case 30: case 31:
4434 *total = COSTS_N_INSNS (5); break;
4435 }
4436 }
4437 else
4438 *total = COSTS_N_INSNS (10+4*16);
4439 return true;
4440
4441 default:
4442 break;
4443 }
4444 }
4445 return false;
4446 }
4447
4448
4449 static GTY(()) section * saddr_section;
4450 static GTY(()) section * frodata_section;
4451
4452 int
rl78_saddr_p(rtx x)4453 rl78_saddr_p (rtx x)
4454 {
4455 const char * c;
4456
4457 if (MEM_P (x))
4458 x = XEXP (x, 0);
4459 if (GET_CODE (x) == PLUS)
4460 x = XEXP (x, 0);
4461 if (GET_CODE (x) != SYMBOL_REF)
4462 return 0;
4463
4464 c = XSTR (x, 0);
4465 if (memcmp (c, "@s.", 3) == 0)
4466 return 1;
4467
4468 return 0;
4469 }
4470
4471 int
rl78_sfr_p(rtx x)4472 rl78_sfr_p (rtx x)
4473 {
4474 if (MEM_P (x))
4475 x = XEXP (x, 0);
4476 if (GET_CODE (x) != CONST_INT)
4477 return 0;
4478
4479 if ((INTVAL (x) & 0xFF00) != 0xFF00)
4480 return 0;
4481
4482 return 1;
4483 }
4484
4485 #undef TARGET_STRIP_NAME_ENCODING
4486 #define TARGET_STRIP_NAME_ENCODING rl78_strip_name_encoding
4487
4488 static const char *
rl78_strip_name_encoding(const char * sym)4489 rl78_strip_name_encoding (const char * sym)
4490 {
4491 while (1)
4492 {
4493 if (*sym == '*')
4494 sym++;
4495 else if (*sym == '@' && sym[2] == '.')
4496 sym += 3;
4497 else
4498 return sym;
4499 }
4500 }
4501
4502 /* Like rl78_strip_name_encoding, but does not strip leading asterisks. This
4503 is important if the stripped name is going to be passed to assemble_name()
4504 as that handles asterisk prefixed names in a special manner. */
4505
4506 static const char *
rl78_strip_nonasm_name_encoding(const char * sym)4507 rl78_strip_nonasm_name_encoding (const char * sym)
4508 {
4509 while (1)
4510 {
4511 if (*sym == '@' && sym[2] == '.')
4512 sym += 3;
4513 else
4514 return sym;
4515 }
4516 }
4517
4518
4519 static int
rl78_attrlist_to_encoding(tree list,tree decl ATTRIBUTE_UNUSED)4520 rl78_attrlist_to_encoding (tree list, tree decl ATTRIBUTE_UNUSED)
4521 {
4522 while (list)
4523 {
4524 if (is_attribute_p ("saddr", TREE_PURPOSE (list)))
4525 return 's';
4526 list = TREE_CHAIN (list);
4527 }
4528
4529 return 0;
4530 }
4531
4532 #define RL78_ATTRIBUTES(decl) \
4533 (TYPE_P (decl)) ? TYPE_ATTRIBUTES (decl) \
4534 : DECL_ATTRIBUTES (decl) \
4535 ? (DECL_ATTRIBUTES (decl)) \
4536 : TYPE_ATTRIBUTES (TREE_TYPE (decl))
4537
4538 #undef TARGET_ENCODE_SECTION_INFO
4539 #define TARGET_ENCODE_SECTION_INFO rl78_encode_section_info
4540
4541 static void
rl78_encode_section_info(tree decl,rtx rtl,int first)4542 rl78_encode_section_info (tree decl, rtx rtl, int first)
4543 {
4544 rtx rtlname;
4545 const char * oldname;
4546 char encoding;
4547 char * newname;
4548 tree idp;
4549 tree type;
4550 tree rl78_attributes;
4551
4552 if (!first)
4553 return;
4554
4555 rtlname = XEXP (rtl, 0);
4556
4557 if (GET_CODE (rtlname) == SYMBOL_REF)
4558 oldname = XSTR (rtlname, 0);
4559 else if (GET_CODE (rtlname) == MEM
4560 && GET_CODE (XEXP (rtlname, 0)) == SYMBOL_REF)
4561 oldname = XSTR (XEXP (rtlname, 0), 0);
4562 else
4563 gcc_unreachable ();
4564
4565 type = TREE_TYPE (decl);
4566 if (type == error_mark_node)
4567 return;
4568 if (! DECL_P (decl))
4569 return;
4570 rl78_attributes = RL78_ATTRIBUTES (decl);
4571
4572 encoding = rl78_attrlist_to_encoding (rl78_attributes, decl);
4573
4574 if (encoding)
4575 {
4576 newname = (char *) alloca (strlen (oldname) + 4);
4577 sprintf (newname, "@%c.%s", encoding, oldname);
4578 idp = get_identifier (newname);
4579 XEXP (rtl, 0) =
4580 gen_rtx_SYMBOL_REF (Pmode, IDENTIFIER_POINTER (idp));
4581 SYMBOL_REF_WEAK (XEXP (rtl, 0)) = DECL_WEAK (decl);
4582 SET_SYMBOL_REF_DECL (XEXP (rtl, 0), decl);
4583 }
4584 }
4585
4586 #undef TARGET_ASM_INIT_SECTIONS
4587 #define TARGET_ASM_INIT_SECTIONS rl78_asm_init_sections
4588
4589 static void
rl78_asm_init_sections(void)4590 rl78_asm_init_sections (void)
4591 {
4592 saddr_section
4593 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
4594 "\t.section .saddr,\"aw\",@progbits");
4595 frodata_section
4596 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
4597 "\t.section .frodata,\"aw\",@progbits");
4598 }
4599
4600 #undef TARGET_ASM_SELECT_SECTION
4601 #define TARGET_ASM_SELECT_SECTION rl78_select_section
4602
4603 static section *
rl78_select_section(tree decl,int reloc,unsigned HOST_WIDE_INT align)4604 rl78_select_section (tree decl,
4605 int reloc,
4606 unsigned HOST_WIDE_INT align)
4607 {
4608 int readonly = 1;
4609
4610 switch (TREE_CODE (decl))
4611 {
4612 case VAR_DECL:
4613 if (!TREE_READONLY (decl)
4614 || TREE_SIDE_EFFECTS (decl)
4615 || !DECL_INITIAL (decl)
4616 || (DECL_INITIAL (decl) != error_mark_node
4617 && !TREE_CONSTANT (DECL_INITIAL (decl))))
4618 readonly = 0;
4619 break;
4620 case CONSTRUCTOR:
4621 if (! TREE_CONSTANT (decl))
4622 readonly = 0;
4623 break;
4624
4625 default:
4626 break;
4627 }
4628
4629 if (TREE_CODE (decl) == VAR_DECL)
4630 {
4631 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4632
4633 if (name[0] == '@' && name[2] == '.')
4634 switch (name[1])
4635 {
4636 case 's':
4637 return saddr_section;
4638 }
4639
4640 if (TYPE_ADDR_SPACE (TREE_TYPE (decl)) == ADDR_SPACE_FAR
4641 && readonly)
4642 {
4643 return frodata_section;
4644 }
4645 }
4646
4647 if (readonly)
4648 return TARGET_ES0 ? frodata_section : readonly_data_section;
4649
4650 switch (categorize_decl_for_section (decl, reloc))
4651 {
4652 case SECCAT_TEXT: return text_section;
4653 case SECCAT_DATA: return data_section;
4654 case SECCAT_BSS: return bss_section;
4655 case SECCAT_RODATA: return TARGET_ES0 ? frodata_section : readonly_data_section;
4656 default:
4657 return default_select_section (decl, reloc, align);
4658 }
4659 }
4660
4661 void
rl78_output_labelref(FILE * file,const char * str)4662 rl78_output_labelref (FILE *file, const char *str)
4663 {
4664 const char *str2;
4665
4666 str2 = targetm.strip_name_encoding (str);
4667 if (str2[0] != '.')
4668 fputs (user_label_prefix, file);
4669 fputs (str2, file);
4670 }
4671
4672 void
rl78_output_aligned_common(FILE * stream,tree decl ATTRIBUTE_UNUSED,const char * name,int size,int align,int global)4673 rl78_output_aligned_common (FILE *stream,
4674 tree decl ATTRIBUTE_UNUSED,
4675 const char *name,
4676 int size, int align, int global)
4677 {
4678 /* We intentionally don't use rl78_section_tag() here. */
4679 if (name[0] == '@' && name[2] == '.')
4680 {
4681 const char *sec = 0;
4682 switch (name[1])
4683 {
4684 case 's':
4685 switch_to_section (saddr_section);
4686 sec = ".saddr";
4687 break;
4688 }
4689 if (sec)
4690 {
4691 const char *name2;
4692 int p2align = 0;
4693
4694 while (align > BITS_PER_UNIT)
4695 {
4696 align /= 2;
4697 p2align ++;
4698 }
4699 name2 = targetm.strip_name_encoding (name);
4700 if (global)
4701 fprintf (stream, "\t.global\t_%s\n", name2);
4702 fprintf (stream, "\t.p2align %d\n", p2align);
4703 fprintf (stream, "\t.type\t_%s,@object\n", name2);
4704 fprintf (stream, "\t.size\t_%s,%d\n", name2, size);
4705 fprintf (stream, "_%s:\n\t.zero\t%d\n", name2, size);
4706 return;
4707 }
4708 }
4709
4710 if (!global)
4711 {
4712 fprintf (stream, "\t.local\t");
4713 assemble_name (stream, name);
4714 fprintf (stream, "\n");
4715 }
4716 fprintf (stream, "\t.comm\t");
4717 assemble_name (stream, name);
4718 fprintf (stream, ",%u,%u\n", size, align / BITS_PER_UNIT);
4719 }
4720
4721 #undef TARGET_INSERT_ATTRIBUTES
4722 #define TARGET_INSERT_ATTRIBUTES rl78_insert_attributes
4723
4724 static void
rl78_insert_attributes(tree decl,tree * attributes ATTRIBUTE_UNUSED)4725 rl78_insert_attributes (tree decl, tree *attributes ATTRIBUTE_UNUSED)
4726 {
4727 if (TARGET_ES0
4728 && TREE_CODE (decl) == VAR_DECL
4729 && TREE_READONLY (decl)
4730 && TREE_ADDRESSABLE (decl)
4731 && TYPE_ADDR_SPACE (TREE_TYPE (decl)) == ADDR_SPACE_GENERIC)
4732 {
4733 tree type = TREE_TYPE (decl);
4734 tree attr = TYPE_ATTRIBUTES (type);
4735 int q = TYPE_QUALS_NO_ADDR_SPACE (type) | ENCODE_QUAL_ADDR_SPACE (ADDR_SPACE_FAR);
4736
4737 TREE_TYPE (decl) = build_type_attribute_qual_variant (type, attr, q);
4738 }
4739 }
4740
4741 #undef TARGET_ASM_INTEGER
4742 #define TARGET_ASM_INTEGER rl78_asm_out_integer
4743
4744 static bool
rl78_asm_out_integer(rtx x,unsigned int size,int aligned_p)4745 rl78_asm_out_integer (rtx x, unsigned int size, int aligned_p)
4746 {
4747 if (default_assemble_integer (x, size, aligned_p))
4748 return true;
4749
4750 if (size == 4)
4751 {
4752 assemble_integer_with_op (".long\t", x);
4753 return true;
4754 }
4755
4756 return false;
4757 }
4758
4759 #undef TARGET_UNWIND_WORD_MODE
4760 #define TARGET_UNWIND_WORD_MODE rl78_unwind_word_mode
4761
4762 static scalar_int_mode
rl78_unwind_word_mode(void)4763 rl78_unwind_word_mode (void)
4764 {
4765 return HImode;
4766 }
4767
4768 #ifndef USE_COLLECT2
4769 #undef TARGET_ASM_CONSTRUCTOR
4770 #define TARGET_ASM_CONSTRUCTOR rl78_asm_constructor
4771 #undef TARGET_ASM_DESTRUCTOR
4772 #define TARGET_ASM_DESTRUCTOR rl78_asm_destructor
4773
4774 static void
rl78_asm_ctor_dtor(rtx symbol,int priority,bool is_ctor)4775 rl78_asm_ctor_dtor (rtx symbol, int priority, bool is_ctor)
4776 {
4777 section *sec;
4778
4779 if (priority != DEFAULT_INIT_PRIORITY)
4780 {
4781 /* This section of the function is based upon code copied
4782 from: gcc/varasm.c:get_cdtor_priority_section(). */
4783 char buf[18];
4784
4785 sprintf (buf, "%s.%.5u", is_ctor ? ".ctors" : ".dtors",
4786 MAX_INIT_PRIORITY - priority);
4787 sec = get_section (buf, 0, NULL);
4788 }
4789 else
4790 sec = is_ctor ? ctors_section : dtors_section;
4791
4792 assemble_addr_to_section (symbol, sec);
4793 }
4794
4795 static void
rl78_asm_constructor(rtx symbol,int priority)4796 rl78_asm_constructor (rtx symbol, int priority)
4797 {
4798 rl78_asm_ctor_dtor (symbol, priority, true);
4799 }
4800
4801 static void
rl78_asm_destructor(rtx symbol,int priority)4802 rl78_asm_destructor (rtx symbol, int priority)
4803 {
4804 rl78_asm_ctor_dtor (symbol, priority, false);
4805 }
4806 #endif /* ! USE_COLLECT2 */
4807
4808 /* Scan backwards through the insn chain looking to see if the flags
4809 have been set for a comparison of OP against OPERAND. Start with
4810 the insn *before* the current insn. */
4811
4812 bool
rl78_flags_already_set(rtx op,rtx operand)4813 rl78_flags_already_set (rtx op, rtx operand)
4814 {
4815 /* We only track the Z flag. */
4816 if (GET_CODE (op) != EQ && GET_CODE (op) != NE)
4817 return false;
4818
4819 /* This should not happen, but let's be paranoid. */
4820 if (current_output_insn == NULL_RTX)
4821 return false;
4822
4823 rtx_insn *insn;
4824 bool res = false;
4825
4826 for (insn = prev_nonnote_nondebug_insn (current_output_insn);
4827 insn != NULL_RTX;
4828 insn = prev_nonnote_nondebug_insn (insn))
4829 {
4830 if (LABEL_P (insn))
4831 break;
4832
4833 if (! INSN_P (insn))
4834 continue;
4835
4836 /* Make sure that the insn can be recognized. */
4837 if (recog_memoized (insn) == -1)
4838 continue;
4839
4840 enum attr_update_Z updated = get_attr_update_Z (insn);
4841
4842 rtx set = single_set (insn);
4843 bool must_break = (set != NULL_RTX && rtx_equal_p (operand, SET_DEST (set)));
4844
4845 switch (updated)
4846 {
4847 case UPDATE_Z_NO:
4848 break;
4849 case UPDATE_Z_CLOBBER:
4850 must_break = true;
4851 break;
4852 case UPDATE_Z_UPDATE_Z:
4853 res = must_break;
4854 must_break = true;
4855 break;
4856 default:
4857 gcc_unreachable ();
4858 }
4859
4860 if (must_break)
4861 break;
4862 }
4863
4864 /* We have to re-recognize the current insn as the call(s) to
4865 get_attr_update_Z() above will have overwritten the recog_data cache. */
4866 recog_memoized (current_output_insn);
4867 cleanup_subreg_operands (current_output_insn);
4868 constrain_operands_cached (current_output_insn, 1);
4869
4870 return res;
4871 }
4872
4873 const char *
rl78_addsi3_internal(rtx * operands,unsigned int alternative)4874 rl78_addsi3_internal (rtx * operands, unsigned int alternative)
4875 {
4876 const char *addH2 = "addw ax, %H2\n\t";
4877
4878 /* If we are adding in a constant symbolic address when -mes0
4879 is active then we know that the address must be <64K and
4880 that it is invalid to access anything above 64K relative to
4881 this address. So we can skip adding in the high bytes. */
4882 if (TARGET_ES0
4883 && GET_CODE (operands[2]) == SYMBOL_REF
4884 && TREE_CODE (SYMBOL_REF_DECL (operands[2])) == VAR_DECL
4885 && TREE_READONLY (SYMBOL_REF_DECL (operands[2]))
4886 && ! TREE_SIDE_EFFECTS (SYMBOL_REF_DECL (operands[2])))
4887 return "movw ax, %h1\n\taddw ax, %h2\n\tmovw %h0, ax";
4888
4889 if(CONST_INT_P(operands[2]))
4890 {
4891 if((INTVAL(operands[2]) & 0xFFFF0000) == 0)
4892 {
4893 addH2 = "";
4894 }
4895 else if((INTVAL(operands[2]) & 0xFFFF0000) == 0x00010000)
4896 {
4897 addH2 = "incw ax\n\t";
4898 }
4899 else if((INTVAL(operands[2]) & 0xFFFF0000) == 0xFFFF0000)
4900 {
4901 addH2 = "decw ax\n\t";
4902 }
4903 }
4904
4905 switch (alternative)
4906 {
4907 case 0:
4908 case 1:
4909 snprintf(fmt_buffer, sizeof(fmt_buffer),
4910 "movw ax, %%h1\n\taddw ax, %%h2\n\tmovw %%h0, ax\n\tmovw ax, %%H1\n\tsknc\n\tincw ax\n\t%smovw %%H0,ax", addH2);
4911 break;
4912 case 2:
4913 snprintf(fmt_buffer, sizeof(fmt_buffer),
4914 "movw ax, %%h1\n\taddw ax, %%h2\n\tmovw bc, ax\n\tmovw ax, %%H1\n\tsknc\n\tincw ax\n\t%smovw %%H0, ax\n\tmovw ax, bc\n\tmovw %%h0, ax", addH2);
4915 break;
4916 default:
4917 gcc_unreachable ();
4918 }
4919
4920 return fmt_buffer;
4921 }
4922
4923 rtx
rl78_emit_libcall(const char * name,enum rtx_code code,enum machine_mode dmode,enum machine_mode smode,int noperands,rtx * operands)4924 rl78_emit_libcall (const char *name, enum rtx_code code,
4925 enum machine_mode dmode, enum machine_mode smode,
4926 int noperands, rtx *operands)
4927 {
4928 rtx ret;
4929 rtx_insn *insns;
4930 rtx libcall;
4931 rtx equiv;
4932
4933 start_sequence ();
4934 libcall = gen_rtx_SYMBOL_REF (Pmode, name);
4935
4936 switch (noperands)
4937 {
4938 case 2:
4939 ret = emit_library_call_value (libcall, NULL_RTX, LCT_CONST,
4940 dmode, operands[1], smode);
4941 equiv = gen_rtx_fmt_e (code, dmode, operands[1]);
4942 break;
4943
4944 case 3:
4945 ret = emit_library_call_value (libcall, NULL_RTX,
4946 LCT_CONST, dmode,
4947 operands[1], smode, operands[2],
4948 smode);
4949 equiv = gen_rtx_fmt_ee (code, dmode, operands[1], operands[2]);
4950 break;
4951
4952 default:
4953 gcc_unreachable ();
4954 }
4955
4956 insns = get_insns ();
4957 end_sequence ();
4958 emit_libcall_block (insns, operands[0], ret, equiv);
4959 return ret;
4960 }
4961
4962
4963 #undef TARGET_PREFERRED_RELOAD_CLASS
4964 #define TARGET_PREFERRED_RELOAD_CLASS rl78_preferred_reload_class
4965
4966 static reg_class_t
rl78_preferred_reload_class(rtx x ATTRIBUTE_UNUSED,reg_class_t rclass)4967 rl78_preferred_reload_class (rtx x ATTRIBUTE_UNUSED, reg_class_t rclass)
4968 {
4969 if (rclass == NO_REGS)
4970 rclass = V_REGS;
4971
4972 return rclass;
4973 }
4974
4975
4976 struct gcc_target targetm = TARGET_INITIALIZER;
4977
4978 #include "gt-rl78.h"
4979