1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998-2018 Free Software Foundation, Inc.
3 Contributed by Denis Chertykov (chertykov@gmail.com)
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #define IN_TARGET_CODE 1
22
23 #include "config.h"
24 #include "system.h"
25 #include "intl.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "stringpool.h"
32 #include "attribs.h"
33 #include "cgraph.h"
34 #include "c-family/c-common.h"
35 #include "cfghooks.h"
36 #include "df.h"
37 #include "memmodel.h"
38 #include "tm_p.h"
39 #include "optabs.h"
40 #include "regs.h"
41 #include "emit-rtl.h"
42 #include "recog.h"
43 #include "conditions.h"
44 #include "insn-attr.h"
45 #include "reload.h"
46 #include "varasm.h"
47 #include "calls.h"
48 #include "stor-layout.h"
49 #include "output.h"
50 #include "explow.h"
51 #include "expr.h"
52 #include "langhooks.h"
53 #include "cfgrtl.h"
54 #include "params.h"
55 #include "builtins.h"
56 #include "context.h"
57 #include "tree-pass.h"
58 #include "print-rtl.h"
59 #include "rtl-iter.h"
60
61 /* This file should be included last. */
62 #include "target-def.h"
63
64 /* Maximal allowed offset for an address in the LD command */
65 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
66
67 /* Return true if STR starts with PREFIX and false, otherwise. */
68 #define STR_PREFIX_P(STR,PREFIX) (strncmp (STR, PREFIX, strlen (PREFIX)) == 0)
69
70 /* The 4 bits starting at SECTION_MACH_DEP are reserved to store the
71 address space where data is to be located.
72 As the only non-generic address spaces are all located in flash,
73 this can be used to test if data shall go into some .progmem* section.
74 This must be the rightmost field of machine dependent section flags. */
75 #define AVR_SECTION_PROGMEM (0xf * SECTION_MACH_DEP)
76
77 /* Similar 4-bit region for SYMBOL_REF_FLAGS. */
78 #define AVR_SYMBOL_FLAG_PROGMEM (0xf * SYMBOL_FLAG_MACH_DEP)
79
80 /* Similar 4-bit region in SYMBOL_REF_FLAGS:
81 Set address-space AS in SYMBOL_REF_FLAGS of SYM */
82 #define AVR_SYMBOL_SET_ADDR_SPACE(SYM,AS) \
83 do { \
84 SYMBOL_REF_FLAGS (sym) &= ~AVR_SYMBOL_FLAG_PROGMEM; \
85 SYMBOL_REF_FLAGS (sym) |= (AS) * SYMBOL_FLAG_MACH_DEP; \
86 } while (0)
87
88 /* Read address-space from SYMBOL_REF_FLAGS of SYM */
89 #define AVR_SYMBOL_GET_ADDR_SPACE(SYM) \
90 ((SYMBOL_REF_FLAGS (sym) & AVR_SYMBOL_FLAG_PROGMEM) \
91 / SYMBOL_FLAG_MACH_DEP)
92
93 /* (AVR_TINY only): Symbol has attribute progmem */
94 #define AVR_SYMBOL_FLAG_TINY_PM \
95 (SYMBOL_FLAG_MACH_DEP << 7)
96
97 /* (AVR_TINY only): Symbol has attribute absdata */
98 #define AVR_SYMBOL_FLAG_TINY_ABSDATA \
99 (SYMBOL_FLAG_MACH_DEP << 8)
100
101 #define TINY_ADIW(REG1, REG2, I) \
102 "subi " #REG1 ",lo8(-(" #I "))" CR_TAB \
103 "sbci " #REG2 ",hi8(-(" #I "))"
104
105 #define TINY_SBIW(REG1, REG2, I) \
106 "subi " #REG1 ",lo8((" #I "))" CR_TAB \
107 "sbci " #REG2 ",hi8((" #I "))"
108
109 #define AVR_TMP_REGNO (AVR_TINY ? TMP_REGNO_TINY : TMP_REGNO)
110 #define AVR_ZERO_REGNO (AVR_TINY ? ZERO_REGNO_TINY : ZERO_REGNO)
111
112 /* Known address spaces. The order must be the same as in the respective
113 enum from avr.h (or designated initialized must be used). */
114 const avr_addrspace_t avr_addrspace[ADDR_SPACE_COUNT] =
115 {
116 { ADDR_SPACE_RAM, 0, 2, "", 0, NULL },
117 { ADDR_SPACE_FLASH, 1, 2, "__flash", 0, ".progmem.data" },
118 { ADDR_SPACE_FLASH1, 1, 2, "__flash1", 1, ".progmem1.data" },
119 { ADDR_SPACE_FLASH2, 1, 2, "__flash2", 2, ".progmem2.data" },
120 { ADDR_SPACE_FLASH3, 1, 2, "__flash3", 3, ".progmem3.data" },
121 { ADDR_SPACE_FLASH4, 1, 2, "__flash4", 4, ".progmem4.data" },
122 { ADDR_SPACE_FLASH5, 1, 2, "__flash5", 5, ".progmem5.data" },
123 { ADDR_SPACE_MEMX, 1, 3, "__memx", 0, ".progmemx.data" },
124 };
125
126
127 /* Holding RAM addresses of some SFRs used by the compiler and that
128 are unique over all devices in an architecture like 'avr4'. */
129
130 typedef struct
131 {
132 /* SREG: The processor status */
133 int sreg;
134
135 /* RAMPX, RAMPY, RAMPD and CCP of XMEGA */
136 int ccp;
137 int rampd;
138 int rampx;
139 int rampy;
140
141 /* RAMPZ: The high byte of 24-bit address used with ELPM */
142 int rampz;
143
144 /* SP: The stack pointer and its low and high byte */
145 int sp_l;
146 int sp_h;
147 } avr_addr_t;
148
149 static avr_addr_t avr_addr;
150
151
152 /* Prototypes for local helper functions. */
153
154 static const char* out_movqi_r_mr (rtx_insn *, rtx[], int*);
155 static const char* out_movhi_r_mr (rtx_insn *, rtx[], int*);
156 static const char* out_movsi_r_mr (rtx_insn *, rtx[], int*);
157 static const char* out_movqi_mr_r (rtx_insn *, rtx[], int*);
158 static const char* out_movhi_mr_r (rtx_insn *, rtx[], int*);
159 static const char* out_movsi_mr_r (rtx_insn *, rtx[], int*);
160
161 static int get_sequence_length (rtx_insn *insns);
162 static int sequent_regs_live (void);
163 static const char *ptrreg_to_str (int);
164 static const char *cond_string (enum rtx_code);
165 static int avr_num_arg_regs (machine_mode, const_tree);
166 static int avr_operand_rtx_cost (rtx, machine_mode, enum rtx_code,
167 int, bool);
168 static void output_reload_in_const (rtx*, rtx, int*, bool);
169 static struct machine_function * avr_init_machine_status (void);
170
171
172 /* Prototypes for hook implementors if needed before their implementation. */
173
174 static bool avr_rtx_costs (rtx, machine_mode, int, int, int*, bool);
175
176
177 /* Allocate registers from r25 to r8 for parameters for function calls. */
178 #define FIRST_CUM_REG 26
179
180 /* Last call saved register */
181 #define LAST_CALLEE_SAVED_REG (AVR_TINY ? 19 : 17)
182
183 /* Implicit target register of LPM instruction (R0) */
184 extern GTY(()) rtx lpm_reg_rtx;
185 rtx lpm_reg_rtx;
186
187 /* (Implicit) address register of LPM instruction (R31:R30 = Z) */
188 extern GTY(()) rtx lpm_addr_reg_rtx;
189 rtx lpm_addr_reg_rtx;
190
191 /* Temporary register RTX (reg:QI TMP_REGNO) */
192 extern GTY(()) rtx tmp_reg_rtx;
193 rtx tmp_reg_rtx;
194
195 /* Zeroed register RTX (reg:QI ZERO_REGNO) */
196 extern GTY(()) rtx zero_reg_rtx;
197 rtx zero_reg_rtx;
198
199 /* RTXs for all general purpose registers as QImode */
200 extern GTY(()) rtx all_regs_rtx[32];
201 rtx all_regs_rtx[32];
202
203 /* SREG, the processor status */
204 extern GTY(()) rtx sreg_rtx;
205 rtx sreg_rtx;
206
207 /* RAMP* special function registers */
208 extern GTY(()) rtx rampd_rtx;
209 extern GTY(()) rtx rampx_rtx;
210 extern GTY(()) rtx rampy_rtx;
211 extern GTY(()) rtx rampz_rtx;
212 rtx rampd_rtx;
213 rtx rampx_rtx;
214 rtx rampy_rtx;
215 rtx rampz_rtx;
216
217 /* RTX containing the strings "" and "e", respectively */
218 static GTY(()) rtx xstring_empty;
219 static GTY(()) rtx xstring_e;
220
221 /* Current architecture. */
222 const avr_arch_t *avr_arch;
223
224 /* Unnamed sections associated to __attribute__((progmem)) aka. PROGMEM
225 or to address space __flash* or __memx. Only used as singletons inside
226 avr_asm_select_section, but it must not be local there because of GTY. */
227 static GTY(()) section *progmem_section[ADDR_SPACE_COUNT];
228
229 /* Condition for insns/expanders from avr-dimode.md. */
230 bool avr_have_dimode = true;
231
232 /* To track if code will use .bss and/or .data. */
233 bool avr_need_clear_bss_p = false;
234 bool avr_need_copy_data_p = false;
235
236
237 /* Transform UP into lowercase and write the result to LO.
238 You must provide enough space for LO. Return LO. */
239
240 static char*
avr_tolower(char * lo,const char * up)241 avr_tolower (char *lo, const char *up)
242 {
243 char *lo0 = lo;
244
245 for (; *up; up++, lo++)
246 *lo = TOLOWER (*up);
247
248 *lo = '\0';
249
250 return lo0;
251 }
252
253
254 /* Constraint helper function. XVAL is a CONST_INT or a CONST_DOUBLE.
255 Return true if the least significant N_BYTES bytes of XVAL all have a
256 popcount in POP_MASK and false, otherwise. POP_MASK represents a subset
257 of integers which contains an integer N iff bit N of POP_MASK is set. */
258
259 bool
avr_popcount_each_byte(rtx xval,int n_bytes,int pop_mask)260 avr_popcount_each_byte (rtx xval, int n_bytes, int pop_mask)
261 {
262 machine_mode mode = GET_MODE (xval);
263
264 if (VOIDmode == mode)
265 mode = SImode;
266
267 for (int i = 0; i < n_bytes; i++)
268 {
269 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
270 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
271
272 if ((pop_mask & (1 << popcount_hwi (val8))) == 0)
273 return false;
274 }
275
276 return true;
277 }
278
279
280 /* Access some RTX as INT_MODE. If X is a CONST_FIXED we can get
281 the bit representation of X by "casting" it to CONST_INT. */
282
283 rtx
avr_to_int_mode(rtx x)284 avr_to_int_mode (rtx x)
285 {
286 machine_mode mode = GET_MODE (x);
287
288 return VOIDmode == mode
289 ? x
290 : simplify_gen_subreg (int_mode_for_mode (mode).require (), x, mode, 0);
291 }
292
293 namespace {
294
295 static const pass_data avr_pass_data_recompute_notes =
296 {
297 RTL_PASS, // type
298 "", // name (will be patched)
299 OPTGROUP_NONE, // optinfo_flags
300 TV_DF_SCAN, // tv_id
301 0, // properties_required
302 0, // properties_provided
303 0, // properties_destroyed
304 0, // todo_flags_start
305 TODO_df_finish | TODO_df_verify // todo_flags_finish
306 };
307
308
309 class avr_pass_recompute_notes : public rtl_opt_pass
310 {
311 public:
avr_pass_recompute_notes(gcc::context * ctxt,const char * name)312 avr_pass_recompute_notes (gcc::context *ctxt, const char *name)
313 : rtl_opt_pass (avr_pass_data_recompute_notes, ctxt)
314 {
315 this->name = name;
316 }
317
execute(function *)318 virtual unsigned int execute (function*)
319 {
320 df_note_add_problem ();
321 df_analyze ();
322
323 return 0;
324 }
325 }; // avr_pass_recompute_notes
326
327 static const pass_data avr_pass_data_casesi =
328 {
329 RTL_PASS, // type
330 "", // name (will be patched)
331 OPTGROUP_NONE, // optinfo_flags
332 TV_DF_SCAN, // tv_id
333 0, // properties_required
334 0, // properties_provided
335 0, // properties_destroyed
336 0, // todo_flags_start
337 0 // todo_flags_finish
338 };
339
340
341 class avr_pass_casesi : public rtl_opt_pass
342 {
343 public:
avr_pass_casesi(gcc::context * ctxt,const char * name)344 avr_pass_casesi (gcc::context *ctxt, const char *name)
345 : rtl_opt_pass (avr_pass_data_casesi, ctxt)
346 {
347 this->name = name;
348 }
349
350 void avr_rest_of_handle_casesi (function*);
351
gate(function *)352 virtual bool gate (function*) { return optimize > 0; }
353
execute(function * func)354 virtual unsigned int execute (function *func)
355 {
356 avr_rest_of_handle_casesi (func);
357
358 return 0;
359 }
360 }; // avr_pass_casesi
361
362 } // anon namespace
363
364 rtl_opt_pass*
make_avr_pass_recompute_notes(gcc::context * ctxt)365 make_avr_pass_recompute_notes (gcc::context *ctxt)
366 {
367 return new avr_pass_recompute_notes (ctxt, "avr-notes-free-cfg");
368 }
369
370 rtl_opt_pass*
make_avr_pass_casesi(gcc::context * ctxt)371 make_avr_pass_casesi (gcc::context *ctxt)
372 {
373 return new avr_pass_casesi (ctxt, "avr-casesi");
374 }
375
376
377 /* Make one parallel insn with all the patterns from insns i[0]..i[5]. */
378
379 static rtx_insn*
avr_parallel_insn_from_insns(rtx_insn * i[6])380 avr_parallel_insn_from_insns (rtx_insn *i[6])
381 {
382 rtvec vec = gen_rtvec (6, PATTERN (i[0]), PATTERN (i[1]), PATTERN (i[2]),
383 PATTERN (i[3]), PATTERN (i[4]), PATTERN (i[5]));
384 start_sequence();
385 emit (gen_rtx_PARALLEL (VOIDmode, vec));
386 rtx_insn *insn = get_insns();
387 end_sequence();
388
389 return insn;
390 }
391
392
393 /* Return true if we see an insn stream generated by casesi expander together
394 with an extension to SImode of the switch value.
395
396 If this is the case, fill in the insns from casesi to INSNS[1..5] and
397 the SImode extension to INSNS[0]. Moreover, extract the operands of
398 pattern casesi_<mode>_sequence forged from the sequence to recog_data. */
399
400 static bool
avr_is_casesi_sequence(basic_block bb,rtx_insn * insn,rtx_insn * insns[6])401 avr_is_casesi_sequence (basic_block bb, rtx_insn *insn, rtx_insn *insns[6])
402 {
403 rtx set_5, set_0;
404
405 /* A first and quick test for a casesi sequences. As a side effect of
406 the test, harvest respective insns to INSNS[0..5]. */
407
408 if (!(JUMP_P (insns[5] = insn)
409 // casesi is the only insn that comes up with UNSPEC_INDEX_JMP,
410 // hence the following test ensures that we are actually dealing
411 // with code from casesi.
412 && (set_5 = single_set (insns[5]))
413 && UNSPEC == GET_CODE (SET_SRC (set_5))
414 && UNSPEC_INDEX_JMP == XINT (SET_SRC (set_5), 1)
415
416 && (insns[4] = prev_real_insn (insns[5]))
417 && (insns[3] = prev_real_insn (insns[4]))
418 && (insns[2] = prev_real_insn (insns[3]))
419 && (insns[1] = prev_real_insn (insns[2]))
420
421 // Insn prior to casesi.
422 && (insns[0] = prev_real_insn (insns[1]))
423 && (set_0 = single_set (insns[0]))
424 && extend_operator (SET_SRC (set_0), SImode)))
425 {
426 return false;
427 }
428
429 if (dump_file)
430 {
431 fprintf (dump_file, ";; Sequence from casesi in "
432 "[bb %d]:\n\n", bb->index);
433 for (int i = 0; i < 6; i++)
434 print_rtl_single (dump_file, insns[i]);
435 }
436
437 /* We have to deal with quite some operands. Extracting them by hand
438 would be tedious, therefore wrap the insn patterns into a parallel,
439 run recog against it and then use insn extract to get the operands. */
440
441 rtx_insn *xinsn = avr_parallel_insn_from_insns (insns);
442
443 INSN_CODE (xinsn) = recog (PATTERN (xinsn), xinsn, NULL /* num_clobbers */);
444
445 /* Failing to recognize means that someone changed the casesi expander or
446 that some passes prior to this one performed some unexpected changes.
447 Gracefully drop such situations instead of aborting. */
448
449 if (INSN_CODE (xinsn) < 0)
450 {
451 if (dump_file)
452 fprintf (dump_file, ";; Sequence not recognized, giving up.\n\n");
453
454 return false;
455 }
456
457 gcc_assert (CODE_FOR_casesi_qi_sequence == INSN_CODE (xinsn)
458 || CODE_FOR_casesi_hi_sequence == INSN_CODE (xinsn));
459
460 extract_insn (xinsn);
461
462 // Assert on the anatomy of xinsn's operands we are going to work with.
463
464 gcc_assert (recog_data.n_operands == 11);
465 gcc_assert (recog_data.n_dups == 4);
466
467 if (dump_file)
468 {
469 fprintf (dump_file, ";; Operands extracted:\n");
470 for (int i = 0; i < recog_data.n_operands; i++)
471 avr_fdump (dump_file, ";; $%d = %r\n", i, recog_data.operand[i]);
472 fprintf (dump_file, "\n");
473 }
474
475 return true;
476 }
477
478
479 /* Perform some extra checks on operands of casesi_<mode>_sequence.
480 Not all operand dependencies can be described by means of predicates.
481 This function performs left over checks and should always return true.
482 Returning false means that someone changed the casesi expander but did
483 not adjust casesi_<mode>_sequence. */
484
485 bool
avr_casei_sequence_check_operands(rtx * xop)486 avr_casei_sequence_check_operands (rtx *xop)
487 {
488 rtx sub_5 = NULL_RTX;
489
490 if (AVR_HAVE_EIJMP_EICALL
491 // The last clobber op of the tablejump.
492 && xop[8] == all_regs_rtx[24])
493 {
494 // $6 is: (subreg:SI ($5) 0)
495 sub_5 = xop[6];
496 }
497
498 if (!AVR_HAVE_EIJMP_EICALL
499 // $6 is: (plus:HI (subreg:SI ($5) 0)
500 // (label_ref ($3)))
501 && PLUS == GET_CODE (xop[6])
502 && LABEL_REF == GET_CODE (XEXP (xop[6], 1))
503 && rtx_equal_p (xop[3], XEXP (XEXP (xop[6], 1), 0))
504 // The last clobber op of the tablejump.
505 && xop[8] == const0_rtx)
506 {
507 sub_5 = XEXP (xop[6], 0);
508 }
509
510 if (sub_5
511 && SUBREG_P (sub_5)
512 && SUBREG_BYTE (sub_5) == 0
513 && rtx_equal_p (xop[5], SUBREG_REG (sub_5)))
514 return true;
515
516 if (dump_file)
517 fprintf (dump_file, "\n;; Failed condition for casesi_<mode>_sequence\n\n");
518
519 return false;
520 }
521
522
523 /* INSNS[1..5] is a sequence as generated by casesi and INSNS[0] is an
524 extension of an 8-bit or 16-bit integer to SImode. XOP contains the
525 operands of INSNS as extracted by insn_extract from pattern
526 casesi_<mode>_sequence:
527
528 $0: SImode reg switch value as result of $9.
529 $1: Negative of smallest index in switch.
530 $2: Number of entries in switch.
531 $3: Label to table.
532 $4: Label if out-of-bounds.
533 $5: $0 + $1.
534 $6: 3-byte PC: subreg:HI ($5) + label_ref ($3)
535 2-byte PC: subreg:HI ($5)
536 $7: HI reg index into table (Z or pseudo)
537 $8: R24 or const0_rtx (to be clobbered)
538 $9: Extension to SImode of an 8-bit or 16-bit integer register $10.
539 $10: QImode or HImode register input of $9.
540
541 Try to optimize this sequence, i.e. use the original HImode / QImode
542 switch value instead of SImode. */
543
544 static void
avr_optimize_casesi(rtx_insn * insns[6],rtx * xop)545 avr_optimize_casesi (rtx_insn *insns[6], rtx *xop)
546 {
547 // Original mode of the switch value; this is QImode or HImode.
548 machine_mode mode = GET_MODE (xop[10]);
549
550 // How the original switch value was extended to SImode; this is
551 // SIGN_EXTEND or ZERO_EXTEND.
552 enum rtx_code code = GET_CODE (xop[9]);
553
554 // Lower index, upper index (plus one) and range of case calues.
555 HOST_WIDE_INT low_idx = -INTVAL (xop[1]);
556 HOST_WIDE_INT num_idx = INTVAL (xop[2]);
557 HOST_WIDE_INT hig_idx = low_idx + num_idx;
558
559 // Maximum ranges of (un)signed QImode resp. HImode.
560 unsigned umax = QImode == mode ? 0xff : 0xffff;
561 int imax = QImode == mode ? 0x7f : 0x7fff;
562 int imin = -imax - 1;
563
564 // Testing the case range and whether it fits into the range of the
565 // (un)signed mode. This test should actually always pass because it
566 // makes no sense to have case values outside the mode range. Notice
567 // that case labels which are unreachable because they are outside the
568 // mode of the switch value (e.g. "case -1" for uint8_t) have already
569 // been thrown away by the middle-end.
570
571 if (SIGN_EXTEND == code
572 && low_idx >= imin
573 && hig_idx <= imax)
574 {
575 // ok
576 }
577 else if (ZERO_EXTEND == code
578 && low_idx >= 0
579 && (unsigned) hig_idx <= umax)
580 {
581 // ok
582 }
583 else
584 {
585 if (dump_file)
586 fprintf (dump_file, ";; Case ranges too big, giving up.\n\n");
587 return;
588 }
589
590 // Do normalization of switch value $10 and out-of-bound check in its
591 // original mode instead of in SImode. Use a newly created pseudo.
592 // This will replace insns[1..2].
593
594 start_sequence();
595
596 rtx_insn *seq1, *seq2, *last1, *last2;
597
598 rtx reg = copy_to_mode_reg (mode, xop[10]);
599
600 rtx (*gen_add)(rtx,rtx,rtx) = QImode == mode ? gen_addqi3 : gen_addhi3;
601 rtx (*gen_cmp)(rtx,rtx) = QImode == mode ? gen_cmpqi3 : gen_cmphi3;
602
603 emit_insn (gen_add (reg, reg, gen_int_mode (-low_idx, mode)));
604 emit_insn (gen_cmp (reg, gen_int_mode (num_idx, mode)));
605
606 seq1 = get_insns();
607 last1 = get_last_insn();
608 end_sequence();
609
610 emit_insn_before (seq1, insns[1]);
611
612 // After the out-of-bounds test and corresponding branch, use a
613 // 16-bit index. If QImode is used, extend it to HImode first.
614 // This will replace insns[4].
615
616 start_sequence();
617
618 if (QImode == mode)
619 reg = force_reg (HImode, gen_rtx_fmt_e (code, HImode, reg));
620
621 rtx pat_4 = AVR_3_BYTE_PC
622 ? gen_movhi (xop[7], reg)
623 : gen_addhi3 (xop[7], reg, gen_rtx_LABEL_REF (VOIDmode, xop[3]));
624
625 emit_insn (pat_4);
626
627 seq2 = get_insns();
628 last2 = get_last_insn();
629 end_sequence();
630
631 emit_insn_after (seq2, insns[4]);
632
633 if (dump_file)
634 {
635 fprintf (dump_file, ";; New insns: ");
636
637 for (rtx_insn *insn = seq1; ; insn = NEXT_INSN (insn))
638 {
639 fprintf (dump_file, "%d, ", INSN_UID (insn));
640 if (insn == last1)
641 break;
642 }
643 for (rtx_insn *insn = seq2; ; insn = NEXT_INSN (insn))
644 {
645 fprintf (dump_file, "%d%s", INSN_UID (insn),
646 insn == last2 ? ".\n\n" : ", ");
647 if (insn == last2)
648 break;
649 }
650
651 fprintf (dump_file, ";; Deleting insns: %d, %d, %d.\n\n",
652 INSN_UID (insns[1]), INSN_UID (insns[2]), INSN_UID (insns[4]));
653 }
654
655 // Pseudodelete the SImode and subreg of SImode insns. We don't care
656 // about the extension insns[0]: Its result is now unused and other
657 // passes will clean it up.
658
659 SET_INSN_DELETED (insns[1]);
660 SET_INSN_DELETED (insns[2]);
661 SET_INSN_DELETED (insns[4]);
662 }
663
664
665 void
avr_rest_of_handle_casesi(function * func)666 avr_pass_casesi::avr_rest_of_handle_casesi (function *func)
667 {
668 basic_block bb;
669
670 FOR_EACH_BB_FN (bb, func)
671 {
672 rtx_insn *insn, *insns[6];
673
674 FOR_BB_INSNS (bb, insn)
675 {
676 if (avr_is_casesi_sequence (bb, insn, insns))
677 {
678 avr_optimize_casesi (insns, recog_data.operand);
679 }
680 }
681 }
682 }
683
684
685 /* Set `avr_arch' as specified by `-mmcu='.
686 Return true on success. */
687
688 static bool
avr_set_core_architecture(void)689 avr_set_core_architecture (void)
690 {
691 /* Search for mcu core architecture. */
692
693 if (!avr_mmcu)
694 avr_mmcu = AVR_MMCU_DEFAULT;
695
696 avr_arch = &avr_arch_types[0];
697
698 for (const avr_mcu_t *mcu = avr_mcu_types; ; mcu++)
699 {
700 if (mcu->name == NULL)
701 {
702 /* Reached the end of `avr_mcu_types'. This should actually never
703 happen as options are provided by device-specs. It could be a
704 typo in a device-specs or calling the compiler proper directly
705 with -mmcu=<device>. */
706
707 error ("unknown core architecture %qs specified with %qs",
708 avr_mmcu, "-mmcu=");
709 avr_inform_core_architectures ();
710 break;
711 }
712 else if (strcmp (mcu->name, avr_mmcu) == 0
713 // Is this a proper architecture ?
714 && mcu->macro == NULL)
715 {
716 avr_arch = &avr_arch_types[mcu->arch_id];
717 if (avr_n_flash < 0)
718 avr_n_flash = 1 + (mcu->flash_size - 1) / 0x10000;
719
720 return true;
721 }
722 }
723
724 return false;
725 }
726
727
728 /* Implement `TARGET_OPTION_OVERRIDE'. */
729
730 static void
avr_option_override(void)731 avr_option_override (void)
732 {
733 /* caller-save.c looks for call-clobbered hard registers that are assigned
734 to pseudos that cross calls and tries so save-restore them around calls
735 in order to reduce the number of stack slots needed.
736
737 This might lead to situations where reload is no more able to cope
738 with the challenge of AVR's very few address registers and fails to
739 perform the requested spills. */
740
741 if (avr_strict_X)
742 flag_caller_saves = 0;
743
744 /* Allow optimizer to introduce store data races. This used to be the
745 default - it was changed because bigger targets did not see any
746 performance decrease. For the AVR though, disallowing data races
747 introduces additional code in LIM and increases reg pressure. */
748
749 maybe_set_param_value (PARAM_ALLOW_STORE_DATA_RACES, 1,
750 global_options.x_param_values,
751 global_options_set.x_param_values);
752
753 /* Unwind tables currently require a frame pointer for correctness,
754 see toplev.c:process_options(). */
755
756 if ((flag_unwind_tables
757 || flag_non_call_exceptions
758 || flag_asynchronous_unwind_tables)
759 && !ACCUMULATE_OUTGOING_ARGS)
760 {
761 flag_omit_frame_pointer = 0;
762 }
763
764 if (flag_pic == 1)
765 warning (OPT_fpic, "-fpic is not supported");
766 if (flag_pic == 2)
767 warning (OPT_fPIC, "-fPIC is not supported");
768 if (flag_pie == 1)
769 warning (OPT_fpie, "-fpie is not supported");
770 if (flag_pie == 2)
771 warning (OPT_fPIE, "-fPIE is not supported");
772
773 #if !defined (HAVE_AS_AVR_MGCCISR_OPTION)
774 avr_gasisr_prologues = 0;
775 #endif
776
777 if (!avr_set_core_architecture())
778 return;
779
780 /* RAM addresses of some SFRs common to all devices in respective arch. */
781
782 /* SREG: Status Register containing flags like I (global IRQ) */
783 avr_addr.sreg = 0x3F + avr_arch->sfr_offset;
784
785 /* RAMPZ: Address' high part when loading via ELPM */
786 avr_addr.rampz = 0x3B + avr_arch->sfr_offset;
787
788 avr_addr.rampy = 0x3A + avr_arch->sfr_offset;
789 avr_addr.rampx = 0x39 + avr_arch->sfr_offset;
790 avr_addr.rampd = 0x38 + avr_arch->sfr_offset;
791 avr_addr.ccp = (AVR_TINY ? 0x3C : 0x34) + avr_arch->sfr_offset;
792
793 /* SP: Stack Pointer (SP_H:SP_L) */
794 avr_addr.sp_l = 0x3D + avr_arch->sfr_offset;
795 avr_addr.sp_h = avr_addr.sp_l + 1;
796
797 init_machine_status = avr_init_machine_status;
798
799 avr_log_set_avr_log();
800 }
801
802 /* Function to set up the backend function structure. */
803
804 static struct machine_function *
avr_init_machine_status(void)805 avr_init_machine_status (void)
806 {
807 return ggc_cleared_alloc<machine_function> ();
808 }
809
810
811 /* Implement `INIT_EXPANDERS'. */
812 /* The function works like a singleton. */
813
814 void
avr_init_expanders(void)815 avr_init_expanders (void)
816 {
817 for (int regno = 0; regno < 32; regno ++)
818 all_regs_rtx[regno] = gen_rtx_REG (QImode, regno);
819
820 lpm_reg_rtx = all_regs_rtx[LPM_REGNO];
821 tmp_reg_rtx = all_regs_rtx[AVR_TMP_REGNO];
822 zero_reg_rtx = all_regs_rtx[AVR_ZERO_REGNO];
823
824 lpm_addr_reg_rtx = gen_rtx_REG (HImode, REG_Z);
825
826 sreg_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.sreg));
827 rampd_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampd));
828 rampx_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampx));
829 rampy_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampy));
830 rampz_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampz));
831
832 xstring_empty = gen_rtx_CONST_STRING (VOIDmode, "");
833 xstring_e = gen_rtx_CONST_STRING (VOIDmode, "e");
834
835 /* TINY core does not have regs r10-r16, but avr-dimode.md expects them
836 to be present */
837 if (AVR_TINY)
838 avr_have_dimode = false;
839 }
840
841
842 /* Implement `REGNO_REG_CLASS'. */
843 /* Return register class for register R. */
844
845 enum reg_class
avr_regno_reg_class(int r)846 avr_regno_reg_class (int r)
847 {
848 static const enum reg_class reg_class_tab[] =
849 {
850 R0_REG,
851 /* r1 - r15 */
852 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
853 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
854 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
855 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
856 /* r16 - r23 */
857 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
858 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
859 /* r24, r25 */
860 ADDW_REGS, ADDW_REGS,
861 /* X: r26, 27 */
862 POINTER_X_REGS, POINTER_X_REGS,
863 /* Y: r28, r29 */
864 POINTER_Y_REGS, POINTER_Y_REGS,
865 /* Z: r30, r31 */
866 POINTER_Z_REGS, POINTER_Z_REGS,
867 /* SP: SPL, SPH */
868 STACK_REG, STACK_REG
869 };
870
871 if (r <= 33)
872 return reg_class_tab[r];
873
874 return ALL_REGS;
875 }
876
877
878 /* Implement `TARGET_SCALAR_MODE_SUPPORTED_P'. */
879
880 static bool
avr_scalar_mode_supported_p(scalar_mode mode)881 avr_scalar_mode_supported_p (scalar_mode mode)
882 {
883 if (ALL_FIXED_POINT_MODE_P (mode))
884 return true;
885
886 if (PSImode == mode)
887 return true;
888
889 return default_scalar_mode_supported_p (mode);
890 }
891
892
893 /* Return TRUE if DECL is a VAR_DECL located in flash and FALSE, otherwise. */
894
895 static bool
avr_decl_flash_p(tree decl)896 avr_decl_flash_p (tree decl)
897 {
898 if (TREE_CODE (decl) != VAR_DECL
899 || TREE_TYPE (decl) == error_mark_node)
900 {
901 return false;
902 }
903
904 return !ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (decl)));
905 }
906
907
908 /* Return TRUE if DECL is a VAR_DECL located in the 24-bit flash
909 address space and FALSE, otherwise. */
910
911 static bool
avr_decl_memx_p(tree decl)912 avr_decl_memx_p (tree decl)
913 {
914 if (TREE_CODE (decl) != VAR_DECL
915 || TREE_TYPE (decl) == error_mark_node)
916 {
917 return false;
918 }
919
920 return (ADDR_SPACE_MEMX == TYPE_ADDR_SPACE (TREE_TYPE (decl)));
921 }
922
923
924 /* Return TRUE if X is a MEM rtx located in flash and FALSE, otherwise. */
925
926 bool
avr_mem_flash_p(rtx x)927 avr_mem_flash_p (rtx x)
928 {
929 return (MEM_P (x)
930 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x)));
931 }
932
933
934 /* Return TRUE if X is a MEM rtx located in the 24-bit flash
935 address space and FALSE, otherwise. */
936
937 bool
avr_mem_memx_p(rtx x)938 avr_mem_memx_p (rtx x)
939 {
940 return (MEM_P (x)
941 && ADDR_SPACE_MEMX == MEM_ADDR_SPACE (x));
942 }
943
944
945 /* A helper for the subsequent function attribute used to dig for
946 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
947
948 static inline int
avr_lookup_function_attribute1(const_tree func,const char * name)949 avr_lookup_function_attribute1 (const_tree func, const char *name)
950 {
951 if (FUNCTION_DECL == TREE_CODE (func))
952 {
953 if (NULL_TREE != lookup_attribute (name, DECL_ATTRIBUTES (func)))
954 {
955 return true;
956 }
957
958 func = TREE_TYPE (func);
959 }
960
961 gcc_assert (TREE_CODE (func) == FUNCTION_TYPE
962 || TREE_CODE (func) == METHOD_TYPE);
963
964 return NULL_TREE != lookup_attribute (name, TYPE_ATTRIBUTES (func));
965 }
966
967 /* Return nonzero if FUNC is a naked function. */
968
969 static int
avr_naked_function_p(tree func)970 avr_naked_function_p (tree func)
971 {
972 return avr_lookup_function_attribute1 (func, "naked");
973 }
974
975 /* Return nonzero if FUNC is an interrupt function as specified
976 by the "interrupt" attribute. */
977
978 static int
avr_interrupt_function_p(tree func)979 avr_interrupt_function_p (tree func)
980 {
981 return avr_lookup_function_attribute1 (func, "interrupt");
982 }
983
984 /* Return nonzero if FUNC is a signal function as specified
985 by the "signal" attribute. */
986
987 static int
avr_signal_function_p(tree func)988 avr_signal_function_p (tree func)
989 {
990 return avr_lookup_function_attribute1 (func, "signal");
991 }
992
993 /* Return nonzero if FUNC is an OS_task function. */
994
995 static int
avr_OS_task_function_p(tree func)996 avr_OS_task_function_p (tree func)
997 {
998 return avr_lookup_function_attribute1 (func, "OS_task");
999 }
1000
1001 /* Return nonzero if FUNC is an OS_main function. */
1002
1003 static int
avr_OS_main_function_p(tree func)1004 avr_OS_main_function_p (tree func)
1005 {
1006 return avr_lookup_function_attribute1 (func, "OS_main");
1007 }
1008
1009
1010 /* Return nonzero if FUNC is a no_gccisr function as specified
1011 by the "no_gccisr" attribute. */
1012
1013 static int
avr_no_gccisr_function_p(tree func)1014 avr_no_gccisr_function_p (tree func)
1015 {
1016 return avr_lookup_function_attribute1 (func, "no_gccisr");
1017 }
1018
1019 /* Implement `TARGET_SET_CURRENT_FUNCTION'. */
1020 /* Sanity cheching for above function attributes. */
1021
1022 static void
avr_set_current_function(tree decl)1023 avr_set_current_function (tree decl)
1024 {
1025 if (decl == NULL_TREE
1026 || current_function_decl == NULL_TREE
1027 || current_function_decl == error_mark_node
1028 || ! cfun->machine
1029 || cfun->machine->attributes_checked_p)
1030 return;
1031
1032 location_t loc = DECL_SOURCE_LOCATION (decl);
1033
1034 cfun->machine->is_naked = avr_naked_function_p (decl);
1035 cfun->machine->is_signal = avr_signal_function_p (decl);
1036 cfun->machine->is_interrupt = avr_interrupt_function_p (decl);
1037 cfun->machine->is_OS_task = avr_OS_task_function_p (decl);
1038 cfun->machine->is_OS_main = avr_OS_main_function_p (decl);
1039 cfun->machine->is_no_gccisr = avr_no_gccisr_function_p (decl);
1040
1041 const char *isr = cfun->machine->is_interrupt ? "interrupt" : "signal";
1042
1043 /* Too much attributes make no sense as they request conflicting features. */
1044
1045 if (cfun->machine->is_OS_task
1046 && (cfun->machine->is_signal || cfun->machine->is_interrupt))
1047 error_at (loc, "function attributes %qs and %qs are mutually exclusive",
1048 "OS_task", isr);
1049
1050 if (cfun->machine->is_OS_main
1051 && (cfun->machine->is_signal || cfun->machine->is_interrupt))
1052 error_at (loc, "function attributes %qs and %qs are mutually exclusive",
1053 "OS_main", isr);
1054
1055 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1056 {
1057 tree args = TYPE_ARG_TYPES (TREE_TYPE (decl));
1058 tree ret = TREE_TYPE (TREE_TYPE (decl));
1059 const char *name;
1060
1061 name = DECL_ASSEMBLER_NAME_SET_P (decl)
1062 ? IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))
1063 : IDENTIFIER_POINTER (DECL_NAME (decl));
1064
1065 /* Skip a leading '*' that might still prefix the assembler name,
1066 e.g. in non-LTO runs. */
1067
1068 name = default_strip_name_encoding (name);
1069
1070 /* Interrupt handlers must be void __vector (void) functions. */
1071
1072 if (args && TREE_CODE (TREE_VALUE (args)) != VOID_TYPE)
1073 error_at (loc, "%qs function cannot have arguments", isr);
1074
1075 if (TREE_CODE (ret) != VOID_TYPE)
1076 error_at (loc, "%qs function cannot return a value", isr);
1077
1078 #if defined WITH_AVRLIBC
1079 /* Silently ignore 'signal' if 'interrupt' is present. AVR-LibC startet
1080 using this when it switched from SIGNAL and INTERRUPT to ISR. */
1081
1082 if (cfun->machine->is_interrupt)
1083 cfun->machine->is_signal = 0;
1084
1085 /* If the function has the 'signal' or 'interrupt' attribute, ensure
1086 that the name of the function is "__vector_NN" so as to catch
1087 when the user misspells the vector name. */
1088
1089 if (!STR_PREFIX_P (name, "__vector"))
1090 warning_at (loc, OPT_Wmisspelled_isr, "%qs appears to be a misspelled "
1091 "%qs handler, missing %<__vector%> prefix", name, isr);
1092 #endif // AVR-LibC naming conventions
1093 }
1094
1095 #if defined WITH_AVRLIBC
1096 // Common problem is using "ISR" without first including avr/interrupt.h.
1097 const char *name = IDENTIFIER_POINTER (DECL_NAME (decl));
1098 name = default_strip_name_encoding (name);
1099 if (strcmp ("ISR", name) == 0
1100 || strcmp ("INTERRUPT", name) == 0
1101 || strcmp ("SIGNAL", name) == 0)
1102 {
1103 warning_at (loc, OPT_Wmisspelled_isr, "%qs is a reserved identifier"
1104 " in AVR-LibC. Consider %<#include <avr/interrupt.h>%>"
1105 " before using the %qs macro", name, name);
1106 }
1107 #endif // AVR-LibC naming conventions
1108
1109 /* Don't print the above diagnostics more than once. */
1110
1111 cfun->machine->attributes_checked_p = 1;
1112 }
1113
1114
1115 /* Implement `ACCUMULATE_OUTGOING_ARGS'. */
1116
1117 int
avr_accumulate_outgoing_args(void)1118 avr_accumulate_outgoing_args (void)
1119 {
1120 if (!cfun)
1121 return TARGET_ACCUMULATE_OUTGOING_ARGS;
1122
1123 /* FIXME: For setjmp and in avr_builtin_setjmp_frame_value we don't know
1124 what offset is correct. In some cases it is relative to
1125 virtual_outgoing_args_rtx and in others it is relative to
1126 virtual_stack_vars_rtx. For example code see
1127 gcc.c-torture/execute/built-in-setjmp.c
1128 gcc.c-torture/execute/builtins/sprintf-chk.c */
1129
1130 return (TARGET_ACCUMULATE_OUTGOING_ARGS
1131 && !(cfun->calls_setjmp
1132 || cfun->has_nonlocal_label));
1133 }
1134
1135
1136 /* Report contribution of accumulated outgoing arguments to stack size. */
1137
1138 static inline int
avr_outgoing_args_size(void)1139 avr_outgoing_args_size (void)
1140 {
1141 return (ACCUMULATE_OUTGOING_ARGS
1142 ? (HOST_WIDE_INT) crtl->outgoing_args_size : 0);
1143 }
1144
1145
1146 /* Implement TARGET_STARTING_FRAME_OFFSET. */
1147 /* This is the offset from the frame pointer register to the first stack slot
1148 that contains a variable living in the frame. */
1149
1150 static HOST_WIDE_INT
avr_starting_frame_offset(void)1151 avr_starting_frame_offset (void)
1152 {
1153 return 1 + avr_outgoing_args_size ();
1154 }
1155
1156
1157 /* Return the number of hard registers to push/pop in the prologue/epilogue
1158 of the current function, and optionally store these registers in SET. */
1159
1160 static int
avr_regs_to_save(HARD_REG_SET * set)1161 avr_regs_to_save (HARD_REG_SET *set)
1162 {
1163 int count;
1164 int int_or_sig_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
1165
1166 if (set)
1167 CLEAR_HARD_REG_SET (*set);
1168 count = 0;
1169
1170 /* No need to save any registers if the function never returns or
1171 has the "OS_task" or "OS_main" attribute. */
1172
1173 if (TREE_THIS_VOLATILE (current_function_decl)
1174 || cfun->machine->is_OS_task
1175 || cfun->machine->is_OS_main)
1176 return 0;
1177
1178 for (int reg = 0; reg < 32; reg++)
1179 {
1180 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
1181 any global register variables. */
1182
1183 if (fixed_regs[reg])
1184 continue;
1185
1186 if ((int_or_sig_p && !crtl->is_leaf && call_used_regs[reg])
1187 || (df_regs_ever_live_p (reg)
1188 && (int_or_sig_p || !call_used_regs[reg])
1189 /* Don't record frame pointer registers here. They are treated
1190 indivitually in prologue. */
1191 && !(frame_pointer_needed
1192 && (reg == REG_Y || reg == REG_Y + 1))))
1193 {
1194 if (set)
1195 SET_HARD_REG_BIT (*set, reg);
1196 count++;
1197 }
1198 }
1199 return count;
1200 }
1201
1202
1203 /* Implement `TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS' */
1204
1205 static bool
avr_allocate_stack_slots_for_args(void)1206 avr_allocate_stack_slots_for_args (void)
1207 {
1208 return !cfun->machine->is_naked;
1209 }
1210
1211
1212 /* Return true if register FROM can be eliminated via register TO. */
1213
1214 static bool
avr_can_eliminate(const int from ATTRIBUTE_UNUSED,const int to)1215 avr_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
1216 {
1217 return ((frame_pointer_needed && to == FRAME_POINTER_REGNUM)
1218 || !frame_pointer_needed);
1219 }
1220
1221
1222 /* Implement `TARGET_WARN_FUNC_RETURN'. */
1223
1224 static bool
avr_warn_func_return(tree decl)1225 avr_warn_func_return (tree decl)
1226 {
1227 /* Naked functions are implemented entirely in assembly, including the
1228 return sequence, so suppress warnings about this. */
1229
1230 return !avr_naked_function_p (decl);
1231 }
1232
1233 /* Compute offset between arg_pointer and frame_pointer. */
1234
1235 int
avr_initial_elimination_offset(int from,int to)1236 avr_initial_elimination_offset (int from, int to)
1237 {
1238 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1239 return 0;
1240 else
1241 {
1242 int offset = frame_pointer_needed ? 2 : 0;
1243 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
1244
1245 // If FROM is ARG_POINTER_REGNUM, we are not in an ISR as ISRs
1246 // might not have arguments. Hence the following is not affected
1247 // by gasisr prologues.
1248 offset += avr_regs_to_save (NULL);
1249 return (get_frame_size () + avr_outgoing_args_size()
1250 + avr_pc_size + 1 + offset);
1251 }
1252 }
1253
1254
1255 /* Helper for the function below. */
1256
1257 static void
avr_adjust_type_node(tree * node,machine_mode mode,int sat_p)1258 avr_adjust_type_node (tree *node, machine_mode mode, int sat_p)
1259 {
1260 *node = make_node (FIXED_POINT_TYPE);
1261 TYPE_SATURATING (*node) = sat_p;
1262 TYPE_UNSIGNED (*node) = UNSIGNED_FIXED_POINT_MODE_P (mode);
1263 TYPE_IBIT (*node) = GET_MODE_IBIT (mode);
1264 TYPE_FBIT (*node) = GET_MODE_FBIT (mode);
1265 TYPE_PRECISION (*node) = GET_MODE_BITSIZE (mode);
1266 SET_TYPE_ALIGN (*node, 8);
1267 SET_TYPE_MODE (*node, mode);
1268
1269 layout_type (*node);
1270 }
1271
1272
1273 /* Implement `TARGET_BUILD_BUILTIN_VA_LIST'. */
1274
1275 static tree
avr_build_builtin_va_list(void)1276 avr_build_builtin_va_list (void)
1277 {
1278 /* avr-modes.def adjusts [U]TA to be 64-bit modes with 48 fractional bits.
1279 This is more appropriate for the 8-bit machine AVR than 128-bit modes.
1280 The ADJUST_IBIT/FBIT are handled in toplev:init_adjust_machine_modes()
1281 which is auto-generated by genmodes, but the compiler assigns [U]DAmode
1282 to the long long accum modes instead of the desired [U]TAmode.
1283
1284 Fix this now, right after node setup in tree.c:build_common_tree_nodes().
1285 This must run before c-cppbuiltin.c:builtin_define_fixed_point_constants()
1286 which built-in defines macros like __ULLACCUM_FBIT__ that are used by
1287 libgcc to detect IBIT and FBIT. */
1288
1289 avr_adjust_type_node (&ta_type_node, TAmode, 0);
1290 avr_adjust_type_node (&uta_type_node, UTAmode, 0);
1291 avr_adjust_type_node (&sat_ta_type_node, TAmode, 1);
1292 avr_adjust_type_node (&sat_uta_type_node, UTAmode, 1);
1293
1294 unsigned_long_long_accum_type_node = uta_type_node;
1295 long_long_accum_type_node = ta_type_node;
1296 sat_unsigned_long_long_accum_type_node = sat_uta_type_node;
1297 sat_long_long_accum_type_node = sat_ta_type_node;
1298
1299 /* Dispatch to the default handler. */
1300
1301 return std_build_builtin_va_list ();
1302 }
1303
1304
1305 /* Implement `TARGET_BUILTIN_SETJMP_FRAME_VALUE'. */
1306 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
1307 frame pointer by +TARGET_STARTING_FRAME_OFFSET.
1308 Using saved frame = virtual_stack_vars_rtx - TARGET_STARTING_FRAME_OFFSET
1309 avoids creating add/sub of offset in nonlocal goto and setjmp. */
1310
1311 static rtx
avr_builtin_setjmp_frame_value(void)1312 avr_builtin_setjmp_frame_value (void)
1313 {
1314 rtx xval = gen_reg_rtx (Pmode);
1315 emit_insn (gen_subhi3 (xval, virtual_stack_vars_rtx,
1316 gen_int_mode (avr_starting_frame_offset (), Pmode)));
1317 return xval;
1318 }
1319
1320
1321 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3-byte PC).
1322 This is return address of function. */
1323
1324 rtx
avr_return_addr_rtx(int count,rtx tem)1325 avr_return_addr_rtx (int count, rtx tem)
1326 {
1327 rtx r;
1328
1329 /* Can only return this function's return address. Others not supported. */
1330 if (count)
1331 return NULL;
1332
1333 if (AVR_3_BYTE_PC)
1334 {
1335 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
1336 warning (0, "%<builtin_return_address%> contains only 2 bytes"
1337 " of address");
1338 }
1339 else
1340 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
1341
1342 cfun->machine->use_L__stack_usage = 1;
1343
1344 r = gen_rtx_PLUS (Pmode, tem, r);
1345 r = gen_frame_mem (Pmode, memory_address (Pmode, r));
1346 r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
1347 return r;
1348 }
1349
1350 /* Return 1 if the function epilogue is just a single "ret". */
1351
1352 int
avr_simple_epilogue(void)1353 avr_simple_epilogue (void)
1354 {
1355 return (! frame_pointer_needed
1356 && get_frame_size () == 0
1357 && avr_outgoing_args_size() == 0
1358 && avr_regs_to_save (NULL) == 0
1359 && ! cfun->machine->is_interrupt
1360 && ! cfun->machine->is_signal
1361 && ! cfun->machine->is_naked
1362 && ! TREE_THIS_VOLATILE (current_function_decl));
1363 }
1364
1365 /* This function checks sequence of live registers. */
1366
1367 static int
sequent_regs_live(void)1368 sequent_regs_live (void)
1369 {
1370 int live_seq = 0;
1371 int cur_seq = 0;
1372
1373 for (int reg = 0; reg <= LAST_CALLEE_SAVED_REG; ++reg)
1374 {
1375 if (fixed_regs[reg])
1376 {
1377 /* Don't recognize sequences that contain global register
1378 variables. */
1379
1380 if (live_seq != 0)
1381 return 0;
1382 else
1383 continue;
1384 }
1385
1386 if (!call_used_regs[reg])
1387 {
1388 if (df_regs_ever_live_p (reg))
1389 {
1390 ++live_seq;
1391 ++cur_seq;
1392 }
1393 else
1394 cur_seq = 0;
1395 }
1396 }
1397
1398 if (!frame_pointer_needed)
1399 {
1400 if (df_regs_ever_live_p (REG_Y))
1401 {
1402 ++live_seq;
1403 ++cur_seq;
1404 }
1405 else
1406 cur_seq = 0;
1407
1408 if (df_regs_ever_live_p (REG_Y + 1))
1409 {
1410 ++live_seq;
1411 ++cur_seq;
1412 }
1413 else
1414 cur_seq = 0;
1415 }
1416 else
1417 {
1418 cur_seq += 2;
1419 live_seq += 2;
1420 }
1421 return (cur_seq == live_seq) ? live_seq : 0;
1422 }
1423
1424 namespace {
1425 static const pass_data avr_pass_data_pre_proep =
1426 {
1427 RTL_PASS, // type
1428 "", // name (will be patched)
1429 OPTGROUP_NONE, // optinfo_flags
1430 TV_DF_SCAN, // tv_id
1431 0, // properties_required
1432 0, // properties_provided
1433 0, // properties_destroyed
1434 0, // todo_flags_start
1435 0 // todo_flags_finish
1436 };
1437
1438
1439 class avr_pass_pre_proep : public rtl_opt_pass
1440 {
1441 public:
avr_pass_pre_proep(gcc::context * ctxt,const char * name)1442 avr_pass_pre_proep (gcc::context *ctxt, const char *name)
1443 : rtl_opt_pass (avr_pass_data_pre_proep, ctxt)
1444 {
1445 this->name = name;
1446 }
1447
1448 void compute_maybe_gasisr (function*);
1449
execute(function * fun)1450 virtual unsigned int execute (function *fun)
1451 {
1452 if (avr_gasisr_prologues
1453 // Whether this function is an ISR worth scanning at all.
1454 && !fun->machine->is_no_gccisr
1455 && (fun->machine->is_interrupt
1456 || fun->machine->is_signal)
1457 && !cfun->machine->is_naked
1458 // Paranoia: Non-local gotos and labels that might escape.
1459 && !cfun->calls_setjmp
1460 && !cfun->has_nonlocal_label
1461 && !cfun->has_forced_label_in_static)
1462 {
1463 compute_maybe_gasisr (fun);
1464 }
1465
1466 return 0;
1467 }
1468
1469 }; // avr_pass_pre_proep
1470
1471 } // anon namespace
1472
1473 rtl_opt_pass*
make_avr_pass_pre_proep(gcc::context * ctxt)1474 make_avr_pass_pre_proep (gcc::context *ctxt)
1475 {
1476 return new avr_pass_pre_proep (ctxt, "avr-pre-proep");
1477 }
1478
1479
1480 /* Set fun->machine->gasisr.maybe provided we don't find anything that
1481 prohibits GAS generating parts of ISR prologues / epilogues for us. */
1482
1483 void
compute_maybe_gasisr(function * fun)1484 avr_pass_pre_proep::compute_maybe_gasisr (function *fun)
1485 {
1486 // Don't use BB iterators so that we see JUMP_TABLE_DATA.
1487
1488 for (rtx_insn *insn = get_insns (); insn; insn = NEXT_INSN (insn))
1489 {
1490 // Transparent calls always use [R]CALL and are filtered out by GAS.
1491 // ISRs don't use -mcall-prologues, hence what remains to be filtered
1492 // out are open coded (tail) calls.
1493
1494 if (CALL_P (insn))
1495 return;
1496
1497 // __tablejump2__ clobbers something and is targeted by JMP so
1498 // that GAS won't see its usage.
1499
1500 if (AVR_HAVE_JMP_CALL
1501 && JUMP_TABLE_DATA_P (insn))
1502 return;
1503
1504 // Non-local gotos not seen in *FUN.
1505
1506 if (JUMP_P (insn)
1507 && find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
1508 return;
1509 }
1510
1511 fun->machine->gasisr.maybe = 1;
1512 }
1513
1514
1515 /* Obtain the length sequence of insns. */
1516
1517 int
get_sequence_length(rtx_insn * insns)1518 get_sequence_length (rtx_insn *insns)
1519 {
1520 int length = 0;
1521
1522 for (rtx_insn *insn = insns; insn; insn = NEXT_INSN (insn))
1523 length += get_attr_length (insn);
1524
1525 return length;
1526 }
1527
1528
1529 /* Implement `INCOMING_RETURN_ADDR_RTX'. */
1530
1531 rtx
avr_incoming_return_addr_rtx(void)1532 avr_incoming_return_addr_rtx (void)
1533 {
1534 /* The return address is at the top of the stack. Note that the push
1535 was via post-decrement, which means the actual address is off by one. */
1536 return gen_frame_mem (HImode, plus_constant (Pmode, stack_pointer_rtx, 1));
1537 }
1538
1539
1540 /* Unset a bit in *SET. If successful, return the respective bit number.
1541 Otherwise, return -1 and *SET is unaltered. */
1542
1543 static int
avr_hregs_split_reg(HARD_REG_SET * set)1544 avr_hregs_split_reg (HARD_REG_SET *set)
1545 {
1546 for (int regno = 0; regno < 32; regno++)
1547 if (TEST_HARD_REG_BIT (*set, regno))
1548 {
1549 // Don't remove a register from *SET which might indicate that
1550 // some RAMP* register might need ISR prologue / epilogue treatment.
1551
1552 if (AVR_HAVE_RAMPX
1553 && (REG_X == regno || REG_X + 1 == regno)
1554 && TEST_HARD_REG_BIT (*set, REG_X)
1555 && TEST_HARD_REG_BIT (*set, REG_X + 1))
1556 continue;
1557
1558 if (AVR_HAVE_RAMPY
1559 && !frame_pointer_needed
1560 && (REG_Y == regno || REG_Y + 1 == regno)
1561 && TEST_HARD_REG_BIT (*set, REG_Y)
1562 && TEST_HARD_REG_BIT (*set, REG_Y + 1))
1563 continue;
1564
1565 if (AVR_HAVE_RAMPZ
1566 && (REG_Z == regno || REG_Z + 1 == regno)
1567 && TEST_HARD_REG_BIT (*set, REG_Z)
1568 && TEST_HARD_REG_BIT (*set, REG_Z + 1))
1569 continue;
1570
1571 CLEAR_HARD_REG_BIT (*set, regno);
1572 return regno;
1573 }
1574
1575 return -1;
1576 }
1577
1578
1579 /* Helper for expand_prologue. Emit a push of a byte register. */
1580
1581 static void
emit_push_byte(unsigned regno,bool frame_related_p)1582 emit_push_byte (unsigned regno, bool frame_related_p)
1583 {
1584 rtx mem, reg;
1585 rtx_insn *insn;
1586
1587 mem = gen_rtx_POST_DEC (HImode, stack_pointer_rtx);
1588 mem = gen_frame_mem (QImode, mem);
1589 reg = gen_rtx_REG (QImode, regno);
1590
1591 insn = emit_insn (gen_rtx_SET (mem, reg));
1592 if (frame_related_p)
1593 RTX_FRAME_RELATED_P (insn) = 1;
1594
1595 cfun->machine->stack_usage++;
1596 }
1597
1598
1599 /* Helper for expand_prologue. Emit a push of a SFR via register TREG.
1600 SFR is a MEM representing the memory location of the SFR.
1601 If CLR_P then clear the SFR after the push using zero_reg. */
1602
1603 static void
emit_push_sfr(rtx sfr,bool frame_related_p,bool clr_p,int treg)1604 emit_push_sfr (rtx sfr, bool frame_related_p, bool clr_p, int treg)
1605 {
1606 rtx_insn *insn;
1607
1608 gcc_assert (MEM_P (sfr));
1609
1610 /* IN treg, IO(SFR) */
1611 insn = emit_move_insn (all_regs_rtx[treg], sfr);
1612 if (frame_related_p)
1613 RTX_FRAME_RELATED_P (insn) = 1;
1614
1615 /* PUSH treg */
1616 emit_push_byte (treg, frame_related_p);
1617
1618 if (clr_p)
1619 {
1620 /* OUT IO(SFR), __zero_reg__ */
1621 insn = emit_move_insn (sfr, const0_rtx);
1622 if (frame_related_p)
1623 RTX_FRAME_RELATED_P (insn) = 1;
1624 }
1625 }
1626
1627 static void
avr_prologue_setup_frame(HOST_WIDE_INT size,HARD_REG_SET set)1628 avr_prologue_setup_frame (HOST_WIDE_INT size, HARD_REG_SET set)
1629 {
1630 rtx_insn *insn;
1631 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
1632 int live_seq = sequent_regs_live ();
1633
1634 HOST_WIDE_INT size_max
1635 = (HOST_WIDE_INT) GET_MODE_MASK (AVR_HAVE_8BIT_SP ? QImode : Pmode);
1636
1637 bool minimize = (TARGET_CALL_PROLOGUES
1638 && size < size_max
1639 && live_seq
1640 && !isr_p
1641 && !cfun->machine->is_OS_task
1642 && !cfun->machine->is_OS_main
1643 && !AVR_TINY);
1644
1645 if (minimize
1646 && (frame_pointer_needed
1647 || avr_outgoing_args_size() > 8
1648 || (AVR_2_BYTE_PC && live_seq > 6)
1649 || live_seq > 7))
1650 {
1651 rtx pattern;
1652 int first_reg, reg, offset;
1653
1654 emit_move_insn (gen_rtx_REG (HImode, REG_X),
1655 gen_int_mode (size, HImode));
1656
1657 pattern = gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
1658 gen_int_mode (live_seq+size, HImode));
1659 insn = emit_insn (pattern);
1660 RTX_FRAME_RELATED_P (insn) = 1;
1661
1662 /* Describe the effect of the unspec_volatile call to prologue_saves.
1663 Note that this formulation assumes that add_reg_note pushes the
1664 notes to the front. Thus we build them in the reverse order of
1665 how we want dwarf2out to process them. */
1666
1667 /* The function does always set frame_pointer_rtx, but whether that
1668 is going to be permanent in the function is frame_pointer_needed. */
1669
1670 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1671 gen_rtx_SET ((frame_pointer_needed
1672 ? frame_pointer_rtx
1673 : stack_pointer_rtx),
1674 plus_constant (Pmode, stack_pointer_rtx,
1675 -(size + live_seq))));
1676
1677 /* Note that live_seq always contains r28+r29, but the other
1678 registers to be saved are all below 18. */
1679
1680 first_reg = (LAST_CALLEE_SAVED_REG + 1) - (live_seq - 2);
1681
1682 for (reg = 29, offset = -live_seq + 1;
1683 reg >= first_reg;
1684 reg = (reg == 28 ? LAST_CALLEE_SAVED_REG : reg - 1), ++offset)
1685 {
1686 rtx m, r;
1687
1688 m = gen_rtx_MEM (QImode, plus_constant (Pmode, stack_pointer_rtx,
1689 offset));
1690 r = gen_rtx_REG (QImode, reg);
1691 add_reg_note (insn, REG_CFA_OFFSET, gen_rtx_SET (m, r));
1692 }
1693
1694 cfun->machine->stack_usage += size + live_seq;
1695 }
1696 else /* !minimize */
1697 {
1698 for (int reg = 0; reg < 32; ++reg)
1699 if (TEST_HARD_REG_BIT (set, reg))
1700 emit_push_byte (reg, true);
1701
1702 if (frame_pointer_needed
1703 && (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main)))
1704 {
1705 /* Push frame pointer. Always be consistent about the
1706 ordering of pushes -- epilogue_restores expects the
1707 register pair to be pushed low byte first. */
1708
1709 emit_push_byte (REG_Y, true);
1710 emit_push_byte (REG_Y + 1, true);
1711 }
1712
1713 if (frame_pointer_needed
1714 && size == 0)
1715 {
1716 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1717 RTX_FRAME_RELATED_P (insn) = 1;
1718 }
1719
1720 if (size != 0)
1721 {
1722 /* Creating a frame can be done by direct manipulation of the
1723 stack or via the frame pointer. These two methods are:
1724 fp = sp
1725 fp -= size
1726 sp = fp
1727 or
1728 sp -= size
1729 fp = sp (*)
1730 the optimum method depends on function type, stack and
1731 frame size. To avoid a complex logic, both methods are
1732 tested and shortest is selected.
1733
1734 There is also the case where SIZE != 0 and no frame pointer is
1735 needed; this can occur if ACCUMULATE_OUTGOING_ARGS is on.
1736 In that case, insn (*) is not needed in that case.
1737 We use the X register as scratch. This is save because in X
1738 is call-clobbered.
1739 In an interrupt routine, the case of SIZE != 0 together with
1740 !frame_pointer_needed can only occur if the function is not a
1741 leaf function and thus X has already been saved. */
1742
1743 int irq_state = -1;
1744 HOST_WIDE_INT size_cfa = size, neg_size;
1745 rtx_insn *fp_plus_insns;
1746 rtx fp, my_fp;
1747
1748 gcc_assert (frame_pointer_needed
1749 || !isr_p
1750 || !crtl->is_leaf);
1751
1752 fp = my_fp = (frame_pointer_needed
1753 ? frame_pointer_rtx
1754 : gen_rtx_REG (Pmode, REG_X));
1755
1756 if (AVR_HAVE_8BIT_SP)
1757 {
1758 /* The high byte (r29) does not change:
1759 Prefer SUBI (1 cycle) over SBIW (2 cycles, same size). */
1760
1761 my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
1762 }
1763
1764 /* Cut down size and avoid size = 0 so that we don't run
1765 into ICE like PR52488 in the remainder. */
1766
1767 if (size > size_max)
1768 {
1769 /* Don't error so that insane code from newlib still compiles
1770 and does not break building newlib. As PR51345 is implemented
1771 now, there are multilib variants with -msp8.
1772
1773 If user wants sanity checks he can use -Wstack-usage=
1774 or similar options.
1775
1776 For CFA we emit the original, non-saturated size so that
1777 the generic machinery is aware of the real stack usage and
1778 will print the above diagnostic as expected. */
1779
1780 size = size_max;
1781 }
1782
1783 size = trunc_int_for_mode (size, GET_MODE (my_fp));
1784 neg_size = trunc_int_for_mode (-size, GET_MODE (my_fp));
1785
1786 /************ Method 1: Adjust frame pointer ************/
1787
1788 start_sequence ();
1789
1790 /* Normally, the dwarf2out frame-related-expr interpreter does
1791 not expect to have the CFA change once the frame pointer is
1792 set up. Thus, we avoid marking the move insn below and
1793 instead indicate that the entire operation is complete after
1794 the frame pointer subtraction is done. */
1795
1796 insn = emit_move_insn (fp, stack_pointer_rtx);
1797 if (frame_pointer_needed)
1798 {
1799 RTX_FRAME_RELATED_P (insn) = 1;
1800 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1801 gen_rtx_SET (fp, stack_pointer_rtx));
1802 }
1803
1804 insn = emit_move_insn (my_fp, plus_constant (GET_MODE (my_fp),
1805 my_fp, neg_size));
1806
1807 if (frame_pointer_needed)
1808 {
1809 RTX_FRAME_RELATED_P (insn) = 1;
1810 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1811 gen_rtx_SET (fp, plus_constant (Pmode, fp,
1812 -size_cfa)));
1813 }
1814
1815 /* Copy to stack pointer. Note that since we've already
1816 changed the CFA to the frame pointer this operation
1817 need not be annotated if frame pointer is needed.
1818 Always move through unspec, see PR50063.
1819 For meaning of irq_state see movhi_sp_r insn. */
1820
1821 if (cfun->machine->is_interrupt)
1822 irq_state = 1;
1823
1824 if (TARGET_NO_INTERRUPTS
1825 || cfun->machine->is_signal
1826 || cfun->machine->is_OS_main)
1827 irq_state = 0;
1828
1829 if (AVR_HAVE_8BIT_SP)
1830 irq_state = 2;
1831
1832 insn = emit_insn (gen_movhi_sp_r (stack_pointer_rtx,
1833 fp, GEN_INT (irq_state)));
1834 if (!frame_pointer_needed)
1835 {
1836 RTX_FRAME_RELATED_P (insn) = 1;
1837 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1838 gen_rtx_SET (stack_pointer_rtx,
1839 plus_constant (Pmode,
1840 stack_pointer_rtx,
1841 -size_cfa)));
1842 }
1843
1844 fp_plus_insns = get_insns ();
1845 end_sequence ();
1846
1847 /************ Method 2: Adjust Stack pointer ************/
1848
1849 /* Stack adjustment by means of RCALL . and/or PUSH __TMP_REG__
1850 can only handle specific offsets. */
1851
1852 int n_rcall = size / (AVR_3_BYTE_PC ? 3 : 2);
1853
1854 if (avr_sp_immediate_operand (gen_int_mode (-size, HImode), HImode)
1855 // Don't use more than 3 RCALLs.
1856 && n_rcall <= 3)
1857 {
1858 rtx_insn *sp_plus_insns;
1859
1860 start_sequence ();
1861
1862 insn = emit_move_insn (stack_pointer_rtx,
1863 plus_constant (Pmode, stack_pointer_rtx,
1864 -size));
1865 RTX_FRAME_RELATED_P (insn) = 1;
1866 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1867 gen_rtx_SET (stack_pointer_rtx,
1868 plus_constant (Pmode,
1869 stack_pointer_rtx,
1870 -size_cfa)));
1871 if (frame_pointer_needed)
1872 {
1873 insn = emit_move_insn (fp, stack_pointer_rtx);
1874 RTX_FRAME_RELATED_P (insn) = 1;
1875 }
1876
1877 sp_plus_insns = get_insns ();
1878 end_sequence ();
1879
1880 /************ Use shortest method ************/
1881
1882 emit_insn (get_sequence_length (sp_plus_insns)
1883 < get_sequence_length (fp_plus_insns)
1884 ? sp_plus_insns
1885 : fp_plus_insns);
1886 }
1887 else
1888 {
1889 emit_insn (fp_plus_insns);
1890 }
1891
1892 cfun->machine->stack_usage += size_cfa;
1893 } /* !minimize && size != 0 */
1894 } /* !minimize */
1895 }
1896
1897
1898 /* Output function prologue. */
1899
1900 void
avr_expand_prologue(void)1901 avr_expand_prologue (void)
1902 {
1903 HARD_REG_SET set;
1904 HOST_WIDE_INT size;
1905
1906 size = get_frame_size() + avr_outgoing_args_size();
1907
1908 cfun->machine->stack_usage = 0;
1909
1910 /* Prologue: naked. */
1911 if (cfun->machine->is_naked)
1912 {
1913 return;
1914 }
1915
1916 avr_regs_to_save (&set);
1917
1918 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1919 {
1920 int treg = AVR_TMP_REGNO;
1921 /* Enable interrupts. */
1922 if (cfun->machine->is_interrupt)
1923 emit_insn (gen_enable_interrupt ());
1924
1925 if (cfun->machine->gasisr.maybe)
1926 {
1927 /* Let GAS PR21472 emit prologue preamble for us which handles SREG,
1928 ZERO_REG and TMP_REG and one additional, optional register for
1929 us in an optimal way. This even scans through inline asm. */
1930
1931 cfun->machine->gasisr.yes = 1;
1932
1933 // The optional reg or TMP_REG if we don't need one. If we need one,
1934 // remove that reg from SET so that it's not puhed / popped twice.
1935 // We also use it below instead of TMP_REG in some places.
1936
1937 treg = avr_hregs_split_reg (&set);
1938 if (treg < 0)
1939 treg = AVR_TMP_REGNO;
1940 cfun->machine->gasisr.regno = treg;
1941
1942 // The worst case of pushes. The exact number can be inferred
1943 // at assembly time by magic expression __gcc_isr.n_pushed.
1944 cfun->machine->stack_usage += 3 + (treg != AVR_TMP_REGNO);
1945
1946 // Emit a Prologue chunk. Epilogue chunk(s) might follow.
1947 // The final Done chunk is emit by final postscan.
1948 emit_insn (gen_gasisr (GEN_INT (GASISR_Prologue), GEN_INT (treg)));
1949 }
1950 else // !TARGET_GASISR_PROLOGUES: Classic, dumb prologue preamble.
1951 {
1952 /* Push zero reg. */
1953 emit_push_byte (AVR_ZERO_REGNO, true);
1954
1955 /* Push tmp reg. */
1956 emit_push_byte (AVR_TMP_REGNO, true);
1957
1958 /* Push SREG. */
1959 /* ??? There's no dwarf2 column reserved for SREG. */
1960 emit_push_sfr (sreg_rtx, false, false /* clr */, AVR_TMP_REGNO);
1961
1962 /* Clear zero reg. */
1963 emit_move_insn (zero_reg_rtx, const0_rtx);
1964
1965 /* Prevent any attempt to delete the setting of ZERO_REG! */
1966 emit_use (zero_reg_rtx);
1967 }
1968
1969 /* Push and clear RAMPD/X/Y/Z if present and low-part register is used.
1970 ??? There are no dwarf2 columns reserved for RAMPD/X/Y/Z. */
1971
1972 if (AVR_HAVE_RAMPD)
1973 emit_push_sfr (rampd_rtx, false /* frame */, true /* clr */, treg);
1974
1975 if (AVR_HAVE_RAMPX
1976 && TEST_HARD_REG_BIT (set, REG_X)
1977 && TEST_HARD_REG_BIT (set, REG_X + 1))
1978 {
1979 emit_push_sfr (rampx_rtx, false /* frame */, true /* clr */, treg);
1980 }
1981
1982 if (AVR_HAVE_RAMPY
1983 && (frame_pointer_needed
1984 || (TEST_HARD_REG_BIT (set, REG_Y)
1985 && TEST_HARD_REG_BIT (set, REG_Y + 1))))
1986 {
1987 emit_push_sfr (rampy_rtx, false /* frame */, true /* clr */, treg);
1988 }
1989
1990 if (AVR_HAVE_RAMPZ
1991 && TEST_HARD_REG_BIT (set, REG_Z)
1992 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1993 {
1994 emit_push_sfr (rampz_rtx, false /* frame */, AVR_HAVE_RAMPD, treg);
1995 }
1996 } /* is_interrupt is_signal */
1997
1998 avr_prologue_setup_frame (size, set);
1999
2000 if (flag_stack_usage_info)
2001 current_function_static_stack_size
2002 = cfun->machine->stack_usage + INCOMING_FRAME_SP_OFFSET;
2003 }
2004
2005
2006 /* Implement `TARGET_ASM_FUNCTION_END_PROLOGUE'. */
2007 /* Output summary at end of function prologue. */
2008
2009 static void
avr_asm_function_end_prologue(FILE * file)2010 avr_asm_function_end_prologue (FILE *file)
2011 {
2012 if (cfun->machine->is_naked)
2013 {
2014 fputs ("/* prologue: naked */\n", file);
2015 }
2016 else
2017 {
2018 if (cfun->machine->is_interrupt)
2019 {
2020 fputs ("/* prologue: Interrupt */\n", file);
2021 }
2022 else if (cfun->machine->is_signal)
2023 {
2024 fputs ("/* prologue: Signal */\n", file);
2025 }
2026 else
2027 fputs ("/* prologue: function */\n", file);
2028 }
2029
2030 if (ACCUMULATE_OUTGOING_ARGS)
2031 fprintf (file, "/* outgoing args size = %d */\n",
2032 avr_outgoing_args_size());
2033
2034 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
2035 (HOST_WIDE_INT) get_frame_size());
2036
2037 if (!cfun->machine->gasisr.yes)
2038 {
2039 fprintf (file, "/* stack size = %d */\n", cfun->machine->stack_usage);
2040 // Create symbol stack offset so all functions have it. Add 1 to stack
2041 // usage for offset so that SP + .L__stack_offset = return address.
2042 fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
2043 }
2044 else
2045 {
2046 int used_by_gasisr = 3 + (cfun->machine->gasisr.regno != AVR_TMP_REGNO);
2047 int to = cfun->machine->stack_usage;
2048 int from = to - used_by_gasisr;
2049 // Number of pushed regs is only known at assembly-time.
2050 fprintf (file, "/* stack size = %d...%d */\n", from , to);
2051 fprintf (file, ".L__stack_usage = %d + __gcc_isr.n_pushed\n", from);
2052 }
2053 }
2054
2055
2056 /* Implement `EPILOGUE_USES'. */
2057
2058 int
avr_epilogue_uses(int regno ATTRIBUTE_UNUSED)2059 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
2060 {
2061 if (reload_completed
2062 && cfun->machine
2063 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
2064 return 1;
2065 return 0;
2066 }
2067
2068 /* Helper for avr_expand_epilogue. Emit a pop of a byte register. */
2069
2070 static void
emit_pop_byte(unsigned regno)2071 emit_pop_byte (unsigned regno)
2072 {
2073 rtx mem, reg;
2074
2075 mem = gen_rtx_PRE_INC (HImode, stack_pointer_rtx);
2076 mem = gen_frame_mem (QImode, mem);
2077 reg = gen_rtx_REG (QImode, regno);
2078
2079 emit_insn (gen_rtx_SET (reg, mem));
2080 }
2081
2082 /* Output RTL epilogue. */
2083
2084 void
avr_expand_epilogue(bool sibcall_p)2085 avr_expand_epilogue (bool sibcall_p)
2086 {
2087 int live_seq;
2088 HARD_REG_SET set;
2089 int minimize;
2090 HOST_WIDE_INT size;
2091 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
2092
2093 size = get_frame_size() + avr_outgoing_args_size();
2094
2095 /* epilogue: naked */
2096 if (cfun->machine->is_naked)
2097 {
2098 gcc_assert (!sibcall_p);
2099
2100 emit_jump_insn (gen_return ());
2101 return;
2102 }
2103
2104 avr_regs_to_save (&set);
2105 live_seq = sequent_regs_live ();
2106
2107 minimize = (TARGET_CALL_PROLOGUES
2108 && live_seq
2109 && !isr_p
2110 && !cfun->machine->is_OS_task
2111 && !cfun->machine->is_OS_main
2112 && !AVR_TINY);
2113
2114 if (minimize
2115 && (live_seq > 4
2116 || frame_pointer_needed
2117 || size))
2118 {
2119 /* Get rid of frame. */
2120
2121 if (!frame_pointer_needed)
2122 {
2123 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
2124 }
2125
2126 if (size)
2127 {
2128 emit_move_insn (frame_pointer_rtx,
2129 plus_constant (Pmode, frame_pointer_rtx, size));
2130 }
2131
2132 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
2133 return;
2134 }
2135
2136 if (size)
2137 {
2138 /* Try two methods to adjust stack and select shortest. */
2139
2140 int irq_state = -1;
2141 rtx fp, my_fp;
2142 rtx_insn *fp_plus_insns;
2143 HOST_WIDE_INT size_max;
2144
2145 gcc_assert (frame_pointer_needed
2146 || !isr_p
2147 || !crtl->is_leaf);
2148
2149 fp = my_fp = (frame_pointer_needed
2150 ? frame_pointer_rtx
2151 : gen_rtx_REG (Pmode, REG_X));
2152
2153 if (AVR_HAVE_8BIT_SP)
2154 {
2155 /* The high byte (r29) does not change:
2156 Prefer SUBI (1 cycle) over SBIW (2 cycles). */
2157
2158 my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
2159 }
2160
2161 /* For rationale see comment in prologue generation. */
2162
2163 size_max = (HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (my_fp));
2164 if (size > size_max)
2165 size = size_max;
2166 size = trunc_int_for_mode (size, GET_MODE (my_fp));
2167
2168 /********** Method 1: Adjust fp register **********/
2169
2170 start_sequence ();
2171
2172 if (!frame_pointer_needed)
2173 emit_move_insn (fp, stack_pointer_rtx);
2174
2175 emit_move_insn (my_fp, plus_constant (GET_MODE (my_fp), my_fp, size));
2176
2177 /* Copy to stack pointer. */
2178
2179 if (TARGET_NO_INTERRUPTS)
2180 irq_state = 0;
2181
2182 if (AVR_HAVE_8BIT_SP)
2183 irq_state = 2;
2184
2185 emit_insn (gen_movhi_sp_r (stack_pointer_rtx, fp,
2186 GEN_INT (irq_state)));
2187
2188 fp_plus_insns = get_insns ();
2189 end_sequence ();
2190
2191 /********** Method 2: Adjust Stack pointer **********/
2192
2193 if (avr_sp_immediate_operand (gen_int_mode (size, HImode), HImode))
2194 {
2195 rtx_insn *sp_plus_insns;
2196
2197 start_sequence ();
2198
2199 emit_move_insn (stack_pointer_rtx,
2200 plus_constant (Pmode, stack_pointer_rtx, size));
2201
2202 sp_plus_insns = get_insns ();
2203 end_sequence ();
2204
2205 /************ Use shortest method ************/
2206
2207 emit_insn (get_sequence_length (sp_plus_insns)
2208 < get_sequence_length (fp_plus_insns)
2209 ? sp_plus_insns
2210 : fp_plus_insns);
2211 }
2212 else
2213 emit_insn (fp_plus_insns);
2214 } /* size != 0 */
2215
2216 if (frame_pointer_needed
2217 && !(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
2218 {
2219 /* Restore previous frame_pointer. See avr_expand_prologue for
2220 rationale for not using pophi. */
2221
2222 emit_pop_byte (REG_Y + 1);
2223 emit_pop_byte (REG_Y);
2224 }
2225
2226 /* Restore used registers. */
2227
2228 int treg = AVR_TMP_REGNO;
2229
2230 if (isr_p
2231 && cfun->machine->gasisr.yes)
2232 {
2233 treg = cfun->machine->gasisr.regno;
2234 CLEAR_HARD_REG_BIT (set, treg);
2235 }
2236
2237 for (int reg = 31; reg >= 0; --reg)
2238 if (TEST_HARD_REG_BIT (set, reg))
2239 emit_pop_byte (reg);
2240
2241 if (isr_p)
2242 {
2243 /* Restore RAMPZ/Y/X/D using tmp_reg as scratch.
2244 The conditions to restore them must be tha same as in prologue. */
2245
2246 if (AVR_HAVE_RAMPZ
2247 && TEST_HARD_REG_BIT (set, REG_Z)
2248 && TEST_HARD_REG_BIT (set, REG_Z + 1))
2249 {
2250 emit_pop_byte (treg);
2251 emit_move_insn (rampz_rtx, all_regs_rtx[treg]);
2252 }
2253
2254 if (AVR_HAVE_RAMPY
2255 && (frame_pointer_needed
2256 || (TEST_HARD_REG_BIT (set, REG_Y)
2257 && TEST_HARD_REG_BIT (set, REG_Y + 1))))
2258 {
2259 emit_pop_byte (treg);
2260 emit_move_insn (rampy_rtx, all_regs_rtx[treg]);
2261 }
2262
2263 if (AVR_HAVE_RAMPX
2264 && TEST_HARD_REG_BIT (set, REG_X)
2265 && TEST_HARD_REG_BIT (set, REG_X + 1))
2266 {
2267 emit_pop_byte (treg);
2268 emit_move_insn (rampx_rtx, all_regs_rtx[treg]);
2269 }
2270
2271 if (AVR_HAVE_RAMPD)
2272 {
2273 emit_pop_byte (treg);
2274 emit_move_insn (rampd_rtx, all_regs_rtx[treg]);
2275 }
2276
2277 if (cfun->machine->gasisr.yes)
2278 {
2279 // Emit an Epilogue chunk.
2280 emit_insn (gen_gasisr (GEN_INT (GASISR_Epilogue),
2281 GEN_INT (cfun->machine->gasisr.regno)));
2282 }
2283 else // !TARGET_GASISR_PROLOGUES
2284 {
2285 /* Restore SREG using tmp_reg as scratch. */
2286
2287 emit_pop_byte (AVR_TMP_REGNO);
2288 emit_move_insn (sreg_rtx, tmp_reg_rtx);
2289
2290 /* Restore tmp REG. */
2291 emit_pop_byte (AVR_TMP_REGNO);
2292
2293 /* Restore zero REG. */
2294 emit_pop_byte (AVR_ZERO_REGNO);
2295 }
2296 }
2297
2298 if (!sibcall_p)
2299 emit_jump_insn (gen_return ());
2300 }
2301
2302
2303 /* Implement `TARGET_ASM_FUNCTION_BEGIN_EPILOGUE'. */
2304
2305 static void
avr_asm_function_begin_epilogue(FILE * file)2306 avr_asm_function_begin_epilogue (FILE *file)
2307 {
2308 app_disable();
2309 fprintf (file, "/* epilogue start */\n");
2310 }
2311
2312
2313 /* Implement `TARGET_CANNOT_MODITY_JUMPS_P'. */
2314
2315 static bool
avr_cannot_modify_jumps_p(void)2316 avr_cannot_modify_jumps_p (void)
2317 {
2318 /* Naked Functions must not have any instructions after
2319 their epilogue, see PR42240 */
2320
2321 if (reload_completed
2322 && cfun->machine
2323 && cfun->machine->is_naked)
2324 {
2325 return true;
2326 }
2327
2328 return false;
2329 }
2330
2331
2332 /* Implement `TARGET_MODE_DEPENDENT_ADDRESS_P'. */
2333
2334 static bool
avr_mode_dependent_address_p(const_rtx addr ATTRIBUTE_UNUSED,addr_space_t as)2335 avr_mode_dependent_address_p (const_rtx addr ATTRIBUTE_UNUSED, addr_space_t as)
2336 {
2337 /* FIXME: Non-generic addresses are not mode-dependent in themselves.
2338 This hook just serves to hack around PR rtl-optimization/52543 by
2339 claiming that non-generic addresses were mode-dependent so that
2340 lower-subreg.c will skip these addresses. lower-subreg.c sets up fake
2341 RTXes to probe SET and MEM costs and assumes that MEM is always in the
2342 generic address space which is not true. */
2343
2344 return !ADDR_SPACE_GENERIC_P (as);
2345 }
2346
2347
2348 /* Return true if rtx X is a CONST_INT, CONST or SYMBOL_REF
2349 address with the `absdata' variable attribute, i.e. respective
2350 data can be read / written by LDS / STS instruction.
2351 This is used only for AVR_TINY. */
2352
2353 static bool
avr_address_tiny_absdata_p(rtx x,machine_mode mode)2354 avr_address_tiny_absdata_p (rtx x, machine_mode mode)
2355 {
2356 if (CONST == GET_CODE (x))
2357 x = XEXP (XEXP (x, 0), 0);
2358
2359 if (SYMBOL_REF_P (x))
2360 return SYMBOL_REF_FLAGS (x) & AVR_SYMBOL_FLAG_TINY_ABSDATA;
2361
2362 if (CONST_INT_P (x)
2363 && IN_RANGE (INTVAL (x), 0, 0xc0 - GET_MODE_SIZE (mode)))
2364 return true;
2365
2366 return false;
2367 }
2368
2369
2370 /* Helper function for `avr_legitimate_address_p'. */
2371
2372 static inline bool
avr_reg_ok_for_addr_p(rtx reg,addr_space_t as,RTX_CODE outer_code,bool strict)2373 avr_reg_ok_for_addr_p (rtx reg, addr_space_t as,
2374 RTX_CODE outer_code, bool strict)
2375 {
2376 return (REG_P (reg)
2377 && (avr_regno_mode_code_ok_for_base_p (REGNO (reg), QImode,
2378 as, outer_code, UNKNOWN)
2379 || (!strict
2380 && REGNO (reg) >= FIRST_PSEUDO_REGISTER)));
2381 }
2382
2383
2384 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
2385 machine for a memory operand of mode MODE. */
2386
2387 static bool
avr_legitimate_address_p(machine_mode mode,rtx x,bool strict)2388 avr_legitimate_address_p (machine_mode mode, rtx x, bool strict)
2389 {
2390 bool ok = CONSTANT_ADDRESS_P (x);
2391
2392 switch (GET_CODE (x))
2393 {
2394 case REG:
2395 ok = avr_reg_ok_for_addr_p (x, ADDR_SPACE_GENERIC,
2396 MEM, strict);
2397
2398 if (strict
2399 && GET_MODE_SIZE (mode) > 4
2400 && REG_X == REGNO (x))
2401 {
2402 ok = false;
2403 }
2404 break;
2405
2406 case POST_INC:
2407 case PRE_DEC:
2408 ok = avr_reg_ok_for_addr_p (XEXP (x, 0), ADDR_SPACE_GENERIC,
2409 GET_CODE (x), strict);
2410 break;
2411
2412 case PLUS:
2413 {
2414 rtx reg = XEXP (x, 0);
2415 rtx op1 = XEXP (x, 1);
2416
2417 if (REG_P (reg)
2418 && CONST_INT_P (op1)
2419 && INTVAL (op1) >= 0)
2420 {
2421 bool fit = IN_RANGE (INTVAL (op1), 0, MAX_LD_OFFSET (mode));
2422
2423 if (fit)
2424 {
2425 ok = (! strict
2426 || avr_reg_ok_for_addr_p (reg, ADDR_SPACE_GENERIC,
2427 PLUS, strict));
2428
2429 if (reg == frame_pointer_rtx
2430 || reg == arg_pointer_rtx)
2431 {
2432 ok = true;
2433 }
2434 }
2435 else if (frame_pointer_needed
2436 && reg == frame_pointer_rtx)
2437 {
2438 ok = true;
2439 }
2440 }
2441 }
2442 break;
2443
2444 default:
2445 break;
2446 }
2447
2448 if (AVR_TINY
2449 && CONSTANT_ADDRESS_P (x))
2450 {
2451 /* avrtiny's load / store instructions only cover addresses 0..0xbf:
2452 IN / OUT range is 0..0x3f and LDS / STS can access 0x40..0xbf. */
2453
2454 ok = avr_address_tiny_absdata_p (x, mode);
2455 }
2456
2457 if (avr_log.legitimate_address_p)
2458 {
2459 avr_edump ("\n%?: ret=%d, mode=%m strict=%d "
2460 "reload_completed=%d reload_in_progress=%d %s:",
2461 ok, mode, strict, reload_completed, reload_in_progress,
2462 reg_renumber ? "(reg_renumber)" : "");
2463
2464 if (GET_CODE (x) == PLUS
2465 && REG_P (XEXP (x, 0))
2466 && CONST_INT_P (XEXP (x, 1))
2467 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
2468 && reg_renumber)
2469 {
2470 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
2471 true_regnum (XEXP (x, 0)));
2472 }
2473
2474 avr_edump ("\n%r\n", x);
2475 }
2476
2477 return ok;
2478 }
2479
2480
2481 /* Former implementation of TARGET_LEGITIMIZE_ADDRESS,
2482 now only a helper for avr_addr_space_legitimize_address. */
2483 /* Attempts to replace X with a valid
2484 memory address for an operand of mode MODE */
2485
2486 static rtx
avr_legitimize_address(rtx x,rtx oldx,machine_mode mode)2487 avr_legitimize_address (rtx x, rtx oldx, machine_mode mode)
2488 {
2489 bool big_offset_p = false;
2490
2491 x = oldx;
2492
2493 if (AVR_TINY)
2494 {
2495 if (CONSTANT_ADDRESS_P (x)
2496 && ! avr_address_tiny_absdata_p (x, mode))
2497 {
2498 x = force_reg (Pmode, x);
2499 }
2500 }
2501
2502 if (GET_CODE (oldx) == PLUS
2503 && REG_P (XEXP (oldx, 0)))
2504 {
2505 if (REG_P (XEXP (oldx, 1)))
2506 x = force_reg (GET_MODE (oldx), oldx);
2507 else if (CONST_INT_P (XEXP (oldx, 1)))
2508 {
2509 int offs = INTVAL (XEXP (oldx, 1));
2510 if (frame_pointer_rtx != XEXP (oldx, 0)
2511 && offs > MAX_LD_OFFSET (mode))
2512 {
2513 big_offset_p = true;
2514 x = force_reg (GET_MODE (oldx), oldx);
2515 }
2516 }
2517 }
2518
2519 if (avr_log.legitimize_address)
2520 {
2521 avr_edump ("\n%?: mode=%m\n %r\n", mode, oldx);
2522
2523 if (x != oldx)
2524 avr_edump (" %s --> %r\n", big_offset_p ? "(big offset)" : "", x);
2525 }
2526
2527 return x;
2528 }
2529
2530
2531 /* Implement `LEGITIMIZE_RELOAD_ADDRESS'. */
2532 /* This will allow register R26/27 to be used where it is no worse than normal
2533 base pointers R28/29 or R30/31. For example, if base offset is greater
2534 than 63 bytes or for R++ or --R addressing. */
2535
2536 rtx
avr_legitimize_reload_address(rtx * px,machine_mode mode,int opnum,int type,int addr_type,int ind_levels ATTRIBUTE_UNUSED,rtx (* mk_memloc)(rtx,int))2537 avr_legitimize_reload_address (rtx *px, machine_mode mode,
2538 int opnum, int type, int addr_type,
2539 int ind_levels ATTRIBUTE_UNUSED,
2540 rtx (*mk_memloc)(rtx,int))
2541 {
2542 rtx x = *px;
2543
2544 if (avr_log.legitimize_reload_address)
2545 avr_edump ("\n%?:%m %r\n", mode, x);
2546
2547 if (1 && (GET_CODE (x) == POST_INC
2548 || GET_CODE (x) == PRE_DEC))
2549 {
2550 push_reload (XEXP (x, 0), XEXP (x, 0), &XEXP (x, 0), &XEXP (x, 0),
2551 POINTER_REGS, GET_MODE (x), GET_MODE (x), 0, 0,
2552 opnum, RELOAD_OTHER);
2553
2554 if (avr_log.legitimize_reload_address)
2555 avr_edump (" RCLASS.1 = %R\n IN = %r\n OUT = %r\n",
2556 POINTER_REGS, XEXP (x, 0), XEXP (x, 0));
2557
2558 return x;
2559 }
2560
2561 if (GET_CODE (x) == PLUS
2562 && REG_P (XEXP (x, 0))
2563 && reg_equiv_constant (REGNO (XEXP (x, 0))) == 0
2564 && CONST_INT_P (XEXP (x, 1))
2565 && INTVAL (XEXP (x, 1)) >= 1)
2566 {
2567 bool fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
2568
2569 if (fit)
2570 {
2571 if (reg_equiv_address (REGNO (XEXP (x, 0))) != 0)
2572 {
2573 int regno = REGNO (XEXP (x, 0));
2574 rtx mem = mk_memloc (x, regno);
2575
2576 push_reload (XEXP (mem, 0), NULL_RTX, &XEXP (mem, 0), NULL,
2577 POINTER_REGS, Pmode, VOIDmode, 0, 0,
2578 1, (enum reload_type) addr_type);
2579
2580 if (avr_log.legitimize_reload_address)
2581 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
2582 POINTER_REGS, XEXP (mem, 0), NULL_RTX);
2583
2584 push_reload (mem, NULL_RTX, &XEXP (x, 0), NULL,
2585 BASE_POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
2586 opnum, (enum reload_type) type);
2587
2588 if (avr_log.legitimize_reload_address)
2589 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
2590 BASE_POINTER_REGS, mem, NULL_RTX);
2591
2592 return x;
2593 }
2594 }
2595 else if (! (frame_pointer_needed
2596 && XEXP (x, 0) == frame_pointer_rtx))
2597 {
2598 push_reload (x, NULL_RTX, px, NULL,
2599 POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
2600 opnum, (enum reload_type) type);
2601
2602 if (avr_log.legitimize_reload_address)
2603 avr_edump (" RCLASS.3 = %R\n IN = %r\n OUT = %r\n",
2604 POINTER_REGS, x, NULL_RTX);
2605
2606 return x;
2607 }
2608 }
2609
2610 return NULL_RTX;
2611 }
2612
2613
2614 /* Helper function to print assembler resp. track instruction
2615 sequence lengths. Always return "".
2616
2617 If PLEN == NULL:
2618 Output assembler code from template TPL with operands supplied
2619 by OPERANDS. This is just forwarding to output_asm_insn.
2620
2621 If PLEN != NULL:
2622 If N_WORDS >= 0 Add N_WORDS to *PLEN.
2623 If N_WORDS < 0 Set *PLEN to -N_WORDS.
2624 Don't output anything.
2625 */
2626
2627 static const char*
avr_asm_len(const char * tpl,rtx * operands,int * plen,int n_words)2628 avr_asm_len (const char* tpl, rtx* operands, int* plen, int n_words)
2629 {
2630 if (plen == NULL)
2631 output_asm_insn (tpl, operands);
2632 else
2633 {
2634 if (n_words < 0)
2635 *plen = -n_words;
2636 else
2637 *plen += n_words;
2638 }
2639
2640 return "";
2641 }
2642
2643
2644 /* Return a pointer register name as a string. */
2645
2646 static const char*
ptrreg_to_str(int regno)2647 ptrreg_to_str (int regno)
2648 {
2649 switch (regno)
2650 {
2651 case REG_X: return "X";
2652 case REG_Y: return "Y";
2653 case REG_Z: return "Z";
2654 default:
2655 output_operand_lossage ("address operand requires constraint for"
2656 " X, Y, or Z register");
2657 }
2658 return NULL;
2659 }
2660
2661 /* Return the condition name as a string.
2662 Used in conditional jump constructing */
2663
2664 static const char*
cond_string(enum rtx_code code)2665 cond_string (enum rtx_code code)
2666 {
2667 switch (code)
2668 {
2669 case NE:
2670 return "ne";
2671 case EQ:
2672 return "eq";
2673 case GE:
2674 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2675 return "pl";
2676 else
2677 return "ge";
2678 case LT:
2679 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2680 return "mi";
2681 else
2682 return "lt";
2683 case GEU:
2684 return "sh";
2685 case LTU:
2686 return "lo";
2687 default:
2688 gcc_unreachable ();
2689 }
2690
2691 return "";
2692 }
2693
2694
2695 /* Return true if rtx X is a CONST or SYMBOL_REF with progmem.
2696 This must be used for AVR_TINY only because on other cores
2697 the flash memory is not visible in the RAM address range and
2698 cannot be read by, say, LD instruction. */
2699
2700 static bool
avr_address_tiny_pm_p(rtx x)2701 avr_address_tiny_pm_p (rtx x)
2702 {
2703 if (CONST == GET_CODE (x))
2704 x = XEXP (XEXP (x, 0), 0);
2705
2706 if (SYMBOL_REF_P (x))
2707 return SYMBOL_REF_FLAGS (x) & AVR_SYMBOL_FLAG_TINY_PM;
2708
2709 return false;
2710 }
2711
2712 /* Implement `TARGET_PRINT_OPERAND_ADDRESS'. */
2713 /* Output ADDR to FILE as address. */
2714
2715 static void
avr_print_operand_address(FILE * file,machine_mode,rtx addr)2716 avr_print_operand_address (FILE *file, machine_mode /*mode*/, rtx addr)
2717 {
2718 if (AVR_TINY
2719 && avr_address_tiny_pm_p (addr))
2720 {
2721 addr = plus_constant (Pmode, addr, avr_arch->flash_pm_offset);
2722 }
2723
2724 switch (GET_CODE (addr))
2725 {
2726 case REG:
2727 fprintf (file, "%s", ptrreg_to_str (REGNO (addr)));
2728 break;
2729
2730 case PRE_DEC:
2731 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
2732 break;
2733
2734 case POST_INC:
2735 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
2736 break;
2737
2738 default:
2739 if (CONSTANT_ADDRESS_P (addr)
2740 && text_segment_operand (addr, VOIDmode))
2741 {
2742 rtx x = addr;
2743 if (GET_CODE (x) == CONST)
2744 x = XEXP (x, 0);
2745 if (GET_CODE (x) == PLUS && CONST_INT_P (XEXP (x, 1)))
2746 {
2747 /* Assembler gs() will implant word address. Make offset
2748 a byte offset inside gs() for assembler. This is
2749 needed because the more logical (constant+gs(sym)) is not
2750 accepted by gas. For 128K and smaller devices this is ok.
2751 For large devices it will create a trampoline to offset
2752 from symbol which may not be what the user really wanted. */
2753
2754 fprintf (file, "gs(");
2755 output_addr_const (file, XEXP (x, 0));
2756 fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC ")",
2757 2 * INTVAL (XEXP (x, 1)));
2758 if (AVR_3_BYTE_PC)
2759 if (warning (0, "pointer offset from symbol maybe incorrect"))
2760 {
2761 output_addr_const (stderr, addr);
2762 fprintf (stderr, "\n");
2763 }
2764 }
2765 else
2766 {
2767 fprintf (file, "gs(");
2768 output_addr_const (file, addr);
2769 fprintf (file, ")");
2770 }
2771 }
2772 else
2773 output_addr_const (file, addr);
2774 }
2775 }
2776
2777
2778 /* Implement `TARGET_PRINT_OPERAND_PUNCT_VALID_P'. */
2779
2780 static bool
avr_print_operand_punct_valid_p(unsigned char code)2781 avr_print_operand_punct_valid_p (unsigned char code)
2782 {
2783 return code == '~' || code == '!';
2784 }
2785
2786
2787 /* Implement `TARGET_PRINT_OPERAND'. */
2788 /* Output X as assembler operand to file FILE.
2789 For a description of supported %-codes, see top of avr.md. */
2790
2791 static void
avr_print_operand(FILE * file,rtx x,int code)2792 avr_print_operand (FILE *file, rtx x, int code)
2793 {
2794 int abcd = 0, ef = 0, ij = 0;
2795
2796 if (code >= 'A' && code <= 'D')
2797 abcd = code - 'A';
2798 else if (code == 'E' || code == 'F')
2799 ef = code - 'E';
2800 else if (code == 'I' || code == 'J')
2801 ij = code - 'I';
2802
2803 if (code == '~')
2804 {
2805 if (!AVR_HAVE_JMP_CALL)
2806 fputc ('r', file);
2807 }
2808 else if (code == '!')
2809 {
2810 if (AVR_HAVE_EIJMP_EICALL)
2811 fputc ('e', file);
2812 }
2813 else if (code == 't'
2814 || code == 'T')
2815 {
2816 static int t_regno = -1;
2817 static int t_nbits = -1;
2818
2819 if (REG_P (x) && t_regno < 0 && code == 'T')
2820 {
2821 t_regno = REGNO (x);
2822 t_nbits = GET_MODE_BITSIZE (GET_MODE (x));
2823 }
2824 else if (CONST_INT_P (x) && t_regno >= 0
2825 && IN_RANGE (INTVAL (x), 0, t_nbits - 1))
2826 {
2827 int bpos = INTVAL (x);
2828
2829 fprintf (file, "%s", reg_names[t_regno + bpos / 8]);
2830 if (code == 'T')
2831 fprintf (file, ",%d", bpos % 8);
2832
2833 t_regno = -1;
2834 }
2835 else
2836 fatal_insn ("operands to %T/%t must be reg + const_int:", x);
2837 }
2838 else if (code == 'E' || code == 'F')
2839 {
2840 rtx op = XEXP (x, 0);
2841 fprintf (file, "%s", reg_names[REGNO (op) + ef]);
2842 }
2843 else if (code == 'I' || code == 'J')
2844 {
2845 rtx op = XEXP (XEXP (x, 0), 0);
2846 fprintf (file, "%s", reg_names[REGNO (op) + ij]);
2847 }
2848 else if (REG_P (x))
2849 {
2850 if (x == zero_reg_rtx)
2851 fprintf (file, "__zero_reg__");
2852 else if (code == 'r' && REGNO (x) < 32)
2853 fprintf (file, "%d", (int) REGNO (x));
2854 else
2855 fprintf (file, "%s", reg_names[REGNO (x) + abcd]);
2856 }
2857 else if (CONST_INT_P (x))
2858 {
2859 HOST_WIDE_INT ival = INTVAL (x);
2860
2861 if ('i' != code)
2862 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ival + abcd);
2863 else if (low_io_address_operand (x, VOIDmode)
2864 || high_io_address_operand (x, VOIDmode))
2865 {
2866 if (AVR_HAVE_RAMPZ && ival == avr_addr.rampz)
2867 fprintf (file, "__RAMPZ__");
2868 else if (AVR_HAVE_RAMPY && ival == avr_addr.rampy)
2869 fprintf (file, "__RAMPY__");
2870 else if (AVR_HAVE_RAMPX && ival == avr_addr.rampx)
2871 fprintf (file, "__RAMPX__");
2872 else if (AVR_HAVE_RAMPD && ival == avr_addr.rampd)
2873 fprintf (file, "__RAMPD__");
2874 else if ((AVR_XMEGA || AVR_TINY) && ival == avr_addr.ccp)
2875 fprintf (file, "__CCP__");
2876 else if (ival == avr_addr.sreg) fprintf (file, "__SREG__");
2877 else if (ival == avr_addr.sp_l) fprintf (file, "__SP_L__");
2878 else if (ival == avr_addr.sp_h) fprintf (file, "__SP_H__");
2879 else
2880 {
2881 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
2882 ival - avr_arch->sfr_offset);
2883 }
2884 }
2885 else
2886 fatal_insn ("bad address, not an I/O address:", x);
2887 }
2888 else if (MEM_P (x))
2889 {
2890 rtx addr = XEXP (x, 0);
2891
2892 if (code == 'm')
2893 {
2894 if (!CONSTANT_P (addr))
2895 fatal_insn ("bad address, not a constant:", addr);
2896 /* Assembler template with m-code is data - not progmem section */
2897 if (text_segment_operand (addr, VOIDmode))
2898 if (warning (0, "accessing data memory with"
2899 " program memory address"))
2900 {
2901 output_addr_const (stderr, addr);
2902 fprintf(stderr,"\n");
2903 }
2904 output_addr_const (file, addr);
2905 }
2906 else if (code == 'i')
2907 {
2908 avr_print_operand (file, addr, 'i');
2909 }
2910 else if (code == 'o')
2911 {
2912 if (GET_CODE (addr) != PLUS)
2913 fatal_insn ("bad address, not (reg+disp):", addr);
2914
2915 avr_print_operand (file, XEXP (addr, 1), 0);
2916 }
2917 else if (code == 'b')
2918 {
2919 if (GET_CODE (addr) != PLUS)
2920 fatal_insn ("bad address, not (reg+disp):", addr);
2921
2922 avr_print_operand_address (file, VOIDmode, XEXP (addr, 0));
2923 }
2924 else if (code == 'p' || code == 'r')
2925 {
2926 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
2927 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
2928
2929 if (code == 'p')
2930 /* X, Y, Z */
2931 avr_print_operand_address (file, VOIDmode, XEXP (addr, 0));
2932 else
2933 avr_print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
2934 }
2935 else if (GET_CODE (addr) == PLUS)
2936 {
2937 avr_print_operand_address (file, VOIDmode, XEXP (addr, 0));
2938 if (REGNO (XEXP (addr, 0)) == REG_X)
2939 fatal_insn ("internal compiler error. Bad address:"
2940 ,addr);
2941 fputc ('+', file);
2942 avr_print_operand (file, XEXP (addr, 1), code);
2943 }
2944 else
2945 avr_print_operand_address (file, VOIDmode, addr);
2946 }
2947 else if (code == 'i')
2948 {
2949 if (SYMBOL_REF_P (x) && (SYMBOL_REF_FLAGS (x) & SYMBOL_FLAG_IO))
2950 avr_print_operand_address
2951 (file, VOIDmode, plus_constant (HImode, x, -avr_arch->sfr_offset));
2952 else
2953 fatal_insn ("bad address, not an I/O address:", x);
2954 }
2955 else if (code == 'x')
2956 {
2957 /* Constant progmem address - like used in jmp or call */
2958 if (text_segment_operand (x, VOIDmode) == 0)
2959 if (warning (0, "accessing program memory"
2960 " with data memory address"))
2961 {
2962 output_addr_const (stderr, x);
2963 fprintf(stderr,"\n");
2964 }
2965 /* Use normal symbol for direct address no linker trampoline needed */
2966 output_addr_const (file, x);
2967 }
2968 else if (CONST_FIXED_P (x))
2969 {
2970 HOST_WIDE_INT ival = INTVAL (avr_to_int_mode (x));
2971 if (code != 0)
2972 output_operand_lossage ("Unsupported code '%c' for fixed-point:",
2973 code);
2974 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ival);
2975 }
2976 else if (CONST_DOUBLE_P (x))
2977 {
2978 long val;
2979 if (GET_MODE (x) != SFmode)
2980 fatal_insn ("internal compiler error. Unknown mode:", x);
2981 REAL_VALUE_TO_TARGET_SINGLE (*CONST_DOUBLE_REAL_VALUE (x), val);
2982 fprintf (file, "0x%lx", val);
2983 }
2984 else if (GET_CODE (x) == CONST_STRING)
2985 fputs (XSTR (x, 0), file);
2986 else if (code == 'j')
2987 fputs (cond_string (GET_CODE (x)), file);
2988 else if (code == 'k')
2989 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
2990 else
2991 avr_print_operand_address (file, VOIDmode, x);
2992 }
2993
2994
2995 /* Implement TARGET_USE_BY_PIECES_INFRASTRUCTURE_P. */
2996
2997 /* Prefer sequence of loads/stores for moves of size upto
2998 two - two pairs of load/store instructions are always better
2999 than the 5 instruction sequence for a loop (1 instruction
3000 for loop counter setup, and 4 for the body of the loop). */
3001
3002 static bool
avr_use_by_pieces_infrastructure_p(unsigned HOST_WIDE_INT size,unsigned int align ATTRIBUTE_UNUSED,enum by_pieces_operation op,bool speed_p)3003 avr_use_by_pieces_infrastructure_p (unsigned HOST_WIDE_INT size,
3004 unsigned int align ATTRIBUTE_UNUSED,
3005 enum by_pieces_operation op,
3006 bool speed_p)
3007 {
3008 if (op != MOVE_BY_PIECES
3009 || (speed_p && size > MOVE_MAX_PIECES))
3010 return default_use_by_pieces_infrastructure_p (size, align, op, speed_p);
3011
3012 return size <= MOVE_MAX_PIECES;
3013 }
3014
3015
3016 /* Worker function for `NOTICE_UPDATE_CC'. */
3017 /* Update the condition code in the INSN. */
3018
3019 void
avr_notice_update_cc(rtx body ATTRIBUTE_UNUSED,rtx_insn * insn)3020 avr_notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx_insn *insn)
3021 {
3022 rtx set;
3023 enum attr_cc cc = get_attr_cc (insn);
3024
3025 switch (cc)
3026 {
3027 default:
3028 break;
3029
3030 case CC_PLUS:
3031 case CC_LDI:
3032 {
3033 rtx *op = recog_data.operand;
3034 int len_dummy, icc;
3035
3036 /* Extract insn's operands. */
3037 extract_constrain_insn_cached (insn);
3038
3039 switch (cc)
3040 {
3041 default:
3042 gcc_unreachable();
3043
3044 case CC_PLUS:
3045 avr_out_plus (insn, op, &len_dummy, &icc);
3046 cc = (enum attr_cc) icc;
3047 break;
3048
3049 case CC_LDI:
3050
3051 cc = (op[1] == CONST0_RTX (GET_MODE (op[0]))
3052 && reg_overlap_mentioned_p (op[0], zero_reg_rtx))
3053 /* Loading zero-reg with 0 uses CLR and thus clobbers cc0. */
3054 ? CC_CLOBBER
3055 /* Any other "r,rL" combination does not alter cc0. */
3056 : CC_NONE;
3057
3058 break;
3059 } /* inner switch */
3060
3061 break;
3062 }
3063 } /* outer swicth */
3064
3065 switch (cc)
3066 {
3067 default:
3068 /* Special values like CC_OUT_PLUS from above have been
3069 mapped to "standard" CC_* values so we never come here. */
3070
3071 gcc_unreachable();
3072 break;
3073
3074 case CC_NONE:
3075 /* Insn does not affect CC at all, but it might set some registers
3076 that are stored in cc_status. If such a register is affected by
3077 the current insn, for example by means of a SET or a CLOBBER,
3078 then we must reset cc_status; cf. PR77326.
3079
3080 Unfortunately, set_of cannot be used as reg_overlap_mentioned_p
3081 will abort on COMPARE (which might be found in cc_status.value1/2).
3082 Thus work out the registers set by the insn and regs mentioned
3083 in cc_status.value1/2. */
3084
3085 if (cc_status.value1
3086 || cc_status.value2)
3087 {
3088 HARD_REG_SET regs_used;
3089 HARD_REG_SET regs_set;
3090 CLEAR_HARD_REG_SET (regs_used);
3091
3092 if (cc_status.value1
3093 && !CONSTANT_P (cc_status.value1))
3094 {
3095 find_all_hard_regs (cc_status.value1, ®s_used);
3096 }
3097
3098 if (cc_status.value2
3099 && !CONSTANT_P (cc_status.value2))
3100 {
3101 find_all_hard_regs (cc_status.value2, ®s_used);
3102 }
3103
3104 find_all_hard_reg_sets (insn, ®s_set, false);
3105
3106 if (hard_reg_set_intersect_p (regs_used, regs_set))
3107 {
3108 CC_STATUS_INIT;
3109 }
3110 }
3111
3112 break; // CC_NONE
3113
3114 case CC_SET_N:
3115 CC_STATUS_INIT;
3116 break;
3117
3118 case CC_SET_ZN:
3119 set = single_set (insn);
3120 CC_STATUS_INIT;
3121 if (set)
3122 {
3123 cc_status.flags |= CC_NO_OVERFLOW;
3124 cc_status.value1 = SET_DEST (set);
3125 }
3126 break;
3127
3128 case CC_SET_VZN:
3129 /* Insn like INC, DEC, NEG that set Z,N,V. We currently don't make use
3130 of this combination, cf. also PR61055. */
3131 CC_STATUS_INIT;
3132 break;
3133
3134 case CC_SET_CZN:
3135 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
3136 The V flag may or may not be known but that's ok because
3137 alter_cond will change tests to use EQ/NE. */
3138 set = single_set (insn);
3139 CC_STATUS_INIT;
3140 if (set)
3141 {
3142 cc_status.value1 = SET_DEST (set);
3143 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
3144 }
3145 break;
3146
3147 case CC_COMPARE:
3148 set = single_set (insn);
3149 CC_STATUS_INIT;
3150 if (set)
3151 cc_status.value1 = SET_SRC (set);
3152 break;
3153
3154 case CC_CLOBBER:
3155 /* Insn doesn't leave CC in a usable state. */
3156 CC_STATUS_INIT;
3157 break;
3158 }
3159 }
3160
3161 /* Choose mode for jump insn:
3162 1 - relative jump in range -63 <= x <= 62 ;
3163 2 - relative jump in range -2046 <= x <= 2045 ;
3164 3 - absolute jump (only for ATmega[16]03). */
3165
3166 int
avr_jump_mode(rtx x,rtx_insn * insn)3167 avr_jump_mode (rtx x, rtx_insn *insn)
3168 {
3169 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
3170 ? XEXP (x, 0) : x));
3171 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
3172 int jump_distance = cur_addr - dest_addr;
3173
3174 if (IN_RANGE (jump_distance, -63, 62))
3175 return 1;
3176 else if (IN_RANGE (jump_distance, -2046, 2045))
3177 return 2;
3178 else if (AVR_HAVE_JMP_CALL)
3179 return 3;
3180
3181 return 2;
3182 }
3183
3184 /* Return an AVR condition jump commands.
3185 X is a comparison RTX.
3186 LEN is a number returned by avr_jump_mode function.
3187 If REVERSE nonzero then condition code in X must be reversed. */
3188
3189 const char*
ret_cond_branch(rtx x,int len,int reverse)3190 ret_cond_branch (rtx x, int len, int reverse)
3191 {
3192 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
3193
3194 switch (cond)
3195 {
3196 case GT:
3197 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
3198 return (len == 1 ? ("breq .+2" CR_TAB
3199 "brpl %0") :
3200 len == 2 ? ("breq .+4" CR_TAB
3201 "brmi .+2" CR_TAB
3202 "rjmp %0") :
3203 ("breq .+6" CR_TAB
3204 "brmi .+4" CR_TAB
3205 "jmp %0"));
3206
3207 else
3208 return (len == 1 ? ("breq .+2" CR_TAB
3209 "brge %0") :
3210 len == 2 ? ("breq .+4" CR_TAB
3211 "brlt .+2" CR_TAB
3212 "rjmp %0") :
3213 ("breq .+6" CR_TAB
3214 "brlt .+4" CR_TAB
3215 "jmp %0"));
3216 case GTU:
3217 return (len == 1 ? ("breq .+2" CR_TAB
3218 "brsh %0") :
3219 len == 2 ? ("breq .+4" CR_TAB
3220 "brlo .+2" CR_TAB
3221 "rjmp %0") :
3222 ("breq .+6" CR_TAB
3223 "brlo .+4" CR_TAB
3224 "jmp %0"));
3225 case LE:
3226 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
3227 return (len == 1 ? ("breq %0" CR_TAB
3228 "brmi %0") :
3229 len == 2 ? ("breq .+2" CR_TAB
3230 "brpl .+2" CR_TAB
3231 "rjmp %0") :
3232 ("breq .+2" CR_TAB
3233 "brpl .+4" CR_TAB
3234 "jmp %0"));
3235 else
3236 return (len == 1 ? ("breq %0" CR_TAB
3237 "brlt %0") :
3238 len == 2 ? ("breq .+2" CR_TAB
3239 "brge .+2" CR_TAB
3240 "rjmp %0") :
3241 ("breq .+2" CR_TAB
3242 "brge .+4" CR_TAB
3243 "jmp %0"));
3244 case LEU:
3245 return (len == 1 ? ("breq %0" CR_TAB
3246 "brlo %0") :
3247 len == 2 ? ("breq .+2" CR_TAB
3248 "brsh .+2" CR_TAB
3249 "rjmp %0") :
3250 ("breq .+2" CR_TAB
3251 "brsh .+4" CR_TAB
3252 "jmp %0"));
3253 default:
3254 if (reverse)
3255 {
3256 switch (len)
3257 {
3258 case 1:
3259 return "br%k1 %0";
3260 case 2:
3261 return ("br%j1 .+2" CR_TAB
3262 "rjmp %0");
3263 default:
3264 return ("br%j1 .+4" CR_TAB
3265 "jmp %0");
3266 }
3267 }
3268 else
3269 {
3270 switch (len)
3271 {
3272 case 1:
3273 return "br%j1 %0";
3274 case 2:
3275 return ("br%k1 .+2" CR_TAB
3276 "rjmp %0");
3277 default:
3278 return ("br%k1 .+4" CR_TAB
3279 "jmp %0");
3280 }
3281 }
3282 }
3283 return "";
3284 }
3285
3286
3287 /* Worker function for `FINAL_PRESCAN_INSN'. */
3288 /* Output insn cost for next insn. */
3289
3290 void
avr_final_prescan_insn(rtx_insn * insn,rtx * operand ATTRIBUTE_UNUSED,int num_operands ATTRIBUTE_UNUSED)3291 avr_final_prescan_insn (rtx_insn *insn, rtx *operand ATTRIBUTE_UNUSED,
3292 int num_operands ATTRIBUTE_UNUSED)
3293 {
3294 if (avr_log.rtx_costs)
3295 {
3296 rtx set = single_set (insn);
3297
3298 if (set)
3299 fprintf (asm_out_file, "/* DEBUG: cost = %d. */\n",
3300 set_src_cost (SET_SRC (set), GET_MODE (SET_DEST (set)),
3301 optimize_insn_for_speed_p ()));
3302 else
3303 fprintf (asm_out_file, "/* DEBUG: pattern-cost = %d. */\n",
3304 rtx_cost (PATTERN (insn), VOIDmode, INSN, 0,
3305 optimize_insn_for_speed_p()));
3306 }
3307
3308 if (avr_log.insn_addresses)
3309 fprintf (asm_out_file, ";; ADDR = %d\n",
3310 (int) INSN_ADDRESSES (INSN_UID (insn)));
3311 }
3312
3313
3314 /* Implement `TARGET_ASM_FINAL_POSTSCAN_INSN'. */
3315 /* When GAS generates (parts of) ISR prologue / epilogue for us, we must
3316 hint GAS about the end of the code to scan. There migh be code located
3317 after the last epilogue. */
3318
3319 static void
avr_asm_final_postscan_insn(FILE * stream,rtx_insn * insn,rtx *,int)3320 avr_asm_final_postscan_insn (FILE *stream, rtx_insn *insn, rtx*, int)
3321 {
3322 if (cfun->machine->gasisr.yes
3323 && !next_real_insn (insn))
3324 {
3325 app_disable();
3326 fprintf (stream, "\t__gcc_isr %d,r%d\n", GASISR_Done,
3327 cfun->machine->gasisr.regno);
3328 }
3329 }
3330
3331
3332 /* Return 0 if undefined, 1 if always true or always false. */
3333
3334 int
avr_simplify_comparison_p(machine_mode mode,RTX_CODE op,rtx x)3335 avr_simplify_comparison_p (machine_mode mode, RTX_CODE op, rtx x)
3336 {
3337 unsigned int max = (mode == QImode ? 0xff :
3338 mode == HImode ? 0xffff :
3339 mode == PSImode ? 0xffffff :
3340 mode == SImode ? 0xffffffff : 0);
3341 if (max && op && CONST_INT_P (x))
3342 {
3343 if (unsigned_condition (op) != op)
3344 max >>= 1;
3345
3346 if (max != (INTVAL (x) & max)
3347 && INTVAL (x) != 0xff)
3348 return 1;
3349 }
3350 return 0;
3351 }
3352
3353
3354 /* Worker function for `FUNCTION_ARG_REGNO_P'. */
3355 /* Returns nonzero if REGNO is the number of a hard
3356 register in which function arguments are sometimes passed. */
3357
3358 int
avr_function_arg_regno_p(int r)3359 avr_function_arg_regno_p (int r)
3360 {
3361 return AVR_TINY ? IN_RANGE (r, 20, 25) : IN_RANGE (r, 8, 25);
3362 }
3363
3364
3365 /* Worker function for `INIT_CUMULATIVE_ARGS'. */
3366 /* Initializing the variable cum for the state at the beginning
3367 of the argument list. */
3368
3369 void
avr_init_cumulative_args(CUMULATIVE_ARGS * cum,tree fntype,rtx libname,tree fndecl ATTRIBUTE_UNUSED)3370 avr_init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
3371 tree fndecl ATTRIBUTE_UNUSED)
3372 {
3373 cum->nregs = AVR_TINY ? 6 : 18;
3374 cum->regno = FIRST_CUM_REG;
3375 if (!libname && stdarg_p (fntype))
3376 cum->nregs = 0;
3377
3378 /* Assume the calle may be tail called */
3379
3380 cfun->machine->sibcall_fails = 0;
3381 }
3382
3383 /* Returns the number of registers to allocate for a function argument. */
3384
3385 static int
avr_num_arg_regs(machine_mode mode,const_tree type)3386 avr_num_arg_regs (machine_mode mode, const_tree type)
3387 {
3388 int size;
3389
3390 if (mode == BLKmode)
3391 size = int_size_in_bytes (type);
3392 else
3393 size = GET_MODE_SIZE (mode);
3394
3395 /* Align all function arguments to start in even-numbered registers.
3396 Odd-sized arguments leave holes above them. */
3397
3398 return (size + 1) & ~1;
3399 }
3400
3401
3402 /* Implement `TARGET_FUNCTION_ARG'. */
3403 /* Controls whether a function argument is passed
3404 in a register, and which register. */
3405
3406 static rtx
avr_function_arg(cumulative_args_t cum_v,machine_mode mode,const_tree type,bool named ATTRIBUTE_UNUSED)3407 avr_function_arg (cumulative_args_t cum_v, machine_mode mode,
3408 const_tree type, bool named ATTRIBUTE_UNUSED)
3409 {
3410 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
3411 int bytes = avr_num_arg_regs (mode, type);
3412
3413 if (cum->nregs && bytes <= cum->nregs)
3414 return gen_rtx_REG (mode, cum->regno - bytes);
3415
3416 return NULL_RTX;
3417 }
3418
3419
3420 /* Implement `TARGET_FUNCTION_ARG_ADVANCE'. */
3421 /* Update the summarizer variable CUM to advance past an argument
3422 in the argument list. */
3423
3424 static void
avr_function_arg_advance(cumulative_args_t cum_v,machine_mode mode,const_tree type,bool named ATTRIBUTE_UNUSED)3425 avr_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
3426 const_tree type, bool named ATTRIBUTE_UNUSED)
3427 {
3428 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
3429 int bytes = avr_num_arg_regs (mode, type);
3430
3431 cum->nregs -= bytes;
3432 cum->regno -= bytes;
3433
3434 /* A parameter is being passed in a call-saved register. As the original
3435 contents of these regs has to be restored before leaving the function,
3436 a function must not pass arguments in call-saved regs in order to get
3437 tail-called. */
3438
3439 if (cum->regno >= 8
3440 && cum->nregs >= 0
3441 && !call_used_regs[cum->regno])
3442 {
3443 /* FIXME: We ship info on failing tail-call in struct machine_function.
3444 This uses internals of calls.c:expand_call() and the way args_so_far
3445 is used. targetm.function_ok_for_sibcall() needs to be extended to
3446 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
3447 dependent so that such an extension is not wanted. */
3448
3449 cfun->machine->sibcall_fails = 1;
3450 }
3451
3452 /* Test if all registers needed by the ABI are actually available. If the
3453 user has fixed a GPR needed to pass an argument, an (implicit) function
3454 call will clobber that fixed register. See PR45099 for an example. */
3455
3456 if (cum->regno >= 8
3457 && cum->nregs >= 0)
3458 {
3459 for (int regno = cum->regno; regno < cum->regno + bytes; regno++)
3460 if (fixed_regs[regno])
3461 warning (0, "fixed register %s used to pass parameter to function",
3462 reg_names[regno]);
3463 }
3464
3465 if (cum->nregs <= 0)
3466 {
3467 cum->nregs = 0;
3468 cum->regno = FIRST_CUM_REG;
3469 }
3470 }
3471
3472 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
3473 /* Decide whether we can make a sibling call to a function. DECL is the
3474 declaration of the function being targeted by the call and EXP is the
3475 CALL_EXPR representing the call. */
3476
3477 static bool
avr_function_ok_for_sibcall(tree decl_callee,tree exp_callee)3478 avr_function_ok_for_sibcall (tree decl_callee, tree exp_callee)
3479 {
3480 tree fntype_callee;
3481
3482 /* Tail-calling must fail if callee-saved regs are used to pass
3483 function args. We must not tail-call when `epilogue_restores'
3484 is used. Unfortunately, we cannot tell at this point if that
3485 actually will happen or not, and we cannot step back from
3486 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
3487
3488 if (cfun->machine->sibcall_fails
3489 || TARGET_CALL_PROLOGUES)
3490 {
3491 return false;
3492 }
3493
3494 fntype_callee = TREE_TYPE (CALL_EXPR_FN (exp_callee));
3495
3496 if (decl_callee)
3497 {
3498 decl_callee = TREE_TYPE (decl_callee);
3499 }
3500 else
3501 {
3502 decl_callee = fntype_callee;
3503
3504 while (FUNCTION_TYPE != TREE_CODE (decl_callee)
3505 && METHOD_TYPE != TREE_CODE (decl_callee))
3506 {
3507 decl_callee = TREE_TYPE (decl_callee);
3508 }
3509 }
3510
3511 /* Ensure that caller and callee have compatible epilogues */
3512
3513 if (cfun->machine->is_interrupt
3514 || cfun->machine->is_signal
3515 || cfun->machine->is_naked
3516 || avr_naked_function_p (decl_callee))
3517 {
3518 return false;
3519 }
3520
3521 return true;
3522 }
3523
3524 /***********************************************************************
3525 Functions for outputting various mov's for a various modes
3526 ************************************************************************/
3527
3528 /* Return true if a value of mode MODE is read from flash by
3529 __load_* function from libgcc. */
3530
3531 bool
avr_load_libgcc_p(rtx op)3532 avr_load_libgcc_p (rtx op)
3533 {
3534 machine_mode mode = GET_MODE (op);
3535 int n_bytes = GET_MODE_SIZE (mode);
3536
3537 return (n_bytes > 2
3538 && !AVR_HAVE_LPMX
3539 && avr_mem_flash_p (op));
3540 }
3541
3542 /* Return true if a value of mode MODE is read by __xload_* function. */
3543
3544 bool
avr_xload_libgcc_p(machine_mode mode)3545 avr_xload_libgcc_p (machine_mode mode)
3546 {
3547 int n_bytes = GET_MODE_SIZE (mode);
3548
3549 return (n_bytes > 1
3550 || avr_n_flash > 1);
3551 }
3552
3553
3554 /* Fixme: This is a hack because secondary reloads don't works as expected.
3555
3556 Find an unused d-register to be used as scratch in INSN.
3557 EXCLUDE is either NULL_RTX or some register. In the case where EXCLUDE
3558 is a register, skip all possible return values that overlap EXCLUDE.
3559 The policy for the returned register is similar to that of
3560 `reg_unused_after', i.e. the returned register may overlap the SET_DEST
3561 of INSN.
3562
3563 Return a QImode d-register or NULL_RTX if nothing found. */
3564
3565 static rtx
avr_find_unused_d_reg(rtx_insn * insn,rtx exclude)3566 avr_find_unused_d_reg (rtx_insn *insn, rtx exclude)
3567 {
3568 bool isr_p = (avr_interrupt_function_p (current_function_decl)
3569 || avr_signal_function_p (current_function_decl));
3570
3571 for (int regno = 16; regno < 32; regno++)
3572 {
3573 rtx reg = all_regs_rtx[regno];
3574
3575 if ((exclude
3576 && reg_overlap_mentioned_p (exclude, reg))
3577 || fixed_regs[regno])
3578 {
3579 continue;
3580 }
3581
3582 /* Try non-live register */
3583
3584 if (!df_regs_ever_live_p (regno)
3585 && (TREE_THIS_VOLATILE (current_function_decl)
3586 || cfun->machine->is_OS_task
3587 || cfun->machine->is_OS_main
3588 || (!isr_p && call_used_regs[regno])))
3589 {
3590 return reg;
3591 }
3592
3593 /* Any live register can be used if it is unused after.
3594 Prologue/epilogue will care for it as needed. */
3595
3596 if (df_regs_ever_live_p (regno)
3597 && reg_unused_after (insn, reg))
3598 {
3599 return reg;
3600 }
3601 }
3602
3603 return NULL_RTX;
3604 }
3605
3606
3607 /* Helper function for the next function in the case where only restricted
3608 version of LPM instruction is available. */
3609
3610 static const char*
avr_out_lpm_no_lpmx(rtx_insn * insn,rtx * xop,int * plen)3611 avr_out_lpm_no_lpmx (rtx_insn *insn, rtx *xop, int *plen)
3612 {
3613 rtx dest = xop[0];
3614 rtx addr = xop[1];
3615 int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
3616 int regno_dest;
3617
3618 regno_dest = REGNO (dest);
3619
3620 /* The implicit target register of LPM. */
3621 xop[3] = lpm_reg_rtx;
3622
3623 switch (GET_CODE (addr))
3624 {
3625 default:
3626 gcc_unreachable();
3627
3628 case REG:
3629
3630 gcc_assert (REG_Z == REGNO (addr));
3631
3632 switch (n_bytes)
3633 {
3634 default:
3635 gcc_unreachable();
3636
3637 case 1:
3638 avr_asm_len ("%4lpm", xop, plen, 1);
3639
3640 if (regno_dest != LPM_REGNO)
3641 avr_asm_len ("mov %0,%3", xop, plen, 1);
3642
3643 return "";
3644
3645 case 2:
3646 if (REGNO (dest) == REG_Z)
3647 return avr_asm_len ("%4lpm" CR_TAB
3648 "push %3" CR_TAB
3649 "adiw %2,1" CR_TAB
3650 "%4lpm" CR_TAB
3651 "mov %B0,%3" CR_TAB
3652 "pop %A0", xop, plen, 6);
3653
3654 avr_asm_len ("%4lpm" CR_TAB
3655 "mov %A0,%3" CR_TAB
3656 "adiw %2,1" CR_TAB
3657 "%4lpm" CR_TAB
3658 "mov %B0,%3", xop, plen, 5);
3659
3660 if (!reg_unused_after (insn, addr))
3661 avr_asm_len ("sbiw %2,1", xop, plen, 1);
3662
3663 break; /* 2 */
3664 }
3665
3666 break; /* REG */
3667
3668 case POST_INC:
3669
3670 gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
3671 && n_bytes <= 4);
3672
3673 if (regno_dest == LPM_REGNO)
3674 avr_asm_len ("%4lpm" CR_TAB
3675 "adiw %2,1", xop, plen, 2);
3676 else
3677 avr_asm_len ("%4lpm" CR_TAB
3678 "mov %A0,%3" CR_TAB
3679 "adiw %2,1", xop, plen, 3);
3680
3681 if (n_bytes >= 2)
3682 avr_asm_len ("%4lpm" CR_TAB
3683 "mov %B0,%3" CR_TAB
3684 "adiw %2,1", xop, plen, 3);
3685
3686 if (n_bytes >= 3)
3687 avr_asm_len ("%4lpm" CR_TAB
3688 "mov %C0,%3" CR_TAB
3689 "adiw %2,1", xop, plen, 3);
3690
3691 if (n_bytes >= 4)
3692 avr_asm_len ("%4lpm" CR_TAB
3693 "mov %D0,%3" CR_TAB
3694 "adiw %2,1", xop, plen, 3);
3695
3696 break; /* POST_INC */
3697
3698 } /* switch CODE (addr) */
3699
3700 return "";
3701 }
3702
3703
3704 /* If PLEN == NULL: Ouput instructions to load a value from a memory location
3705 OP[1] in AS1 to register OP[0].
3706 If PLEN != 0 set *PLEN to the length in words of the instruction sequence.
3707 Return "". */
3708
3709 const char*
avr_out_lpm(rtx_insn * insn,rtx * op,int * plen)3710 avr_out_lpm (rtx_insn *insn, rtx *op, int *plen)
3711 {
3712 rtx xop[7];
3713 rtx dest = op[0];
3714 rtx src = SET_SRC (single_set (insn));
3715 rtx addr;
3716 int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
3717 int segment;
3718 RTX_CODE code;
3719 addr_space_t as = MEM_ADDR_SPACE (src);
3720
3721 if (plen)
3722 *plen = 0;
3723
3724 if (MEM_P (dest))
3725 {
3726 warning (0, "writing to address space %qs not supported",
3727 avr_addrspace[MEM_ADDR_SPACE (dest)].name);
3728
3729 return "";
3730 }
3731
3732 addr = XEXP (src, 0);
3733 code = GET_CODE (addr);
3734
3735 gcc_assert (REG_P (dest));
3736 gcc_assert (REG == code || POST_INC == code);
3737
3738 xop[0] = dest;
3739 xop[1] = addr;
3740 xop[2] = lpm_addr_reg_rtx;
3741 xop[4] = xstring_empty;
3742 xop[5] = tmp_reg_rtx;
3743 xop[6] = XEXP (rampz_rtx, 0);
3744
3745 segment = avr_addrspace[as].segment;
3746
3747 /* Set RAMPZ as needed. */
3748
3749 if (segment)
3750 {
3751 xop[4] = GEN_INT (segment);
3752 xop[3] = avr_find_unused_d_reg (insn, lpm_addr_reg_rtx);
3753
3754 if (xop[3] != NULL_RTX)
3755 {
3756 avr_asm_len ("ldi %3,%4" CR_TAB
3757 "out %i6,%3", xop, plen, 2);
3758 }
3759 else if (segment == 1)
3760 {
3761 avr_asm_len ("clr %5" CR_TAB
3762 "inc %5" CR_TAB
3763 "out %i6,%5", xop, plen, 3);
3764 }
3765 else
3766 {
3767 avr_asm_len ("mov %5,%2" CR_TAB
3768 "ldi %2,%4" CR_TAB
3769 "out %i6,%2" CR_TAB
3770 "mov %2,%5", xop, plen, 4);
3771 }
3772
3773 xop[4] = xstring_e;
3774
3775 if (!AVR_HAVE_ELPMX)
3776 return avr_out_lpm_no_lpmx (insn, xop, plen);
3777 }
3778 else if (!AVR_HAVE_LPMX)
3779 {
3780 return avr_out_lpm_no_lpmx (insn, xop, plen);
3781 }
3782
3783 /* We have [E]LPMX: Output reading from Flash the comfortable way. */
3784
3785 switch (GET_CODE (addr))
3786 {
3787 default:
3788 gcc_unreachable();
3789
3790 case REG:
3791
3792 gcc_assert (REG_Z == REGNO (addr));
3793
3794 switch (n_bytes)
3795 {
3796 default:
3797 gcc_unreachable();
3798
3799 case 1:
3800 avr_asm_len ("%4lpm %0,%a2", xop, plen, 1);
3801 break;
3802
3803 case 2:
3804 if (REGNO (dest) == REG_Z)
3805 avr_asm_len ("%4lpm %5,%a2+" CR_TAB
3806 "%4lpm %B0,%a2" CR_TAB
3807 "mov %A0,%5", xop, plen, 3);
3808 else
3809 {
3810 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3811 "%4lpm %B0,%a2", xop, plen, 2);
3812
3813 if (!reg_unused_after (insn, addr))
3814 avr_asm_len ("sbiw %2,1", xop, plen, 1);
3815 }
3816
3817 break; /* 2 */
3818
3819 case 3:
3820
3821 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3822 "%4lpm %B0,%a2+" CR_TAB
3823 "%4lpm %C0,%a2", xop, plen, 3);
3824
3825 if (!reg_unused_after (insn, addr))
3826 avr_asm_len ("sbiw %2,2", xop, plen, 1);
3827
3828 break; /* 3 */
3829
3830 case 4:
3831
3832 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3833 "%4lpm %B0,%a2+", xop, plen, 2);
3834
3835 if (REGNO (dest) == REG_Z - 2)
3836 avr_asm_len ("%4lpm %5,%a2+" CR_TAB
3837 "%4lpm %C0,%a2" CR_TAB
3838 "mov %D0,%5", xop, plen, 3);
3839 else
3840 {
3841 avr_asm_len ("%4lpm %C0,%a2+" CR_TAB
3842 "%4lpm %D0,%a2", xop, plen, 2);
3843
3844 if (!reg_unused_after (insn, addr))
3845 avr_asm_len ("sbiw %2,3", xop, plen, 1);
3846 }
3847
3848 break; /* 4 */
3849 } /* n_bytes */
3850
3851 break; /* REG */
3852
3853 case POST_INC:
3854
3855 gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
3856 && n_bytes <= 4);
3857
3858 avr_asm_len ("%4lpm %A0,%a2+", xop, plen, 1);
3859 if (n_bytes >= 2) avr_asm_len ("%4lpm %B0,%a2+", xop, plen, 1);
3860 if (n_bytes >= 3) avr_asm_len ("%4lpm %C0,%a2+", xop, plen, 1);
3861 if (n_bytes >= 4) avr_asm_len ("%4lpm %D0,%a2+", xop, plen, 1);
3862
3863 break; /* POST_INC */
3864
3865 } /* switch CODE (addr) */
3866
3867 if (xop[4] == xstring_e && AVR_HAVE_RAMPD)
3868 {
3869 /* Reset RAMPZ to 0 so that EBI devices don't read garbage from RAM. */
3870
3871 xop[0] = zero_reg_rtx;
3872 avr_asm_len ("out %i6,%0", xop, plen, 1);
3873 }
3874
3875 return "";
3876 }
3877
3878
3879 /* Worker function for xload_8 insn. */
3880
3881 const char*
avr_out_xload(rtx_insn * insn ATTRIBUTE_UNUSED,rtx * op,int * plen)3882 avr_out_xload (rtx_insn *insn ATTRIBUTE_UNUSED, rtx *op, int *plen)
3883 {
3884 rtx xop[4];
3885
3886 xop[0] = op[0];
3887 xop[1] = op[1];
3888 xop[2] = lpm_addr_reg_rtx;
3889 xop[3] = AVR_HAVE_LPMX ? op[0] : lpm_reg_rtx;
3890
3891 avr_asm_len (AVR_HAVE_LPMX ? "lpm %3,%a2" : "lpm", xop, plen, -1);
3892
3893 avr_asm_len ("sbrc %1,7" CR_TAB
3894 "ld %3,%a2", xop, plen, 2);
3895
3896 if (REGNO (xop[0]) != REGNO (xop[3]))
3897 avr_asm_len ("mov %0,%3", xop, plen, 1);
3898
3899 return "";
3900 }
3901
3902
3903 const char*
output_movqi(rtx_insn * insn,rtx operands[],int * plen)3904 output_movqi (rtx_insn *insn, rtx operands[], int *plen)
3905 {
3906 rtx dest = operands[0];
3907 rtx src = operands[1];
3908
3909 if (avr_mem_flash_p (src)
3910 || avr_mem_flash_p (dest))
3911 {
3912 return avr_out_lpm (insn, operands, plen);
3913 }
3914
3915 gcc_assert (GET_MODE_SIZE (GET_MODE (dest)) == 1);
3916
3917 if (REG_P (dest))
3918 {
3919 if (REG_P (src)) /* mov r,r */
3920 {
3921 if (test_hard_reg_class (STACK_REG, dest))
3922 return avr_asm_len ("out %0,%1", operands, plen, -1);
3923 else if (test_hard_reg_class (STACK_REG, src))
3924 return avr_asm_len ("in %0,%1", operands, plen, -1);
3925
3926 return avr_asm_len ("mov %0,%1", operands, plen, -1);
3927 }
3928 else if (CONSTANT_P (src))
3929 {
3930 output_reload_in_const (operands, NULL_RTX, plen, false);
3931 return "";
3932 }
3933 else if (MEM_P (src))
3934 return out_movqi_r_mr (insn, operands, plen); /* mov r,m */
3935 }
3936 else if (MEM_P (dest))
3937 {
3938 rtx xop[2];
3939
3940 xop[0] = dest;
3941 xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
3942
3943 return out_movqi_mr_r (insn, xop, plen);
3944 }
3945
3946 return "";
3947 }
3948
3949
3950 const char *
output_movhi(rtx_insn * insn,rtx xop[],int * plen)3951 output_movhi (rtx_insn *insn, rtx xop[], int *plen)
3952 {
3953 rtx dest = xop[0];
3954 rtx src = xop[1];
3955
3956 gcc_assert (GET_MODE_SIZE (GET_MODE (dest)) == 2);
3957
3958 if (avr_mem_flash_p (src)
3959 || avr_mem_flash_p (dest))
3960 {
3961 return avr_out_lpm (insn, xop, plen);
3962 }
3963
3964 if (REG_P (dest))
3965 {
3966 if (REG_P (src)) /* mov r,r */
3967 {
3968 if (test_hard_reg_class (STACK_REG, dest))
3969 {
3970 if (AVR_HAVE_8BIT_SP)
3971 return avr_asm_len ("out __SP_L__,%A1", xop, plen, -1);
3972
3973 if (AVR_XMEGA)
3974 return avr_asm_len ("out __SP_L__,%A1" CR_TAB
3975 "out __SP_H__,%B1", xop, plen, -2);
3976
3977 /* Use simple load of SP if no interrupts are used. */
3978
3979 return TARGET_NO_INTERRUPTS
3980 ? avr_asm_len ("out __SP_H__,%B1" CR_TAB
3981 "out __SP_L__,%A1", xop, plen, -2)
3982 : avr_asm_len ("in __tmp_reg__,__SREG__" CR_TAB
3983 "cli" CR_TAB
3984 "out __SP_H__,%B1" CR_TAB
3985 "out __SREG__,__tmp_reg__" CR_TAB
3986 "out __SP_L__,%A1", xop, plen, -5);
3987 }
3988 else if (test_hard_reg_class (STACK_REG, src))
3989 {
3990 return !AVR_HAVE_SPH
3991 ? avr_asm_len ("in %A0,__SP_L__" CR_TAB
3992 "clr %B0", xop, plen, -2)
3993
3994 : avr_asm_len ("in %A0,__SP_L__" CR_TAB
3995 "in %B0,__SP_H__", xop, plen, -2);
3996 }
3997
3998 return AVR_HAVE_MOVW
3999 ? avr_asm_len ("movw %0,%1", xop, plen, -1)
4000
4001 : avr_asm_len ("mov %A0,%A1" CR_TAB
4002 "mov %B0,%B1", xop, plen, -2);
4003 } /* REG_P (src) */
4004 else if (CONSTANT_P (src))
4005 {
4006 return output_reload_inhi (xop, NULL, plen);
4007 }
4008 else if (MEM_P (src))
4009 {
4010 return out_movhi_r_mr (insn, xop, plen); /* mov r,m */
4011 }
4012 }
4013 else if (MEM_P (dest))
4014 {
4015 rtx xop[2];
4016
4017 xop[0] = dest;
4018 xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
4019
4020 return out_movhi_mr_r (insn, xop, plen);
4021 }
4022
4023 fatal_insn ("invalid insn:", insn);
4024
4025 return "";
4026 }
4027
4028
4029 /* Same as out_movqi_r_mr, but TINY does not have ADIW or SBIW */
4030
4031 static const char*
avr_out_movqi_r_mr_reg_disp_tiny(rtx_insn * insn,rtx op[],int * plen)4032 avr_out_movqi_r_mr_reg_disp_tiny (rtx_insn *insn, rtx op[], int *plen)
4033 {
4034 rtx dest = op[0];
4035 rtx src = op[1];
4036 rtx x = XEXP (src, 0);
4037
4038 avr_asm_len (TINY_ADIW (%I1, %J1, %o1) CR_TAB
4039 "ld %0,%b1" , op, plen, -3);
4040
4041 if (!reg_overlap_mentioned_p (dest, XEXP (x, 0))
4042 && !reg_unused_after (insn, XEXP (x, 0)))
4043 avr_asm_len (TINY_SBIW (%I1, %J1, %o1), op, plen, 2);
4044
4045 return "";
4046 }
4047
4048 static const char*
out_movqi_r_mr(rtx_insn * insn,rtx op[],int * plen)4049 out_movqi_r_mr (rtx_insn *insn, rtx op[], int *plen)
4050 {
4051 rtx dest = op[0];
4052 rtx src = op[1];
4053 rtx x = XEXP (src, 0);
4054
4055 if (CONSTANT_ADDRESS_P (x))
4056 {
4057 int n_words = AVR_TINY ? 1 : 2;
4058 return io_address_operand (x, QImode)
4059 ? avr_asm_len ("in %0,%i1", op, plen, -1)
4060 : avr_asm_len ("lds %0,%m1", op, plen, -n_words);
4061 }
4062
4063 if (GET_CODE (x) == PLUS
4064 && REG_P (XEXP (x, 0))
4065 && CONST_INT_P (XEXP (x, 1)))
4066 {
4067 /* memory access by reg+disp */
4068
4069 int disp = INTVAL (XEXP (x, 1));
4070
4071 if (AVR_TINY)
4072 return avr_out_movqi_r_mr_reg_disp_tiny (insn, op, plen);
4073
4074 if (disp - GET_MODE_SIZE (GET_MODE (src)) >= 63)
4075 {
4076 if (REGNO (XEXP (x, 0)) != REG_Y)
4077 fatal_insn ("incorrect insn:",insn);
4078
4079 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
4080 return avr_asm_len ("adiw r28,%o1-63" CR_TAB
4081 "ldd %0,Y+63" CR_TAB
4082 "sbiw r28,%o1-63", op, plen, -3);
4083
4084 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
4085 "sbci r29,hi8(-%o1)" CR_TAB
4086 "ld %0,Y" CR_TAB
4087 "subi r28,lo8(%o1)" CR_TAB
4088 "sbci r29,hi8(%o1)", op, plen, -5);
4089 }
4090 else if (REGNO (XEXP (x, 0)) == REG_X)
4091 {
4092 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
4093 it but I have this situation with extremal optimizing options. */
4094
4095 avr_asm_len ("adiw r26,%o1" CR_TAB
4096 "ld %0,X", op, plen, -2);
4097
4098 if (!reg_overlap_mentioned_p (dest, XEXP (x, 0))
4099 && !reg_unused_after (insn, XEXP (x, 0)))
4100 {
4101 avr_asm_len ("sbiw r26,%o1", op, plen, 1);
4102 }
4103
4104 return "";
4105 }
4106
4107 return avr_asm_len ("ldd %0,%1", op, plen, -1);
4108 }
4109
4110 return avr_asm_len ("ld %0,%1", op, plen, -1);
4111 }
4112
4113
4114 /* Same as movhi_r_mr, but TINY does not have ADIW, SBIW and LDD */
4115
4116 static const char*
avr_out_movhi_r_mr_reg_no_disp_tiny(rtx_insn * insn,rtx op[],int * plen)4117 avr_out_movhi_r_mr_reg_no_disp_tiny (rtx_insn *insn, rtx op[], int *plen)
4118 {
4119 rtx dest = op[0];
4120 rtx src = op[1];
4121 rtx base = XEXP (src, 0);
4122
4123 int reg_dest = true_regnum (dest);
4124 int reg_base = true_regnum (base);
4125
4126 if (reg_dest == reg_base) /* R = (R) */
4127 return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
4128 "ld %B0,%1" CR_TAB
4129 "mov %A0,__tmp_reg__", op, plen, -3);
4130
4131 avr_asm_len ("ld %A0,%1+" CR_TAB
4132 "ld %B0,%1", op, plen, -2);
4133
4134 if (!reg_unused_after (insn, base))
4135 avr_asm_len (TINY_SBIW (%E1, %F1, 1), op, plen, 2);
4136
4137 return "";
4138 }
4139
4140
4141 /* Same as movhi_r_mr, but TINY does not have ADIW, SBIW and LDD */
4142
4143 static const char*
avr_out_movhi_r_mr_reg_disp_tiny(rtx_insn * insn,rtx op[],int * plen)4144 avr_out_movhi_r_mr_reg_disp_tiny (rtx_insn *insn, rtx op[], int *plen)
4145 {
4146 rtx dest = op[0];
4147 rtx src = op[1];
4148 rtx base = XEXP (src, 0);
4149
4150 int reg_dest = true_regnum (dest);
4151 int reg_base = true_regnum (XEXP (base, 0));
4152
4153 if (reg_base == reg_dest)
4154 {
4155 return avr_asm_len (TINY_ADIW (%I1, %J1, %o1) CR_TAB
4156 "ld __tmp_reg__,%b1+" CR_TAB
4157 "ld %B0,%b1" CR_TAB
4158 "mov %A0,__tmp_reg__", op, plen, -5);
4159 }
4160 else
4161 {
4162 avr_asm_len (TINY_ADIW (%I1, %J1, %o1) CR_TAB
4163 "ld %A0,%b1+" CR_TAB
4164 "ld %B0,%b1", op, plen, -4);
4165
4166 if (!reg_unused_after (insn, XEXP (base, 0)))
4167 avr_asm_len (TINY_SBIW (%I1, %J1, %o1+1), op, plen, 2);
4168
4169 return "";
4170 }
4171 }
4172
4173
4174 /* Same as movhi_r_mr, but TINY does not have ADIW, SBIW and LDD */
4175
4176 static const char*
avr_out_movhi_r_mr_pre_dec_tiny(rtx_insn * insn,rtx op[],int * plen)4177 avr_out_movhi_r_mr_pre_dec_tiny (rtx_insn *insn, rtx op[], int *plen)
4178 {
4179 int mem_volatile_p = 0;
4180 rtx dest = op[0];
4181 rtx src = op[1];
4182 rtx base = XEXP (src, 0);
4183
4184 /* "volatile" forces reading low byte first, even if less efficient,
4185 for correct operation with 16-bit I/O registers. */
4186 mem_volatile_p = MEM_VOLATILE_P (src);
4187
4188 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
4189 fatal_insn ("incorrect insn:", insn);
4190
4191 if (!mem_volatile_p)
4192 return avr_asm_len ("ld %B0,%1" CR_TAB
4193 "ld %A0,%1", op, plen, -2);
4194
4195 return avr_asm_len (TINY_SBIW (%I1, %J1, 2) CR_TAB
4196 "ld %A0,%p1+" CR_TAB
4197 "ld %B0,%p1" CR_TAB
4198 TINY_SBIW (%I1, %J1, 1), op, plen, -6);
4199 }
4200
4201
4202 static const char*
out_movhi_r_mr(rtx_insn * insn,rtx op[],int * plen)4203 out_movhi_r_mr (rtx_insn *insn, rtx op[], int *plen)
4204 {
4205 rtx dest = op[0];
4206 rtx src = op[1];
4207 rtx base = XEXP (src, 0);
4208 int reg_dest = true_regnum (dest);
4209 int reg_base = true_regnum (base);
4210 /* "volatile" forces reading low byte first, even if less efficient,
4211 for correct operation with 16-bit I/O registers. */
4212 int mem_volatile_p = MEM_VOLATILE_P (src);
4213
4214 if (reg_base > 0)
4215 {
4216 if (AVR_TINY)
4217 return avr_out_movhi_r_mr_reg_no_disp_tiny (insn, op, plen);
4218
4219 if (reg_dest == reg_base) /* R = (R) */
4220 return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
4221 "ld %B0,%1" CR_TAB
4222 "mov %A0,__tmp_reg__", op, plen, -3);
4223
4224 if (reg_base != REG_X)
4225 return avr_asm_len ("ld %A0,%1" CR_TAB
4226 "ldd %B0,%1+1", op, plen, -2);
4227
4228 avr_asm_len ("ld %A0,X+" CR_TAB
4229 "ld %B0,X", op, plen, -2);
4230
4231 if (!reg_unused_after (insn, base))
4232 avr_asm_len ("sbiw r26,1", op, plen, 1);
4233
4234 return "";
4235 }
4236 else if (GET_CODE (base) == PLUS) /* (R + i) */
4237 {
4238 int disp = INTVAL (XEXP (base, 1));
4239 int reg_base = true_regnum (XEXP (base, 0));
4240
4241 if (AVR_TINY)
4242 return avr_out_movhi_r_mr_reg_disp_tiny (insn, op, plen);
4243
4244 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
4245 {
4246 if (REGNO (XEXP (base, 0)) != REG_Y)
4247 fatal_insn ("incorrect insn:",insn);
4248
4249 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (src))
4250 ? avr_asm_len ("adiw r28,%o1-62" CR_TAB
4251 "ldd %A0,Y+62" CR_TAB
4252 "ldd %B0,Y+63" CR_TAB
4253 "sbiw r28,%o1-62", op, plen, -4)
4254
4255 : avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
4256 "sbci r29,hi8(-%o1)" CR_TAB
4257 "ld %A0,Y" CR_TAB
4258 "ldd %B0,Y+1" CR_TAB
4259 "subi r28,lo8(%o1)" CR_TAB
4260 "sbci r29,hi8(%o1)", op, plen, -6);
4261 }
4262
4263 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
4264 it but I have this situation with extremal
4265 optimization options. */
4266
4267 if (reg_base == REG_X)
4268 {
4269 if (reg_base == reg_dest)
4270 return avr_asm_len ("adiw r26,%o1" CR_TAB
4271 "ld __tmp_reg__,X+" CR_TAB
4272 "ld %B0,X" CR_TAB
4273 "mov %A0,__tmp_reg__", op, plen, -4);
4274
4275 avr_asm_len ("adiw r26,%o1" CR_TAB
4276 "ld %A0,X+" CR_TAB
4277 "ld %B0,X", op, plen, -3);
4278
4279 if (!reg_unused_after (insn, XEXP (base, 0)))
4280 avr_asm_len ("sbiw r26,%o1+1", op, plen, 1);
4281
4282 return "";
4283 }
4284
4285 return reg_base == reg_dest
4286 ? avr_asm_len ("ldd __tmp_reg__,%A1" CR_TAB
4287 "ldd %B0,%B1" CR_TAB
4288 "mov %A0,__tmp_reg__", op, plen, -3)
4289
4290 : avr_asm_len ("ldd %A0,%A1" CR_TAB
4291 "ldd %B0,%B1", op, plen, -2);
4292 }
4293 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
4294 {
4295 if (AVR_TINY)
4296 return avr_out_movhi_r_mr_pre_dec_tiny (insn, op, plen);
4297
4298 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
4299 fatal_insn ("incorrect insn:", insn);
4300
4301 if (!mem_volatile_p)
4302 return avr_asm_len ("ld %B0,%1" CR_TAB
4303 "ld %A0,%1", op, plen, -2);
4304
4305 return REGNO (XEXP (base, 0)) == REG_X
4306 ? avr_asm_len ("sbiw r26,2" CR_TAB
4307 "ld %A0,X+" CR_TAB
4308 "ld %B0,X" CR_TAB
4309 "sbiw r26,1", op, plen, -4)
4310
4311 : avr_asm_len ("sbiw %r1,2" CR_TAB
4312 "ld %A0,%p1" CR_TAB
4313 "ldd %B0,%p1+1", op, plen, -3);
4314 }
4315 else if (GET_CODE (base) == POST_INC) /* (R++) */
4316 {
4317 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
4318 fatal_insn ("incorrect insn:", insn);
4319
4320 return avr_asm_len ("ld %A0,%1" CR_TAB
4321 "ld %B0,%1", op, plen, -2);
4322 }
4323 else if (CONSTANT_ADDRESS_P (base))
4324 {
4325 int n_words = AVR_TINY ? 2 : 4;
4326 return io_address_operand (base, HImode)
4327 ? avr_asm_len ("in %A0,%i1" CR_TAB
4328 "in %B0,%i1+1", op, plen, -2)
4329
4330 : avr_asm_len ("lds %A0,%m1" CR_TAB
4331 "lds %B0,%m1+1", op, plen, -n_words);
4332 }
4333
4334 fatal_insn ("unknown move insn:",insn);
4335 return "";
4336 }
4337
4338 static const char*
avr_out_movsi_r_mr_reg_no_disp_tiny(rtx_insn * insn,rtx op[],int * l)4339 avr_out_movsi_r_mr_reg_no_disp_tiny (rtx_insn *insn, rtx op[], int *l)
4340 {
4341 rtx dest = op[0];
4342 rtx src = op[1];
4343 rtx base = XEXP (src, 0);
4344 int reg_dest = true_regnum (dest);
4345 int reg_base = true_regnum (base);
4346
4347 if (reg_dest == reg_base)
4348 {
4349 /* "ld r26,-X" is undefined */
4350 return *l = 9, (TINY_ADIW (%E1, %F1, 3) CR_TAB
4351 "ld %D0,%1" CR_TAB
4352 "ld %C0,-%1" CR_TAB
4353 "ld __tmp_reg__,-%1" CR_TAB
4354 TINY_SBIW (%E1, %F1, 1) CR_TAB
4355 "ld %A0,%1" CR_TAB
4356 "mov %B0,__tmp_reg__");
4357 }
4358 else if (reg_dest == reg_base - 2)
4359 {
4360 return *l = 5, ("ld %A0,%1+" CR_TAB
4361 "ld %B0,%1+" CR_TAB
4362 "ld __tmp_reg__,%1+" CR_TAB
4363 "ld %D0,%1" CR_TAB
4364 "mov %C0,__tmp_reg__");
4365 }
4366 else if (reg_unused_after (insn, base))
4367 {
4368 return *l = 4, ("ld %A0,%1+" CR_TAB
4369 "ld %B0,%1+" CR_TAB
4370 "ld %C0,%1+" CR_TAB
4371 "ld %D0,%1");
4372 }
4373 else
4374 {
4375 return *l = 6, ("ld %A0,%1+" CR_TAB
4376 "ld %B0,%1+" CR_TAB
4377 "ld %C0,%1+" CR_TAB
4378 "ld %D0,%1" CR_TAB
4379 TINY_SBIW (%E1, %F1, 3));
4380 }
4381 }
4382
4383
4384 static const char*
avr_out_movsi_r_mr_reg_disp_tiny(rtx_insn * insn,rtx op[],int * l)4385 avr_out_movsi_r_mr_reg_disp_tiny (rtx_insn *insn, rtx op[], int *l)
4386 {
4387 rtx dest = op[0];
4388 rtx src = op[1];
4389 rtx base = XEXP (src, 0);
4390 int reg_dest = true_regnum (dest);
4391 int reg_base = true_regnum (XEXP (base, 0));
4392
4393 if (reg_dest == reg_base)
4394 {
4395 /* "ld r26,-X" is undefined */
4396 return *l = 9, (TINY_ADIW (%I1, %J1, %o1+3) CR_TAB
4397 "ld %D0,%b1" CR_TAB
4398 "ld %C0,-%b1" CR_TAB
4399 "ld __tmp_reg__,-%b1" CR_TAB
4400 TINY_SBIW (%I1, %J1, 1) CR_TAB
4401 "ld %A0,%b1" CR_TAB
4402 "mov %B0,__tmp_reg__");
4403 }
4404 else if (reg_dest == reg_base - 2)
4405 {
4406 return *l = 7, (TINY_ADIW (%I1, %J1, %o1) CR_TAB
4407 "ld %A0,%b1+" CR_TAB
4408 "ld %B0,%b1+" CR_TAB
4409 "ld __tmp_reg__,%b1+" CR_TAB
4410 "ld %D0,%b1" CR_TAB
4411 "mov %C0,__tmp_reg__");
4412 }
4413 else if (reg_unused_after (insn, XEXP (base, 0)))
4414 {
4415 return *l = 6, (TINY_ADIW (%I1, %J1, %o1) CR_TAB
4416 "ld %A0,%b1+" CR_TAB
4417 "ld %B0,%b1+" CR_TAB
4418 "ld %C0,%b1+" CR_TAB
4419 "ld %D0,%b1");
4420 }
4421 else
4422 {
4423 return *l = 8, (TINY_ADIW (%I1, %J1, %o1) CR_TAB
4424 "ld %A0,%b1+" CR_TAB
4425 "ld %B0,%b1+" CR_TAB
4426 "ld %C0,%b1+" CR_TAB
4427 "ld %D0,%b1" CR_TAB
4428 TINY_SBIW (%I1, %J1, %o1+3));
4429 }
4430 }
4431
4432 static const char*
out_movsi_r_mr(rtx_insn * insn,rtx op[],int * l)4433 out_movsi_r_mr (rtx_insn *insn, rtx op[], int *l)
4434 {
4435 rtx dest = op[0];
4436 rtx src = op[1];
4437 rtx base = XEXP (src, 0);
4438 int reg_dest = true_regnum (dest);
4439 int reg_base = true_regnum (base);
4440 int tmp;
4441
4442 if (!l)
4443 l = &tmp;
4444
4445 if (reg_base > 0)
4446 {
4447 if (AVR_TINY)
4448 return avr_out_movsi_r_mr_reg_no_disp_tiny (insn, op, l);
4449
4450 if (reg_base == REG_X) /* (R26) */
4451 {
4452 if (reg_dest == REG_X)
4453 /* "ld r26,-X" is undefined */
4454 return *l=7, ("adiw r26,3" CR_TAB
4455 "ld r29,X" CR_TAB
4456 "ld r28,-X" CR_TAB
4457 "ld __tmp_reg__,-X" CR_TAB
4458 "sbiw r26,1" CR_TAB
4459 "ld r26,X" CR_TAB
4460 "mov r27,__tmp_reg__");
4461 else if (reg_dest == REG_X - 2)
4462 return *l=5, ("ld %A0,X+" CR_TAB
4463 "ld %B0,X+" CR_TAB
4464 "ld __tmp_reg__,X+" CR_TAB
4465 "ld %D0,X" CR_TAB
4466 "mov %C0,__tmp_reg__");
4467 else if (reg_unused_after (insn, base))
4468 return *l=4, ("ld %A0,X+" CR_TAB
4469 "ld %B0,X+" CR_TAB
4470 "ld %C0,X+" CR_TAB
4471 "ld %D0,X");
4472 else
4473 return *l=5, ("ld %A0,X+" CR_TAB
4474 "ld %B0,X+" CR_TAB
4475 "ld %C0,X+" CR_TAB
4476 "ld %D0,X" CR_TAB
4477 "sbiw r26,3");
4478 }
4479 else
4480 {
4481 if (reg_dest == reg_base)
4482 return *l=5, ("ldd %D0,%1+3" CR_TAB
4483 "ldd %C0,%1+2" CR_TAB
4484 "ldd __tmp_reg__,%1+1" CR_TAB
4485 "ld %A0,%1" CR_TAB
4486 "mov %B0,__tmp_reg__");
4487 else if (reg_base == reg_dest + 2)
4488 return *l=5, ("ld %A0,%1" CR_TAB
4489 "ldd %B0,%1+1" CR_TAB
4490 "ldd __tmp_reg__,%1+2" CR_TAB
4491 "ldd %D0,%1+3" CR_TAB
4492 "mov %C0,__tmp_reg__");
4493 else
4494 return *l=4, ("ld %A0,%1" CR_TAB
4495 "ldd %B0,%1+1" CR_TAB
4496 "ldd %C0,%1+2" CR_TAB
4497 "ldd %D0,%1+3");
4498 }
4499 }
4500 else if (GET_CODE (base) == PLUS) /* (R + i) */
4501 {
4502 int disp = INTVAL (XEXP (base, 1));
4503
4504 if (AVR_TINY)
4505 return avr_out_movsi_r_mr_reg_disp_tiny (insn, op, l);
4506
4507 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
4508 {
4509 if (REGNO (XEXP (base, 0)) != REG_Y)
4510 fatal_insn ("incorrect insn:",insn);
4511
4512 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
4513 return *l = 6, ("adiw r28,%o1-60" CR_TAB
4514 "ldd %A0,Y+60" CR_TAB
4515 "ldd %B0,Y+61" CR_TAB
4516 "ldd %C0,Y+62" CR_TAB
4517 "ldd %D0,Y+63" CR_TAB
4518 "sbiw r28,%o1-60");
4519
4520 return *l = 8, ("subi r28,lo8(-%o1)" CR_TAB
4521 "sbci r29,hi8(-%o1)" CR_TAB
4522 "ld %A0,Y" CR_TAB
4523 "ldd %B0,Y+1" CR_TAB
4524 "ldd %C0,Y+2" CR_TAB
4525 "ldd %D0,Y+3" CR_TAB
4526 "subi r28,lo8(%o1)" CR_TAB
4527 "sbci r29,hi8(%o1)");
4528 }
4529
4530 reg_base = true_regnum (XEXP (base, 0));
4531 if (reg_base == REG_X)
4532 {
4533 /* R = (X + d) */
4534 if (reg_dest == REG_X)
4535 {
4536 *l = 7;
4537 /* "ld r26,-X" is undefined */
4538 return ("adiw r26,%o1+3" CR_TAB
4539 "ld r29,X" CR_TAB
4540 "ld r28,-X" CR_TAB
4541 "ld __tmp_reg__,-X" CR_TAB
4542 "sbiw r26,1" CR_TAB
4543 "ld r26,X" CR_TAB
4544 "mov r27,__tmp_reg__");
4545 }
4546 *l = 6;
4547 if (reg_dest == REG_X - 2)
4548 return ("adiw r26,%o1" CR_TAB
4549 "ld r24,X+" CR_TAB
4550 "ld r25,X+" CR_TAB
4551 "ld __tmp_reg__,X+" CR_TAB
4552 "ld r27,X" CR_TAB
4553 "mov r26,__tmp_reg__");
4554
4555 return ("adiw r26,%o1" CR_TAB
4556 "ld %A0,X+" CR_TAB
4557 "ld %B0,X+" CR_TAB
4558 "ld %C0,X+" CR_TAB
4559 "ld %D0,X" CR_TAB
4560 "sbiw r26,%o1+3");
4561 }
4562 if (reg_dest == reg_base)
4563 return *l=5, ("ldd %D0,%D1" CR_TAB
4564 "ldd %C0,%C1" CR_TAB
4565 "ldd __tmp_reg__,%B1" CR_TAB
4566 "ldd %A0,%A1" CR_TAB
4567 "mov %B0,__tmp_reg__");
4568 else if (reg_dest == reg_base - 2)
4569 return *l=5, ("ldd %A0,%A1" CR_TAB
4570 "ldd %B0,%B1" CR_TAB
4571 "ldd __tmp_reg__,%C1" CR_TAB
4572 "ldd %D0,%D1" CR_TAB
4573 "mov %C0,__tmp_reg__");
4574 return *l=4, ("ldd %A0,%A1" CR_TAB
4575 "ldd %B0,%B1" CR_TAB
4576 "ldd %C0,%C1" CR_TAB
4577 "ldd %D0,%D1");
4578 }
4579 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
4580 return *l=4, ("ld %D0,%1" CR_TAB
4581 "ld %C0,%1" CR_TAB
4582 "ld %B0,%1" CR_TAB
4583 "ld %A0,%1");
4584 else if (GET_CODE (base) == POST_INC) /* (R++) */
4585 return *l=4, ("ld %A0,%1" CR_TAB
4586 "ld %B0,%1" CR_TAB
4587 "ld %C0,%1" CR_TAB
4588 "ld %D0,%1");
4589 else if (CONSTANT_ADDRESS_P (base))
4590 {
4591 if (io_address_operand (base, SImode))
4592 {
4593 *l = 4;
4594 return ("in %A0,%i1" CR_TAB
4595 "in %B0,%i1+1" CR_TAB
4596 "in %C0,%i1+2" CR_TAB
4597 "in %D0,%i1+3");
4598 }
4599 else
4600 {
4601 *l = AVR_TINY ? 4 : 8;
4602 return ("lds %A0,%m1" CR_TAB
4603 "lds %B0,%m1+1" CR_TAB
4604 "lds %C0,%m1+2" CR_TAB
4605 "lds %D0,%m1+3");
4606 }
4607 }
4608
4609 fatal_insn ("unknown move insn:",insn);
4610 return "";
4611 }
4612
4613 static const char*
avr_out_movsi_mr_r_reg_no_disp_tiny(rtx_insn * insn,rtx op[],int * l)4614 avr_out_movsi_mr_r_reg_no_disp_tiny (rtx_insn *insn, rtx op[], int *l)
4615 {
4616 rtx dest = op[0];
4617 rtx src = op[1];
4618 rtx base = XEXP (dest, 0);
4619 int reg_base = true_regnum (base);
4620 int reg_src = true_regnum (src);
4621
4622 if (reg_base == reg_src)
4623 {
4624 /* "ld r26,-X" is undefined */
4625 if (reg_unused_after (insn, base))
4626 {
4627 return *l = 7, ("mov __tmp_reg__, %B1" CR_TAB
4628 "st %0,%A1" CR_TAB
4629 TINY_ADIW (%E0, %F0, 1) CR_TAB
4630 "st %0+,__tmp_reg__" CR_TAB
4631 "st %0+,%C1" CR_TAB
4632 "st %0+,%D1");
4633 }
4634 else
4635 {
4636 return *l = 9, ("mov __tmp_reg__, %B1" CR_TAB
4637 "st %0,%A1" CR_TAB
4638 TINY_ADIW (%E0, %F0, 1) CR_TAB
4639 "st %0+,__tmp_reg__" CR_TAB
4640 "st %0+,%C1" CR_TAB
4641 "st %0+,%D1" CR_TAB
4642 TINY_SBIW (%E0, %F0, 3));
4643 }
4644 }
4645 else if (reg_base == reg_src + 2)
4646 {
4647 if (reg_unused_after (insn, base))
4648 return *l = 7, ("mov __zero_reg__,%C1" CR_TAB
4649 "mov __tmp_reg__,%D1" CR_TAB
4650 "st %0+,%A1" CR_TAB
4651 "st %0+,%B1" CR_TAB
4652 "st %0+,__zero_reg__" CR_TAB
4653 "st %0,__tmp_reg__" CR_TAB
4654 "clr __zero_reg__");
4655 else
4656 return *l = 9, ("mov __zero_reg__,%C1" CR_TAB
4657 "mov __tmp_reg__,%D1" CR_TAB
4658 "st %0+,%A1" CR_TAB
4659 "st %0+,%B1" CR_TAB
4660 "st %0+,__zero_reg__" CR_TAB
4661 "st %0,__tmp_reg__" CR_TAB
4662 "clr __zero_reg__" CR_TAB
4663 TINY_SBIW (%E0, %F0, 3));
4664 }
4665
4666 return *l = 6, ("st %0+,%A1" CR_TAB
4667 "st %0+,%B1" CR_TAB
4668 "st %0+,%C1" CR_TAB
4669 "st %0,%D1" CR_TAB
4670 TINY_SBIW (%E0, %F0, 3));
4671 }
4672
4673 static const char*
avr_out_movsi_mr_r_reg_disp_tiny(rtx op[],int * l)4674 avr_out_movsi_mr_r_reg_disp_tiny (rtx op[], int *l)
4675 {
4676 rtx dest = op[0];
4677 rtx src = op[1];
4678 rtx base = XEXP (dest, 0);
4679 int reg_base = REGNO (XEXP (base, 0));
4680 int reg_src =true_regnum (src);
4681
4682 if (reg_base == reg_src)
4683 {
4684 *l = 11;
4685 return ("mov __tmp_reg__,%A2" CR_TAB
4686 "mov __zero_reg__,%B2" CR_TAB
4687 TINY_ADIW (%I0, %J0, %o0) CR_TAB
4688 "st %b0+,__tmp_reg__" CR_TAB
4689 "st %b0+,__zero_reg__" CR_TAB
4690 "st %b0+,%C2" CR_TAB
4691 "st %b0,%D2" CR_TAB
4692 "clr __zero_reg__" CR_TAB
4693 TINY_SBIW (%I0, %J0, %o0+3));
4694 }
4695 else if (reg_src == reg_base - 2)
4696 {
4697 *l = 11;
4698 return ("mov __tmp_reg__,%C2" CR_TAB
4699 "mov __zero_reg__,%D2" CR_TAB
4700 TINY_ADIW (%I0, %J0, %o0) CR_TAB
4701 "st %b0+,%A0" CR_TAB
4702 "st %b0+,%B0" CR_TAB
4703 "st %b0+,__tmp_reg__" CR_TAB
4704 "st %b0,__zero_reg__" CR_TAB
4705 "clr __zero_reg__" CR_TAB
4706 TINY_SBIW (%I0, %J0, %o0+3));
4707 }
4708 *l = 8;
4709 return (TINY_ADIW (%I0, %J0, %o0) CR_TAB
4710 "st %b0+,%A1" CR_TAB
4711 "st %b0+,%B1" CR_TAB
4712 "st %b0+,%C1" CR_TAB
4713 "st %b0,%D1" CR_TAB
4714 TINY_SBIW (%I0, %J0, %o0+3));
4715 }
4716
4717 static const char*
out_movsi_mr_r(rtx_insn * insn,rtx op[],int * l)4718 out_movsi_mr_r (rtx_insn *insn, rtx op[], int *l)
4719 {
4720 rtx dest = op[0];
4721 rtx src = op[1];
4722 rtx base = XEXP (dest, 0);
4723 int reg_base = true_regnum (base);
4724 int reg_src = true_regnum (src);
4725 int tmp;
4726
4727 if (!l)
4728 l = &tmp;
4729
4730 if (CONSTANT_ADDRESS_P (base))
4731 {
4732 if (io_address_operand (base, SImode))
4733 {
4734 return *l=4,("out %i0, %A1" CR_TAB
4735 "out %i0+1,%B1" CR_TAB
4736 "out %i0+2,%C1" CR_TAB
4737 "out %i0+3,%D1");
4738 }
4739 else
4740 {
4741 *l = AVR_TINY ? 4 : 8;
4742 return ("sts %m0,%A1" CR_TAB
4743 "sts %m0+1,%B1" CR_TAB
4744 "sts %m0+2,%C1" CR_TAB
4745 "sts %m0+3,%D1");
4746 }
4747 }
4748
4749 if (reg_base > 0) /* (r) */
4750 {
4751 if (AVR_TINY)
4752 return avr_out_movsi_mr_r_reg_no_disp_tiny (insn, op, l);
4753
4754 if (reg_base == REG_X) /* (R26) */
4755 {
4756 if (reg_src == REG_X)
4757 {
4758 /* "st X+,r26" is undefined */
4759 if (reg_unused_after (insn, base))
4760 return *l=6, ("mov __tmp_reg__,r27" CR_TAB
4761 "st X,r26" CR_TAB
4762 "adiw r26,1" CR_TAB
4763 "st X+,__tmp_reg__" CR_TAB
4764 "st X+,r28" CR_TAB
4765 "st X,r29");
4766 else
4767 return *l=7, ("mov __tmp_reg__,r27" CR_TAB
4768 "st X,r26" CR_TAB
4769 "adiw r26,1" CR_TAB
4770 "st X+,__tmp_reg__" CR_TAB
4771 "st X+,r28" CR_TAB
4772 "st X,r29" CR_TAB
4773 "sbiw r26,3");
4774 }
4775 else if (reg_base == reg_src + 2)
4776 {
4777 if (reg_unused_after (insn, base))
4778 return *l=7, ("mov __zero_reg__,%C1" CR_TAB
4779 "mov __tmp_reg__,%D1" CR_TAB
4780 "st %0+,%A1" CR_TAB
4781 "st %0+,%B1" CR_TAB
4782 "st %0+,__zero_reg__" CR_TAB
4783 "st %0,__tmp_reg__" CR_TAB
4784 "clr __zero_reg__");
4785 else
4786 return *l=8, ("mov __zero_reg__,%C1" CR_TAB
4787 "mov __tmp_reg__,%D1" CR_TAB
4788 "st %0+,%A1" CR_TAB
4789 "st %0+,%B1" CR_TAB
4790 "st %0+,__zero_reg__" CR_TAB
4791 "st %0,__tmp_reg__" CR_TAB
4792 "clr __zero_reg__" CR_TAB
4793 "sbiw r26,3");
4794 }
4795 return *l=5, ("st %0+,%A1" CR_TAB
4796 "st %0+,%B1" CR_TAB
4797 "st %0+,%C1" CR_TAB
4798 "st %0,%D1" CR_TAB
4799 "sbiw r26,3");
4800 }
4801 else
4802 return *l=4, ("st %0,%A1" CR_TAB
4803 "std %0+1,%B1" CR_TAB
4804 "std %0+2,%C1" CR_TAB
4805 "std %0+3,%D1");
4806 }
4807 else if (GET_CODE (base) == PLUS) /* (R + i) */
4808 {
4809 int disp = INTVAL (XEXP (base, 1));
4810
4811 if (AVR_TINY)
4812 return avr_out_movsi_mr_r_reg_disp_tiny (op, l);
4813
4814 reg_base = REGNO (XEXP (base, 0));
4815 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
4816 {
4817 if (reg_base != REG_Y)
4818 fatal_insn ("incorrect insn:",insn);
4819
4820 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
4821 return *l = 6, ("adiw r28,%o0-60" CR_TAB
4822 "std Y+60,%A1" CR_TAB
4823 "std Y+61,%B1" CR_TAB
4824 "std Y+62,%C1" CR_TAB
4825 "std Y+63,%D1" CR_TAB
4826 "sbiw r28,%o0-60");
4827
4828 return *l = 8, ("subi r28,lo8(-%o0)" CR_TAB
4829 "sbci r29,hi8(-%o0)" CR_TAB
4830 "st Y,%A1" CR_TAB
4831 "std Y+1,%B1" CR_TAB
4832 "std Y+2,%C1" CR_TAB
4833 "std Y+3,%D1" CR_TAB
4834 "subi r28,lo8(%o0)" CR_TAB
4835 "sbci r29,hi8(%o0)");
4836 }
4837 if (reg_base == REG_X)
4838 {
4839 /* (X + d) = R */
4840 if (reg_src == REG_X)
4841 {
4842 *l = 9;
4843 return ("mov __tmp_reg__,r26" CR_TAB
4844 "mov __zero_reg__,r27" CR_TAB
4845 "adiw r26,%o0" CR_TAB
4846 "st X+,__tmp_reg__" CR_TAB
4847 "st X+,__zero_reg__" CR_TAB
4848 "st X+,r28" CR_TAB
4849 "st X,r29" CR_TAB
4850 "clr __zero_reg__" CR_TAB
4851 "sbiw r26,%o0+3");
4852 }
4853 else if (reg_src == REG_X - 2)
4854 {
4855 *l = 9;
4856 return ("mov __tmp_reg__,r26" CR_TAB
4857 "mov __zero_reg__,r27" CR_TAB
4858 "adiw r26,%o0" CR_TAB
4859 "st X+,r24" CR_TAB
4860 "st X+,r25" CR_TAB
4861 "st X+,__tmp_reg__" CR_TAB
4862 "st X,__zero_reg__" CR_TAB
4863 "clr __zero_reg__" CR_TAB
4864 "sbiw r26,%o0+3");
4865 }
4866 *l = 6;
4867 return ("adiw r26,%o0" CR_TAB
4868 "st X+,%A1" CR_TAB
4869 "st X+,%B1" CR_TAB
4870 "st X+,%C1" CR_TAB
4871 "st X,%D1" CR_TAB
4872 "sbiw r26,%o0+3");
4873 }
4874 return *l=4, ("std %A0,%A1" CR_TAB
4875 "std %B0,%B1" CR_TAB
4876 "std %C0,%C1" CR_TAB
4877 "std %D0,%D1");
4878 }
4879 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
4880 return *l=4, ("st %0,%D1" CR_TAB
4881 "st %0,%C1" CR_TAB
4882 "st %0,%B1" CR_TAB
4883 "st %0,%A1");
4884 else if (GET_CODE (base) == POST_INC) /* (R++) */
4885 return *l=4, ("st %0,%A1" CR_TAB
4886 "st %0,%B1" CR_TAB
4887 "st %0,%C1" CR_TAB
4888 "st %0,%D1");
4889 fatal_insn ("unknown move insn:",insn);
4890 return "";
4891 }
4892
4893 const char *
output_movsisf(rtx_insn * insn,rtx operands[],int * l)4894 output_movsisf (rtx_insn *insn, rtx operands[], int *l)
4895 {
4896 int dummy;
4897 rtx dest = operands[0];
4898 rtx src = operands[1];
4899 int *real_l = l;
4900
4901 if (avr_mem_flash_p (src)
4902 || avr_mem_flash_p (dest))
4903 {
4904 return avr_out_lpm (insn, operands, real_l);
4905 }
4906
4907 if (!l)
4908 l = &dummy;
4909
4910 gcc_assert (GET_MODE_SIZE (GET_MODE (dest)) == 4);
4911
4912 if (REG_P (dest))
4913 {
4914 if (REG_P (src)) /* mov r,r */
4915 {
4916 if (true_regnum (dest) > true_regnum (src))
4917 {
4918 if (AVR_HAVE_MOVW)
4919 {
4920 *l = 2;
4921 return ("movw %C0,%C1" CR_TAB
4922 "movw %A0,%A1");
4923 }
4924 *l = 4;
4925 return ("mov %D0,%D1" CR_TAB
4926 "mov %C0,%C1" CR_TAB
4927 "mov %B0,%B1" CR_TAB
4928 "mov %A0,%A1");
4929 }
4930 else
4931 {
4932 if (AVR_HAVE_MOVW)
4933 {
4934 *l = 2;
4935 return ("movw %A0,%A1" CR_TAB
4936 "movw %C0,%C1");
4937 }
4938 *l = 4;
4939 return ("mov %A0,%A1" CR_TAB
4940 "mov %B0,%B1" CR_TAB
4941 "mov %C0,%C1" CR_TAB
4942 "mov %D0,%D1");
4943 }
4944 }
4945 else if (CONSTANT_P (src))
4946 {
4947 return output_reload_insisf (operands, NULL_RTX, real_l);
4948 }
4949 else if (MEM_P (src))
4950 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
4951 }
4952 else if (MEM_P (dest))
4953 {
4954 const char *templ;
4955
4956 if (src == CONST0_RTX (GET_MODE (dest)))
4957 operands[1] = zero_reg_rtx;
4958
4959 templ = out_movsi_mr_r (insn, operands, real_l);
4960
4961 if (!real_l)
4962 output_asm_insn (templ, operands);
4963
4964 operands[1] = src;
4965 return "";
4966 }
4967 fatal_insn ("invalid insn:", insn);
4968 return "";
4969 }
4970
4971
4972 /* Handle loads of 24-bit types from memory to register. */
4973
4974 static const char*
avr_out_load_psi_reg_no_disp_tiny(rtx_insn * insn,rtx * op,int * plen)4975 avr_out_load_psi_reg_no_disp_tiny (rtx_insn *insn, rtx *op, int *plen)
4976 {
4977 rtx dest = op[0];
4978 rtx src = op[1];
4979 rtx base = XEXP (src, 0);
4980 int reg_dest = true_regnum (dest);
4981 int reg_base = true_regnum (base);
4982
4983 if (reg_base == reg_dest)
4984 {
4985 return avr_asm_len (TINY_ADIW (%E1, %F1, 2) CR_TAB
4986 "ld %C0,%1" CR_TAB
4987 "ld __tmp_reg__,-%1" CR_TAB
4988 TINY_SBIW (%E1, %F1, 1) CR_TAB
4989 "ld %A0,%1" CR_TAB
4990 "mov %B0,__tmp_reg__", op, plen, -8);
4991 }
4992 else
4993 {
4994 avr_asm_len ("ld %A0,%1+" CR_TAB
4995 "ld %B0,%1+" CR_TAB
4996 "ld %C0,%1", op, plen, -3);
4997
4998 if (reg_dest != reg_base - 2
4999 && !reg_unused_after (insn, base))
5000 {
5001 avr_asm_len (TINY_SBIW (%E1, %F1, 2), op, plen, 2);
5002 }
5003 return "";
5004 }
5005 }
5006
5007 static const char*
avr_out_load_psi_reg_disp_tiny(rtx_insn * insn,rtx * op,int * plen)5008 avr_out_load_psi_reg_disp_tiny (rtx_insn *insn, rtx *op, int *plen)
5009 {
5010 rtx dest = op[0];
5011 rtx src = op[1];
5012 rtx base = XEXP (src, 0);
5013 int reg_dest = true_regnum (dest);
5014 int reg_base = true_regnum (base);
5015
5016 reg_base = true_regnum (XEXP (base, 0));
5017 if (reg_base == reg_dest)
5018 {
5019 return avr_asm_len (TINY_ADIW (%I1, %J1, %o1+2) CR_TAB
5020 "ld %C0,%b1" CR_TAB
5021 "ld __tmp_reg__,-%b1" CR_TAB
5022 TINY_SBIW (%I1, %J1, 1) CR_TAB
5023 "ld %A0,%b1" CR_TAB
5024 "mov %B0,__tmp_reg__", op, plen, -8);
5025 }
5026 else
5027 {
5028 avr_asm_len (TINY_ADIW (%I1, %J1, %o1) CR_TAB
5029 "ld %A0,%b1+" CR_TAB
5030 "ld %B0,%b1+" CR_TAB
5031 "ld %C0,%b1", op, plen, -5);
5032
5033 if (reg_dest != reg_base - 2
5034 && !reg_unused_after (insn, XEXP (base, 0)))
5035 avr_asm_len (TINY_SBIW (%I1, %J1, %o1+2), op, plen, 2);
5036
5037 return "";
5038 }
5039 }
5040
5041 static const char*
avr_out_load_psi(rtx_insn * insn,rtx * op,int * plen)5042 avr_out_load_psi (rtx_insn *insn, rtx *op, int *plen)
5043 {
5044 rtx dest = op[0];
5045 rtx src = op[1];
5046 rtx base = XEXP (src, 0);
5047 int reg_dest = true_regnum (dest);
5048 int reg_base = true_regnum (base);
5049
5050 if (reg_base > 0)
5051 {
5052 if (AVR_TINY)
5053 return avr_out_load_psi_reg_no_disp_tiny (insn, op, plen);
5054
5055 if (reg_base == REG_X) /* (R26) */
5056 {
5057 if (reg_dest == REG_X)
5058 /* "ld r26,-X" is undefined */
5059 return avr_asm_len ("adiw r26,2" CR_TAB
5060 "ld r28,X" CR_TAB
5061 "ld __tmp_reg__,-X" CR_TAB
5062 "sbiw r26,1" CR_TAB
5063 "ld r26,X" CR_TAB
5064 "mov r27,__tmp_reg__", op, plen, -6);
5065 else
5066 {
5067 avr_asm_len ("ld %A0,X+" CR_TAB
5068 "ld %B0,X+" CR_TAB
5069 "ld %C0,X", op, plen, -3);
5070
5071 if (reg_dest != REG_X - 2
5072 && !reg_unused_after (insn, base))
5073 {
5074 avr_asm_len ("sbiw r26,2", op, plen, 1);
5075 }
5076
5077 return "";
5078 }
5079 }
5080 else /* reg_base != REG_X */
5081 {
5082 if (reg_dest == reg_base)
5083 return avr_asm_len ("ldd %C0,%1+2" CR_TAB
5084 "ldd __tmp_reg__,%1+1" CR_TAB
5085 "ld %A0,%1" CR_TAB
5086 "mov %B0,__tmp_reg__", op, plen, -4);
5087 else
5088 return avr_asm_len ("ld %A0,%1" CR_TAB
5089 "ldd %B0,%1+1" CR_TAB
5090 "ldd %C0,%1+2", op, plen, -3);
5091 }
5092 }
5093 else if (GET_CODE (base) == PLUS) /* (R + i) */
5094 {
5095 int disp = INTVAL (XEXP (base, 1));
5096
5097 if (AVR_TINY)
5098 return avr_out_load_psi_reg_disp_tiny (insn, op, plen);
5099
5100 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
5101 {
5102 if (REGNO (XEXP (base, 0)) != REG_Y)
5103 fatal_insn ("incorrect insn:",insn);
5104
5105 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
5106 return avr_asm_len ("adiw r28,%o1-61" CR_TAB
5107 "ldd %A0,Y+61" CR_TAB
5108 "ldd %B0,Y+62" CR_TAB
5109 "ldd %C0,Y+63" CR_TAB
5110 "sbiw r28,%o1-61", op, plen, -5);
5111
5112 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
5113 "sbci r29,hi8(-%o1)" CR_TAB
5114 "ld %A0,Y" CR_TAB
5115 "ldd %B0,Y+1" CR_TAB
5116 "ldd %C0,Y+2" CR_TAB
5117 "subi r28,lo8(%o1)" CR_TAB
5118 "sbci r29,hi8(%o1)", op, plen, -7);
5119 }
5120
5121 reg_base = true_regnum (XEXP (base, 0));
5122 if (reg_base == REG_X)
5123 {
5124 /* R = (X + d) */
5125 if (reg_dest == REG_X)
5126 {
5127 /* "ld r26,-X" is undefined */
5128 return avr_asm_len ("adiw r26,%o1+2" CR_TAB
5129 "ld r28,X" CR_TAB
5130 "ld __tmp_reg__,-X" CR_TAB
5131 "sbiw r26,1" CR_TAB
5132 "ld r26,X" CR_TAB
5133 "mov r27,__tmp_reg__", op, plen, -6);
5134 }
5135
5136 avr_asm_len ("adiw r26,%o1" CR_TAB
5137 "ld %A0,X+" CR_TAB
5138 "ld %B0,X+" CR_TAB
5139 "ld %C0,X", op, plen, -4);
5140
5141 if (reg_dest != REG_W
5142 && !reg_unused_after (insn, XEXP (base, 0)))
5143 avr_asm_len ("sbiw r26,%o1+2", op, plen, 1);
5144
5145 return "";
5146 }
5147
5148 if (reg_dest == reg_base)
5149 return avr_asm_len ("ldd %C0,%C1" CR_TAB
5150 "ldd __tmp_reg__,%B1" CR_TAB
5151 "ldd %A0,%A1" CR_TAB
5152 "mov %B0,__tmp_reg__", op, plen, -4);
5153
5154 return avr_asm_len ("ldd %A0,%A1" CR_TAB
5155 "ldd %B0,%B1" CR_TAB
5156 "ldd %C0,%C1", op, plen, -3);
5157 }
5158 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
5159 return avr_asm_len ("ld %C0,%1" CR_TAB
5160 "ld %B0,%1" CR_TAB
5161 "ld %A0,%1", op, plen, -3);
5162 else if (GET_CODE (base) == POST_INC) /* (R++) */
5163 return avr_asm_len ("ld %A0,%1" CR_TAB
5164 "ld %B0,%1" CR_TAB
5165 "ld %C0,%1", op, plen, -3);
5166
5167 else if (CONSTANT_ADDRESS_P (base))
5168 {
5169 int n_words = AVR_TINY ? 3 : 6;
5170 return avr_asm_len ("lds %A0,%m1" CR_TAB
5171 "lds %B0,%m1+1" CR_TAB
5172 "lds %C0,%m1+2", op, plen , -n_words);
5173 }
5174
5175 fatal_insn ("unknown move insn:",insn);
5176 return "";
5177 }
5178
5179
5180 static const char*
avr_out_store_psi_reg_no_disp_tiny(rtx_insn * insn,rtx * op,int * plen)5181 avr_out_store_psi_reg_no_disp_tiny (rtx_insn *insn, rtx *op, int *plen)
5182 {
5183 rtx dest = op[0];
5184 rtx src = op[1];
5185 rtx base = XEXP (dest, 0);
5186 int reg_base = true_regnum (base);
5187 int reg_src = true_regnum (src);
5188
5189 if (reg_base == reg_src)
5190 {
5191 avr_asm_len ("st %0,%A1" CR_TAB
5192 "mov __tmp_reg__,%B1" CR_TAB
5193 TINY_ADIW (%E0, %F0, 1) CR_TAB /* st X+, r27 is undefined */
5194 "st %0+,__tmp_reg__" CR_TAB
5195 "st %0,%C1", op, plen, -6);
5196
5197 }
5198 else if (reg_src == reg_base - 2)
5199 {
5200 avr_asm_len ("st %0,%A1" CR_TAB
5201 "mov __tmp_reg__,%C1" CR_TAB
5202 TINY_ADIW (%E0, %F0, 1) CR_TAB
5203 "st %0+,%B1" CR_TAB
5204 "st %0,__tmp_reg__", op, plen, 6);
5205 }
5206 else
5207 {
5208 avr_asm_len ("st %0+,%A1" CR_TAB
5209 "st %0+,%B1" CR_TAB
5210 "st %0,%C1", op, plen, -3);
5211 }
5212
5213 if (!reg_unused_after (insn, base))
5214 avr_asm_len (TINY_SBIW (%E0, %F0, 2), op, plen, 2);
5215
5216 return "";
5217 }
5218
5219 static const char*
avr_out_store_psi_reg_disp_tiny(rtx_insn * insn,rtx * op,int * plen)5220 avr_out_store_psi_reg_disp_tiny (rtx_insn *insn, rtx *op, int *plen)
5221 {
5222 rtx dest = op[0];
5223 rtx src = op[1];
5224 rtx base = XEXP (dest, 0);
5225 int reg_base = REGNO (XEXP (base, 0));
5226 int reg_src = true_regnum (src);
5227
5228 if (reg_src == reg_base)
5229 avr_asm_len ("mov __tmp_reg__,%A1" CR_TAB
5230 "mov __zero_reg__,%B1" CR_TAB
5231 TINY_ADIW (%I0, %J0, %o0) CR_TAB
5232 "st %b0+,__tmp_reg__" CR_TAB
5233 "st %b0+,__zero_reg__" CR_TAB
5234 "st %b0,%C1" CR_TAB
5235 "clr __zero_reg__", op, plen, -8);
5236 else if (reg_src == reg_base - 2)
5237 avr_asm_len ("mov __tmp_reg__,%C1" CR_TAB
5238 TINY_ADIW (%I0, %J0, %o0) CR_TAB
5239 "st %b0+,%A1" CR_TAB
5240 "st %b0+,%B1" CR_TAB
5241 "st %b0,__tmp_reg__", op, plen, -6);
5242 else
5243 avr_asm_len (TINY_ADIW (%I0, %J0, %o0) CR_TAB
5244 "st %b0+,%A1" CR_TAB
5245 "st %b0+,%B1" CR_TAB
5246 "st %b0,%C1", op, plen, -5);
5247
5248 if (!reg_unused_after (insn, XEXP (base, 0)))
5249 avr_asm_len (TINY_SBIW (%I0, %J0, %o0+2), op, plen, 2);
5250
5251 return "";
5252 }
5253
5254 /* Handle store of 24-bit type from register or zero to memory. */
5255
5256 static const char*
avr_out_store_psi(rtx_insn * insn,rtx * op,int * plen)5257 avr_out_store_psi (rtx_insn *insn, rtx *op, int *plen)
5258 {
5259 rtx dest = op[0];
5260 rtx src = op[1];
5261 rtx base = XEXP (dest, 0);
5262 int reg_base = true_regnum (base);
5263
5264 if (CONSTANT_ADDRESS_P (base))
5265 {
5266 int n_words = AVR_TINY ? 3 : 6;
5267 return avr_asm_len ("sts %m0,%A1" CR_TAB
5268 "sts %m0+1,%B1" CR_TAB
5269 "sts %m0+2,%C1", op, plen, -n_words);
5270 }
5271
5272 if (reg_base > 0) /* (r) */
5273 {
5274 if (AVR_TINY)
5275 return avr_out_store_psi_reg_no_disp_tiny (insn, op, plen);
5276
5277 if (reg_base == REG_X) /* (R26) */
5278 {
5279 gcc_assert (!reg_overlap_mentioned_p (base, src));
5280
5281 avr_asm_len ("st %0+,%A1" CR_TAB
5282 "st %0+,%B1" CR_TAB
5283 "st %0,%C1", op, plen, -3);
5284
5285 if (!reg_unused_after (insn, base))
5286 avr_asm_len ("sbiw r26,2", op, plen, 1);
5287
5288 return "";
5289 }
5290 else
5291 return avr_asm_len ("st %0,%A1" CR_TAB
5292 "std %0+1,%B1" CR_TAB
5293 "std %0+2,%C1", op, plen, -3);
5294 }
5295 else if (GET_CODE (base) == PLUS) /* (R + i) */
5296 {
5297 int disp = INTVAL (XEXP (base, 1));
5298
5299 if (AVR_TINY)
5300 return avr_out_store_psi_reg_disp_tiny (insn, op, plen);
5301
5302 reg_base = REGNO (XEXP (base, 0));
5303
5304 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
5305 {
5306 if (reg_base != REG_Y)
5307 fatal_insn ("incorrect insn:",insn);
5308
5309 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
5310 return avr_asm_len ("adiw r28,%o0-61" CR_TAB
5311 "std Y+61,%A1" CR_TAB
5312 "std Y+62,%B1" CR_TAB
5313 "std Y+63,%C1" CR_TAB
5314 "sbiw r28,%o0-61", op, plen, -5);
5315
5316 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
5317 "sbci r29,hi8(-%o0)" CR_TAB
5318 "st Y,%A1" CR_TAB
5319 "std Y+1,%B1" CR_TAB
5320 "std Y+2,%C1" CR_TAB
5321 "subi r28,lo8(%o0)" CR_TAB
5322 "sbci r29,hi8(%o0)", op, plen, -7);
5323 }
5324 if (reg_base == REG_X)
5325 {
5326 /* (X + d) = R */
5327 gcc_assert (!reg_overlap_mentioned_p (XEXP (base, 0), src));
5328
5329 avr_asm_len ("adiw r26,%o0" CR_TAB
5330 "st X+,%A1" CR_TAB
5331 "st X+,%B1" CR_TAB
5332 "st X,%C1", op, plen, -4);
5333
5334 if (!reg_unused_after (insn, XEXP (base, 0)))
5335 avr_asm_len ("sbiw r26,%o0+2", op, plen, 1);
5336
5337 return "";
5338 }
5339
5340 return avr_asm_len ("std %A0,%A1" CR_TAB
5341 "std %B0,%B1" CR_TAB
5342 "std %C0,%C1", op, plen, -3);
5343 }
5344 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
5345 return avr_asm_len ("st %0,%C1" CR_TAB
5346 "st %0,%B1" CR_TAB
5347 "st %0,%A1", op, plen, -3);
5348 else if (GET_CODE (base) == POST_INC) /* (R++) */
5349 return avr_asm_len ("st %0,%A1" CR_TAB
5350 "st %0,%B1" CR_TAB
5351 "st %0,%C1", op, plen, -3);
5352
5353 fatal_insn ("unknown move insn:",insn);
5354 return "";
5355 }
5356
5357
5358 /* Move around 24-bit stuff. */
5359
5360 const char *
avr_out_movpsi(rtx_insn * insn,rtx * op,int * plen)5361 avr_out_movpsi (rtx_insn *insn, rtx *op, int *plen)
5362 {
5363 rtx dest = op[0];
5364 rtx src = op[1];
5365
5366 if (avr_mem_flash_p (src)
5367 || avr_mem_flash_p (dest))
5368 {
5369 return avr_out_lpm (insn, op, plen);
5370 }
5371
5372 if (register_operand (dest, VOIDmode))
5373 {
5374 if (register_operand (src, VOIDmode)) /* mov r,r */
5375 {
5376 if (true_regnum (dest) > true_regnum (src))
5377 {
5378 avr_asm_len ("mov %C0,%C1", op, plen, -1);
5379
5380 if (AVR_HAVE_MOVW)
5381 return avr_asm_len ("movw %A0,%A1", op, plen, 1);
5382 else
5383 return avr_asm_len ("mov %B0,%B1" CR_TAB
5384 "mov %A0,%A1", op, plen, 2);
5385 }
5386 else
5387 {
5388 if (AVR_HAVE_MOVW)
5389 avr_asm_len ("movw %A0,%A1", op, plen, -1);
5390 else
5391 avr_asm_len ("mov %A0,%A1" CR_TAB
5392 "mov %B0,%B1", op, plen, -2);
5393
5394 return avr_asm_len ("mov %C0,%C1", op, plen, 1);
5395 }
5396 }
5397 else if (CONSTANT_P (src))
5398 {
5399 return avr_out_reload_inpsi (op, NULL_RTX, plen);
5400 }
5401 else if (MEM_P (src))
5402 return avr_out_load_psi (insn, op, plen); /* mov r,m */
5403 }
5404 else if (MEM_P (dest))
5405 {
5406 rtx xop[2];
5407
5408 xop[0] = dest;
5409 xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
5410
5411 return avr_out_store_psi (insn, xop, plen);
5412 }
5413
5414 fatal_insn ("invalid insn:", insn);
5415 return "";
5416 }
5417
5418 static const char*
avr_out_movqi_mr_r_reg_disp_tiny(rtx_insn * insn,rtx op[],int * plen)5419 avr_out_movqi_mr_r_reg_disp_tiny (rtx_insn *insn, rtx op[], int *plen)
5420 {
5421 rtx dest = op[0];
5422 rtx src = op[1];
5423 rtx x = XEXP (dest, 0);
5424
5425 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
5426 {
5427 avr_asm_len ("mov __tmp_reg__,%1" CR_TAB
5428 TINY_ADIW (%I0, %J0, %o0) CR_TAB
5429 "st %b0,__tmp_reg__", op, plen, -4);
5430 }
5431 else
5432 {
5433 avr_asm_len (TINY_ADIW (%I0, %J0, %o0) CR_TAB
5434 "st %b0,%1", op, plen, -3);
5435 }
5436
5437 if (!reg_unused_after (insn, XEXP (x, 0)))
5438 avr_asm_len (TINY_SBIW (%I0, %J0, %o0), op, plen, 2);
5439
5440 return "";
5441 }
5442
5443 static const char*
out_movqi_mr_r(rtx_insn * insn,rtx op[],int * plen)5444 out_movqi_mr_r (rtx_insn *insn, rtx op[], int *plen)
5445 {
5446 rtx dest = op[0];
5447 rtx src = op[1];
5448 rtx x = XEXP (dest, 0);
5449
5450 if (CONSTANT_ADDRESS_P (x))
5451 {
5452 int n_words = AVR_TINY ? 1 : 2;
5453 return io_address_operand (x, QImode)
5454 ? avr_asm_len ("out %i0,%1", op, plen, -1)
5455 : avr_asm_len ("sts %m0,%1", op, plen, -n_words);
5456 }
5457 else if (GET_CODE (x) == PLUS
5458 && REG_P (XEXP (x, 0))
5459 && CONST_INT_P (XEXP (x, 1)))
5460 {
5461 /* memory access by reg+disp */
5462
5463 int disp = INTVAL (XEXP (x, 1));
5464
5465 if (AVR_TINY)
5466 return avr_out_movqi_mr_r_reg_disp_tiny (insn, op, plen);
5467
5468 if (disp - GET_MODE_SIZE (GET_MODE (dest)) >= 63)
5469 {
5470 if (REGNO (XEXP (x, 0)) != REG_Y)
5471 fatal_insn ("incorrect insn:",insn);
5472
5473 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
5474 return avr_asm_len ("adiw r28,%o0-63" CR_TAB
5475 "std Y+63,%1" CR_TAB
5476 "sbiw r28,%o0-63", op, plen, -3);
5477
5478 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
5479 "sbci r29,hi8(-%o0)" CR_TAB
5480 "st Y,%1" CR_TAB
5481 "subi r28,lo8(%o0)" CR_TAB
5482 "sbci r29,hi8(%o0)", op, plen, -5);
5483 }
5484 else if (REGNO (XEXP (x, 0)) == REG_X)
5485 {
5486 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
5487 {
5488 avr_asm_len ("mov __tmp_reg__,%1" CR_TAB
5489 "adiw r26,%o0" CR_TAB
5490 "st X,__tmp_reg__", op, plen, -3);
5491 }
5492 else
5493 {
5494 avr_asm_len ("adiw r26,%o0" CR_TAB
5495 "st X,%1", op, plen, -2);
5496 }
5497
5498 if (!reg_unused_after (insn, XEXP (x, 0)))
5499 avr_asm_len ("sbiw r26,%o0", op, plen, 1);
5500
5501 return "";
5502 }
5503
5504 return avr_asm_len ("std %0,%1", op, plen, -1);
5505 }
5506
5507 return avr_asm_len ("st %0,%1", op, plen, -1);
5508 }
5509
5510
5511 /* Helper for the next function for XMEGA. It does the same
5512 but with low byte first. */
5513
5514 static const char*
avr_out_movhi_mr_r_xmega(rtx_insn * insn,rtx op[],int * plen)5515 avr_out_movhi_mr_r_xmega (rtx_insn *insn, rtx op[], int *plen)
5516 {
5517 rtx dest = op[0];
5518 rtx src = op[1];
5519 rtx base = XEXP (dest, 0);
5520 int reg_base = true_regnum (base);
5521 int reg_src = true_regnum (src);
5522
5523 /* "volatile" forces writing low byte first, even if less efficient,
5524 for correct operation with 16-bit I/O registers like SP. */
5525 int mem_volatile_p = MEM_VOLATILE_P (dest);
5526
5527 if (CONSTANT_ADDRESS_P (base))
5528 {
5529 return io_address_operand (base, HImode)
5530 ? avr_asm_len ("out %i0,%A1" CR_TAB
5531 "out %i0+1,%B1", op, plen, -2)
5532
5533 : avr_asm_len ("sts %m0,%A1" CR_TAB
5534 "sts %m0+1,%B1", op, plen, -4);
5535 }
5536
5537 if (reg_base > 0)
5538 {
5539 if (reg_base != REG_X)
5540 return avr_asm_len ("st %0,%A1" CR_TAB
5541 "std %0+1,%B1", op, plen, -2);
5542
5543 if (reg_src == REG_X)
5544 /* "st X+,r26" and "st -X,r26" are undefined. */
5545 avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
5546 "st X,r26" CR_TAB
5547 "adiw r26,1" CR_TAB
5548 "st X,__tmp_reg__", op, plen, -4);
5549 else
5550 avr_asm_len ("st X+,%A1" CR_TAB
5551 "st X,%B1", op, plen, -2);
5552
5553 return reg_unused_after (insn, base)
5554 ? ""
5555 : avr_asm_len ("sbiw r26,1", op, plen, 1);
5556 }
5557 else if (GET_CODE (base) == PLUS)
5558 {
5559 int disp = INTVAL (XEXP (base, 1));
5560 reg_base = REGNO (XEXP (base, 0));
5561 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
5562 {
5563 if (reg_base != REG_Y)
5564 fatal_insn ("incorrect insn:",insn);
5565
5566 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))
5567 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
5568 "std Y+62,%A1" CR_TAB
5569 "std Y+63,%B1" CR_TAB
5570 "sbiw r28,%o0-62", op, plen, -4)
5571
5572 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
5573 "sbci r29,hi8(-%o0)" CR_TAB
5574 "st Y,%A1" CR_TAB
5575 "std Y+1,%B1" CR_TAB
5576 "subi r28,lo8(%o0)" CR_TAB
5577 "sbci r29,hi8(%o0)", op, plen, -6);
5578 }
5579
5580 if (reg_base != REG_X)
5581 return avr_asm_len ("std %A0,%A1" CR_TAB
5582 "std %B0,%B1", op, plen, -2);
5583 /* (X + d) = R */
5584 return reg_src == REG_X
5585 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
5586 "mov __zero_reg__,r27" CR_TAB
5587 "adiw r26,%o0" CR_TAB
5588 "st X+,__tmp_reg__" CR_TAB
5589 "st X,__zero_reg__" CR_TAB
5590 "clr __zero_reg__" CR_TAB
5591 "sbiw r26,%o0+1", op, plen, -7)
5592
5593 : avr_asm_len ("adiw r26,%o0" CR_TAB
5594 "st X+,%A1" CR_TAB
5595 "st X,%B1" CR_TAB
5596 "sbiw r26,%o0+1", op, plen, -4);
5597 }
5598 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
5599 {
5600 if (!mem_volatile_p)
5601 return avr_asm_len ("st %0,%B1" CR_TAB
5602 "st %0,%A1", op, plen, -2);
5603
5604 return REGNO (XEXP (base, 0)) == REG_X
5605 ? avr_asm_len ("sbiw r26,2" CR_TAB
5606 "st X+,%A1" CR_TAB
5607 "st X,%B1" CR_TAB
5608 "sbiw r26,1", op, plen, -4)
5609
5610 : avr_asm_len ("sbiw %r0,2" CR_TAB
5611 "st %p0,%A1" CR_TAB
5612 "std %p0+1,%B1", op, plen, -3);
5613 }
5614 else if (GET_CODE (base) == POST_INC) /* (R++) */
5615 {
5616 return avr_asm_len ("st %0,%A1" CR_TAB
5617 "st %0,%B1", op, plen, -2);
5618
5619 }
5620 fatal_insn ("unknown move insn:",insn);
5621 return "";
5622 }
5623
5624 static const char*
avr_out_movhi_mr_r_reg_no_disp_tiny(rtx_insn * insn,rtx op[],int * plen)5625 avr_out_movhi_mr_r_reg_no_disp_tiny (rtx_insn *insn, rtx op[], int *plen)
5626 {
5627 rtx dest = op[0];
5628 rtx src = op[1];
5629 rtx base = XEXP (dest, 0);
5630 int reg_base = true_regnum (base);
5631 int reg_src = true_regnum (src);
5632 int mem_volatile_p = MEM_VOLATILE_P (dest);
5633
5634 if (reg_base == reg_src)
5635 {
5636 return !mem_volatile_p && reg_unused_after (insn, src)
5637 ? avr_asm_len ("mov __tmp_reg__,%B1" CR_TAB
5638 "st %0,%A1" CR_TAB
5639 TINY_ADIW (%E0, %F0, 1) CR_TAB
5640 "st %0,__tmp_reg__", op, plen, -5)
5641 : avr_asm_len ("mov __tmp_reg__,%B1" CR_TAB
5642 TINY_ADIW (%E0, %F0, 1) CR_TAB
5643 "st %0,__tmp_reg__" CR_TAB
5644 TINY_SBIW (%E0, %F0, 1) CR_TAB
5645 "st %0, %A1", op, plen, -7);
5646 }
5647
5648 return !mem_volatile_p && reg_unused_after (insn, base)
5649 ? avr_asm_len ("st %0+,%A1" CR_TAB
5650 "st %0,%B1", op, plen, -2)
5651 : avr_asm_len (TINY_ADIW (%E0, %F0, 1) CR_TAB
5652 "st %0,%B1" CR_TAB
5653 "st -%0,%A1", op, plen, -4);
5654 }
5655
5656 static const char*
avr_out_movhi_mr_r_reg_disp_tiny(rtx_insn * insn,rtx op[],int * plen)5657 avr_out_movhi_mr_r_reg_disp_tiny (rtx_insn *insn, rtx op[], int *plen)
5658 {
5659 rtx dest = op[0];
5660 rtx src = op[1];
5661 rtx base = XEXP (dest, 0);
5662 int reg_base = REGNO (XEXP (base, 0));
5663 int reg_src = true_regnum (src);
5664
5665 if (reg_src == reg_base)
5666 avr_asm_len ("mov __tmp_reg__,%A1" CR_TAB
5667 "mov __zero_reg__,%B1" CR_TAB
5668 TINY_ADIW (%I0, %J0, %o0+1) CR_TAB
5669 "st %b0,__zero_reg__" CR_TAB
5670 "st -%b0,__tmp_reg__" CR_TAB
5671 "clr __zero_reg__", op, plen, -7);
5672 else
5673 avr_asm_len (TINY_ADIW (%I0, %J0, %o0+1) CR_TAB
5674 "st %b0,%B1" CR_TAB
5675 "st -%b0,%A1", op, plen, -4);
5676
5677 if (!reg_unused_after (insn, XEXP (base, 0)))
5678 avr_asm_len (TINY_SBIW (%I0, %J0, %o0), op, plen, 2);
5679
5680 return "";
5681 }
5682
5683 static const char*
avr_out_movhi_mr_r_post_inc_tiny(rtx op[],int * plen)5684 avr_out_movhi_mr_r_post_inc_tiny (rtx op[], int *plen)
5685 {
5686 return avr_asm_len (TINY_ADIW (%I0, %J0, 1) CR_TAB
5687 "st %p0,%B1" CR_TAB
5688 "st -%p0,%A1" CR_TAB
5689 TINY_ADIW (%I0, %J0, 2), op, plen, -6);
5690 }
5691
5692 static const char*
out_movhi_mr_r(rtx_insn * insn,rtx op[],int * plen)5693 out_movhi_mr_r (rtx_insn *insn, rtx op[], int *plen)
5694 {
5695 rtx dest = op[0];
5696 rtx src = op[1];
5697 rtx base = XEXP (dest, 0);
5698 int reg_base = true_regnum (base);
5699 int reg_src = true_regnum (src);
5700 int mem_volatile_p;
5701
5702 /* "volatile" forces writing high-byte first (no-xmega) resp.
5703 low-byte first (xmega) even if less efficient, for correct
5704 operation with 16-bit I/O registers like. */
5705
5706 if (AVR_XMEGA)
5707 return avr_out_movhi_mr_r_xmega (insn, op, plen);
5708
5709 mem_volatile_p = MEM_VOLATILE_P (dest);
5710
5711 if (CONSTANT_ADDRESS_P (base))
5712 {
5713 int n_words = AVR_TINY ? 2 : 4;
5714 return io_address_operand (base, HImode)
5715 ? avr_asm_len ("out %i0+1,%B1" CR_TAB
5716 "out %i0,%A1", op, plen, -2)
5717
5718 : avr_asm_len ("sts %m0+1,%B1" CR_TAB
5719 "sts %m0,%A1", op, plen, -n_words);
5720 }
5721
5722 if (reg_base > 0)
5723 {
5724 if (AVR_TINY)
5725 return avr_out_movhi_mr_r_reg_no_disp_tiny (insn, op, plen);
5726
5727 if (reg_base != REG_X)
5728 return avr_asm_len ("std %0+1,%B1" CR_TAB
5729 "st %0,%A1", op, plen, -2);
5730
5731 if (reg_src == REG_X)
5732 /* "st X+,r26" and "st -X,r26" are undefined. */
5733 return !mem_volatile_p && reg_unused_after (insn, src)
5734 ? avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
5735 "st X,r26" CR_TAB
5736 "adiw r26,1" CR_TAB
5737 "st X,__tmp_reg__", op, plen, -4)
5738
5739 : avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
5740 "adiw r26,1" CR_TAB
5741 "st X,__tmp_reg__" CR_TAB
5742 "sbiw r26,1" CR_TAB
5743 "st X,r26", op, plen, -5);
5744
5745 return !mem_volatile_p && reg_unused_after (insn, base)
5746 ? avr_asm_len ("st X+,%A1" CR_TAB
5747 "st X,%B1", op, plen, -2)
5748 : avr_asm_len ("adiw r26,1" CR_TAB
5749 "st X,%B1" CR_TAB
5750 "st -X,%A1", op, plen, -3);
5751 }
5752 else if (GET_CODE (base) == PLUS)
5753 {
5754 int disp = INTVAL (XEXP (base, 1));
5755
5756 if (AVR_TINY)
5757 return avr_out_movhi_mr_r_reg_disp_tiny (insn, op, plen);
5758
5759 reg_base = REGNO (XEXP (base, 0));
5760 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
5761 {
5762 if (reg_base != REG_Y)
5763 fatal_insn ("incorrect insn:",insn);
5764
5765 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))
5766 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
5767 "std Y+63,%B1" CR_TAB
5768 "std Y+62,%A1" CR_TAB
5769 "sbiw r28,%o0-62", op, plen, -4)
5770
5771 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
5772 "sbci r29,hi8(-%o0)" CR_TAB
5773 "std Y+1,%B1" CR_TAB
5774 "st Y,%A1" CR_TAB
5775 "subi r28,lo8(%o0)" CR_TAB
5776 "sbci r29,hi8(%o0)", op, plen, -6);
5777 }
5778
5779 if (reg_base != REG_X)
5780 return avr_asm_len ("std %B0,%B1" CR_TAB
5781 "std %A0,%A1", op, plen, -2);
5782 /* (X + d) = R */
5783 return reg_src == REG_X
5784 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
5785 "mov __zero_reg__,r27" CR_TAB
5786 "adiw r26,%o0+1" CR_TAB
5787 "st X,__zero_reg__" CR_TAB
5788 "st -X,__tmp_reg__" CR_TAB
5789 "clr __zero_reg__" CR_TAB
5790 "sbiw r26,%o0", op, plen, -7)
5791
5792 : avr_asm_len ("adiw r26,%o0+1" CR_TAB
5793 "st X,%B1" CR_TAB
5794 "st -X,%A1" CR_TAB
5795 "sbiw r26,%o0", op, plen, -4);
5796 }
5797 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
5798 {
5799 return avr_asm_len ("st %0,%B1" CR_TAB
5800 "st %0,%A1", op, plen, -2);
5801 }
5802 else if (GET_CODE (base) == POST_INC) /* (R++) */
5803 {
5804 if (!mem_volatile_p)
5805 return avr_asm_len ("st %0,%A1" CR_TAB
5806 "st %0,%B1", op, plen, -2);
5807
5808 if (AVR_TINY)
5809 return avr_out_movhi_mr_r_post_inc_tiny (op, plen);
5810
5811 return REGNO (XEXP (base, 0)) == REG_X
5812 ? avr_asm_len ("adiw r26,1" CR_TAB
5813 "st X,%B1" CR_TAB
5814 "st -X,%A1" CR_TAB
5815 "adiw r26,2", op, plen, -4)
5816
5817 : avr_asm_len ("std %p0+1,%B1" CR_TAB
5818 "st %p0,%A1" CR_TAB
5819 "adiw %r0,2", op, plen, -3);
5820 }
5821 fatal_insn ("unknown move insn:",insn);
5822 return "";
5823 }
5824
5825 /* Return 1 if frame pointer for current function required. */
5826
5827 static bool
avr_frame_pointer_required_p(void)5828 avr_frame_pointer_required_p (void)
5829 {
5830 return (cfun->calls_alloca
5831 || cfun->calls_setjmp
5832 || cfun->has_nonlocal_label
5833 || crtl->args.info.nregs == 0
5834 || get_frame_size () > 0);
5835 }
5836
5837 /* Returns the condition of compare insn INSN, or UNKNOWN. */
5838
5839 static RTX_CODE
compare_condition(rtx_insn * insn)5840 compare_condition (rtx_insn *insn)
5841 {
5842 rtx_insn *next = next_real_insn (insn);
5843
5844 if (next && JUMP_P (next))
5845 {
5846 rtx pat = PATTERN (next);
5847 rtx src = SET_SRC (pat);
5848
5849 if (IF_THEN_ELSE == GET_CODE (src))
5850 return GET_CODE (XEXP (src, 0));
5851 }
5852
5853 return UNKNOWN;
5854 }
5855
5856
5857 /* Returns true iff INSN is a tst insn that only tests the sign. */
5858
5859 static bool
compare_sign_p(rtx_insn * insn)5860 compare_sign_p (rtx_insn *insn)
5861 {
5862 RTX_CODE cond = compare_condition (insn);
5863 return (cond == GE || cond == LT);
5864 }
5865
5866
5867 /* Returns true iff the next insn is a JUMP_INSN with a condition
5868 that needs to be swapped (GT, GTU, LE, LEU). */
5869
5870 static bool
compare_diff_p(rtx_insn * insn)5871 compare_diff_p (rtx_insn *insn)
5872 {
5873 RTX_CODE cond = compare_condition (insn);
5874 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
5875 }
5876
5877 /* Returns true iff INSN is a compare insn with the EQ or NE condition. */
5878
5879 static bool
compare_eq_p(rtx_insn * insn)5880 compare_eq_p (rtx_insn *insn)
5881 {
5882 RTX_CODE cond = compare_condition (insn);
5883 return (cond == EQ || cond == NE);
5884 }
5885
5886
5887 /* Output compare instruction
5888
5889 compare (XOP[0], XOP[1])
5890
5891 for a register XOP[0] and a compile-time constant XOP[1]. Return "".
5892 XOP[2] is an 8-bit scratch register as needed.
5893
5894 PLEN == NULL: Output instructions.
5895 PLEN != NULL: Set *PLEN to the length (in words) of the sequence.
5896 Don't output anything. */
5897
5898 const char*
avr_out_compare(rtx_insn * insn,rtx * xop,int * plen)5899 avr_out_compare (rtx_insn *insn, rtx *xop, int *plen)
5900 {
5901 /* Register to compare and value to compare against. */
5902 rtx xreg = xop[0];
5903 rtx xval = xop[1];
5904
5905 /* MODE of the comparison. */
5906 machine_mode mode;
5907
5908 /* Number of bytes to operate on. */
5909 int n_bytes = GET_MODE_SIZE (GET_MODE (xreg));
5910
5911 /* Value (0..0xff) held in clobber register xop[2] or -1 if unknown. */
5912 int clobber_val = -1;
5913
5914 /* Map fixed mode operands to integer operands with the same binary
5915 representation. They are easier to handle in the remainder. */
5916
5917 if (CONST_FIXED_P (xval))
5918 {
5919 xreg = avr_to_int_mode (xop[0]);
5920 xval = avr_to_int_mode (xop[1]);
5921 }
5922
5923 mode = GET_MODE (xreg);
5924
5925 gcc_assert (REG_P (xreg));
5926 gcc_assert ((CONST_INT_P (xval) && n_bytes <= 4)
5927 || (const_double_operand (xval, VOIDmode) && n_bytes == 8));
5928
5929 if (plen)
5930 *plen = 0;
5931
5932 /* Comparisons == +/-1 and != +/-1 can be done similar to camparing
5933 against 0 by ORing the bytes. This is one instruction shorter.
5934 Notice that 64-bit comparisons are always against reg:ALL8 18 (ACC_A)
5935 and therefore don't use this. */
5936
5937 if (!test_hard_reg_class (LD_REGS, xreg)
5938 && compare_eq_p (insn)
5939 && reg_unused_after (insn, xreg))
5940 {
5941 if (xval == const1_rtx)
5942 {
5943 avr_asm_len ("dec %A0" CR_TAB
5944 "or %A0,%B0", xop, plen, 2);
5945
5946 if (n_bytes >= 3)
5947 avr_asm_len ("or %A0,%C0", xop, plen, 1);
5948
5949 if (n_bytes >= 4)
5950 avr_asm_len ("or %A0,%D0", xop, plen, 1);
5951
5952 return "";
5953 }
5954 else if (xval == constm1_rtx)
5955 {
5956 if (n_bytes >= 4)
5957 avr_asm_len ("and %A0,%D0", xop, plen, 1);
5958
5959 if (n_bytes >= 3)
5960 avr_asm_len ("and %A0,%C0", xop, plen, 1);
5961
5962 return avr_asm_len ("and %A0,%B0" CR_TAB
5963 "com %A0", xop, plen, 2);
5964 }
5965 }
5966
5967 /* Comparisons == -1 and != -1 of a d-register that's used after the
5968 comparison. (If it's unused after we use CPI / SBCI or ADIW sequence
5969 from below.) Instead of CPI Rlo,-1 / LDI Rx,-1 / CPC Rhi,Rx we can
5970 use CPI Rlo,-1 / CPC Rhi,Rlo which is 1 instruction shorter:
5971 If CPI is true then Rlo contains -1 and we can use Rlo instead of Rx
5972 when CPC'ing the high part. If CPI is false then CPC cannot render
5973 the result to true. This also works for the more generic case where
5974 the constant is of the form 0xabab. */
5975
5976 if (n_bytes == 2
5977 && xval != const0_rtx
5978 && test_hard_reg_class (LD_REGS, xreg)
5979 && compare_eq_p (insn)
5980 && !reg_unused_after (insn, xreg))
5981 {
5982 rtx xlo8 = simplify_gen_subreg (QImode, xval, mode, 0);
5983 rtx xhi8 = simplify_gen_subreg (QImode, xval, mode, 1);
5984
5985 if (INTVAL (xlo8) == INTVAL (xhi8))
5986 {
5987 xop[0] = xreg;
5988 xop[1] = xlo8;
5989
5990 return avr_asm_len ("cpi %A0,%1" CR_TAB
5991 "cpc %B0,%A0", xop, plen, 2);
5992 }
5993 }
5994
5995 for (int i = 0; i < n_bytes; i++)
5996 {
5997 /* We compare byte-wise. */
5998 rtx reg8 = simplify_gen_subreg (QImode, xreg, mode, i);
5999 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
6000
6001 /* 8-bit value to compare with this byte. */
6002 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
6003
6004 /* Registers R16..R31 can operate with immediate. */
6005 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
6006
6007 xop[0] = reg8;
6008 xop[1] = gen_int_mode (val8, QImode);
6009
6010 /* Word registers >= R24 can use SBIW/ADIW with 0..63. */
6011
6012 if (i == 0
6013 && test_hard_reg_class (ADDW_REGS, reg8))
6014 {
6015 int val16 = trunc_int_for_mode (INTVAL (xval), HImode);
6016
6017 if (IN_RANGE (val16, 0, 63)
6018 && (val8 == 0
6019 || reg_unused_after (insn, xreg)))
6020 {
6021 if (AVR_TINY)
6022 avr_asm_len (TINY_SBIW (%A0, %B0, %1), xop, plen, 2);
6023 else
6024 avr_asm_len ("sbiw %0,%1", xop, plen, 1);
6025
6026 i++;
6027 continue;
6028 }
6029
6030 if (n_bytes == 2
6031 && IN_RANGE (val16, -63, -1)
6032 && compare_eq_p (insn)
6033 && reg_unused_after (insn, xreg))
6034 {
6035 return AVR_TINY
6036 ? avr_asm_len (TINY_ADIW (%A0, %B0, %n1), xop, plen, 2)
6037 : avr_asm_len ("adiw %0,%n1", xop, plen, 1);
6038 }
6039 }
6040
6041 /* Comparing against 0 is easy. */
6042
6043 if (val8 == 0)
6044 {
6045 avr_asm_len (i == 0
6046 ? "cp %0,__zero_reg__"
6047 : "cpc %0,__zero_reg__", xop, plen, 1);
6048 continue;
6049 }
6050
6051 /* Upper registers can compare and subtract-with-carry immediates.
6052 Notice that compare instructions do the same as respective subtract
6053 instruction; the only difference is that comparisons don't write
6054 the result back to the target register. */
6055
6056 if (ld_reg_p)
6057 {
6058 if (i == 0)
6059 {
6060 avr_asm_len ("cpi %0,%1", xop, plen, 1);
6061 continue;
6062 }
6063 else if (reg_unused_after (insn, xreg))
6064 {
6065 avr_asm_len ("sbci %0,%1", xop, plen, 1);
6066 continue;
6067 }
6068 }
6069
6070 /* Must load the value into the scratch register. */
6071
6072 gcc_assert (REG_P (xop[2]));
6073
6074 if (clobber_val != (int) val8)
6075 avr_asm_len ("ldi %2,%1", xop, plen, 1);
6076 clobber_val = (int) val8;
6077
6078 avr_asm_len (i == 0
6079 ? "cp %0,%2"
6080 : "cpc %0,%2", xop, plen, 1);
6081 }
6082
6083 return "";
6084 }
6085
6086
6087 /* Prepare operands of compare_const_di2 to be used with avr_out_compare. */
6088
6089 const char*
avr_out_compare64(rtx_insn * insn,rtx * op,int * plen)6090 avr_out_compare64 (rtx_insn *insn, rtx *op, int *plen)
6091 {
6092 rtx xop[3];
6093
6094 xop[0] = gen_rtx_REG (DImode, 18);
6095 xop[1] = op[0];
6096 xop[2] = op[1];
6097
6098 return avr_out_compare (insn, xop, plen);
6099 }
6100
6101 /* Output test instruction for HImode. */
6102
6103 const char*
avr_out_tsthi(rtx_insn * insn,rtx * op,int * plen)6104 avr_out_tsthi (rtx_insn *insn, rtx *op, int *plen)
6105 {
6106 if (compare_sign_p (insn))
6107 {
6108 avr_asm_len ("tst %B0", op, plen, -1);
6109 }
6110 else if (reg_unused_after (insn, op[0])
6111 && compare_eq_p (insn))
6112 {
6113 /* Faster than sbiw if we can clobber the operand. */
6114 avr_asm_len ("or %A0,%B0", op, plen, -1);
6115 }
6116 else
6117 {
6118 avr_out_compare (insn, op, plen);
6119 }
6120
6121 return "";
6122 }
6123
6124
6125 /* Output test instruction for PSImode. */
6126
6127 const char*
avr_out_tstpsi(rtx_insn * insn,rtx * op,int * plen)6128 avr_out_tstpsi (rtx_insn *insn, rtx *op, int *plen)
6129 {
6130 if (compare_sign_p (insn))
6131 {
6132 avr_asm_len ("tst %C0", op, plen, -1);
6133 }
6134 else if (reg_unused_after (insn, op[0])
6135 && compare_eq_p (insn))
6136 {
6137 /* Faster than sbiw if we can clobber the operand. */
6138 avr_asm_len ("or %A0,%B0" CR_TAB
6139 "or %A0,%C0", op, plen, -2);
6140 }
6141 else
6142 {
6143 avr_out_compare (insn, op, plen);
6144 }
6145
6146 return "";
6147 }
6148
6149
6150 /* Output test instruction for SImode. */
6151
6152 const char*
avr_out_tstsi(rtx_insn * insn,rtx * op,int * plen)6153 avr_out_tstsi (rtx_insn *insn, rtx *op, int *plen)
6154 {
6155 if (compare_sign_p (insn))
6156 {
6157 avr_asm_len ("tst %D0", op, plen, -1);
6158 }
6159 else if (reg_unused_after (insn, op[0])
6160 && compare_eq_p (insn))
6161 {
6162 /* Faster than sbiw if we can clobber the operand. */
6163 avr_asm_len ("or %A0,%B0" CR_TAB
6164 "or %A0,%C0" CR_TAB
6165 "or %A0,%D0", op, plen, -3);
6166 }
6167 else
6168 {
6169 avr_out_compare (insn, op, plen);
6170 }
6171
6172 return "";
6173 }
6174
6175
6176 /* Generate asm equivalent for various shifts. This only handles cases
6177 that are not already carefully hand-optimized in ?sh??i3_out.
6178
6179 OPERANDS[0] resp. %0 in TEMPL is the operand to be shifted.
6180 OPERANDS[2] is the shift count as CONST_INT, MEM or REG.
6181 OPERANDS[3] is a QImode scratch register from LD regs if
6182 available and SCRATCH, otherwise (no scratch available)
6183
6184 TEMPL is an assembler template that shifts by one position.
6185 T_LEN is the length of this template. */
6186
6187 void
out_shift_with_cnt(const char * templ,rtx_insn * insn,rtx operands[],int * plen,int t_len)6188 out_shift_with_cnt (const char *templ, rtx_insn *insn, rtx operands[],
6189 int *plen, int t_len)
6190 {
6191 bool second_label = true;
6192 bool saved_in_tmp = false;
6193 bool use_zero_reg = false;
6194 rtx op[5];
6195
6196 op[0] = operands[0];
6197 op[1] = operands[1];
6198 op[2] = operands[2];
6199 op[3] = operands[3];
6200
6201 if (plen)
6202 *plen = 0;
6203
6204 if (CONST_INT_P (operands[2]))
6205 {
6206 bool scratch = (GET_CODE (PATTERN (insn)) == PARALLEL
6207 && REG_P (operands[3]));
6208 int count = INTVAL (operands[2]);
6209 int max_len = 10; /* If larger than this, always use a loop. */
6210
6211 if (count <= 0)
6212 return;
6213
6214 if (count < 8 && !scratch)
6215 use_zero_reg = true;
6216
6217 if (optimize_size)
6218 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
6219
6220 if (t_len * count <= max_len)
6221 {
6222 /* Output shifts inline with no loop - faster. */
6223
6224 while (count-- > 0)
6225 avr_asm_len (templ, op, plen, t_len);
6226
6227 return;
6228 }
6229
6230 if (scratch)
6231 {
6232 avr_asm_len ("ldi %3,%2", op, plen, 1);
6233 }
6234 else if (use_zero_reg)
6235 {
6236 /* Hack to save one word: use __zero_reg__ as loop counter.
6237 Set one bit, then shift in a loop until it is 0 again. */
6238
6239 op[3] = zero_reg_rtx;
6240
6241 avr_asm_len ("set" CR_TAB
6242 "bld %3,%2-1", op, plen, 2);
6243 }
6244 else
6245 {
6246 /* No scratch register available, use one from LD_REGS (saved in
6247 __tmp_reg__) that doesn't overlap with registers to shift. */
6248
6249 op[3] = all_regs_rtx[((REGNO (op[0]) - 1) & 15) + 16];
6250 op[4] = tmp_reg_rtx;
6251 saved_in_tmp = true;
6252
6253 avr_asm_len ("mov %4,%3" CR_TAB
6254 "ldi %3,%2", op, plen, 2);
6255 }
6256
6257 second_label = false;
6258 }
6259 else if (MEM_P (op[2]))
6260 {
6261 rtx op_mov[2];
6262
6263 op_mov[0] = op[3] = tmp_reg_rtx;
6264 op_mov[1] = op[2];
6265
6266 out_movqi_r_mr (insn, op_mov, plen);
6267 }
6268 else if (register_operand (op[2], QImode))
6269 {
6270 op[3] = op[2];
6271
6272 if (!reg_unused_after (insn, op[2])
6273 || reg_overlap_mentioned_p (op[0], op[2]))
6274 {
6275 op[3] = tmp_reg_rtx;
6276 avr_asm_len ("mov %3,%2", op, plen, 1);
6277 }
6278 }
6279 else
6280 fatal_insn ("bad shift insn:", insn);
6281
6282 if (second_label)
6283 avr_asm_len ("rjmp 2f", op, plen, 1);
6284
6285 avr_asm_len ("1:", op, plen, 0);
6286 avr_asm_len (templ, op, plen, t_len);
6287
6288 if (second_label)
6289 avr_asm_len ("2:", op, plen, 0);
6290
6291 avr_asm_len (use_zero_reg ? "lsr %3" : "dec %3", op, plen, 1);
6292 avr_asm_len (second_label ? "brpl 1b" : "brne 1b", op, plen, 1);
6293
6294 if (saved_in_tmp)
6295 avr_asm_len ("mov %3,%4", op, plen, 1);
6296 }
6297
6298
6299 /* 8bit shift left ((char)x << i) */
6300
6301 const char *
ashlqi3_out(rtx_insn * insn,rtx operands[],int * len)6302 ashlqi3_out (rtx_insn *insn, rtx operands[], int *len)
6303 {
6304 if (CONST_INT_P (operands[2]))
6305 {
6306 int k;
6307
6308 if (!len)
6309 len = &k;
6310
6311 switch (INTVAL (operands[2]))
6312 {
6313 default:
6314 if (INTVAL (operands[2]) < 8)
6315 break;
6316
6317 *len = 1;
6318 return "clr %0";
6319
6320 case 1:
6321 *len = 1;
6322 return "lsl %0";
6323
6324 case 2:
6325 *len = 2;
6326 return ("lsl %0" CR_TAB
6327 "lsl %0");
6328
6329 case 3:
6330 *len = 3;
6331 return ("lsl %0" CR_TAB
6332 "lsl %0" CR_TAB
6333 "lsl %0");
6334
6335 case 4:
6336 if (test_hard_reg_class (LD_REGS, operands[0]))
6337 {
6338 *len = 2;
6339 return ("swap %0" CR_TAB
6340 "andi %0,0xf0");
6341 }
6342 *len = 4;
6343 return ("lsl %0" CR_TAB
6344 "lsl %0" CR_TAB
6345 "lsl %0" CR_TAB
6346 "lsl %0");
6347
6348 case 5:
6349 if (test_hard_reg_class (LD_REGS, operands[0]))
6350 {
6351 *len = 3;
6352 return ("swap %0" CR_TAB
6353 "lsl %0" CR_TAB
6354 "andi %0,0xe0");
6355 }
6356 *len = 5;
6357 return ("lsl %0" CR_TAB
6358 "lsl %0" CR_TAB
6359 "lsl %0" CR_TAB
6360 "lsl %0" CR_TAB
6361 "lsl %0");
6362
6363 case 6:
6364 if (test_hard_reg_class (LD_REGS, operands[0]))
6365 {
6366 *len = 4;
6367 return ("swap %0" CR_TAB
6368 "lsl %0" CR_TAB
6369 "lsl %0" CR_TAB
6370 "andi %0,0xc0");
6371 }
6372 *len = 6;
6373 return ("lsl %0" CR_TAB
6374 "lsl %0" CR_TAB
6375 "lsl %0" CR_TAB
6376 "lsl %0" CR_TAB
6377 "lsl %0" CR_TAB
6378 "lsl %0");
6379
6380 case 7:
6381 *len = 3;
6382 return ("ror %0" CR_TAB
6383 "clr %0" CR_TAB
6384 "ror %0");
6385 }
6386 }
6387 else if (CONSTANT_P (operands[2]))
6388 fatal_insn ("internal compiler error. Incorrect shift:", insn);
6389
6390 out_shift_with_cnt ("lsl %0",
6391 insn, operands, len, 1);
6392 return "";
6393 }
6394
6395
6396 /* 16bit shift left ((short)x << i) */
6397
6398 const char *
ashlhi3_out(rtx_insn * insn,rtx operands[],int * len)6399 ashlhi3_out (rtx_insn *insn, rtx operands[], int *len)
6400 {
6401 if (CONST_INT_P (operands[2]))
6402 {
6403 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
6404 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
6405 int k;
6406 int *t = len;
6407
6408 if (!len)
6409 len = &k;
6410
6411 switch (INTVAL (operands[2]))
6412 {
6413 default:
6414 if (INTVAL (operands[2]) < 16)
6415 break;
6416
6417 *len = 2;
6418 return ("clr %B0" CR_TAB
6419 "clr %A0");
6420
6421 case 4:
6422 if (optimize_size && scratch)
6423 break; /* 5 */
6424 if (ldi_ok)
6425 {
6426 *len = 6;
6427 return ("swap %A0" CR_TAB
6428 "swap %B0" CR_TAB
6429 "andi %B0,0xf0" CR_TAB
6430 "eor %B0,%A0" CR_TAB
6431 "andi %A0,0xf0" CR_TAB
6432 "eor %B0,%A0");
6433 }
6434 if (scratch)
6435 {
6436 *len = 7;
6437 return ("swap %A0" CR_TAB
6438 "swap %B0" CR_TAB
6439 "ldi %3,0xf0" CR_TAB
6440 "and %B0,%3" CR_TAB
6441 "eor %B0,%A0" CR_TAB
6442 "and %A0,%3" CR_TAB
6443 "eor %B0,%A0");
6444 }
6445 break; /* optimize_size ? 6 : 8 */
6446
6447 case 5:
6448 if (optimize_size)
6449 break; /* scratch ? 5 : 6 */
6450 if (ldi_ok)
6451 {
6452 *len = 8;
6453 return ("lsl %A0" CR_TAB
6454 "rol %B0" CR_TAB
6455 "swap %A0" CR_TAB
6456 "swap %B0" CR_TAB
6457 "andi %B0,0xf0" CR_TAB
6458 "eor %B0,%A0" CR_TAB
6459 "andi %A0,0xf0" CR_TAB
6460 "eor %B0,%A0");
6461 }
6462 if (scratch)
6463 {
6464 *len = 9;
6465 return ("lsl %A0" CR_TAB
6466 "rol %B0" CR_TAB
6467 "swap %A0" CR_TAB
6468 "swap %B0" CR_TAB
6469 "ldi %3,0xf0" CR_TAB
6470 "and %B0,%3" CR_TAB
6471 "eor %B0,%A0" CR_TAB
6472 "and %A0,%3" CR_TAB
6473 "eor %B0,%A0");
6474 }
6475 break; /* 10 */
6476
6477 case 6:
6478 if (optimize_size)
6479 break; /* scratch ? 5 : 6 */
6480 *len = 9;
6481 return ("clr __tmp_reg__" CR_TAB
6482 "lsr %B0" CR_TAB
6483 "ror %A0" CR_TAB
6484 "ror __tmp_reg__" CR_TAB
6485 "lsr %B0" CR_TAB
6486 "ror %A0" CR_TAB
6487 "ror __tmp_reg__" CR_TAB
6488 "mov %B0,%A0" CR_TAB
6489 "mov %A0,__tmp_reg__");
6490
6491 case 7:
6492 *len = 5;
6493 return ("lsr %B0" CR_TAB
6494 "mov %B0,%A0" CR_TAB
6495 "clr %A0" CR_TAB
6496 "ror %B0" CR_TAB
6497 "ror %A0");
6498
6499 case 8:
6500 return *len = 2, ("mov %B0,%A1" CR_TAB
6501 "clr %A0");
6502
6503 case 9:
6504 *len = 3;
6505 return ("mov %B0,%A0" CR_TAB
6506 "clr %A0" CR_TAB
6507 "lsl %B0");
6508
6509 case 10:
6510 *len = 4;
6511 return ("mov %B0,%A0" CR_TAB
6512 "clr %A0" CR_TAB
6513 "lsl %B0" CR_TAB
6514 "lsl %B0");
6515
6516 case 11:
6517 *len = 5;
6518 return ("mov %B0,%A0" CR_TAB
6519 "clr %A0" CR_TAB
6520 "lsl %B0" CR_TAB
6521 "lsl %B0" CR_TAB
6522 "lsl %B0");
6523
6524 case 12:
6525 if (ldi_ok)
6526 {
6527 *len = 4;
6528 return ("mov %B0,%A0" CR_TAB
6529 "clr %A0" CR_TAB
6530 "swap %B0" CR_TAB
6531 "andi %B0,0xf0");
6532 }
6533 if (scratch)
6534 {
6535 *len = 5;
6536 return ("mov %B0,%A0" CR_TAB
6537 "clr %A0" CR_TAB
6538 "swap %B0" CR_TAB
6539 "ldi %3,0xf0" CR_TAB
6540 "and %B0,%3");
6541 }
6542 *len = 6;
6543 return ("mov %B0,%A0" CR_TAB
6544 "clr %A0" CR_TAB
6545 "lsl %B0" CR_TAB
6546 "lsl %B0" CR_TAB
6547 "lsl %B0" CR_TAB
6548 "lsl %B0");
6549
6550 case 13:
6551 if (ldi_ok)
6552 {
6553 *len = 5;
6554 return ("mov %B0,%A0" CR_TAB
6555 "clr %A0" CR_TAB
6556 "swap %B0" CR_TAB
6557 "lsl %B0" CR_TAB
6558 "andi %B0,0xe0");
6559 }
6560 if (AVR_HAVE_MUL && scratch)
6561 {
6562 *len = 5;
6563 return ("ldi %3,0x20" CR_TAB
6564 "mul %A0,%3" CR_TAB
6565 "mov %B0,r0" CR_TAB
6566 "clr %A0" CR_TAB
6567 "clr __zero_reg__");
6568 }
6569 if (optimize_size && scratch)
6570 break; /* 5 */
6571 if (scratch)
6572 {
6573 *len = 6;
6574 return ("mov %B0,%A0" CR_TAB
6575 "clr %A0" CR_TAB
6576 "swap %B0" CR_TAB
6577 "lsl %B0" CR_TAB
6578 "ldi %3,0xe0" CR_TAB
6579 "and %B0,%3");
6580 }
6581 if (AVR_HAVE_MUL)
6582 {
6583 *len = 6;
6584 return ("set" CR_TAB
6585 "bld r1,5" CR_TAB
6586 "mul %A0,r1" CR_TAB
6587 "mov %B0,r0" CR_TAB
6588 "clr %A0" CR_TAB
6589 "clr __zero_reg__");
6590 }
6591 *len = 7;
6592 return ("mov %B0,%A0" CR_TAB
6593 "clr %A0" CR_TAB
6594 "lsl %B0" CR_TAB
6595 "lsl %B0" CR_TAB
6596 "lsl %B0" CR_TAB
6597 "lsl %B0" CR_TAB
6598 "lsl %B0");
6599
6600 case 14:
6601 if (AVR_HAVE_MUL && ldi_ok)
6602 {
6603 *len = 5;
6604 return ("ldi %B0,0x40" CR_TAB
6605 "mul %A0,%B0" CR_TAB
6606 "mov %B0,r0" CR_TAB
6607 "clr %A0" CR_TAB
6608 "clr __zero_reg__");
6609 }
6610 if (AVR_HAVE_MUL && scratch)
6611 {
6612 *len = 5;
6613 return ("ldi %3,0x40" CR_TAB
6614 "mul %A0,%3" CR_TAB
6615 "mov %B0,r0" CR_TAB
6616 "clr %A0" CR_TAB
6617 "clr __zero_reg__");
6618 }
6619 if (optimize_size && ldi_ok)
6620 {
6621 *len = 5;
6622 return ("mov %B0,%A0" CR_TAB
6623 "ldi %A0,6" "\n1:\t"
6624 "lsl %B0" CR_TAB
6625 "dec %A0" CR_TAB
6626 "brne 1b");
6627 }
6628 if (optimize_size && scratch)
6629 break; /* 5 */
6630 *len = 6;
6631 return ("clr %B0" CR_TAB
6632 "lsr %A0" CR_TAB
6633 "ror %B0" CR_TAB
6634 "lsr %A0" CR_TAB
6635 "ror %B0" CR_TAB
6636 "clr %A0");
6637
6638 case 15:
6639 *len = 4;
6640 return ("clr %B0" CR_TAB
6641 "lsr %A0" CR_TAB
6642 "ror %B0" CR_TAB
6643 "clr %A0");
6644 }
6645 len = t;
6646 }
6647 out_shift_with_cnt ("lsl %A0" CR_TAB
6648 "rol %B0", insn, operands, len, 2);
6649 return "";
6650 }
6651
6652
6653 /* 24-bit shift left */
6654
6655 const char*
avr_out_ashlpsi3(rtx_insn * insn,rtx * op,int * plen)6656 avr_out_ashlpsi3 (rtx_insn *insn, rtx *op, int *plen)
6657 {
6658 if (plen)
6659 *plen = 0;
6660
6661 if (CONST_INT_P (op[2]))
6662 {
6663 switch (INTVAL (op[2]))
6664 {
6665 default:
6666 if (INTVAL (op[2]) < 24)
6667 break;
6668
6669 return avr_asm_len ("clr %A0" CR_TAB
6670 "clr %B0" CR_TAB
6671 "clr %C0", op, plen, 3);
6672
6673 case 8:
6674 {
6675 int reg0 = REGNO (op[0]);
6676 int reg1 = REGNO (op[1]);
6677
6678 if (reg0 >= reg1)
6679 return avr_asm_len ("mov %C0,%B1" CR_TAB
6680 "mov %B0,%A1" CR_TAB
6681 "clr %A0", op, plen, 3);
6682 else
6683 return avr_asm_len ("clr %A0" CR_TAB
6684 "mov %B0,%A1" CR_TAB
6685 "mov %C0,%B1", op, plen, 3);
6686 }
6687
6688 case 16:
6689 {
6690 int reg0 = REGNO (op[0]);
6691 int reg1 = REGNO (op[1]);
6692
6693 if (reg0 + 2 != reg1)
6694 avr_asm_len ("mov %C0,%A0", op, plen, 1);
6695
6696 return avr_asm_len ("clr %B0" CR_TAB
6697 "clr %A0", op, plen, 2);
6698 }
6699
6700 case 23:
6701 return avr_asm_len ("clr %C0" CR_TAB
6702 "lsr %A0" CR_TAB
6703 "ror %C0" CR_TAB
6704 "clr %B0" CR_TAB
6705 "clr %A0", op, plen, 5);
6706 }
6707 }
6708
6709 out_shift_with_cnt ("lsl %A0" CR_TAB
6710 "rol %B0" CR_TAB
6711 "rol %C0", insn, op, plen, 3);
6712 return "";
6713 }
6714
6715
6716 /* 32bit shift left ((long)x << i) */
6717
6718 const char *
ashlsi3_out(rtx_insn * insn,rtx operands[],int * len)6719 ashlsi3_out (rtx_insn *insn, rtx operands[], int *len)
6720 {
6721 if (CONST_INT_P (operands[2]))
6722 {
6723 int k;
6724 int *t = len;
6725
6726 if (!len)
6727 len = &k;
6728
6729 switch (INTVAL (operands[2]))
6730 {
6731 default:
6732 if (INTVAL (operands[2]) < 32)
6733 break;
6734
6735 if (AVR_HAVE_MOVW)
6736 return *len = 3, ("clr %D0" CR_TAB
6737 "clr %C0" CR_TAB
6738 "movw %A0,%C0");
6739 *len = 4;
6740 return ("clr %D0" CR_TAB
6741 "clr %C0" CR_TAB
6742 "clr %B0" CR_TAB
6743 "clr %A0");
6744
6745 case 8:
6746 {
6747 int reg0 = true_regnum (operands[0]);
6748 int reg1 = true_regnum (operands[1]);
6749 *len = 4;
6750 if (reg0 >= reg1)
6751 return ("mov %D0,%C1" CR_TAB
6752 "mov %C0,%B1" CR_TAB
6753 "mov %B0,%A1" CR_TAB
6754 "clr %A0");
6755 else
6756 return ("clr %A0" CR_TAB
6757 "mov %B0,%A1" CR_TAB
6758 "mov %C0,%B1" CR_TAB
6759 "mov %D0,%C1");
6760 }
6761
6762 case 16:
6763 {
6764 int reg0 = true_regnum (operands[0]);
6765 int reg1 = true_regnum (operands[1]);
6766 if (reg0 + 2 == reg1)
6767 return *len = 2, ("clr %B0" CR_TAB
6768 "clr %A0");
6769 if (AVR_HAVE_MOVW)
6770 return *len = 3, ("movw %C0,%A1" CR_TAB
6771 "clr %B0" CR_TAB
6772 "clr %A0");
6773 else
6774 return *len = 4, ("mov %C0,%A1" CR_TAB
6775 "mov %D0,%B1" CR_TAB
6776 "clr %B0" CR_TAB
6777 "clr %A0");
6778 }
6779
6780 case 24:
6781 *len = 4;
6782 return ("mov %D0,%A1" CR_TAB
6783 "clr %C0" CR_TAB
6784 "clr %B0" CR_TAB
6785 "clr %A0");
6786
6787 case 31:
6788 *len = 6;
6789 return ("clr %D0" CR_TAB
6790 "lsr %A0" CR_TAB
6791 "ror %D0" CR_TAB
6792 "clr %C0" CR_TAB
6793 "clr %B0" CR_TAB
6794 "clr %A0");
6795 }
6796 len = t;
6797 }
6798 out_shift_with_cnt ("lsl %A0" CR_TAB
6799 "rol %B0" CR_TAB
6800 "rol %C0" CR_TAB
6801 "rol %D0", insn, operands, len, 4);
6802 return "";
6803 }
6804
6805 /* 8bit arithmetic shift right ((signed char)x >> i) */
6806
6807 const char *
ashrqi3_out(rtx_insn * insn,rtx operands[],int * len)6808 ashrqi3_out (rtx_insn *insn, rtx operands[], int *len)
6809 {
6810 if (CONST_INT_P (operands[2]))
6811 {
6812 int k;
6813
6814 if (!len)
6815 len = &k;
6816
6817 switch (INTVAL (operands[2]))
6818 {
6819 case 1:
6820 *len = 1;
6821 return "asr %0";
6822
6823 case 2:
6824 *len = 2;
6825 return ("asr %0" CR_TAB
6826 "asr %0");
6827
6828 case 3:
6829 *len = 3;
6830 return ("asr %0" CR_TAB
6831 "asr %0" CR_TAB
6832 "asr %0");
6833
6834 case 4:
6835 *len = 4;
6836 return ("asr %0" CR_TAB
6837 "asr %0" CR_TAB
6838 "asr %0" CR_TAB
6839 "asr %0");
6840
6841 case 5:
6842 *len = 5;
6843 return ("asr %0" CR_TAB
6844 "asr %0" CR_TAB
6845 "asr %0" CR_TAB
6846 "asr %0" CR_TAB
6847 "asr %0");
6848
6849 case 6:
6850 *len = 4;
6851 return ("bst %0,6" CR_TAB
6852 "lsl %0" CR_TAB
6853 "sbc %0,%0" CR_TAB
6854 "bld %0,0");
6855
6856 default:
6857 if (INTVAL (operands[2]) < 8)
6858 break;
6859
6860 /* fall through */
6861
6862 case 7:
6863 *len = 2;
6864 return ("lsl %0" CR_TAB
6865 "sbc %0,%0");
6866 }
6867 }
6868 else if (CONSTANT_P (operands[2]))
6869 fatal_insn ("internal compiler error. Incorrect shift:", insn);
6870
6871 out_shift_with_cnt ("asr %0",
6872 insn, operands, len, 1);
6873 return "";
6874 }
6875
6876
6877 /* 16bit arithmetic shift right ((signed short)x >> i) */
6878
6879 const char *
ashrhi3_out(rtx_insn * insn,rtx operands[],int * len)6880 ashrhi3_out (rtx_insn *insn, rtx operands[], int *len)
6881 {
6882 if (CONST_INT_P (operands[2]))
6883 {
6884 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
6885 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
6886 int k;
6887 int *t = len;
6888
6889 if (!len)
6890 len = &k;
6891
6892 switch (INTVAL (operands[2]))
6893 {
6894 case 4:
6895 case 5:
6896 /* XXX try to optimize this too? */
6897 break;
6898
6899 case 6:
6900 if (optimize_size)
6901 break; /* scratch ? 5 : 6 */
6902 *len = 8;
6903 return ("mov __tmp_reg__,%A0" CR_TAB
6904 "mov %A0,%B0" CR_TAB
6905 "lsl __tmp_reg__" CR_TAB
6906 "rol %A0" CR_TAB
6907 "sbc %B0,%B0" CR_TAB
6908 "lsl __tmp_reg__" CR_TAB
6909 "rol %A0" CR_TAB
6910 "rol %B0");
6911
6912 case 7:
6913 *len = 4;
6914 return ("lsl %A0" CR_TAB
6915 "mov %A0,%B0" CR_TAB
6916 "rol %A0" CR_TAB
6917 "sbc %B0,%B0");
6918
6919 case 8:
6920 {
6921 int reg0 = true_regnum (operands[0]);
6922 int reg1 = true_regnum (operands[1]);
6923
6924 if (reg0 == reg1)
6925 return *len = 3, ("mov %A0,%B0" CR_TAB
6926 "lsl %B0" CR_TAB
6927 "sbc %B0,%B0");
6928 else
6929 return *len = 4, ("mov %A0,%B1" CR_TAB
6930 "clr %B0" CR_TAB
6931 "sbrc %A0,7" CR_TAB
6932 "dec %B0");
6933 }
6934
6935 case 9:
6936 *len = 4;
6937 return ("mov %A0,%B0" CR_TAB
6938 "lsl %B0" CR_TAB
6939 "sbc %B0,%B0" CR_TAB
6940 "asr %A0");
6941
6942 case 10:
6943 *len = 5;
6944 return ("mov %A0,%B0" CR_TAB
6945 "lsl %B0" CR_TAB
6946 "sbc %B0,%B0" CR_TAB
6947 "asr %A0" CR_TAB
6948 "asr %A0");
6949
6950 case 11:
6951 if (AVR_HAVE_MUL && ldi_ok)
6952 {
6953 *len = 5;
6954 return ("ldi %A0,0x20" CR_TAB
6955 "muls %B0,%A0" CR_TAB
6956 "mov %A0,r1" CR_TAB
6957 "sbc %B0,%B0" CR_TAB
6958 "clr __zero_reg__");
6959 }
6960 if (optimize_size && scratch)
6961 break; /* 5 */
6962 *len = 6;
6963 return ("mov %A0,%B0" CR_TAB
6964 "lsl %B0" CR_TAB
6965 "sbc %B0,%B0" CR_TAB
6966 "asr %A0" CR_TAB
6967 "asr %A0" CR_TAB
6968 "asr %A0");
6969
6970 case 12:
6971 if (AVR_HAVE_MUL && ldi_ok)
6972 {
6973 *len = 5;
6974 return ("ldi %A0,0x10" CR_TAB
6975 "muls %B0,%A0" CR_TAB
6976 "mov %A0,r1" CR_TAB
6977 "sbc %B0,%B0" CR_TAB
6978 "clr __zero_reg__");
6979 }
6980 if (optimize_size && scratch)
6981 break; /* 5 */
6982 *len = 7;
6983 return ("mov %A0,%B0" CR_TAB
6984 "lsl %B0" CR_TAB
6985 "sbc %B0,%B0" CR_TAB
6986 "asr %A0" CR_TAB
6987 "asr %A0" CR_TAB
6988 "asr %A0" CR_TAB
6989 "asr %A0");
6990
6991 case 13:
6992 if (AVR_HAVE_MUL && ldi_ok)
6993 {
6994 *len = 5;
6995 return ("ldi %A0,0x08" CR_TAB
6996 "muls %B0,%A0" CR_TAB
6997 "mov %A0,r1" CR_TAB
6998 "sbc %B0,%B0" CR_TAB
6999 "clr __zero_reg__");
7000 }
7001 if (optimize_size)
7002 break; /* scratch ? 5 : 7 */
7003 *len = 8;
7004 return ("mov %A0,%B0" CR_TAB
7005 "lsl %B0" CR_TAB
7006 "sbc %B0,%B0" CR_TAB
7007 "asr %A0" CR_TAB
7008 "asr %A0" CR_TAB
7009 "asr %A0" CR_TAB
7010 "asr %A0" CR_TAB
7011 "asr %A0");
7012
7013 case 14:
7014 *len = 5;
7015 return ("lsl %B0" CR_TAB
7016 "sbc %A0,%A0" CR_TAB
7017 "lsl %B0" CR_TAB
7018 "mov %B0,%A0" CR_TAB
7019 "rol %A0");
7020
7021 default:
7022 if (INTVAL (operands[2]) < 16)
7023 break;
7024
7025 /* fall through */
7026
7027 case 15:
7028 return *len = 3, ("lsl %B0" CR_TAB
7029 "sbc %A0,%A0" CR_TAB
7030 "mov %B0,%A0");
7031 }
7032 len = t;
7033 }
7034 out_shift_with_cnt ("asr %B0" CR_TAB
7035 "ror %A0", insn, operands, len, 2);
7036 return "";
7037 }
7038
7039
7040 /* 24-bit arithmetic shift right */
7041
7042 const char*
avr_out_ashrpsi3(rtx_insn * insn,rtx * op,int * plen)7043 avr_out_ashrpsi3 (rtx_insn *insn, rtx *op, int *plen)
7044 {
7045 int dest = REGNO (op[0]);
7046 int src = REGNO (op[1]);
7047
7048 if (CONST_INT_P (op[2]))
7049 {
7050 if (plen)
7051 *plen = 0;
7052
7053 switch (INTVAL (op[2]))
7054 {
7055 case 8:
7056 if (dest <= src)
7057 return avr_asm_len ("mov %A0,%B1" CR_TAB
7058 "mov %B0,%C1" CR_TAB
7059 "clr %C0" CR_TAB
7060 "sbrc %B0,7" CR_TAB
7061 "dec %C0", op, plen, 5);
7062 else
7063 return avr_asm_len ("clr %C0" CR_TAB
7064 "sbrc %C1,7" CR_TAB
7065 "dec %C0" CR_TAB
7066 "mov %B0,%C1" CR_TAB
7067 "mov %A0,%B1", op, plen, 5);
7068
7069 case 16:
7070 if (dest != src + 2)
7071 avr_asm_len ("mov %A0,%C1", op, plen, 1);
7072
7073 return avr_asm_len ("clr %B0" CR_TAB
7074 "sbrc %A0,7" CR_TAB
7075 "com %B0" CR_TAB
7076 "mov %C0,%B0", op, plen, 4);
7077
7078 default:
7079 if (INTVAL (op[2]) < 24)
7080 break;
7081
7082 /* fall through */
7083
7084 case 23:
7085 return avr_asm_len ("lsl %C0" CR_TAB
7086 "sbc %A0,%A0" CR_TAB
7087 "mov %B0,%A0" CR_TAB
7088 "mov %C0,%A0", op, plen, 4);
7089 } /* switch */
7090 }
7091
7092 out_shift_with_cnt ("asr %C0" CR_TAB
7093 "ror %B0" CR_TAB
7094 "ror %A0", insn, op, plen, 3);
7095 return "";
7096 }
7097
7098
7099 /* 32-bit arithmetic shift right ((signed long)x >> i) */
7100
7101 const char *
ashrsi3_out(rtx_insn * insn,rtx operands[],int * len)7102 ashrsi3_out (rtx_insn *insn, rtx operands[], int *len)
7103 {
7104 if (CONST_INT_P (operands[2]))
7105 {
7106 int k;
7107 int *t = len;
7108
7109 if (!len)
7110 len = &k;
7111
7112 switch (INTVAL (operands[2]))
7113 {
7114 case 8:
7115 {
7116 int reg0 = true_regnum (operands[0]);
7117 int reg1 = true_regnum (operands[1]);
7118 *len=6;
7119 if (reg0 <= reg1)
7120 return ("mov %A0,%B1" CR_TAB
7121 "mov %B0,%C1" CR_TAB
7122 "mov %C0,%D1" CR_TAB
7123 "clr %D0" CR_TAB
7124 "sbrc %C0,7" CR_TAB
7125 "dec %D0");
7126 else
7127 return ("clr %D0" CR_TAB
7128 "sbrc %D1,7" CR_TAB
7129 "dec %D0" CR_TAB
7130 "mov %C0,%D1" CR_TAB
7131 "mov %B0,%C1" CR_TAB
7132 "mov %A0,%B1");
7133 }
7134
7135 case 16:
7136 {
7137 int reg0 = true_regnum (operands[0]);
7138 int reg1 = true_regnum (operands[1]);
7139
7140 if (reg0 == reg1 + 2)
7141 return *len = 4, ("clr %D0" CR_TAB
7142 "sbrc %B0,7" CR_TAB
7143 "com %D0" CR_TAB
7144 "mov %C0,%D0");
7145 if (AVR_HAVE_MOVW)
7146 return *len = 5, ("movw %A0,%C1" CR_TAB
7147 "clr %D0" CR_TAB
7148 "sbrc %B0,7" CR_TAB
7149 "com %D0" CR_TAB
7150 "mov %C0,%D0");
7151 else
7152 return *len = 6, ("mov %B0,%D1" CR_TAB
7153 "mov %A0,%C1" CR_TAB
7154 "clr %D0" CR_TAB
7155 "sbrc %B0,7" CR_TAB
7156 "com %D0" CR_TAB
7157 "mov %C0,%D0");
7158 }
7159
7160 case 24:
7161 return *len = 6, ("mov %A0,%D1" CR_TAB
7162 "clr %D0" CR_TAB
7163 "sbrc %A0,7" CR_TAB
7164 "com %D0" CR_TAB
7165 "mov %B0,%D0" CR_TAB
7166 "mov %C0,%D0");
7167
7168 default:
7169 if (INTVAL (operands[2]) < 32)
7170 break;
7171
7172 /* fall through */
7173
7174 case 31:
7175 if (AVR_HAVE_MOVW)
7176 return *len = 4, ("lsl %D0" CR_TAB
7177 "sbc %A0,%A0" CR_TAB
7178 "mov %B0,%A0" CR_TAB
7179 "movw %C0,%A0");
7180 else
7181 return *len = 5, ("lsl %D0" CR_TAB
7182 "sbc %A0,%A0" CR_TAB
7183 "mov %B0,%A0" CR_TAB
7184 "mov %C0,%A0" CR_TAB
7185 "mov %D0,%A0");
7186 }
7187 len = t;
7188 }
7189 out_shift_with_cnt ("asr %D0" CR_TAB
7190 "ror %C0" CR_TAB
7191 "ror %B0" CR_TAB
7192 "ror %A0", insn, operands, len, 4);
7193 return "";
7194 }
7195
7196 /* 8-bit logic shift right ((unsigned char)x >> i) */
7197
7198 const char *
lshrqi3_out(rtx_insn * insn,rtx operands[],int * len)7199 lshrqi3_out (rtx_insn *insn, rtx operands[], int *len)
7200 {
7201 if (CONST_INT_P (operands[2]))
7202 {
7203 int k;
7204
7205 if (!len)
7206 len = &k;
7207
7208 switch (INTVAL (operands[2]))
7209 {
7210 default:
7211 if (INTVAL (operands[2]) < 8)
7212 break;
7213
7214 *len = 1;
7215 return "clr %0";
7216
7217 case 1:
7218 *len = 1;
7219 return "lsr %0";
7220
7221 case 2:
7222 *len = 2;
7223 return ("lsr %0" CR_TAB
7224 "lsr %0");
7225 case 3:
7226 *len = 3;
7227 return ("lsr %0" CR_TAB
7228 "lsr %0" CR_TAB
7229 "lsr %0");
7230
7231 case 4:
7232 if (test_hard_reg_class (LD_REGS, operands[0]))
7233 {
7234 *len=2;
7235 return ("swap %0" CR_TAB
7236 "andi %0,0x0f");
7237 }
7238 *len = 4;
7239 return ("lsr %0" CR_TAB
7240 "lsr %0" CR_TAB
7241 "lsr %0" CR_TAB
7242 "lsr %0");
7243
7244 case 5:
7245 if (test_hard_reg_class (LD_REGS, operands[0]))
7246 {
7247 *len = 3;
7248 return ("swap %0" CR_TAB
7249 "lsr %0" CR_TAB
7250 "andi %0,0x7");
7251 }
7252 *len = 5;
7253 return ("lsr %0" CR_TAB
7254 "lsr %0" CR_TAB
7255 "lsr %0" CR_TAB
7256 "lsr %0" CR_TAB
7257 "lsr %0");
7258
7259 case 6:
7260 if (test_hard_reg_class (LD_REGS, operands[0]))
7261 {
7262 *len = 4;
7263 return ("swap %0" CR_TAB
7264 "lsr %0" CR_TAB
7265 "lsr %0" CR_TAB
7266 "andi %0,0x3");
7267 }
7268 *len = 6;
7269 return ("lsr %0" CR_TAB
7270 "lsr %0" CR_TAB
7271 "lsr %0" CR_TAB
7272 "lsr %0" CR_TAB
7273 "lsr %0" CR_TAB
7274 "lsr %0");
7275
7276 case 7:
7277 *len = 3;
7278 return ("rol %0" CR_TAB
7279 "clr %0" CR_TAB
7280 "rol %0");
7281 }
7282 }
7283 else if (CONSTANT_P (operands[2]))
7284 fatal_insn ("internal compiler error. Incorrect shift:", insn);
7285
7286 out_shift_with_cnt ("lsr %0",
7287 insn, operands, len, 1);
7288 return "";
7289 }
7290
7291 /* 16-bit logic shift right ((unsigned short)x >> i) */
7292
7293 const char *
lshrhi3_out(rtx_insn * insn,rtx operands[],int * len)7294 lshrhi3_out (rtx_insn *insn, rtx operands[], int *len)
7295 {
7296 if (CONST_INT_P (operands[2]))
7297 {
7298 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
7299 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
7300 int k;
7301 int *t = len;
7302
7303 if (!len)
7304 len = &k;
7305
7306 switch (INTVAL (operands[2]))
7307 {
7308 default:
7309 if (INTVAL (operands[2]) < 16)
7310 break;
7311
7312 *len = 2;
7313 return ("clr %B0" CR_TAB
7314 "clr %A0");
7315
7316 case 4:
7317 if (optimize_size && scratch)
7318 break; /* 5 */
7319 if (ldi_ok)
7320 {
7321 *len = 6;
7322 return ("swap %B0" CR_TAB
7323 "swap %A0" CR_TAB
7324 "andi %A0,0x0f" CR_TAB
7325 "eor %A0,%B0" CR_TAB
7326 "andi %B0,0x0f" CR_TAB
7327 "eor %A0,%B0");
7328 }
7329 if (scratch)
7330 {
7331 *len = 7;
7332 return ("swap %B0" CR_TAB
7333 "swap %A0" CR_TAB
7334 "ldi %3,0x0f" CR_TAB
7335 "and %A0,%3" CR_TAB
7336 "eor %A0,%B0" CR_TAB
7337 "and %B0,%3" CR_TAB
7338 "eor %A0,%B0");
7339 }
7340 break; /* optimize_size ? 6 : 8 */
7341
7342 case 5:
7343 if (optimize_size)
7344 break; /* scratch ? 5 : 6 */
7345 if (ldi_ok)
7346 {
7347 *len = 8;
7348 return ("lsr %B0" CR_TAB
7349 "ror %A0" CR_TAB
7350 "swap %B0" CR_TAB
7351 "swap %A0" CR_TAB
7352 "andi %A0,0x0f" CR_TAB
7353 "eor %A0,%B0" CR_TAB
7354 "andi %B0,0x0f" CR_TAB
7355 "eor %A0,%B0");
7356 }
7357 if (scratch)
7358 {
7359 *len = 9;
7360 return ("lsr %B0" CR_TAB
7361 "ror %A0" CR_TAB
7362 "swap %B0" CR_TAB
7363 "swap %A0" CR_TAB
7364 "ldi %3,0x0f" CR_TAB
7365 "and %A0,%3" CR_TAB
7366 "eor %A0,%B0" CR_TAB
7367 "and %B0,%3" CR_TAB
7368 "eor %A0,%B0");
7369 }
7370 break; /* 10 */
7371
7372 case 6:
7373 if (optimize_size)
7374 break; /* scratch ? 5 : 6 */
7375 *len = 9;
7376 return ("clr __tmp_reg__" CR_TAB
7377 "lsl %A0" CR_TAB
7378 "rol %B0" CR_TAB
7379 "rol __tmp_reg__" CR_TAB
7380 "lsl %A0" CR_TAB
7381 "rol %B0" CR_TAB
7382 "rol __tmp_reg__" CR_TAB
7383 "mov %A0,%B0" CR_TAB
7384 "mov %B0,__tmp_reg__");
7385
7386 case 7:
7387 *len = 5;
7388 return ("lsl %A0" CR_TAB
7389 "mov %A0,%B0" CR_TAB
7390 "rol %A0" CR_TAB
7391 "sbc %B0,%B0" CR_TAB
7392 "neg %B0");
7393
7394 case 8:
7395 return *len = 2, ("mov %A0,%B1" CR_TAB
7396 "clr %B0");
7397
7398 case 9:
7399 *len = 3;
7400 return ("mov %A0,%B0" CR_TAB
7401 "clr %B0" CR_TAB
7402 "lsr %A0");
7403
7404 case 10:
7405 *len = 4;
7406 return ("mov %A0,%B0" CR_TAB
7407 "clr %B0" CR_TAB
7408 "lsr %A0" CR_TAB
7409 "lsr %A0");
7410
7411 case 11:
7412 *len = 5;
7413 return ("mov %A0,%B0" CR_TAB
7414 "clr %B0" CR_TAB
7415 "lsr %A0" CR_TAB
7416 "lsr %A0" CR_TAB
7417 "lsr %A0");
7418
7419 case 12:
7420 if (ldi_ok)
7421 {
7422 *len = 4;
7423 return ("mov %A0,%B0" CR_TAB
7424 "clr %B0" CR_TAB
7425 "swap %A0" CR_TAB
7426 "andi %A0,0x0f");
7427 }
7428 if (scratch)
7429 {
7430 *len = 5;
7431 return ("mov %A0,%B0" CR_TAB
7432 "clr %B0" CR_TAB
7433 "swap %A0" CR_TAB
7434 "ldi %3,0x0f" CR_TAB
7435 "and %A0,%3");
7436 }
7437 *len = 6;
7438 return ("mov %A0,%B0" CR_TAB
7439 "clr %B0" CR_TAB
7440 "lsr %A0" CR_TAB
7441 "lsr %A0" CR_TAB
7442 "lsr %A0" CR_TAB
7443 "lsr %A0");
7444
7445 case 13:
7446 if (ldi_ok)
7447 {
7448 *len = 5;
7449 return ("mov %A0,%B0" CR_TAB
7450 "clr %B0" CR_TAB
7451 "swap %A0" CR_TAB
7452 "lsr %A0" CR_TAB
7453 "andi %A0,0x07");
7454 }
7455 if (AVR_HAVE_MUL && scratch)
7456 {
7457 *len = 5;
7458 return ("ldi %3,0x08" CR_TAB
7459 "mul %B0,%3" CR_TAB
7460 "mov %A0,r1" CR_TAB
7461 "clr %B0" CR_TAB
7462 "clr __zero_reg__");
7463 }
7464 if (optimize_size && scratch)
7465 break; /* 5 */
7466 if (scratch)
7467 {
7468 *len = 6;
7469 return ("mov %A0,%B0" CR_TAB
7470 "clr %B0" CR_TAB
7471 "swap %A0" CR_TAB
7472 "lsr %A0" CR_TAB
7473 "ldi %3,0x07" CR_TAB
7474 "and %A0,%3");
7475 }
7476 if (AVR_HAVE_MUL)
7477 {
7478 *len = 6;
7479 return ("set" CR_TAB
7480 "bld r1,3" CR_TAB
7481 "mul %B0,r1" CR_TAB
7482 "mov %A0,r1" CR_TAB
7483 "clr %B0" CR_TAB
7484 "clr __zero_reg__");
7485 }
7486 *len = 7;
7487 return ("mov %A0,%B0" CR_TAB
7488 "clr %B0" CR_TAB
7489 "lsr %A0" CR_TAB
7490 "lsr %A0" CR_TAB
7491 "lsr %A0" CR_TAB
7492 "lsr %A0" CR_TAB
7493 "lsr %A0");
7494
7495 case 14:
7496 if (AVR_HAVE_MUL && ldi_ok)
7497 {
7498 *len = 5;
7499 return ("ldi %A0,0x04" CR_TAB
7500 "mul %B0,%A0" CR_TAB
7501 "mov %A0,r1" CR_TAB
7502 "clr %B0" CR_TAB
7503 "clr __zero_reg__");
7504 }
7505 if (AVR_HAVE_MUL && scratch)
7506 {
7507 *len = 5;
7508 return ("ldi %3,0x04" CR_TAB
7509 "mul %B0,%3" CR_TAB
7510 "mov %A0,r1" CR_TAB
7511 "clr %B0" CR_TAB
7512 "clr __zero_reg__");
7513 }
7514 if (optimize_size && ldi_ok)
7515 {
7516 *len = 5;
7517 return ("mov %A0,%B0" CR_TAB
7518 "ldi %B0,6" "\n1:\t"
7519 "lsr %A0" CR_TAB
7520 "dec %B0" CR_TAB
7521 "brne 1b");
7522 }
7523 if (optimize_size && scratch)
7524 break; /* 5 */
7525 *len = 6;
7526 return ("clr %A0" CR_TAB
7527 "lsl %B0" CR_TAB
7528 "rol %A0" CR_TAB
7529 "lsl %B0" CR_TAB
7530 "rol %A0" CR_TAB
7531 "clr %B0");
7532
7533 case 15:
7534 *len = 4;
7535 return ("clr %A0" CR_TAB
7536 "lsl %B0" CR_TAB
7537 "rol %A0" CR_TAB
7538 "clr %B0");
7539 }
7540 len = t;
7541 }
7542 out_shift_with_cnt ("lsr %B0" CR_TAB
7543 "ror %A0", insn, operands, len, 2);
7544 return "";
7545 }
7546
7547
7548 /* 24-bit logic shift right */
7549
7550 const char*
avr_out_lshrpsi3(rtx_insn * insn,rtx * op,int * plen)7551 avr_out_lshrpsi3 (rtx_insn *insn, rtx *op, int *plen)
7552 {
7553 int dest = REGNO (op[0]);
7554 int src = REGNO (op[1]);
7555
7556 if (CONST_INT_P (op[2]))
7557 {
7558 if (plen)
7559 *plen = 0;
7560
7561 switch (INTVAL (op[2]))
7562 {
7563 case 8:
7564 if (dest <= src)
7565 return avr_asm_len ("mov %A0,%B1" CR_TAB
7566 "mov %B0,%C1" CR_TAB
7567 "clr %C0", op, plen, 3);
7568 else
7569 return avr_asm_len ("clr %C0" CR_TAB
7570 "mov %B0,%C1" CR_TAB
7571 "mov %A0,%B1", op, plen, 3);
7572
7573 case 16:
7574 if (dest != src + 2)
7575 avr_asm_len ("mov %A0,%C1", op, plen, 1);
7576
7577 return avr_asm_len ("clr %B0" CR_TAB
7578 "clr %C0", op, plen, 2);
7579
7580 default:
7581 if (INTVAL (op[2]) < 24)
7582 break;
7583
7584 /* fall through */
7585
7586 case 23:
7587 return avr_asm_len ("clr %A0" CR_TAB
7588 "sbrc %C0,7" CR_TAB
7589 "inc %A0" CR_TAB
7590 "clr %B0" CR_TAB
7591 "clr %C0", op, plen, 5);
7592 } /* switch */
7593 }
7594
7595 out_shift_with_cnt ("lsr %C0" CR_TAB
7596 "ror %B0" CR_TAB
7597 "ror %A0", insn, op, plen, 3);
7598 return "";
7599 }
7600
7601
7602 /* 32-bit logic shift right ((unsigned int)x >> i) */
7603
7604 const char *
lshrsi3_out(rtx_insn * insn,rtx operands[],int * len)7605 lshrsi3_out (rtx_insn *insn, rtx operands[], int *len)
7606 {
7607 if (CONST_INT_P (operands[2]))
7608 {
7609 int k;
7610 int *t = len;
7611
7612 if (!len)
7613 len = &k;
7614
7615 switch (INTVAL (operands[2]))
7616 {
7617 default:
7618 if (INTVAL (operands[2]) < 32)
7619 break;
7620
7621 if (AVR_HAVE_MOVW)
7622 return *len = 3, ("clr %D0" CR_TAB
7623 "clr %C0" CR_TAB
7624 "movw %A0,%C0");
7625 *len = 4;
7626 return ("clr %D0" CR_TAB
7627 "clr %C0" CR_TAB
7628 "clr %B0" CR_TAB
7629 "clr %A0");
7630
7631 case 8:
7632 {
7633 int reg0 = true_regnum (operands[0]);
7634 int reg1 = true_regnum (operands[1]);
7635 *len = 4;
7636 if (reg0 <= reg1)
7637 return ("mov %A0,%B1" CR_TAB
7638 "mov %B0,%C1" CR_TAB
7639 "mov %C0,%D1" CR_TAB
7640 "clr %D0");
7641 else
7642 return ("clr %D0" CR_TAB
7643 "mov %C0,%D1" CR_TAB
7644 "mov %B0,%C1" CR_TAB
7645 "mov %A0,%B1");
7646 }
7647
7648 case 16:
7649 {
7650 int reg0 = true_regnum (operands[0]);
7651 int reg1 = true_regnum (operands[1]);
7652
7653 if (reg0 == reg1 + 2)
7654 return *len = 2, ("clr %C0" CR_TAB
7655 "clr %D0");
7656 if (AVR_HAVE_MOVW)
7657 return *len = 3, ("movw %A0,%C1" CR_TAB
7658 "clr %C0" CR_TAB
7659 "clr %D0");
7660 else
7661 return *len = 4, ("mov %B0,%D1" CR_TAB
7662 "mov %A0,%C1" CR_TAB
7663 "clr %C0" CR_TAB
7664 "clr %D0");
7665 }
7666
7667 case 24:
7668 return *len = 4, ("mov %A0,%D1" CR_TAB
7669 "clr %B0" CR_TAB
7670 "clr %C0" CR_TAB
7671 "clr %D0");
7672
7673 case 31:
7674 *len = 6;
7675 return ("clr %A0" CR_TAB
7676 "sbrc %D0,7" CR_TAB
7677 "inc %A0" CR_TAB
7678 "clr %B0" CR_TAB
7679 "clr %C0" CR_TAB
7680 "clr %D0");
7681 }
7682 len = t;
7683 }
7684 out_shift_with_cnt ("lsr %D0" CR_TAB
7685 "ror %C0" CR_TAB
7686 "ror %B0" CR_TAB
7687 "ror %A0", insn, operands, len, 4);
7688 return "";
7689 }
7690
7691
7692 /* Output addition of register XOP[0] and compile time constant XOP[2].
7693 CODE == PLUS: perform addition by using ADD instructions or
7694 CODE == MINUS: perform addition by using SUB instructions:
7695
7696 XOP[0] = XOP[0] + XOP[2]
7697
7698 Or perform addition/subtraction with register XOP[2] depending on CODE:
7699
7700 XOP[0] = XOP[0] +/- XOP[2]
7701
7702 If PLEN == NULL, print assembler instructions to perform the operation;
7703 otherwise, set *PLEN to the length of the instruction sequence (in words)
7704 printed with PLEN == NULL. XOP[3] is an 8-bit scratch register or NULL_RTX.
7705 Set *PCC to effect on cc0 according to respective CC_* insn attribute.
7706
7707 CODE_SAT == UNKNOWN: Perform ordinary, non-saturating operation.
7708 CODE_SAT != UNKNOWN: Perform operation and saturate according to CODE_SAT.
7709 If CODE_SAT != UNKNOWN then SIGN contains the sign of the summand resp.
7710 the subtrahend in the original insn, provided it is a compile time constant.
7711 In all other cases, SIGN is 0.
7712
7713 If OUT_LABEL is true, print the final 0: label which is needed for
7714 saturated addition / subtraction. The only case where OUT_LABEL = false
7715 is useful is for saturated addition / subtraction performed during
7716 fixed-point rounding, cf. `avr_out_round'. */
7717
7718 static void
avr_out_plus_1(rtx * xop,int * plen,enum rtx_code code,int * pcc,enum rtx_code code_sat,int sign,bool out_label)7719 avr_out_plus_1 (rtx *xop, int *plen, enum rtx_code code, int *pcc,
7720 enum rtx_code code_sat, int sign, bool out_label)
7721 {
7722 /* MODE of the operation. */
7723 machine_mode mode = GET_MODE (xop[0]);
7724
7725 /* INT_MODE of the same size. */
7726 scalar_int_mode imode = int_mode_for_mode (mode).require ();
7727
7728 /* Number of bytes to operate on. */
7729 int n_bytes = GET_MODE_SIZE (mode);
7730
7731 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
7732 int clobber_val = -1;
7733
7734 /* op[0]: 8-bit destination register
7735 op[1]: 8-bit const int
7736 op[2]: 8-bit scratch register */
7737 rtx op[3];
7738
7739 /* Started the operation? Before starting the operation we may skip
7740 adding 0. This is no more true after the operation started because
7741 carry must be taken into account. */
7742 bool started = false;
7743
7744 /* Value to add. There are two ways to add VAL: R += VAL and R -= -VAL. */
7745 rtx xval = xop[2];
7746
7747 /* Output a BRVC instruction. Only needed with saturation. */
7748 bool out_brvc = true;
7749
7750 if (plen)
7751 *plen = 0;
7752
7753 if (REG_P (xop[2]))
7754 {
7755 *pcc = MINUS == code ? (int) CC_SET_CZN : (int) CC_CLOBBER;
7756
7757 for (int i = 0; i < n_bytes; i++)
7758 {
7759 /* We operate byte-wise on the destination. */
7760 op[0] = simplify_gen_subreg (QImode, xop[0], mode, i);
7761 op[1] = simplify_gen_subreg (QImode, xop[2], mode, i);
7762
7763 if (i == 0)
7764 avr_asm_len (code == PLUS ? "add %0,%1" : "sub %0,%1",
7765 op, plen, 1);
7766 else
7767 avr_asm_len (code == PLUS ? "adc %0,%1" : "sbc %0,%1",
7768 op, plen, 1);
7769 }
7770
7771 if (reg_overlap_mentioned_p (xop[0], xop[2]))
7772 {
7773 gcc_assert (REGNO (xop[0]) == REGNO (xop[2]));
7774
7775 if (MINUS == code)
7776 return;
7777 }
7778
7779 goto saturate;
7780 }
7781
7782 /* Except in the case of ADIW with 16-bit register (see below)
7783 addition does not set cc0 in a usable way. */
7784
7785 *pcc = (MINUS == code) ? CC_SET_CZN : CC_CLOBBER;
7786
7787 if (CONST_FIXED_P (xval))
7788 xval = avr_to_int_mode (xval);
7789
7790 /* Adding/Subtracting zero is a no-op. */
7791
7792 if (xval == const0_rtx)
7793 {
7794 *pcc = CC_NONE;
7795 return;
7796 }
7797
7798 if (MINUS == code)
7799 xval = simplify_unary_operation (NEG, imode, xval, imode);
7800
7801 op[2] = xop[3];
7802
7803 if (SS_PLUS == code_sat && MINUS == code
7804 && sign < 0
7805 && 0x80 == (INTVAL (simplify_gen_subreg (QImode, xval, imode, n_bytes-1))
7806 & GET_MODE_MASK (QImode)))
7807 {
7808 /* We compute x + 0x80 by means of SUB instructions. We negated the
7809 constant subtrahend above and are left with x - (-128) so that we
7810 need something like SUBI r,128 which does not exist because SUBI sets
7811 V according to the sign of the subtrahend. Notice the only case
7812 where this must be done is when NEG overflowed in case [2s] because
7813 the V computation needs the right sign of the subtrahend. */
7814
7815 rtx msb = simplify_gen_subreg (QImode, xop[0], mode, n_bytes - 1);
7816
7817 avr_asm_len ("subi %0,128" CR_TAB
7818 "brmi 0f", &msb, plen, 2);
7819 out_brvc = false;
7820
7821 goto saturate;
7822 }
7823
7824 for (int i = 0; i < n_bytes; i++)
7825 {
7826 /* We operate byte-wise on the destination. */
7827 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
7828 rtx xval8 = simplify_gen_subreg (QImode, xval, imode, i);
7829
7830 /* 8-bit value to operate with this byte. */
7831 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
7832
7833 /* Registers R16..R31 can operate with immediate. */
7834 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
7835
7836 op[0] = reg8;
7837 op[1] = gen_int_mode (val8, QImode);
7838
7839 /* To get usable cc0 no low-bytes must have been skipped. */
7840
7841 if (i && !started)
7842 *pcc = CC_CLOBBER;
7843
7844 if (!started
7845 && i % 2 == 0
7846 && i + 2 <= n_bytes
7847 && test_hard_reg_class (ADDW_REGS, reg8))
7848 {
7849 rtx xval16 = simplify_gen_subreg (HImode, xval, imode, i);
7850 unsigned int val16 = UINTVAL (xval16) & GET_MODE_MASK (HImode);
7851
7852 /* Registers R24, X, Y, Z can use ADIW/SBIW with constants < 64
7853 i.e. operate word-wise. */
7854
7855 if (val16 < 64)
7856 {
7857 if (val16 != 0)
7858 {
7859 started = true;
7860 avr_asm_len (code == PLUS ? "adiw %0,%1" : "sbiw %0,%1",
7861 op, plen, 1);
7862
7863 if (n_bytes == 2 && PLUS == code)
7864 *pcc = CC_SET_CZN;
7865 }
7866
7867 i++;
7868 continue;
7869 }
7870 }
7871
7872 if (val8 == 0)
7873 {
7874 if (started)
7875 avr_asm_len (code == PLUS
7876 ? "adc %0,__zero_reg__" : "sbc %0,__zero_reg__",
7877 op, plen, 1);
7878 continue;
7879 }
7880 else if ((val8 == 1 || val8 == 0xff)
7881 && UNKNOWN == code_sat
7882 && !started
7883 && i == n_bytes - 1)
7884 {
7885 avr_asm_len ((code == PLUS) ^ (val8 == 1) ? "dec %0" : "inc %0",
7886 op, plen, 1);
7887 *pcc = CC_CLOBBER;
7888 break;
7889 }
7890
7891 switch (code)
7892 {
7893 case PLUS:
7894
7895 gcc_assert (plen != NULL || (op[2] && REG_P (op[2])));
7896
7897 if (plen != NULL && UNKNOWN != code_sat)
7898 {
7899 /* This belongs to the x + 0x80 corner case. The code with
7900 ADD instruction is not smaller, thus make this case
7901 expensive so that the caller won't pick it. */
7902
7903 *plen += 10;
7904 break;
7905 }
7906
7907 if (clobber_val != (int) val8)
7908 avr_asm_len ("ldi %2,%1", op, plen, 1);
7909 clobber_val = (int) val8;
7910
7911 avr_asm_len (started ? "adc %0,%2" : "add %0,%2", op, plen, 1);
7912
7913 break; /* PLUS */
7914
7915 case MINUS:
7916
7917 if (ld_reg_p)
7918 avr_asm_len (started ? "sbci %0,%1" : "subi %0,%1", op, plen, 1);
7919 else
7920 {
7921 gcc_assert (plen != NULL || REG_P (op[2]));
7922
7923 if (clobber_val != (int) val8)
7924 avr_asm_len ("ldi %2,%1", op, plen, 1);
7925 clobber_val = (int) val8;
7926
7927 avr_asm_len (started ? "sbc %0,%2" : "sub %0,%2", op, plen, 1);
7928 }
7929
7930 break; /* MINUS */
7931
7932 default:
7933 /* Unknown code */
7934 gcc_unreachable();
7935 }
7936
7937 started = true;
7938
7939 } /* for all sub-bytes */
7940
7941 saturate:
7942
7943 if (UNKNOWN == code_sat)
7944 return;
7945
7946 *pcc = (int) CC_CLOBBER;
7947
7948 /* Vanilla addition/subtraction is done. We are left with saturation.
7949
7950 We have to compute A = A <op> B where A is a register and
7951 B is a register or a non-zero compile time constant CONST.
7952 A is register class "r" if unsigned && B is REG. Otherwise, A is in "d".
7953 B stands for the original operand $2 in INSN. In the case of B = CONST,
7954 SIGN in { -1, 1 } is the sign of B. Otherwise, SIGN is 0.
7955
7956 CODE is the instruction flavor we use in the asm sequence to perform <op>.
7957
7958
7959 unsigned
7960 operation | code | sat if | b is | sat value | case
7961 -----------------+-------+----------+--------------+-----------+-------
7962 + as a + b | add | C == 1 | const, reg | u+ = 0xff | [1u]
7963 + as a - (-b) | sub | C == 0 | const | u+ = 0xff | [2u]
7964 - as a - b | sub | C == 1 | const, reg | u- = 0 | [3u]
7965 - as a + (-b) | add | C == 0 | const | u- = 0 | [4u]
7966
7967
7968 signed
7969 operation | code | sat if | b is | sat value | case
7970 -----------------+-------+----------+--------------+-----------+-------
7971 + as a + b | add | V == 1 | const, reg | s+ | [1s]
7972 + as a - (-b) | sub | V == 1 | const | s+ | [2s]
7973 - as a - b | sub | V == 1 | const, reg | s- | [3s]
7974 - as a + (-b) | add | V == 1 | const | s- | [4s]
7975
7976 s+ = b < 0 ? -0x80 : 0x7f
7977 s- = b < 0 ? 0x7f : -0x80
7978
7979 The cases a - b actually perform a - (-(-b)) if B is CONST.
7980 */
7981
7982 op[0] = simplify_gen_subreg (QImode, xop[0], mode, n_bytes-1);
7983 op[1] = n_bytes > 1
7984 ? simplify_gen_subreg (QImode, xop[0], mode, n_bytes-2)
7985 : NULL_RTX;
7986
7987 bool need_copy = true;
7988 int len_call = 1 + AVR_HAVE_JMP_CALL;
7989
7990 switch (code_sat)
7991 {
7992 default:
7993 gcc_unreachable();
7994
7995 case SS_PLUS:
7996 case SS_MINUS:
7997
7998 if (out_brvc)
7999 avr_asm_len ("brvc 0f", op, plen, 1);
8000
8001 if (reg_overlap_mentioned_p (xop[0], xop[2]))
8002 {
8003 /* [1s,reg] */
8004
8005 if (n_bytes == 1)
8006 avr_asm_len ("ldi %0,0x7f" CR_TAB
8007 "adc %0,__zero_reg__", op, plen, 2);
8008 else
8009 avr_asm_len ("ldi %0,0x7f" CR_TAB
8010 "ldi %1,0xff" CR_TAB
8011 "adc %1,__zero_reg__" CR_TAB
8012 "adc %0,__zero_reg__", op, plen, 4);
8013 }
8014 else if (sign == 0 && PLUS == code)
8015 {
8016 /* [1s,reg] */
8017
8018 op[2] = simplify_gen_subreg (QImode, xop[2], mode, n_bytes-1);
8019
8020 if (n_bytes == 1)
8021 avr_asm_len ("ldi %0,0x80" CR_TAB
8022 "sbrs %2,7" CR_TAB
8023 "dec %0", op, plen, 3);
8024 else
8025 avr_asm_len ("ldi %0,0x80" CR_TAB
8026 "cp %2,%0" CR_TAB
8027 "sbc %1,%1" CR_TAB
8028 "sbci %0,0", op, plen, 4);
8029 }
8030 else if (sign == 0 && MINUS == code)
8031 {
8032 /* [3s,reg] */
8033
8034 op[2] = simplify_gen_subreg (QImode, xop[2], mode, n_bytes-1);
8035
8036 if (n_bytes == 1)
8037 avr_asm_len ("ldi %0,0x7f" CR_TAB
8038 "sbrs %2,7" CR_TAB
8039 "inc %0", op, plen, 3);
8040 else
8041 avr_asm_len ("ldi %0,0x7f" CR_TAB
8042 "cp %0,%2" CR_TAB
8043 "sbc %1,%1" CR_TAB
8044 "sbci %0,-1", op, plen, 4);
8045 }
8046 else if ((sign < 0) ^ (SS_MINUS == code_sat))
8047 {
8048 /* [1s,const,B < 0] [2s,B < 0] */
8049 /* [3s,const,B > 0] [4s,B > 0] */
8050
8051 if (n_bytes == 8)
8052 {
8053 avr_asm_len ("%~call __clr_8", op, plen, len_call);
8054 need_copy = false;
8055 }
8056
8057 avr_asm_len ("ldi %0,0x80", op, plen, 1);
8058 if (n_bytes > 1 && need_copy)
8059 avr_asm_len ("clr %1", op, plen, 1);
8060 }
8061 else if ((sign > 0) ^ (SS_MINUS == code_sat))
8062 {
8063 /* [1s,const,B > 0] [2s,B > 0] */
8064 /* [3s,const,B < 0] [4s,B < 0] */
8065
8066 if (n_bytes == 8)
8067 {
8068 avr_asm_len ("sec" CR_TAB
8069 "%~call __sbc_8", op, plen, 1 + len_call);
8070 need_copy = false;
8071 }
8072
8073 avr_asm_len ("ldi %0,0x7f", op, plen, 1);
8074 if (n_bytes > 1 && need_copy)
8075 avr_asm_len ("ldi %1,0xff", op, plen, 1);
8076 }
8077 else
8078 gcc_unreachable();
8079
8080 break;
8081
8082 case US_PLUS:
8083 /* [1u] : [2u] */
8084
8085 avr_asm_len (PLUS == code ? "brcc 0f" : "brcs 0f", op, plen, 1);
8086
8087 if (n_bytes == 8)
8088 {
8089 if (MINUS == code)
8090 avr_asm_len ("sec", op, plen, 1);
8091 avr_asm_len ("%~call __sbc_8", op, plen, len_call);
8092
8093 need_copy = false;
8094 }
8095 else
8096 {
8097 if (MINUS == code && !test_hard_reg_class (LD_REGS, op[0]))
8098 avr_asm_len ("sec" CR_TAB
8099 "sbc %0,%0", op, plen, 2);
8100 else
8101 avr_asm_len (PLUS == code ? "sbc %0,%0" : "ldi %0,0xff",
8102 op, plen, 1);
8103 }
8104 break; /* US_PLUS */
8105
8106 case US_MINUS:
8107 /* [4u] : [3u] */
8108
8109 avr_asm_len (PLUS == code ? "brcs 0f" : "brcc 0f", op, plen, 1);
8110
8111 if (n_bytes == 8)
8112 {
8113 avr_asm_len ("%~call __clr_8", op, plen, len_call);
8114 need_copy = false;
8115 }
8116 else
8117 avr_asm_len ("clr %0", op, plen, 1);
8118
8119 break;
8120 }
8121
8122 /* We set the MSB in the unsigned case and the 2 MSBs in the signed case.
8123 Now copy the right value to the LSBs. */
8124
8125 if (need_copy && n_bytes > 1)
8126 {
8127 if (US_MINUS == code_sat || US_PLUS == code_sat)
8128 {
8129 avr_asm_len ("mov %1,%0", op, plen, 1);
8130
8131 if (n_bytes > 2)
8132 {
8133 op[0] = xop[0];
8134 if (AVR_HAVE_MOVW)
8135 avr_asm_len ("movw %0,%1", op, plen, 1);
8136 else
8137 avr_asm_len ("mov %A0,%1" CR_TAB
8138 "mov %B0,%1", op, plen, 2);
8139 }
8140 }
8141 else if (n_bytes > 2)
8142 {
8143 op[0] = xop[0];
8144 avr_asm_len ("mov %A0,%1" CR_TAB
8145 "mov %B0,%1", op, plen, 2);
8146 }
8147 }
8148
8149 if (need_copy && n_bytes == 8)
8150 {
8151 if (AVR_HAVE_MOVW)
8152 avr_asm_len ("movw %r0+2,%0" CR_TAB
8153 "movw %r0+4,%0", xop, plen, 2);
8154 else
8155 avr_asm_len ("mov %r0+2,%0" CR_TAB
8156 "mov %r0+3,%0" CR_TAB
8157 "mov %r0+4,%0" CR_TAB
8158 "mov %r0+5,%0", xop, plen, 4);
8159 }
8160
8161 if (out_label)
8162 avr_asm_len ("0:", op, plen, 0);
8163 }
8164
8165
8166 /* Output addition/subtraction of register XOP[0] and a constant XOP[2] that
8167 is ont a compile-time constant:
8168
8169 XOP[0] = XOP[0] +/- XOP[2]
8170
8171 This is a helper for the function below. The only insns that need this
8172 are additions/subtraction for pointer modes, i.e. HImode and PSImode. */
8173
8174 static const char*
avr_out_plus_symbol(rtx * xop,enum rtx_code code,int * plen,int * pcc)8175 avr_out_plus_symbol (rtx *xop, enum rtx_code code, int *plen, int *pcc)
8176 {
8177 machine_mode mode = GET_MODE (xop[0]);
8178
8179 /* Only pointer modes want to add symbols. */
8180
8181 gcc_assert (mode == HImode || mode == PSImode);
8182
8183 *pcc = MINUS == code ? (int) CC_SET_CZN : (int) CC_SET_N;
8184
8185 avr_asm_len (PLUS == code
8186 ? "subi %A0,lo8(-(%2))" CR_TAB "sbci %B0,hi8(-(%2))"
8187 : "subi %A0,lo8(%2)" CR_TAB "sbci %B0,hi8(%2)",
8188 xop, plen, -2);
8189
8190 if (PSImode == mode)
8191 avr_asm_len (PLUS == code
8192 ? "sbci %C0,hlo8(-(%2))"
8193 : "sbci %C0,hlo8(%2)", xop, plen, 1);
8194 return "";
8195 }
8196
8197
8198 /* Prepare operands of addition/subtraction to be used with avr_out_plus_1.
8199
8200 INSN is a single_set insn or an insn pattern with a binary operation as
8201 SET_SRC that is one of: PLUS, SS_PLUS, US_PLUS, MINUS, SS_MINUS, US_MINUS.
8202
8203 XOP are the operands of INSN. In the case of 64-bit operations with
8204 constant XOP[] has just one element: The summand/subtrahend in XOP[0].
8205 The non-saturating insns up to 32 bits may or may not supply a "d" class
8206 scratch as XOP[3].
8207
8208 If PLEN == NULL output the instructions.
8209 If PLEN != NULL set *PLEN to the length of the sequence in words.
8210
8211 PCC is a pointer to store the instructions' effect on cc0.
8212 PCC may be NULL.
8213
8214 PLEN and PCC default to NULL.
8215
8216 OUT_LABEL defaults to TRUE. For a description, see AVR_OUT_PLUS_1.
8217
8218 Return "" */
8219
8220 const char*
avr_out_plus(rtx insn,rtx * xop,int * plen,int * pcc,bool out_label)8221 avr_out_plus (rtx insn, rtx *xop, int *plen, int *pcc, bool out_label)
8222 {
8223 int cc_plus, cc_minus, cc_dummy;
8224 int len_plus, len_minus;
8225 rtx op[4];
8226 rtx xpattern = INSN_P (insn) ? single_set (as_a <rtx_insn *> (insn)) : insn;
8227 rtx xdest = SET_DEST (xpattern);
8228 machine_mode mode = GET_MODE (xdest);
8229 scalar_int_mode imode = int_mode_for_mode (mode).require ();
8230 int n_bytes = GET_MODE_SIZE (mode);
8231 enum rtx_code code_sat = GET_CODE (SET_SRC (xpattern));
8232 enum rtx_code code
8233 = (PLUS == code_sat || SS_PLUS == code_sat || US_PLUS == code_sat
8234 ? PLUS : MINUS);
8235
8236 if (!pcc)
8237 pcc = &cc_dummy;
8238
8239 /* PLUS and MINUS don't saturate: Use modular wrap-around. */
8240
8241 if (PLUS == code_sat || MINUS == code_sat)
8242 code_sat = UNKNOWN;
8243
8244 if (n_bytes <= 4 && REG_P (xop[2]))
8245 {
8246 avr_out_plus_1 (xop, plen, code, pcc, code_sat, 0, out_label);
8247 return "";
8248 }
8249
8250 if (n_bytes == 8)
8251 {
8252 op[0] = gen_rtx_REG (DImode, ACC_A);
8253 op[1] = gen_rtx_REG (DImode, ACC_A);
8254 op[2] = avr_to_int_mode (xop[0]);
8255 }
8256 else
8257 {
8258 if (!REG_P (xop[2])
8259 && !CONST_INT_P (xop[2])
8260 && !CONST_FIXED_P (xop[2]))
8261 {
8262 return avr_out_plus_symbol (xop, code, plen, pcc);
8263 }
8264
8265 op[0] = avr_to_int_mode (xop[0]);
8266 op[1] = avr_to_int_mode (xop[1]);
8267 op[2] = avr_to_int_mode (xop[2]);
8268 }
8269
8270 /* Saturations and 64-bit operations don't have a clobber operand.
8271 For the other cases, the caller will provide a proper XOP[3]. */
8272
8273 xpattern = INSN_P (insn) ? PATTERN (insn) : insn;
8274 op[3] = PARALLEL == GET_CODE (xpattern) ? xop[3] : NULL_RTX;
8275
8276 /* Saturation will need the sign of the original operand. */
8277
8278 rtx xmsb = simplify_gen_subreg (QImode, op[2], imode, n_bytes-1);
8279 int sign = INTVAL (xmsb) < 0 ? -1 : 1;
8280
8281 /* If we subtract and the subtrahend is a constant, then negate it
8282 so that avr_out_plus_1 can be used. */
8283
8284 if (MINUS == code)
8285 op[2] = simplify_unary_operation (NEG, imode, op[2], imode);
8286
8287 /* Work out the shortest sequence. */
8288
8289 avr_out_plus_1 (op, &len_minus, MINUS, &cc_minus, code_sat, sign, out_label);
8290 avr_out_plus_1 (op, &len_plus, PLUS, &cc_plus, code_sat, sign, out_label);
8291
8292 if (plen)
8293 {
8294 *plen = (len_minus <= len_plus) ? len_minus : len_plus;
8295 *pcc = (len_minus <= len_plus) ? cc_minus : cc_plus;
8296 }
8297 else if (len_minus <= len_plus)
8298 avr_out_plus_1 (op, NULL, MINUS, pcc, code_sat, sign, out_label);
8299 else
8300 avr_out_plus_1 (op, NULL, PLUS, pcc, code_sat, sign, out_label);
8301
8302 return "";
8303 }
8304
8305
8306 /* Output bit operation (IOR, AND, XOR) with register XOP[0] and compile
8307 time constant XOP[2]:
8308
8309 XOP[0] = XOP[0] <op> XOP[2]
8310
8311 and return "". If PLEN == NULL, print assembler instructions to perform the
8312 operation; otherwise, set *PLEN to the length of the instruction sequence
8313 (in words) printed with PLEN == NULL. XOP[3] is either an 8-bit clobber
8314 register or SCRATCH if no clobber register is needed for the operation.
8315 INSN is an INSN_P or a pattern of an insn. */
8316
8317 const char*
avr_out_bitop(rtx insn,rtx * xop,int * plen)8318 avr_out_bitop (rtx insn, rtx *xop, int *plen)
8319 {
8320 /* CODE and MODE of the operation. */
8321 rtx xpattern = INSN_P (insn) ? single_set (as_a <rtx_insn *> (insn)) : insn;
8322 enum rtx_code code = GET_CODE (SET_SRC (xpattern));
8323 machine_mode mode = GET_MODE (xop[0]);
8324
8325 /* Number of bytes to operate on. */
8326 int n_bytes = GET_MODE_SIZE (mode);
8327
8328 /* Value of T-flag (0 or 1) or -1 if unknow. */
8329 int set_t = -1;
8330
8331 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
8332 int clobber_val = -1;
8333
8334 /* op[0]: 8-bit destination register
8335 op[1]: 8-bit const int
8336 op[2]: 8-bit clobber register, SCRATCH or NULL_RTX.
8337 op[3]: 8-bit register containing 0xff or NULL_RTX */
8338 rtx op[4];
8339
8340 op[2] = QImode == mode ? NULL_RTX : xop[3];
8341 op[3] = NULL_RTX;
8342
8343 if (plen)
8344 *plen = 0;
8345
8346 for (int i = 0; i < n_bytes; i++)
8347 {
8348 /* We operate byte-wise on the destination. */
8349 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
8350 rtx xval8 = simplify_gen_subreg (QImode, xop[2], mode, i);
8351
8352 /* 8-bit value to operate with this byte. */
8353 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
8354
8355 /* Number of bits set in the current byte of the constant. */
8356 int pop8 = popcount_hwi (val8);
8357
8358 /* Registers R16..R31 can operate with immediate. */
8359 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
8360
8361 op[0] = reg8;
8362 op[1] = GEN_INT (val8);
8363
8364 switch (code)
8365 {
8366 case IOR:
8367
8368 if (pop8 == 0)
8369 continue;
8370 else if (ld_reg_p)
8371 avr_asm_len ("ori %0,%1", op, plen, 1);
8372 else if (pop8 == 1)
8373 {
8374 if (set_t != 1)
8375 avr_asm_len ("set", op, plen, 1);
8376 set_t = 1;
8377
8378 op[1] = GEN_INT (exact_log2 (val8));
8379 avr_asm_len ("bld %0,%1", op, plen, 1);
8380 }
8381 else if (pop8 == 8)
8382 {
8383 if (op[3] != NULL_RTX)
8384 avr_asm_len ("mov %0,%3", op, plen, 1);
8385 else
8386 avr_asm_len ("clr %0" CR_TAB
8387 "dec %0", op, plen, 2);
8388
8389 op[3] = op[0];
8390 }
8391 else
8392 {
8393 if (clobber_val != (int) val8)
8394 avr_asm_len ("ldi %2,%1", op, plen, 1);
8395 clobber_val = (int) val8;
8396
8397 avr_asm_len ("or %0,%2", op, plen, 1);
8398 }
8399
8400 continue; /* IOR */
8401
8402 case AND:
8403
8404 if (pop8 == 8)
8405 continue;
8406 else if (pop8 == 0)
8407 avr_asm_len ("clr %0", op, plen, 1);
8408 else if (ld_reg_p)
8409 avr_asm_len ("andi %0,%1", op, plen, 1);
8410 else if (pop8 == 7)
8411 {
8412 if (set_t != 0)
8413 avr_asm_len ("clt", op, plen, 1);
8414 set_t = 0;
8415
8416 op[1] = GEN_INT (exact_log2 (GET_MODE_MASK (QImode) & ~val8));
8417 avr_asm_len ("bld %0,%1", op, plen, 1);
8418 }
8419 else
8420 {
8421 if (clobber_val != (int) val8)
8422 avr_asm_len ("ldi %2,%1", op, plen, 1);
8423 clobber_val = (int) val8;
8424
8425 avr_asm_len ("and %0,%2", op, plen, 1);
8426 }
8427
8428 continue; /* AND */
8429
8430 case XOR:
8431
8432 if (pop8 == 0)
8433 continue;
8434 else if (pop8 == 8)
8435 avr_asm_len ("com %0", op, plen, 1);
8436 else if (ld_reg_p && val8 == (1 << 7))
8437 avr_asm_len ("subi %0,%1", op, plen, 1);
8438 else
8439 {
8440 if (clobber_val != (int) val8)
8441 avr_asm_len ("ldi %2,%1", op, plen, 1);
8442 clobber_val = (int) val8;
8443
8444 avr_asm_len ("eor %0,%2", op, plen, 1);
8445 }
8446
8447 continue; /* XOR */
8448
8449 default:
8450 /* Unknown rtx_code */
8451 gcc_unreachable();
8452 }
8453 } /* for all sub-bytes */
8454
8455 return "";
8456 }
8457
8458
8459 /* Output sign extension from XOP[1] to XOP[0] and return "".
8460 If PLEN == NULL, print assembler instructions to perform the operation;
8461 otherwise, set *PLEN to the length of the instruction sequence (in words)
8462 as printed with PLEN == NULL. */
8463
8464 const char*
avr_out_sign_extend(rtx_insn * insn,rtx * xop,int * plen)8465 avr_out_sign_extend (rtx_insn *insn, rtx *xop, int *plen)
8466 {
8467 // Size in bytes of source resp. destination operand.
8468 unsigned n_src = GET_MODE_SIZE (GET_MODE (xop[1]));
8469 unsigned n_dest = GET_MODE_SIZE (GET_MODE (xop[0]));
8470 rtx r_msb = all_regs_rtx[REGNO (xop[1]) + n_src - 1];
8471
8472 if (plen)
8473 *plen = 0;
8474
8475 // Copy destination to source
8476
8477 if (REGNO (xop[0]) != REGNO (xop[1]))
8478 {
8479 gcc_assert (n_src <= 2);
8480
8481 if (n_src == 2)
8482 avr_asm_len (AVR_HAVE_MOVW
8483 ? "movw %0,%1"
8484 : "mov %B0,%B1", xop, plen, 1);
8485 if (n_src == 1 || !AVR_HAVE_MOVW)
8486 avr_asm_len ("mov %A0,%A1", xop, plen, 1);
8487 }
8488
8489 // Set Carry to the sign bit MSB.7...
8490
8491 if (REGNO (xop[0]) == REGNO (xop[1])
8492 || !reg_unused_after (insn, r_msb))
8493 {
8494 avr_asm_len ("mov __tmp_reg__,%0", &r_msb, plen, 1);
8495 r_msb = tmp_reg_rtx;
8496 }
8497
8498 avr_asm_len ("lsl %0", &r_msb, plen, 1);
8499
8500 // ...and propagate it to all the new sign bits
8501
8502 for (unsigned n = n_src; n < n_dest; n++)
8503 avr_asm_len ("sbc %0,%0", &all_regs_rtx[REGNO (xop[0]) + n], plen, 1);
8504
8505 return "";
8506 }
8507
8508
8509 /* PLEN == NULL: Output code to add CONST_INT OP[0] to SP.
8510 PLEN != NULL: Set *PLEN to the length of that sequence.
8511 Return "". */
8512
8513 const char*
avr_out_addto_sp(rtx * op,int * plen)8514 avr_out_addto_sp (rtx *op, int *plen)
8515 {
8516 int pc_len = AVR_2_BYTE_PC ? 2 : 3;
8517 int addend = INTVAL (op[0]);
8518
8519 if (plen)
8520 *plen = 0;
8521
8522 if (addend < 0)
8523 {
8524 if (flag_verbose_asm || flag_print_asm_name)
8525 avr_asm_len (ASM_COMMENT_START "SP -= %n0", op, plen, 0);
8526
8527 while (addend <= -pc_len)
8528 {
8529 addend += pc_len;
8530 avr_asm_len ("rcall .", op, plen, 1);
8531 }
8532
8533 while (addend++ < 0)
8534 avr_asm_len ("push __tmp_reg__", op, plen, 1);
8535 }
8536 else if (addend > 0)
8537 {
8538 if (flag_verbose_asm || flag_print_asm_name)
8539 avr_asm_len (ASM_COMMENT_START "SP += %0", op, plen, 0);
8540
8541 while (addend-- > 0)
8542 avr_asm_len ("pop __tmp_reg__", op, plen, 1);
8543 }
8544
8545 return "";
8546 }
8547
8548
8549 /* Output instructions to insert an inverted bit into OPERANDS[0]:
8550 $0.$1 = ~$2.$3 if XBITNO = NULL
8551 $0.$1 = ~$2.XBITNO if XBITNO != NULL.
8552 If PLEN = NULL then output the respective instruction sequence which
8553 is a combination of BST / BLD and some instruction(s) to invert the bit.
8554 If PLEN != NULL then store the length of the sequence (in words) in *PLEN.
8555 Return "". */
8556
8557 const char*
avr_out_insert_notbit(rtx_insn * insn,rtx operands[],rtx xbitno,int * plen)8558 avr_out_insert_notbit (rtx_insn *insn, rtx operands[], rtx xbitno, int *plen)
8559 {
8560 rtx op[4] = { operands[0], operands[1], operands[2],
8561 xbitno == NULL_RTX ? operands [3] : xbitno };
8562
8563 if (INTVAL (op[1]) == 7
8564 && test_hard_reg_class (LD_REGS, op[0]))
8565 {
8566 /* If the inserted bit number is 7 and we have a d-reg, then invert
8567 the bit after the insertion by means of SUBI *,0x80. */
8568
8569 if (INTVAL (op[3]) == 7
8570 && REGNO (op[0]) == REGNO (op[2]))
8571 {
8572 avr_asm_len ("subi %0,0x80", op, plen, -1);
8573 }
8574 else
8575 {
8576 avr_asm_len ("bst %2,%3" CR_TAB
8577 "bld %0,%1" CR_TAB
8578 "subi %0,0x80", op, plen, -3);
8579 }
8580 }
8581 else if (test_hard_reg_class (LD_REGS, op[0])
8582 && (INTVAL (op[1]) != INTVAL (op[3])
8583 || !reg_overlap_mentioned_p (op[0], op[2])))
8584 {
8585 /* If the destination bit is in a d-reg we can jump depending
8586 on the source bit and use ANDI / ORI. This just applies if we
8587 have not an early-clobber situation with the bit. */
8588
8589 avr_asm_len ("andi %0,~(1<<%1)" CR_TAB
8590 "sbrs %2,%3" CR_TAB
8591 "ori %0,1<<%1", op, plen, -3);
8592 }
8593 else
8594 {
8595 /* Otherwise, invert the bit by means of COM before we store it with
8596 BST and then undo the COM if needed. */
8597
8598 avr_asm_len ("com %2" CR_TAB
8599 "bst %2,%3", op, plen, -2);
8600
8601 if (!reg_unused_after (insn, op[2])
8602 // A simple 'reg_unused_after' is not enough because that function
8603 // assumes that the destination register is overwritten completely
8604 // and hence is in order for our purpose. This is not the case
8605 // with BLD which just changes one bit of the destination.
8606 || reg_overlap_mentioned_p (op[0], op[2]))
8607 {
8608 /* Undo the COM from above. */
8609 avr_asm_len ("com %2", op, plen, 1);
8610 }
8611
8612 avr_asm_len ("bld %0,%1", op, plen, 1);
8613 }
8614
8615 return "";
8616 }
8617
8618
8619 /* Outputs instructions needed for fixed point type conversion.
8620 This includes converting between any fixed point type, as well
8621 as converting to any integer type. Conversion between integer
8622 types is not supported.
8623
8624 Converting signed fractional types requires a bit shift if converting
8625 to or from any unsigned fractional type because the decimal place is
8626 shifted by 1 bit. When the destination is a signed fractional, the sign
8627 is stored in either the carry or T bit. */
8628
8629 const char*
avr_out_fract(rtx_insn * insn,rtx operands[],bool intsigned,int * plen)8630 avr_out_fract (rtx_insn *insn, rtx operands[], bool intsigned, int *plen)
8631 {
8632 rtx xop[6];
8633 RTX_CODE shift = UNKNOWN;
8634 bool sign_in_carry = false;
8635 bool msb_in_carry = false;
8636 bool lsb_in_tmp_reg = false;
8637 bool lsb_in_carry = false;
8638 bool frac_rounded = false;
8639 const char *code_ashift = "lsl %0";
8640
8641
8642 #define MAY_CLOBBER(RR) \
8643 /* Shorthand used below. */ \
8644 ((sign_bytes \
8645 && IN_RANGE (RR, dest.regno_msb - sign_bytes + 1, dest.regno_msb)) \
8646 || (offset && IN_RANGE (RR, dest.regno, dest.regno_msb)) \
8647 || (reg_unused_after (insn, all_regs_rtx[RR]) \
8648 && !IN_RANGE (RR, dest.regno, dest.regno_msb)))
8649
8650 struct
8651 {
8652 /* bytes : Length of operand in bytes.
8653 ibyte : Length of integral part in bytes.
8654 fbyte, fbit : Length of fractional part in bytes, bits. */
8655
8656 bool sbit;
8657 unsigned fbit, bytes, ibyte, fbyte;
8658 unsigned regno, regno_msb;
8659 } dest, src, *val[2] = { &dest, &src };
8660
8661 if (plen)
8662 *plen = 0;
8663
8664 /* Step 0: Determine information on source and destination operand we
8665 ====== will need in the remainder. */
8666
8667 for (size_t i = 0; i < ARRAY_SIZE (val); i++)
8668 {
8669 machine_mode mode;
8670
8671 xop[i] = operands[i];
8672
8673 mode = GET_MODE (xop[i]);
8674
8675 val[i]->bytes = GET_MODE_SIZE (mode);
8676 val[i]->regno = REGNO (xop[i]);
8677 val[i]->regno_msb = REGNO (xop[i]) + val[i]->bytes - 1;
8678
8679 if (SCALAR_INT_MODE_P (mode))
8680 {
8681 val[i]->sbit = intsigned;
8682 val[i]->fbit = 0;
8683 }
8684 else if (ALL_SCALAR_FIXED_POINT_MODE_P (mode))
8685 {
8686 val[i]->sbit = SIGNED_SCALAR_FIXED_POINT_MODE_P (mode);
8687 val[i]->fbit = GET_MODE_FBIT (mode);
8688 }
8689 else
8690 fatal_insn ("unsupported fixed-point conversion", insn);
8691
8692 val[i]->fbyte = (1 + val[i]->fbit) / BITS_PER_UNIT;
8693 val[i]->ibyte = val[i]->bytes - val[i]->fbyte;
8694 }
8695
8696 // Byte offset of the decimal point taking into account different place
8697 // of the decimal point in input and output and different register numbers
8698 // of input and output.
8699 int offset = dest.regno - src.regno + dest.fbyte - src.fbyte;
8700
8701 // Number of destination bytes that will come from sign / zero extension.
8702 int sign_bytes = (dest.ibyte - src.ibyte) * (dest.ibyte > src.ibyte);
8703
8704 // Number of bytes at the low end to be filled with zeros.
8705 int zero_bytes = (dest.fbyte - src.fbyte) * (dest.fbyte > src.fbyte);
8706
8707 // Do we have a 16-Bit register that is cleared?
8708 rtx clrw = NULL_RTX;
8709
8710 bool sign_extend = src.sbit && sign_bytes;
8711
8712 if (dest.fbit % 8 == 0 && src.fbit % 8 == 7)
8713 shift = ASHIFT;
8714 else if (dest.fbit % 8 == 7 && src.fbit % 8 == 0)
8715 shift = ASHIFTRT;
8716 else if (dest.fbit % 8 == src.fbit % 8)
8717 shift = UNKNOWN;
8718 else
8719 gcc_unreachable();
8720
8721 /* If we need to round the fraction part, we might need to save/round it
8722 before clobbering any of it in Step 1. Also, we might want to do
8723 the rounding now to make use of LD_REGS. */
8724 if (SCALAR_INT_MODE_P (GET_MODE (xop[0]))
8725 && SCALAR_ACCUM_MODE_P (GET_MODE (xop[1]))
8726 && !TARGET_FRACT_CONV_TRUNC)
8727 {
8728 bool overlap
8729 = (src.regno <=
8730 (offset ? dest.regno_msb - sign_bytes : dest.regno + zero_bytes - 1)
8731 && dest.regno - offset -1 >= dest.regno);
8732 unsigned s0 = dest.regno - offset -1;
8733 bool use_src = true;
8734 unsigned sn;
8735 unsigned copied_msb = src.regno_msb;
8736 bool have_carry = false;
8737
8738 if (src.ibyte > dest.ibyte)
8739 copied_msb -= src.ibyte - dest.ibyte;
8740
8741 for (sn = s0; sn <= copied_msb; sn++)
8742 if (!IN_RANGE (sn, dest.regno, dest.regno_msb)
8743 && !reg_unused_after (insn, all_regs_rtx[sn]))
8744 use_src = false;
8745 if (use_src && TEST_HARD_REG_BIT (reg_class_contents[LD_REGS], s0))
8746 {
8747 avr_asm_len ("tst %0" CR_TAB "brpl 0f",
8748 &all_regs_rtx[src.regno_msb], plen, 2);
8749 sn = src.regno;
8750 if (sn < s0)
8751 {
8752 if (TEST_HARD_REG_BIT (reg_class_contents[LD_REGS], sn))
8753 avr_asm_len ("cpi %0,1", &all_regs_rtx[sn], plen, 1);
8754 else
8755 avr_asm_len ("sec" CR_TAB
8756 "cpc %0,__zero_reg__",
8757 &all_regs_rtx[sn], plen, 2);
8758 have_carry = true;
8759 }
8760 while (++sn < s0)
8761 avr_asm_len ("cpc %0,__zero_reg__", &all_regs_rtx[sn], plen, 1);
8762
8763 avr_asm_len (have_carry ? "sbci %0,128" : "subi %0,129",
8764 &all_regs_rtx[s0], plen, 1);
8765 for (sn = src.regno + src.fbyte; sn <= copied_msb; sn++)
8766 avr_asm_len ("sbci %0,255", &all_regs_rtx[sn], plen, 1);
8767 avr_asm_len ("\n0:", NULL, plen, 0);
8768 frac_rounded = true;
8769 }
8770 else if (use_src && overlap)
8771 {
8772 avr_asm_len ("clr __tmp_reg__" CR_TAB
8773 "sbrc %1,0" CR_TAB
8774 "dec __tmp_reg__", xop, plen, 1);
8775 sn = src.regno;
8776 if (sn < s0)
8777 {
8778 avr_asm_len ("add %0,__tmp_reg__", &all_regs_rtx[sn], plen, 1);
8779 have_carry = true;
8780 }
8781
8782 while (++sn < s0)
8783 avr_asm_len ("adc %0,__tmp_reg__", &all_regs_rtx[sn], plen, 1);
8784
8785 if (have_carry)
8786 avr_asm_len ("clt" CR_TAB
8787 "bld __tmp_reg__,7" CR_TAB
8788 "adc %0,__tmp_reg__",
8789 &all_regs_rtx[s0], plen, 1);
8790 else
8791 avr_asm_len ("lsr __tmp_reg" CR_TAB
8792 "add %0,__tmp_reg__",
8793 &all_regs_rtx[s0], plen, 2);
8794 for (sn = src.regno + src.fbyte; sn <= copied_msb; sn++)
8795 avr_asm_len ("adc %0,__zero_reg__", &all_regs_rtx[sn], plen, 1);
8796 frac_rounded = true;
8797 }
8798 else if (overlap)
8799 {
8800 bool use_src
8801 = (TEST_HARD_REG_BIT (reg_class_contents[LD_REGS], s0)
8802 && (IN_RANGE (s0, dest.regno, dest.regno_msb)
8803 || reg_unused_after (insn, all_regs_rtx[s0])));
8804 xop[2] = all_regs_rtx[s0];
8805 unsigned sn = src.regno;
8806 if (!use_src || sn == s0)
8807 avr_asm_len ("mov __tmp_reg__,%2", xop, plen, 1);
8808 /* We need to consider to-be-discarded bits
8809 if the value is negative. */
8810 if (sn < s0)
8811 {
8812 avr_asm_len ("tst %0" CR_TAB
8813 "brpl 0f",
8814 &all_regs_rtx[src.regno_msb], plen, 2);
8815 /* Test to-be-discarded bytes for any nozero bits.
8816 ??? Could use OR or SBIW to test two registers at once. */
8817 if (sn < s0)
8818 avr_asm_len ("cp %0,__zero_reg__", &all_regs_rtx[sn], plen, 1);
8819
8820 while (++sn < s0)
8821 avr_asm_len ("cpc %0,__zero_reg__", &all_regs_rtx[sn], plen, 1);
8822 /* Set bit 0 in __tmp_reg__ if any of the lower bits was set. */
8823 if (use_src)
8824 avr_asm_len ("breq 0f" CR_TAB
8825 "ori %2,1"
8826 "\n0:\t" "mov __tmp_reg__,%2",
8827 xop, plen, 3);
8828 else
8829 avr_asm_len ("breq 0f" CR_TAB
8830 "set" CR_TAB
8831 "bld __tmp_reg__,0\n0:",
8832 xop, plen, 3);
8833 }
8834 lsb_in_tmp_reg = true;
8835 }
8836 }
8837
8838 /* Step 1: Clear bytes at the low end and copy payload bits from source
8839 ====== to destination. */
8840
8841 int step = offset < 0 ? 1 : -1;
8842 unsigned d0 = offset < 0 ? dest.regno : dest.regno_msb;
8843
8844 // We cleared at least that number of registers.
8845 int clr_n = 0;
8846
8847 for (; d0 >= dest.regno && d0 <= dest.regno_msb; d0 += step)
8848 {
8849 // Next regno of destination is needed for MOVW
8850 unsigned d1 = d0 + step;
8851
8852 // Current and next regno of source
8853 signed s0 = d0 - offset;
8854 signed s1 = s0 + step;
8855
8856 // Must current resp. next regno be CLRed? This applies to the low
8857 // bytes of the destination that have no associated source bytes.
8858 bool clr0 = s0 < (signed) src.regno;
8859 bool clr1 = s1 < (signed) src.regno && d1 >= dest.regno;
8860
8861 // First gather what code to emit (if any) and additional step to
8862 // apply if a MOVW is in use. xop[2] is destination rtx and xop[3]
8863 // is the source rtx for the current loop iteration.
8864 const char *code = NULL;
8865 int stepw = 0;
8866
8867 if (clr0)
8868 {
8869 if (AVR_HAVE_MOVW && clr1 && clrw)
8870 {
8871 xop[2] = all_regs_rtx[d0 & ~1];
8872 xop[3] = clrw;
8873 code = "movw %2,%3";
8874 stepw = step;
8875 }
8876 else
8877 {
8878 xop[2] = all_regs_rtx[d0];
8879 code = "clr %2";
8880
8881 if (++clr_n >= 2
8882 && !clrw
8883 && d0 % 2 == (step > 0))
8884 {
8885 clrw = all_regs_rtx[d0 & ~1];
8886 }
8887 }
8888 }
8889 else if (offset && s0 <= (signed) src.regno_msb)
8890 {
8891 int movw = AVR_HAVE_MOVW && offset % 2 == 0
8892 && d0 % 2 == (offset > 0)
8893 && d1 <= dest.regno_msb && d1 >= dest.regno
8894 && s1 <= (signed) src.regno_msb && s1 >= (signed) src.regno;
8895
8896 xop[2] = all_regs_rtx[d0 & ~movw];
8897 xop[3] = all_regs_rtx[s0 & ~movw];
8898 code = movw ? "movw %2,%3" : "mov %2,%3";
8899 stepw = step * movw;
8900 }
8901
8902 if (code)
8903 {
8904 if (sign_extend && shift != ASHIFT && !sign_in_carry
8905 && (d0 == src.regno_msb || d0 + stepw == src.regno_msb))
8906 {
8907 /* We are going to override the sign bit. If we sign-extend,
8908 store the sign in the Carry flag. This is not needed if
8909 the destination will be ASHIFT in the remainder because
8910 the ASHIFT will set Carry without extra instruction. */
8911
8912 avr_asm_len ("lsl %0", &all_regs_rtx[src.regno_msb], plen, 1);
8913 sign_in_carry = true;
8914 }
8915
8916 unsigned src_msb = dest.regno_msb - sign_bytes - offset + 1;
8917
8918 if (!sign_extend && shift == ASHIFTRT && !msb_in_carry
8919 && src.ibyte > dest.ibyte
8920 && (d0 == src_msb || d0 + stepw == src_msb))
8921 {
8922 /* We are going to override the MSB. If we shift right,
8923 store the MSB in the Carry flag. This is only needed if
8924 we don't sign-extend becaue with sign-extension the MSB
8925 (the sign) will be produced by the sign extension. */
8926
8927 avr_asm_len ("lsr %0", &all_regs_rtx[src_msb], plen, 1);
8928 msb_in_carry = true;
8929 }
8930
8931 unsigned src_lsb = dest.regno - offset -1;
8932
8933 if (shift == ASHIFT && src.fbyte > dest.fbyte && !lsb_in_carry
8934 && !lsb_in_tmp_reg
8935 && (d0 == src_lsb || d0 + stepw == src_lsb))
8936 {
8937 /* We are going to override the new LSB; store it into carry. */
8938
8939 avr_asm_len ("lsl %0", &all_regs_rtx[src_lsb], plen, 1);
8940 code_ashift = "rol %0";
8941 lsb_in_carry = true;
8942 }
8943
8944 avr_asm_len (code, xop, plen, 1);
8945 d0 += stepw;
8946 }
8947 }
8948
8949 /* Step 2: Shift destination left by 1 bit position. This might be needed
8950 ====== for signed input and unsigned output. */
8951
8952 if (shift == ASHIFT && src.fbyte > dest.fbyte && !lsb_in_carry)
8953 {
8954 unsigned s0 = dest.regno - offset -1;
8955
8956 /* n1169 4.1.4 says:
8957 "Conversions from a fixed-point to an integer type round toward zero."
8958 Hence, converting a fract type to integer only gives a non-zero result
8959 for -1. */
8960 if (SCALAR_INT_MODE_P (GET_MODE (xop[0]))
8961 && SCALAR_FRACT_MODE_P (GET_MODE (xop[1]))
8962 && !TARGET_FRACT_CONV_TRUNC)
8963 {
8964 gcc_assert (s0 == src.regno_msb);
8965 /* Check if the input is -1. We do that by checking if negating
8966 the input causes an integer overflow. */
8967 unsigned sn = src.regno;
8968 avr_asm_len ("cp __zero_reg__,%0", &all_regs_rtx[sn++], plen, 1);
8969 while (sn <= s0)
8970 avr_asm_len ("cpc __zero_reg__,%0", &all_regs_rtx[sn++], plen, 1);
8971
8972 /* Overflow goes with set carry. Clear carry otherwise. */
8973 avr_asm_len ("brvs 0f" CR_TAB
8974 "clc\n0:", NULL, plen, 2);
8975 }
8976 /* Likewise, when converting from accumulator types to integer, we
8977 need to round up negative values. */
8978 else if (SCALAR_INT_MODE_P (GET_MODE (xop[0]))
8979 && SCALAR_ACCUM_MODE_P (GET_MODE (xop[1]))
8980 && !TARGET_FRACT_CONV_TRUNC
8981 && !frac_rounded)
8982 {
8983 bool have_carry = false;
8984
8985 xop[2] = all_regs_rtx[s0];
8986 if (!lsb_in_tmp_reg && !MAY_CLOBBER (s0))
8987 avr_asm_len ("mov __tmp_reg__,%2", xop, plen, 1);
8988 avr_asm_len ("tst %0" CR_TAB "brpl 0f",
8989 &all_regs_rtx[src.regno_msb], plen, 2);
8990 if (!lsb_in_tmp_reg)
8991 {
8992 unsigned sn = src.regno;
8993 if (sn < s0)
8994 {
8995 avr_asm_len ("cp __zero_reg__,%0", &all_regs_rtx[sn],
8996 plen, 1);
8997 have_carry = true;
8998 }
8999 while (++sn < s0)
9000 avr_asm_len ("cpc __zero_reg__,%0", &all_regs_rtx[sn], plen, 1);
9001 lsb_in_tmp_reg = !MAY_CLOBBER (s0);
9002 }
9003 /* Add in C and the rounding value 127. */
9004 /* If the destination msb is a sign byte, and in LD_REGS,
9005 grab it as a temporary. */
9006 if (sign_bytes
9007 && TEST_HARD_REG_BIT (reg_class_contents[LD_REGS],
9008 dest.regno_msb))
9009 {
9010 xop[3] = all_regs_rtx[dest.regno_msb];
9011 avr_asm_len ("ldi %3,127", xop, plen, 1);
9012 avr_asm_len ((have_carry && lsb_in_tmp_reg ? "adc __tmp_reg__,%3"
9013 : have_carry ? "adc %2,%3"
9014 : lsb_in_tmp_reg ? "add __tmp_reg__,%3"
9015 : "add %2,%3"),
9016 xop, plen, 1);
9017 }
9018 else
9019 {
9020 /* Fall back to use __zero_reg__ as a temporary. */
9021 avr_asm_len ("dec __zero_reg__", NULL, plen, 1);
9022 if (have_carry)
9023 avr_asm_len ("clt" CR_TAB
9024 "bld __zero_reg__,7", NULL, plen, 2);
9025 else
9026 avr_asm_len ("lsr __zero_reg__", NULL, plen, 1);
9027 avr_asm_len (have_carry && lsb_in_tmp_reg
9028 ? "adc __tmp_reg__,__zero_reg__"
9029 : have_carry ? "adc %2,__zero_reg__"
9030 : lsb_in_tmp_reg ? "add __tmp_reg__,__zero_reg__"
9031 : "add %2,__zero_reg__",
9032 xop, plen, 1);
9033 avr_asm_len ("eor __zero_reg__,__zero_reg__", NULL, plen, 1);
9034 }
9035
9036 for (d0 = dest.regno + zero_bytes;
9037 d0 <= dest.regno_msb - sign_bytes; d0++)
9038 avr_asm_len ("adc %0,__zero_reg__", &all_regs_rtx[d0], plen, 1);
9039
9040 avr_asm_len (lsb_in_tmp_reg
9041 ? "\n0:\t" "lsl __tmp_reg__"
9042 : "\n0:\t" "lsl %2",
9043 xop, plen, 1);
9044 }
9045 else if (MAY_CLOBBER (s0))
9046 avr_asm_len ("lsl %0", &all_regs_rtx[s0], plen, 1);
9047 else
9048 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
9049 "lsl __tmp_reg__", &all_regs_rtx[s0], plen, 2);
9050
9051 code_ashift = "rol %0";
9052 lsb_in_carry = true;
9053 }
9054
9055 if (shift == ASHIFT)
9056 {
9057 for (d0 = dest.regno + zero_bytes;
9058 d0 <= dest.regno_msb - sign_bytes; d0++)
9059 {
9060 avr_asm_len (code_ashift, &all_regs_rtx[d0], plen, 1);
9061 code_ashift = "rol %0";
9062 }
9063
9064 lsb_in_carry = false;
9065 sign_in_carry = true;
9066 }
9067
9068 /* Step 4a: Store MSB in carry if we don't already have it or will produce
9069 ======= it in sign-extension below. */
9070
9071 if (!sign_extend && shift == ASHIFTRT && !msb_in_carry
9072 && src.ibyte > dest.ibyte)
9073 {
9074 unsigned s0 = dest.regno_msb - sign_bytes - offset + 1;
9075
9076 if (MAY_CLOBBER (s0))
9077 avr_asm_len ("lsr %0", &all_regs_rtx[s0], plen, 1);
9078 else
9079 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
9080 "lsr __tmp_reg__", &all_regs_rtx[s0], plen, 2);
9081
9082 msb_in_carry = true;
9083 }
9084
9085 /* Step 3: Sign-extend or zero-extend the destination as needed.
9086 ====== */
9087
9088 if (sign_extend && !sign_in_carry)
9089 {
9090 unsigned s0 = src.regno_msb;
9091
9092 if (MAY_CLOBBER (s0))
9093 avr_asm_len ("lsl %0", &all_regs_rtx[s0], plen, 1);
9094 else
9095 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
9096 "lsl __tmp_reg__", &all_regs_rtx[s0], plen, 2);
9097
9098 sign_in_carry = true;
9099 }
9100
9101 gcc_assert (sign_in_carry + msb_in_carry + lsb_in_carry <= 1);
9102
9103 unsigned copies = 0;
9104 rtx movw = sign_extend ? NULL_RTX : clrw;
9105
9106 for (d0 = dest.regno_msb - sign_bytes + 1; d0 <= dest.regno_msb; d0++)
9107 {
9108 if (AVR_HAVE_MOVW && movw
9109 && d0 % 2 == 0 && d0 + 1 <= dest.regno_msb)
9110 {
9111 xop[2] = all_regs_rtx[d0];
9112 xop[3] = movw;
9113 avr_asm_len ("movw %2,%3", xop, plen, 1);
9114 d0++;
9115 }
9116 else
9117 {
9118 avr_asm_len (sign_extend ? "sbc %0,%0" : "clr %0",
9119 &all_regs_rtx[d0], plen, 1);
9120
9121 if (++copies >= 2 && !movw && d0 % 2 == 1)
9122 movw = all_regs_rtx[d0-1];
9123 }
9124 } /* for */
9125
9126
9127 /* Step 4: Right shift the destination. This might be needed for
9128 ====== conversions from unsigned to signed. */
9129
9130 if (shift == ASHIFTRT)
9131 {
9132 const char *code_ashiftrt = "lsr %0";
9133
9134 if (sign_extend || msb_in_carry)
9135 code_ashiftrt = "ror %0";
9136
9137 if (src.sbit && src.ibyte == dest.ibyte)
9138 code_ashiftrt = "asr %0";
9139
9140 for (d0 = dest.regno_msb - sign_bytes;
9141 d0 >= dest.regno + zero_bytes - 1 && d0 >= dest.regno; d0--)
9142 {
9143 avr_asm_len (code_ashiftrt, &all_regs_rtx[d0], plen, 1);
9144 code_ashiftrt = "ror %0";
9145 }
9146 }
9147
9148 #undef MAY_CLOBBER
9149
9150 return "";
9151 }
9152
9153
9154 /* Output fixed-point rounding. XOP[0] = XOP[1] is the operand to round.
9155 XOP[2] is the rounding point, a CONST_INT. The function prints the
9156 instruction sequence if PLEN = NULL and computes the length in words
9157 of the sequence if PLEN != NULL. Most of this function deals with
9158 preparing operands for calls to `avr_out_plus' and `avr_out_bitop'. */
9159
9160 const char*
avr_out_round(rtx_insn * insn ATTRIBUTE_UNUSED,rtx * xop,int * plen)9161 avr_out_round (rtx_insn *insn ATTRIBUTE_UNUSED, rtx *xop, int *plen)
9162 {
9163 scalar_mode mode = as_a <scalar_mode> (GET_MODE (xop[0]));
9164 scalar_int_mode imode = int_mode_for_mode (mode).require ();
9165 // The smallest fractional bit not cleared by the rounding is 2^(-RP).
9166 int fbit = (int) GET_MODE_FBIT (mode);
9167 double_int i_add = double_int_zero.set_bit (fbit-1 - INTVAL (xop[2]));
9168 wide_int wi_add = wi::set_bit_in_zero (fbit-1 - INTVAL (xop[2]),
9169 GET_MODE_PRECISION (imode));
9170 // Lengths of PLUS and AND parts.
9171 int len_add = 0, *plen_add = plen ? &len_add : NULL;
9172 int len_and = 0, *plen_and = plen ? &len_and : NULL;
9173
9174 // Add-Saturate 1/2 * 2^(-RP). Don't print the label "0:" when printing
9175 // the saturated addition so that we can emit the "rjmp 1f" before the
9176 // "0:" below.
9177
9178 rtx xadd = const_fixed_from_double_int (i_add, mode);
9179 rtx xpattern, xsrc, op[4];
9180
9181 xsrc = SIGNED_FIXED_POINT_MODE_P (mode)
9182 ? gen_rtx_SS_PLUS (mode, xop[1], xadd)
9183 : gen_rtx_US_PLUS (mode, xop[1], xadd);
9184 xpattern = gen_rtx_SET (xop[0], xsrc);
9185
9186 op[0] = xop[0];
9187 op[1] = xop[1];
9188 op[2] = xadd;
9189 avr_out_plus (xpattern, op, plen_add, NULL, false /* Don't print "0:" */);
9190
9191 avr_asm_len ("rjmp 1f" CR_TAB
9192 "0:", NULL, plen_add, 1);
9193
9194 // Keep all bits from RP and higher: ... 2^(-RP)
9195 // Clear all bits from RP+1 and lower: 2^(-RP-1) ...
9196 // Rounding point ^^^^^^^
9197 // Added above ^^^^^^^^^
9198 rtx xreg = simplify_gen_subreg (imode, xop[0], mode, 0);
9199 rtx xmask = immed_wide_int_const (-wi_add - wi_add, imode);
9200
9201 xpattern = gen_rtx_SET (xreg, gen_rtx_AND (imode, xreg, xmask));
9202
9203 op[0] = xreg;
9204 op[1] = xreg;
9205 op[2] = xmask;
9206 op[3] = gen_rtx_SCRATCH (QImode);
9207 avr_out_bitop (xpattern, op, plen_and);
9208 avr_asm_len ("1:", NULL, plen, 0);
9209
9210 if (plen)
9211 *plen = len_add + len_and;
9212
9213 return "";
9214 }
9215
9216
9217 /* Create RTL split patterns for byte sized rotate expressions. This
9218 produces a series of move instructions and considers overlap situations.
9219 Overlapping non-HImode operands need a scratch register. */
9220
9221 bool
avr_rotate_bytes(rtx operands[])9222 avr_rotate_bytes (rtx operands[])
9223 {
9224 machine_mode mode = GET_MODE (operands[0]);
9225 bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
9226 bool same_reg = rtx_equal_p (operands[0], operands[1]);
9227 int num = INTVAL (operands[2]);
9228 rtx scratch = operands[3];
9229 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
9230 Word move if no scratch is needed, otherwise use size of scratch. */
9231 machine_mode move_mode = QImode;
9232 int move_size, offset, size;
9233
9234 if (num & 0xf)
9235 move_mode = QImode;
9236 else if ((mode == SImode && !same_reg) || !overlapped)
9237 move_mode = HImode;
9238 else
9239 move_mode = GET_MODE (scratch);
9240
9241 /* Force DI rotate to use QI moves since other DI moves are currently split
9242 into QI moves so forward propagation works better. */
9243 if (mode == DImode)
9244 move_mode = QImode;
9245 /* Make scratch smaller if needed. */
9246 if (SCRATCH != GET_CODE (scratch)
9247 && HImode == GET_MODE (scratch)
9248 && QImode == move_mode)
9249 scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
9250
9251 move_size = GET_MODE_SIZE (move_mode);
9252 /* Number of bytes/words to rotate. */
9253 offset = (num >> 3) / move_size;
9254 /* Number of moves needed. */
9255 size = GET_MODE_SIZE (mode) / move_size;
9256 /* Himode byte swap is special case to avoid a scratch register. */
9257 if (mode == HImode && same_reg)
9258 {
9259 /* HImode byte swap, using xor. This is as quick as using scratch. */
9260 rtx src, dst;
9261 src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
9262 dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
9263 if (!rtx_equal_p (dst, src))
9264 {
9265 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
9266 emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
9267 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
9268 }
9269 }
9270 else
9271 {
9272 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
9273 /* Create linked list of moves to determine move order. */
9274 struct {
9275 rtx src, dst;
9276 int links;
9277 } move[MAX_SIZE + 8];
9278 int blocked, moves;
9279
9280 gcc_assert (size <= MAX_SIZE);
9281 /* Generate list of subreg moves. */
9282 for (int i = 0; i < size; i++)
9283 {
9284 int from = i;
9285 int to = (from + offset) % size;
9286 move[i].src = simplify_gen_subreg (move_mode, operands[1],
9287 mode, from * move_size);
9288 move[i].dst = simplify_gen_subreg (move_mode, operands[0],
9289 mode, to * move_size);
9290 move[i].links = -1;
9291 }
9292 /* Mark dependence where a dst of one move is the src of another move.
9293 The first move is a conflict as it must wait until second is
9294 performed. We ignore moves to self - we catch this later. */
9295 if (overlapped)
9296 for (int i = 0; i < size; i++)
9297 if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
9298 for (int j = 0; j < size; j++)
9299 if (j != i && rtx_equal_p (move[j].src, move[i].dst))
9300 {
9301 /* The dst of move i is the src of move j. */
9302 move[i].links = j;
9303 break;
9304 }
9305
9306 blocked = -1;
9307 moves = 0;
9308 /* Go through move list and perform non-conflicting moves. As each
9309 non-overlapping move is made, it may remove other conflicts
9310 so the process is repeated until no conflicts remain. */
9311 do
9312 {
9313 blocked = -1;
9314 moves = 0;
9315 /* Emit move where dst is not also a src or we have used that
9316 src already. */
9317 for (int i = 0; i < size; i++)
9318 if (move[i].src != NULL_RTX)
9319 {
9320 if (move[i].links == -1
9321 || move[move[i].links].src == NULL_RTX)
9322 {
9323 moves++;
9324 /* Ignore NOP moves to self. */
9325 if (!rtx_equal_p (move[i].dst, move[i].src))
9326 emit_move_insn (move[i].dst, move[i].src);
9327
9328 /* Remove conflict from list. */
9329 move[i].src = NULL_RTX;
9330 }
9331 else
9332 blocked = i;
9333 }
9334
9335 /* Check for deadlock. This is when no moves occurred and we have
9336 at least one blocked move. */
9337 if (moves == 0 && blocked != -1)
9338 {
9339 /* Need to use scratch register to break deadlock.
9340 Add move to put dst of blocked move into scratch.
9341 When this move occurs, it will break chain deadlock.
9342 The scratch register is substituted for real move. */
9343
9344 gcc_assert (SCRATCH != GET_CODE (scratch));
9345
9346 move[size].src = move[blocked].dst;
9347 move[size].dst = scratch;
9348 /* Scratch move is never blocked. */
9349 move[size].links = -1;
9350 /* Make sure we have valid link. */
9351 gcc_assert (move[blocked].links != -1);
9352 /* Replace src of blocking move with scratch reg. */
9353 move[move[blocked].links].src = scratch;
9354 /* Make dependent on scratch move occurring. */
9355 move[blocked].links = size;
9356 size=size+1;
9357 }
9358 }
9359 while (blocked != -1);
9360 }
9361 return true;
9362 }
9363
9364
9365 /* Worker function for `ADJUST_INSN_LENGTH'. */
9366 /* Modifies the length assigned to instruction INSN
9367 LEN is the initially computed length of the insn. */
9368
9369 int
avr_adjust_insn_length(rtx_insn * insn,int len)9370 avr_adjust_insn_length (rtx_insn *insn, int len)
9371 {
9372 rtx *op = recog_data.operand;
9373 enum attr_adjust_len adjust_len;
9374
9375 /* As we pretend jump tables in .text, fix branch offsets crossing jump
9376 tables now. */
9377
9378 if (JUMP_TABLE_DATA_P (insn))
9379 return 0;
9380
9381 /* Some complex insns don't need length adjustment and therefore
9382 the length need not/must not be adjusted for these insns.
9383 It is easier to state this in an insn attribute "adjust_len" than
9384 to clutter up code here... */
9385
9386 if (!NONDEBUG_INSN_P (insn) || recog_memoized (insn) == -1)
9387 {
9388 return len;
9389 }
9390
9391 /* Read from insn attribute "adjust_len" if/how length is to be adjusted. */
9392
9393 adjust_len = get_attr_adjust_len (insn);
9394
9395 if (adjust_len == ADJUST_LEN_NO)
9396 {
9397 /* Nothing to adjust: The length from attribute "length" is fine.
9398 This is the default. */
9399
9400 return len;
9401 }
9402
9403 /* Extract insn's operands. */
9404
9405 extract_constrain_insn_cached (insn);
9406
9407 /* Dispatch to right function. */
9408
9409 switch (adjust_len)
9410 {
9411 case ADJUST_LEN_RELOAD_IN16: output_reload_inhi (op, op[2], &len); break;
9412 case ADJUST_LEN_RELOAD_IN24: avr_out_reload_inpsi (op, op[2], &len); break;
9413 case ADJUST_LEN_RELOAD_IN32: output_reload_insisf (op, op[2], &len); break;
9414
9415 case ADJUST_LEN_OUT_BITOP: avr_out_bitop (insn, op, &len); break;
9416
9417 case ADJUST_LEN_PLUS: avr_out_plus (insn, op, &len); break;
9418 case ADJUST_LEN_ADDTO_SP: avr_out_addto_sp (op, &len); break;
9419
9420 case ADJUST_LEN_MOV8: output_movqi (insn, op, &len); break;
9421 case ADJUST_LEN_MOV16: output_movhi (insn, op, &len); break;
9422 case ADJUST_LEN_MOV24: avr_out_movpsi (insn, op, &len); break;
9423 case ADJUST_LEN_MOV32: output_movsisf (insn, op, &len); break;
9424 case ADJUST_LEN_MOVMEM: avr_out_movmem (insn, op, &len); break;
9425 case ADJUST_LEN_XLOAD: avr_out_xload (insn, op, &len); break;
9426 case ADJUST_LEN_SEXT: avr_out_sign_extend (insn, op, &len); break;
9427
9428 case ADJUST_LEN_SFRACT: avr_out_fract (insn, op, true, &len); break;
9429 case ADJUST_LEN_UFRACT: avr_out_fract (insn, op, false, &len); break;
9430 case ADJUST_LEN_ROUND: avr_out_round (insn, op, &len); break;
9431
9432 case ADJUST_LEN_TSTHI: avr_out_tsthi (insn, op, &len); break;
9433 case ADJUST_LEN_TSTPSI: avr_out_tstpsi (insn, op, &len); break;
9434 case ADJUST_LEN_TSTSI: avr_out_tstsi (insn, op, &len); break;
9435 case ADJUST_LEN_COMPARE: avr_out_compare (insn, op, &len); break;
9436 case ADJUST_LEN_COMPARE64: avr_out_compare64 (insn, op, &len); break;
9437
9438 case ADJUST_LEN_LSHRQI: lshrqi3_out (insn, op, &len); break;
9439 case ADJUST_LEN_LSHRHI: lshrhi3_out (insn, op, &len); break;
9440 case ADJUST_LEN_LSHRSI: lshrsi3_out (insn, op, &len); break;
9441
9442 case ADJUST_LEN_ASHRQI: ashrqi3_out (insn, op, &len); break;
9443 case ADJUST_LEN_ASHRHI: ashrhi3_out (insn, op, &len); break;
9444 case ADJUST_LEN_ASHRSI: ashrsi3_out (insn, op, &len); break;
9445
9446 case ADJUST_LEN_ASHLQI: ashlqi3_out (insn, op, &len); break;
9447 case ADJUST_LEN_ASHLHI: ashlhi3_out (insn, op, &len); break;
9448 case ADJUST_LEN_ASHLSI: ashlsi3_out (insn, op, &len); break;
9449
9450 case ADJUST_LEN_ASHLPSI: avr_out_ashlpsi3 (insn, op, &len); break;
9451 case ADJUST_LEN_ASHRPSI: avr_out_ashrpsi3 (insn, op, &len); break;
9452 case ADJUST_LEN_LSHRPSI: avr_out_lshrpsi3 (insn, op, &len); break;
9453
9454 case ADJUST_LEN_CALL: len = AVR_HAVE_JMP_CALL ? 2 : 1; break;
9455
9456 case ADJUST_LEN_INSERT_BITS: avr_out_insert_bits (op, &len); break;
9457
9458 case ADJUST_LEN_INSV_NOTBIT:
9459 avr_out_insert_notbit (insn, op, NULL_RTX, &len);
9460 break;
9461 case ADJUST_LEN_INSV_NOTBIT_0:
9462 avr_out_insert_notbit (insn, op, const0_rtx, &len);
9463 break;
9464 case ADJUST_LEN_INSV_NOTBIT_7:
9465 avr_out_insert_notbit (insn, op, GEN_INT (7), &len);
9466 break;
9467
9468 default:
9469 gcc_unreachable();
9470 }
9471
9472 return len;
9473 }
9474
9475 /* Return nonzero if register REG dead after INSN. */
9476
9477 int
reg_unused_after(rtx_insn * insn,rtx reg)9478 reg_unused_after (rtx_insn *insn, rtx reg)
9479 {
9480 return (dead_or_set_p (insn, reg)
9481 || (REG_P (reg) && _reg_unused_after (insn, reg)));
9482 }
9483
9484 /* Return nonzero if REG is not used after INSN.
9485 We assume REG is a reload reg, and therefore does
9486 not live past labels. It may live past calls or jumps though. */
9487
9488 int
_reg_unused_after(rtx_insn * insn,rtx reg)9489 _reg_unused_after (rtx_insn *insn, rtx reg)
9490 {
9491 enum rtx_code code;
9492 rtx set;
9493
9494 /* If the reg is set by this instruction, then it is safe for our
9495 case. Disregard the case where this is a store to memory, since
9496 we are checking a register used in the store address. */
9497 set = single_set (insn);
9498 if (set && !MEM_P (SET_DEST (set))
9499 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
9500 return 1;
9501
9502 while ((insn = NEXT_INSN (insn)))
9503 {
9504 rtx set;
9505 code = GET_CODE (insn);
9506
9507 #if 0
9508 /* If this is a label that existed before reload, then the register
9509 if dead here. However, if this is a label added by reorg, then
9510 the register may still be live here. We can't tell the difference,
9511 so we just ignore labels completely. */
9512 if (code == CODE_LABEL)
9513 return 1;
9514 /* else */
9515 #endif
9516
9517 if (!INSN_P (insn))
9518 continue;
9519
9520 if (code == JUMP_INSN)
9521 return 0;
9522
9523 /* If this is a sequence, we must handle them all at once.
9524 We could have for instance a call that sets the target register,
9525 and an insn in a delay slot that uses the register. In this case,
9526 we must return 0. */
9527 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
9528 {
9529 rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
9530 int retval = 0;
9531
9532 for (int i = 0; i < seq->len (); i++)
9533 {
9534 rtx_insn *this_insn = seq->insn (i);
9535 rtx set = single_set (this_insn);
9536
9537 if (CALL_P (this_insn))
9538 code = CALL_INSN;
9539 else if (JUMP_P (this_insn))
9540 {
9541 if (INSN_ANNULLED_BRANCH_P (this_insn))
9542 return 0;
9543 code = JUMP_INSN;
9544 }
9545
9546 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
9547 return 0;
9548 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
9549 {
9550 if (!MEM_P (SET_DEST (set)))
9551 retval = 1;
9552 else
9553 return 0;
9554 }
9555 if (set == 0
9556 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
9557 return 0;
9558 }
9559 if (retval == 1)
9560 return 1;
9561 else if (code == JUMP_INSN)
9562 return 0;
9563 }
9564
9565 if (code == CALL_INSN)
9566 {
9567 rtx tem;
9568 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
9569 if (GET_CODE (XEXP (tem, 0)) == USE
9570 && REG_P (XEXP (XEXP (tem, 0), 0))
9571 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
9572 return 0;
9573 if (call_used_regs[REGNO (reg)])
9574 return 1;
9575 }
9576
9577 set = single_set (insn);
9578
9579 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
9580 return 0;
9581 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
9582 return !MEM_P (SET_DEST (set));
9583 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
9584 return 0;
9585 }
9586 return 1;
9587 }
9588
9589
9590 /* Implement `TARGET_ASM_INTEGER'. */
9591 /* Target hook for assembling integer objects. The AVR version needs
9592 special handling for references to certain labels. */
9593
9594 static bool
avr_assemble_integer(rtx x,unsigned int size,int aligned_p)9595 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
9596 {
9597 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
9598 && text_segment_operand (x, VOIDmode))
9599 {
9600 fputs ("\t.word\tgs(", asm_out_file);
9601 output_addr_const (asm_out_file, x);
9602 fputs (")\n", asm_out_file);
9603
9604 return true;
9605 }
9606 else if (GET_MODE (x) == PSImode)
9607 {
9608 /* This needs binutils 2.23+, see PR binutils/13503 */
9609
9610 fputs ("\t.byte\tlo8(", asm_out_file);
9611 output_addr_const (asm_out_file, x);
9612 fputs (")" ASM_COMMENT_START "need binutils PR13503\n", asm_out_file);
9613
9614 fputs ("\t.byte\thi8(", asm_out_file);
9615 output_addr_const (asm_out_file, x);
9616 fputs (")" ASM_COMMENT_START "need binutils PR13503\n", asm_out_file);
9617
9618 fputs ("\t.byte\thh8(", asm_out_file);
9619 output_addr_const (asm_out_file, x);
9620 fputs (")" ASM_COMMENT_START "need binutils PR13503\n", asm_out_file);
9621
9622 return true;
9623 }
9624 else if (CONST_FIXED_P (x))
9625 {
9626 /* varasm fails to handle big fixed modes that don't fit in hwi. */
9627
9628 for (unsigned n = 0; n < size; n++)
9629 {
9630 rtx xn = simplify_gen_subreg (QImode, x, GET_MODE (x), n);
9631 default_assemble_integer (xn, 1, aligned_p);
9632 }
9633
9634 return true;
9635 }
9636
9637 if (AVR_TINY
9638 && avr_address_tiny_pm_p (x))
9639 {
9640 x = plus_constant (Pmode, x, avr_arch->flash_pm_offset);
9641 }
9642
9643 return default_assemble_integer (x, size, aligned_p);
9644 }
9645
9646
9647 /* Implement `TARGET_CLASS_LIKELY_SPILLED_P'. */
9648 /* Return value is nonzero if pseudos that have been
9649 assigned to registers of class CLASS would likely be spilled
9650 because registers of CLASS are needed for spill registers. */
9651
9652 static bool
avr_class_likely_spilled_p(reg_class_t c)9653 avr_class_likely_spilled_p (reg_class_t c)
9654 {
9655 return (c != ALL_REGS &&
9656 (AVR_TINY ? 1 : c != ADDW_REGS));
9657 }
9658
9659
9660 /* Valid attributes:
9661 progmem - Put data to program memory.
9662 signal - Make a function to be hardware interrupt.
9663 After function prologue interrupts remain disabled.
9664 interrupt - Make a function to be hardware interrupt. Before function
9665 prologue interrupts are enabled by means of SEI.
9666 naked - Don't generate function prologue/epilogue and RET
9667 instruction. */
9668
9669 /* Handle a "progmem" attribute; arguments as in
9670 struct attribute_spec.handler. */
9671
9672 static tree
avr_handle_progmem_attribute(tree * node,tree name,tree args ATTRIBUTE_UNUSED,int flags ATTRIBUTE_UNUSED,bool * no_add_attrs)9673 avr_handle_progmem_attribute (tree *node, tree name,
9674 tree args ATTRIBUTE_UNUSED,
9675 int flags ATTRIBUTE_UNUSED,
9676 bool *no_add_attrs)
9677 {
9678 if (DECL_P (*node))
9679 {
9680 if (TREE_CODE (*node) == TYPE_DECL)
9681 {
9682 /* This is really a decl attribute, not a type attribute,
9683 but try to handle it for GCC 3.0 backwards compatibility. */
9684
9685 tree type = TREE_TYPE (*node);
9686 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
9687 tree newtype = build_type_attribute_variant (type, attr);
9688
9689 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
9690 TREE_TYPE (*node) = newtype;
9691 *no_add_attrs = true;
9692 }
9693 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
9694 {
9695 *no_add_attrs = false;
9696 }
9697 else
9698 {
9699 warning (OPT_Wattributes, "%qE attribute ignored",
9700 name);
9701 *no_add_attrs = true;
9702 }
9703 }
9704
9705 return NULL_TREE;
9706 }
9707
9708 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
9709 struct attribute_spec.handler. */
9710
9711 static tree
avr_handle_fndecl_attribute(tree * node,tree name,tree args ATTRIBUTE_UNUSED,int flags ATTRIBUTE_UNUSED,bool * no_add_attrs)9712 avr_handle_fndecl_attribute (tree *node, tree name,
9713 tree args ATTRIBUTE_UNUSED,
9714 int flags ATTRIBUTE_UNUSED,
9715 bool *no_add_attrs)
9716 {
9717 if (TREE_CODE (*node) != FUNCTION_DECL)
9718 {
9719 warning (OPT_Wattributes, "%qE attribute only applies to functions",
9720 name);
9721 *no_add_attrs = true;
9722 }
9723
9724 return NULL_TREE;
9725 }
9726
9727 static tree
avr_handle_fntype_attribute(tree * node,tree name,tree args ATTRIBUTE_UNUSED,int flags ATTRIBUTE_UNUSED,bool * no_add_attrs)9728 avr_handle_fntype_attribute (tree *node, tree name,
9729 tree args ATTRIBUTE_UNUSED,
9730 int flags ATTRIBUTE_UNUSED,
9731 bool *no_add_attrs)
9732 {
9733 if (TREE_CODE (*node) != FUNCTION_TYPE)
9734 {
9735 warning (OPT_Wattributes, "%qE attribute only applies to functions",
9736 name);
9737 *no_add_attrs = true;
9738 }
9739
9740 return NULL_TREE;
9741 }
9742
9743 static tree
avr_handle_absdata_attribute(tree * node,tree name,tree,int,bool * no_add)9744 avr_handle_absdata_attribute (tree *node, tree name, tree /* args */,
9745 int /* flags */, bool *no_add)
9746 {
9747 location_t loc = DECL_SOURCE_LOCATION (*node);
9748
9749 if (AVR_TINY)
9750 {
9751 if (TREE_CODE (*node) != VAR_DECL
9752 || (!TREE_STATIC (*node) && !DECL_EXTERNAL (*node)))
9753 {
9754 warning_at (loc, OPT_Wattributes, "%qE attribute only applies to"
9755 " variables in static storage", name);
9756 *no_add = true;
9757 }
9758 }
9759 else
9760 {
9761 warning_at (loc, OPT_Wattributes, "%qE attribute only supported"
9762 " for reduced Tiny cores", name);
9763 *no_add = true;
9764 }
9765
9766 return NULL_TREE;
9767 }
9768
9769 static tree
avr_handle_addr_attribute(tree * node,tree name,tree args,int flags ATTRIBUTE_UNUSED,bool * no_add)9770 avr_handle_addr_attribute (tree *node, tree name, tree args,
9771 int flags ATTRIBUTE_UNUSED, bool *no_add)
9772 {
9773 bool io_p = (strncmp (IDENTIFIER_POINTER (name), "io", 2) == 0);
9774 location_t loc = DECL_SOURCE_LOCATION (*node);
9775
9776 if (!VAR_P (*node))
9777 {
9778 warning_at (loc, OPT_Wattributes, "%qE attribute only applies to "
9779 "variables", name);
9780 *no_add = true;
9781 return NULL_TREE;
9782 }
9783
9784 if (args != NULL_TREE)
9785 {
9786 if (TREE_CODE (TREE_VALUE (args)) == NON_LVALUE_EXPR)
9787 TREE_VALUE (args) = TREE_OPERAND (TREE_VALUE (args), 0);
9788 tree arg = TREE_VALUE (args);
9789 if (TREE_CODE (arg) != INTEGER_CST)
9790 {
9791 warning_at (loc, OPT_Wattributes, "%qE attribute allows only an "
9792 "integer constant argument", name);
9793 *no_add = true;
9794 }
9795 else if (io_p
9796 && (!tree_fits_shwi_p (arg)
9797 || !(strcmp (IDENTIFIER_POINTER (name), "io_low") == 0
9798 ? low_io_address_operand : io_address_operand)
9799 (GEN_INT (TREE_INT_CST_LOW (arg)), QImode)))
9800 {
9801 warning_at (loc, OPT_Wattributes, "%qE attribute address "
9802 "out of range", name);
9803 *no_add = true;
9804 }
9805 else
9806 {
9807 tree attribs = DECL_ATTRIBUTES (*node);
9808 const char *names[] = { "io", "io_low", "address", NULL };
9809 for (const char **p = names; *p; p++)
9810 {
9811 tree other = lookup_attribute (*p, attribs);
9812 if (other && TREE_VALUE (other))
9813 {
9814 warning_at (loc, OPT_Wattributes,
9815 "both %s and %qE attribute provide address",
9816 *p, name);
9817 *no_add = true;
9818 break;
9819 }
9820 }
9821 }
9822 }
9823
9824 if (*no_add == false && io_p && !TREE_THIS_VOLATILE (*node))
9825 warning_at (loc, OPT_Wattributes, "%qE attribute on non-volatile variable",
9826 name);
9827
9828 return NULL_TREE;
9829 }
9830
9831 rtx
avr_eval_addr_attrib(rtx x)9832 avr_eval_addr_attrib (rtx x)
9833 {
9834 if (SYMBOL_REF_P (x)
9835 && (SYMBOL_REF_FLAGS (x) & SYMBOL_FLAG_ADDRESS))
9836 {
9837 tree decl = SYMBOL_REF_DECL (x);
9838 tree attr = NULL_TREE;
9839
9840 if (SYMBOL_REF_FLAGS (x) & SYMBOL_FLAG_IO)
9841 {
9842 attr = lookup_attribute ("io", DECL_ATTRIBUTES (decl));
9843 if (!attr || !TREE_VALUE (attr))
9844 attr = lookup_attribute ("io_low", DECL_ATTRIBUTES (decl));
9845 gcc_assert (attr);
9846 }
9847 if (!attr || !TREE_VALUE (attr))
9848 attr = lookup_attribute ("address", DECL_ATTRIBUTES (decl));
9849 gcc_assert (attr && TREE_VALUE (attr) && TREE_VALUE (TREE_VALUE (attr)));
9850 return GEN_INT (TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE (attr))));
9851 }
9852 return x;
9853 }
9854
9855
9856 /* AVR attributes. */
9857 static const struct attribute_spec avr_attribute_table[] =
9858 {
9859 /* { name, min_len, max_len, decl_req, type_req, fn_type_req,
9860 affects_type_identity, handler, exclude } */
9861 { "progmem", 0, 0, false, false, false, false,
9862 avr_handle_progmem_attribute, NULL },
9863 { "signal", 0, 0, true, false, false, false,
9864 avr_handle_fndecl_attribute, NULL },
9865 { "interrupt", 0, 0, true, false, false, false,
9866 avr_handle_fndecl_attribute, NULL },
9867 { "no_gccisr", 0, 0, true, false, false, false,
9868 avr_handle_fndecl_attribute, NULL },
9869 { "naked", 0, 0, false, true, true, false,
9870 avr_handle_fntype_attribute, NULL },
9871 { "OS_task", 0, 0, false, true, true, false,
9872 avr_handle_fntype_attribute, NULL },
9873 { "OS_main", 0, 0, false, true, true, false,
9874 avr_handle_fntype_attribute, NULL },
9875 { "io", 0, 1, true, false, false, false,
9876 avr_handle_addr_attribute, NULL },
9877 { "io_low", 0, 1, true, false, false, false,
9878 avr_handle_addr_attribute, NULL },
9879 { "address", 1, 1, true, false, false, false,
9880 avr_handle_addr_attribute, NULL },
9881 { "absdata", 0, 0, true, false, false, false,
9882 avr_handle_absdata_attribute, NULL },
9883 { NULL, 0, 0, false, false, false, false, NULL, NULL }
9884 };
9885
9886
9887 /* Return true if we support address space AS for the architecture in effect
9888 and false, otherwise. If LOC is not UNKNOWN_LOCATION then also issue
9889 a respective error. */
9890
9891 bool
avr_addr_space_supported_p(addr_space_t as,location_t loc)9892 avr_addr_space_supported_p (addr_space_t as, location_t loc)
9893 {
9894 if (AVR_TINY)
9895 {
9896 if (loc != UNKNOWN_LOCATION)
9897 error_at (loc, "address spaces are not supported for reduced "
9898 "Tiny devices");
9899 return false;
9900 }
9901 else if (avr_addrspace[as].segment >= avr_n_flash)
9902 {
9903 if (loc != UNKNOWN_LOCATION)
9904 error_at (loc, "address space %qs not supported for devices with "
9905 "flash size up to %d KiB", avr_addrspace[as].name,
9906 64 * avr_n_flash);
9907 return false;
9908 }
9909
9910 return true;
9911 }
9912
9913
9914 /* Implement `TARGET_ADDR_SPACE_DIAGNOSE_USAGE'. */
9915
9916 static void
avr_addr_space_diagnose_usage(addr_space_t as,location_t loc)9917 avr_addr_space_diagnose_usage (addr_space_t as, location_t loc)
9918 {
9919 (void) avr_addr_space_supported_p (as, loc);
9920 }
9921
9922
9923 /* Look if DECL shall be placed in program memory space by
9924 means of attribute `progmem' or some address-space qualifier.
9925 Return non-zero if DECL is data that must end up in Flash and
9926 zero if the data lives in RAM (.bss, .data, .rodata, ...).
9927
9928 Return 2 if DECL is located in 24-bit flash address-space
9929 Return 1 if DECL is located in 16-bit flash address-space
9930 Return -1 if attribute `progmem' occurs in DECL or ATTRIBUTES
9931 Return 0 otherwise */
9932
9933 int
avr_progmem_p(tree decl,tree attributes)9934 avr_progmem_p (tree decl, tree attributes)
9935 {
9936 tree a;
9937
9938 if (TREE_CODE (decl) != VAR_DECL)
9939 return 0;
9940
9941 if (avr_decl_memx_p (decl))
9942 return 2;
9943
9944 if (avr_decl_flash_p (decl))
9945 return 1;
9946
9947 if (NULL_TREE
9948 != lookup_attribute ("progmem", attributes))
9949 return -1;
9950
9951 a = decl;
9952
9953 do
9954 a = TREE_TYPE(a);
9955 while (TREE_CODE (a) == ARRAY_TYPE);
9956
9957 if (a == error_mark_node)
9958 return 0;
9959
9960 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
9961 return -1;
9962
9963 return 0;
9964 }
9965
9966
9967 /* Return true if DECL has attribute `absdata' set. This function should
9968 only be used for AVR_TINY. */
9969
9970 static bool
avr_decl_absdata_p(tree decl,tree attributes)9971 avr_decl_absdata_p (tree decl, tree attributes)
9972 {
9973 return (TREE_CODE (decl) == VAR_DECL
9974 && NULL_TREE != lookup_attribute ("absdata", attributes));
9975 }
9976
9977
9978 /* Scan type TYP for pointer references to address space ASn.
9979 Return ADDR_SPACE_GENERIC (i.e. 0) if all pointers targeting
9980 the AS are also declared to be CONST.
9981 Otherwise, return the respective address space, i.e. a value != 0. */
9982
9983 static addr_space_t
avr_nonconst_pointer_addrspace(tree typ)9984 avr_nonconst_pointer_addrspace (tree typ)
9985 {
9986 while (ARRAY_TYPE == TREE_CODE (typ))
9987 typ = TREE_TYPE (typ);
9988
9989 if (POINTER_TYPE_P (typ))
9990 {
9991 addr_space_t as;
9992 tree target = TREE_TYPE (typ);
9993
9994 /* Pointer to function: Test the function's return type. */
9995
9996 if (FUNCTION_TYPE == TREE_CODE (target))
9997 return avr_nonconst_pointer_addrspace (TREE_TYPE (target));
9998
9999 /* "Ordinary" pointers... */
10000
10001 while (TREE_CODE (target) == ARRAY_TYPE)
10002 target = TREE_TYPE (target);
10003
10004 /* Pointers to non-generic address space must be const. */
10005
10006 as = TYPE_ADDR_SPACE (target);
10007
10008 if (!ADDR_SPACE_GENERIC_P (as)
10009 && !TYPE_READONLY (target)
10010 && avr_addr_space_supported_p (as))
10011 {
10012 return as;
10013 }
10014
10015 /* Scan pointer's target type. */
10016
10017 return avr_nonconst_pointer_addrspace (target);
10018 }
10019
10020 return ADDR_SPACE_GENERIC;
10021 }
10022
10023
10024 /* Sanity check NODE so that all pointers targeting non-generic address spaces
10025 go along with CONST qualifier. Writing to these address spaces should
10026 be detected and complained about as early as possible. */
10027
10028 static bool
avr_pgm_check_var_decl(tree node)10029 avr_pgm_check_var_decl (tree node)
10030 {
10031 const char *reason = NULL;
10032
10033 addr_space_t as = ADDR_SPACE_GENERIC;
10034
10035 gcc_assert (as == 0);
10036
10037 if (avr_log.progmem)
10038 avr_edump ("%?: %t\n", node);
10039
10040 switch (TREE_CODE (node))
10041 {
10042 default:
10043 break;
10044
10045 case VAR_DECL:
10046 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
10047 reason = _("variable");
10048 break;
10049
10050 case PARM_DECL:
10051 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
10052 reason = _("function parameter");
10053 break;
10054
10055 case FIELD_DECL:
10056 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
10057 reason = _("structure field");
10058 break;
10059
10060 case FUNCTION_DECL:
10061 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (TREE_TYPE (node))),
10062 as)
10063 reason = _("return type of function");
10064 break;
10065
10066 case POINTER_TYPE:
10067 if (as = avr_nonconst_pointer_addrspace (node), as)
10068 reason = _("pointer");
10069 break;
10070 }
10071
10072 if (reason)
10073 {
10074 if (TYPE_P (node))
10075 error ("pointer targeting address space %qs must be const in %qT",
10076 avr_addrspace[as].name, node);
10077 else
10078 error ("pointer targeting address space %qs must be const"
10079 " in %s %q+D",
10080 avr_addrspace[as].name, reason, node);
10081 }
10082
10083 return reason == NULL;
10084 }
10085
10086
10087 /* Implement `TARGET_INSERT_ATTRIBUTES'. */
10088
10089 static void
avr_insert_attributes(tree node,tree * attributes)10090 avr_insert_attributes (tree node, tree *attributes)
10091 {
10092 avr_pgm_check_var_decl (node);
10093
10094 if (TARGET_MAIN_IS_OS_TASK
10095 && TREE_CODE (node) == FUNCTION_DECL
10096 && MAIN_NAME_P (DECL_NAME (node))
10097 // FIXME: We'd like to also test `flag_hosted' which is only
10098 // available in the C-ish fronts, hence no such test for now.
10099 // Instead, we test the return type of "main" which is not exactly
10100 // the same but good enough.
10101 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_TYPE (node)))
10102 && NULL == lookup_attribute ("OS_task", *attributes))
10103 {
10104 *attributes = tree_cons (get_identifier ("OS_task"),
10105 NULL, *attributes);
10106 }
10107
10108 /* Add the section attribute if the variable is in progmem. */
10109
10110 if (TREE_CODE (node) == VAR_DECL
10111 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
10112 && avr_progmem_p (node, *attributes))
10113 {
10114 addr_space_t as;
10115 tree node0 = node;
10116
10117 /* For C++, we have to peel arrays in order to get correct
10118 determination of readonlyness. */
10119
10120 do
10121 node0 = TREE_TYPE (node0);
10122 while (TREE_CODE (node0) == ARRAY_TYPE);
10123
10124 if (error_mark_node == node0)
10125 return;
10126
10127 as = TYPE_ADDR_SPACE (TREE_TYPE (node));
10128
10129 if (!TYPE_READONLY (node0)
10130 && !TREE_READONLY (node))
10131 {
10132 const char *reason = "__attribute__((progmem))";
10133
10134 if (!ADDR_SPACE_GENERIC_P (as))
10135 reason = avr_addrspace[as].name;
10136
10137 if (avr_log.progmem)
10138 avr_edump ("\n%?: %t\n%t\n", node, node0);
10139
10140 error ("variable %q+D must be const in order to be put into"
10141 " read-only section by means of %qs", node, reason);
10142 }
10143 }
10144 }
10145
10146
10147 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
10148 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
10149 /* Track need of __do_clear_bss. */
10150
10151 void
avr_asm_output_aligned_decl_common(FILE * stream,tree decl,const char * name,unsigned HOST_WIDE_INT size,unsigned int align,bool local_p)10152 avr_asm_output_aligned_decl_common (FILE * stream,
10153 tree decl,
10154 const char *name,
10155 unsigned HOST_WIDE_INT size,
10156 unsigned int align, bool local_p)
10157 {
10158 rtx mem = decl == NULL_TREE ? NULL_RTX : DECL_RTL (decl);
10159 rtx symbol;
10160
10161 if (mem != NULL_RTX && MEM_P (mem)
10162 && SYMBOL_REF_P ((symbol = XEXP (mem, 0)))
10163 && (SYMBOL_REF_FLAGS (symbol) & (SYMBOL_FLAG_IO | SYMBOL_FLAG_ADDRESS)))
10164 {
10165 if (!local_p)
10166 {
10167 fprintf (stream, "\t.globl\t");
10168 assemble_name (stream, name);
10169 fprintf (stream, "\n");
10170 }
10171 if (SYMBOL_REF_FLAGS (symbol) & SYMBOL_FLAG_ADDRESS)
10172 {
10173 assemble_name (stream, name);
10174 fprintf (stream, " = %ld\n",
10175 (long) INTVAL (avr_eval_addr_attrib (symbol)));
10176 }
10177 else if (local_p)
10178 error_at (DECL_SOURCE_LOCATION (decl),
10179 "static IO declaration for %q+D needs an address", decl);
10180 return;
10181 }
10182
10183 /* __gnu_lto_v1 etc. are just markers for the linker injected by toplev.c.
10184 There is no need to trigger __do_clear_bss code for them. */
10185
10186 if (!STR_PREFIX_P (name, "__gnu_lto"))
10187 avr_need_clear_bss_p = true;
10188
10189 if (local_p)
10190 ASM_OUTPUT_ALIGNED_LOCAL (stream, name, size, align);
10191 else
10192 ASM_OUTPUT_ALIGNED_COMMON (stream, name, size, align);
10193 }
10194
10195 void
avr_asm_asm_output_aligned_bss(FILE * file,tree decl,const char * name,unsigned HOST_WIDE_INT size,int align,void (* default_func)(FILE *,tree,const char *,unsigned HOST_WIDE_INT,int))10196 avr_asm_asm_output_aligned_bss (FILE *file, tree decl, const char *name,
10197 unsigned HOST_WIDE_INT size, int align,
10198 void (*default_func)
10199 (FILE *, tree, const char *,
10200 unsigned HOST_WIDE_INT, int))
10201 {
10202 rtx mem = decl == NULL_TREE ? NULL_RTX : DECL_RTL (decl);
10203 rtx symbol;
10204
10205 if (mem != NULL_RTX && MEM_P (mem)
10206 && SYMBOL_REF_P ((symbol = XEXP (mem, 0)))
10207 && (SYMBOL_REF_FLAGS (symbol) & (SYMBOL_FLAG_IO | SYMBOL_FLAG_ADDRESS)))
10208 {
10209 if (!(SYMBOL_REF_FLAGS (symbol) & SYMBOL_FLAG_ADDRESS))
10210 error_at (DECL_SOURCE_LOCATION (decl),
10211 "IO definition for %q+D needs an address", decl);
10212 avr_asm_output_aligned_decl_common (file, decl, name, size, align, false);
10213 }
10214 else
10215 default_func (file, decl, name, size, align);
10216 }
10217
10218
10219 /* Unnamed section callback for data_section
10220 to track need of __do_copy_data. */
10221
10222 static void
avr_output_data_section_asm_op(const void * data)10223 avr_output_data_section_asm_op (const void *data)
10224 {
10225 avr_need_copy_data_p = true;
10226
10227 /* Dispatch to default. */
10228 output_section_asm_op (data);
10229 }
10230
10231
10232 /* Unnamed section callback for bss_section
10233 to track need of __do_clear_bss. */
10234
10235 static void
avr_output_bss_section_asm_op(const void * data)10236 avr_output_bss_section_asm_op (const void *data)
10237 {
10238 avr_need_clear_bss_p = true;
10239
10240 /* Dispatch to default. */
10241 output_section_asm_op (data);
10242 }
10243
10244
10245 /* Unnamed section callback for progmem*.data sections. */
10246
10247 static void
avr_output_progmem_section_asm_op(const void * data)10248 avr_output_progmem_section_asm_op (const void *data)
10249 {
10250 fprintf (asm_out_file, "\t.section\t%s,\"a\",@progbits\n",
10251 (const char*) data);
10252 }
10253
10254
10255 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
10256
10257 static void
avr_asm_init_sections(void)10258 avr_asm_init_sections (void)
10259 {
10260 /* Override section callbacks to keep track of `avr_need_clear_bss_p'
10261 resp. `avr_need_copy_data_p'. If flash is not mapped to RAM then
10262 we have also to track .rodata because it is located in RAM then. */
10263
10264 #if defined HAVE_LD_AVR_AVRXMEGA3_RODATA_IN_FLASH
10265 if (avr_arch->flash_pm_offset == 0)
10266 #endif
10267 readonly_data_section->unnamed.callback = avr_output_data_section_asm_op;
10268 data_section->unnamed.callback = avr_output_data_section_asm_op;
10269 bss_section->unnamed.callback = avr_output_bss_section_asm_op;
10270 }
10271
10272
10273 /* Implement `TARGET_ASM_NAMED_SECTION'. */
10274 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
10275
10276 static void
avr_asm_named_section(const char * name,unsigned int flags,tree decl)10277 avr_asm_named_section (const char *name, unsigned int flags, tree decl)
10278 {
10279 if (flags & AVR_SECTION_PROGMEM)
10280 {
10281 addr_space_t as = (flags & AVR_SECTION_PROGMEM) / SECTION_MACH_DEP;
10282 const char *old_prefix = ".rodata";
10283 const char *new_prefix = avr_addrspace[as].section_name;
10284
10285 if (STR_PREFIX_P (name, old_prefix))
10286 {
10287 const char *sname = ACONCAT ((new_prefix,
10288 name + strlen (old_prefix), NULL));
10289 default_elf_asm_named_section (sname, flags, decl);
10290 return;
10291 }
10292
10293 default_elf_asm_named_section (new_prefix, flags, decl);
10294 return;
10295 }
10296
10297 if (!avr_need_copy_data_p)
10298 avr_need_copy_data_p = (STR_PREFIX_P (name, ".data")
10299 || STR_PREFIX_P (name, ".gnu.linkonce.d"));
10300
10301 if (!avr_need_copy_data_p
10302 #if defined HAVE_LD_AVR_AVRXMEGA3_RODATA_IN_FLASH
10303 && avr_arch->flash_pm_offset == 0
10304 #endif
10305 )
10306 avr_need_copy_data_p = (STR_PREFIX_P (name, ".rodata")
10307 || STR_PREFIX_P (name, ".gnu.linkonce.r"));
10308
10309 if (!avr_need_clear_bss_p)
10310 avr_need_clear_bss_p = STR_PREFIX_P (name, ".bss");
10311
10312 default_elf_asm_named_section (name, flags, decl);
10313 }
10314
10315
10316 /* Implement `TARGET_SECTION_TYPE_FLAGS'. */
10317
10318 static unsigned int
avr_section_type_flags(tree decl,const char * name,int reloc)10319 avr_section_type_flags (tree decl, const char *name, int reloc)
10320 {
10321 unsigned int flags = default_section_type_flags (decl, name, reloc);
10322
10323 if (STR_PREFIX_P (name, ".noinit"))
10324 {
10325 if (decl && TREE_CODE (decl) == VAR_DECL
10326 && DECL_INITIAL (decl) == NULL_TREE)
10327 flags |= SECTION_BSS; /* @nobits */
10328 else
10329 warning (0, "only uninitialized variables can be placed in the "
10330 ".noinit section");
10331 }
10332
10333 if (decl && DECL_P (decl)
10334 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
10335 {
10336 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
10337
10338 /* Attribute progmem puts data in generic address space.
10339 Set section flags as if it was in __flash to get the right
10340 section prefix in the remainder. */
10341
10342 if (ADDR_SPACE_GENERIC_P (as))
10343 as = ADDR_SPACE_FLASH;
10344
10345 flags |= as * SECTION_MACH_DEP;
10346 flags &= ~SECTION_WRITE;
10347 flags &= ~SECTION_BSS;
10348 }
10349
10350 return flags;
10351 }
10352
10353
10354 /* A helper for the next function. NODE is a decl that is associated with
10355 a symbol. Return TRUE if the respective object may be accessed by LDS.
10356 There might still be other reasons for why LDS is not appropriate.
10357 This function is only appropriate for AVR_TINY. */
10358
10359 static bool
avr_decl_maybe_lds_p(tree node)10360 avr_decl_maybe_lds_p (tree node)
10361 {
10362 if (!node
10363 || TREE_CODE (node) != VAR_DECL
10364 || DECL_SECTION_NAME (node) != NULL)
10365 return false;
10366
10367 /* Don't use LDS for objects that go to .rodata. The current default
10368 linker description file still locates .rodata in RAM, but this is not
10369 a must. A better linker script would just keep .rodata in flash and
10370 add an offset of 0x4000 to the VMA. Hence avoid LDS for such data. */
10371
10372 if (TREE_READONLY (node))
10373 return false;
10374
10375 // C++ requires peeling arrays.
10376
10377 do
10378 node = TREE_TYPE (node);
10379 while (ARRAY_TYPE == TREE_CODE (node));
10380
10381 return (node != error_mark_node
10382 && !TYPE_READONLY (node));
10383 }
10384
10385
10386 /* Implement `TARGET_ENCODE_SECTION_INFO'. */
10387
10388 static void
avr_encode_section_info(tree decl,rtx rtl,int new_decl_p)10389 avr_encode_section_info (tree decl, rtx rtl, int new_decl_p)
10390 {
10391 tree addr_attr = NULL_TREE;
10392
10393 /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
10394 readily available, see PR34734. So we postpone the warning
10395 about uninitialized data in program memory section until here. */
10396
10397 if (new_decl_p
10398 && decl && DECL_P (decl)
10399 && !DECL_EXTERNAL (decl)
10400 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
10401 {
10402 if (!TREE_READONLY (decl))
10403 {
10404 // This might happen with C++ if stuff needs constructing.
10405 error ("variable %q+D with dynamic initialization put "
10406 "into program memory area", decl);
10407 }
10408 else if (NULL_TREE == DECL_INITIAL (decl))
10409 {
10410 // Don't warn for (implicit) aliases like in PR80462.
10411 tree asmname = DECL_ASSEMBLER_NAME (decl);
10412 varpool_node *node = varpool_node::get_for_asmname (asmname);
10413 bool alias_p = node && node->alias;
10414
10415 if (!alias_p)
10416 warning (OPT_Wuninitialized, "uninitialized variable %q+D put "
10417 "into program memory area", decl);
10418 }
10419 }
10420
10421 default_encode_section_info (decl, rtl, new_decl_p);
10422
10423 if (decl && DECL_P (decl)
10424 && TREE_CODE (decl) != FUNCTION_DECL
10425 && MEM_P (rtl)
10426 && SYMBOL_REF_P (XEXP (rtl, 0)))
10427 {
10428 rtx sym = XEXP (rtl, 0);
10429 tree type = TREE_TYPE (decl);
10430 tree attr = DECL_ATTRIBUTES (decl);
10431 if (type == error_mark_node)
10432 return;
10433
10434 addr_space_t as = TYPE_ADDR_SPACE (type);
10435
10436 /* PSTR strings are in generic space but located in flash:
10437 patch address space. */
10438
10439 if (!AVR_TINY && avr_progmem_p (decl, attr) == -1)
10440 as = ADDR_SPACE_FLASH;
10441
10442 AVR_SYMBOL_SET_ADDR_SPACE (sym, as);
10443
10444 tree io_low_attr = lookup_attribute ("io_low", attr);
10445 tree io_attr = lookup_attribute ("io", attr);
10446
10447 if (io_low_attr
10448 && TREE_VALUE (io_low_attr) && TREE_VALUE (TREE_VALUE (io_low_attr)))
10449 addr_attr = io_attr;
10450 else if (io_attr
10451 && TREE_VALUE (io_attr) && TREE_VALUE (TREE_VALUE (io_attr)))
10452 addr_attr = io_attr;
10453 else
10454 addr_attr = lookup_attribute ("address", attr);
10455 if (io_low_attr
10456 || (io_attr && addr_attr
10457 && low_io_address_operand
10458 (GEN_INT (TREE_INT_CST_LOW
10459 (TREE_VALUE (TREE_VALUE (addr_attr)))), QImode)))
10460 SYMBOL_REF_FLAGS (sym) |= SYMBOL_FLAG_IO_LOW;
10461 if (io_attr || io_low_attr)
10462 SYMBOL_REF_FLAGS (sym) |= SYMBOL_FLAG_IO;
10463 /* If we have an (io) address attribute specification, but the variable
10464 is external, treat the address as only a tentative definition
10465 to be used to determine if an io port is in the lower range, but
10466 don't use the exact value for constant propagation. */
10467 if (addr_attr && !DECL_EXTERNAL (decl))
10468 SYMBOL_REF_FLAGS (sym) |= SYMBOL_FLAG_ADDRESS;
10469 }
10470
10471 if (AVR_TINY
10472 && decl
10473 && VAR_DECL == TREE_CODE (decl)
10474 && MEM_P (rtl)
10475 && SYMBOL_REF_P (XEXP (rtl, 0)))
10476 {
10477 rtx sym = XEXP (rtl, 0);
10478 bool progmem_p = avr_progmem_p (decl, DECL_ATTRIBUTES (decl)) == -1;
10479
10480 if (progmem_p)
10481 {
10482 // Tag symbols for addition of 0x4000 (avr_arch->flash_pm_offset).
10483 SYMBOL_REF_FLAGS (sym) |= AVR_SYMBOL_FLAG_TINY_PM;
10484 }
10485
10486 if (avr_decl_absdata_p (decl, DECL_ATTRIBUTES (decl))
10487 || (TARGET_ABSDATA
10488 && !progmem_p
10489 && !addr_attr
10490 && avr_decl_maybe_lds_p (decl))
10491 || (addr_attr
10492 // If addr_attr is non-null, it has an argument. Peek into it.
10493 && TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE (addr_attr))) < 0xc0))
10494 {
10495 // May be accessed by LDS / STS.
10496 SYMBOL_REF_FLAGS (sym) |= AVR_SYMBOL_FLAG_TINY_ABSDATA;
10497 }
10498
10499 if (progmem_p
10500 && avr_decl_absdata_p (decl, DECL_ATTRIBUTES (decl)))
10501 {
10502 error ("%q+D has incompatible attributes %qs and %qs",
10503 decl, "progmem", "absdata");
10504 }
10505 }
10506 }
10507
10508
10509 /* Implement `TARGET_ASM_SELECT_SECTION' */
10510
10511 static section *
avr_asm_select_section(tree decl,int reloc,unsigned HOST_WIDE_INT align)10512 avr_asm_select_section (tree decl, int reloc, unsigned HOST_WIDE_INT align)
10513 {
10514 section * sect = default_elf_select_section (decl, reloc, align);
10515
10516 if (decl && DECL_P (decl)
10517 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
10518 {
10519 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
10520
10521 /* __progmem__ goes in generic space but shall be allocated to
10522 .progmem.data */
10523
10524 if (ADDR_SPACE_GENERIC_P (as))
10525 as = ADDR_SPACE_FLASH;
10526
10527 if (sect->common.flags & SECTION_NAMED)
10528 {
10529 const char * name = sect->named.name;
10530 const char * old_prefix = ".rodata";
10531 const char * new_prefix = avr_addrspace[as].section_name;
10532
10533 if (STR_PREFIX_P (name, old_prefix))
10534 {
10535 const char *sname = ACONCAT ((new_prefix,
10536 name + strlen (old_prefix), NULL));
10537 return get_section (sname,
10538 sect->common.flags & ~SECTION_DECLARED,
10539 sect->named.decl);
10540 }
10541 }
10542
10543 if (!progmem_section[as])
10544 {
10545 progmem_section[as]
10546 = get_unnamed_section (0, avr_output_progmem_section_asm_op,
10547 avr_addrspace[as].section_name);
10548 }
10549
10550 return progmem_section[as];
10551 }
10552
10553 return sect;
10554 }
10555
10556 /* Implement `TARGET_ASM_FILE_START'. */
10557 /* Outputs some text at the start of each assembler file. */
10558
10559 static void
avr_file_start(void)10560 avr_file_start (void)
10561 {
10562 int sfr_offset = avr_arch->sfr_offset;
10563
10564 if (avr_arch->asm_only)
10565 error ("architecture %qs supported for assembler only", avr_mmcu);
10566
10567 default_file_start ();
10568
10569 /* Print I/O addresses of some SFRs used with IN and OUT. */
10570
10571 if (AVR_HAVE_SPH)
10572 fprintf (asm_out_file, "__SP_H__ = 0x%02x\n", avr_addr.sp_h - sfr_offset);
10573
10574 fprintf (asm_out_file, "__SP_L__ = 0x%02x\n", avr_addr.sp_l - sfr_offset);
10575 fprintf (asm_out_file, "__SREG__ = 0x%02x\n", avr_addr.sreg - sfr_offset);
10576 if (AVR_HAVE_RAMPZ)
10577 fprintf (asm_out_file, "__RAMPZ__ = 0x%02x\n", avr_addr.rampz - sfr_offset);
10578 if (AVR_HAVE_RAMPY)
10579 fprintf (asm_out_file, "__RAMPY__ = 0x%02x\n", avr_addr.rampy - sfr_offset);
10580 if (AVR_HAVE_RAMPX)
10581 fprintf (asm_out_file, "__RAMPX__ = 0x%02x\n", avr_addr.rampx - sfr_offset);
10582 if (AVR_HAVE_RAMPD)
10583 fprintf (asm_out_file, "__RAMPD__ = 0x%02x\n", avr_addr.rampd - sfr_offset);
10584 if (AVR_XMEGA || AVR_TINY)
10585 fprintf (asm_out_file, "__CCP__ = 0x%02x\n", avr_addr.ccp - sfr_offset);
10586 fprintf (asm_out_file, "__tmp_reg__ = %d\n", AVR_TMP_REGNO);
10587 fprintf (asm_out_file, "__zero_reg__ = %d\n", AVR_ZERO_REGNO);
10588 }
10589
10590
10591 /* Implement `TARGET_ASM_FILE_END'. */
10592 /* Outputs to the stdio stream FILE some
10593 appropriate text to go at the end of an assembler file. */
10594
10595 static void
avr_file_end(void)10596 avr_file_end (void)
10597 {
10598 /* Output these only if there is anything in the
10599 .data* / .rodata* / .gnu.linkonce.* resp. .bss* or COMMON
10600 input section(s) - some code size can be saved by not
10601 linking in the initialization code from libgcc if resp.
10602 sections are empty, see PR18145. */
10603
10604 if (avr_need_copy_data_p)
10605 fputs (".global __do_copy_data\n", asm_out_file);
10606
10607 if (avr_need_clear_bss_p)
10608 fputs (".global __do_clear_bss\n", asm_out_file);
10609 }
10610
10611
10612 /* Worker function for `ADJUST_REG_ALLOC_ORDER'. */
10613 /* Choose the order in which to allocate hard registers for
10614 pseudo-registers local to a basic block.
10615
10616 Store the desired register order in the array `reg_alloc_order'.
10617 Element 0 should be the register to allocate first; element 1, the
10618 next register; and so on. */
10619
10620 void
avr_adjust_reg_alloc_order(void)10621 avr_adjust_reg_alloc_order (void)
10622 {
10623 static const int order_0[] =
10624 {
10625 24, 25,
10626 18, 19, 20, 21, 22, 23,
10627 30, 31,
10628 26, 27, 28, 29,
10629 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
10630 0, 1,
10631 32, 33, 34, 35
10632 };
10633 static const int tiny_order_0[] = {
10634 20, 21,
10635 22, 23,
10636 24, 25,
10637 30, 31,
10638 26, 27,
10639 28, 29,
10640 19, 18,
10641 16, 17,
10642 32, 33, 34, 35,
10643 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0
10644 };
10645 static const int order_1[] =
10646 {
10647 18, 19, 20, 21, 22, 23, 24, 25,
10648 30, 31,
10649 26, 27, 28, 29,
10650 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
10651 0, 1,
10652 32, 33, 34, 35
10653 };
10654 static const int tiny_order_1[] = {
10655 22, 23,
10656 24, 25,
10657 30, 31,
10658 26, 27,
10659 28, 29,
10660 21, 20, 19, 18,
10661 16, 17,
10662 32, 33, 34, 35,
10663 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0
10664 };
10665 static const int order_2[] =
10666 {
10667 25, 24, 23, 22, 21, 20, 19, 18,
10668 30, 31,
10669 26, 27, 28, 29,
10670 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
10671 1, 0,
10672 32, 33, 34, 35
10673 };
10674
10675 /* Select specific register allocation order.
10676 Tiny Core (ATtiny4/5/9/10/20/40) devices have only 16 registers,
10677 so different allocation order should be used. */
10678
10679 const int *order = (TARGET_ORDER_1 ? (AVR_TINY ? tiny_order_1 : order_1)
10680 : TARGET_ORDER_2 ? (AVR_TINY ? tiny_order_0 : order_2)
10681 : (AVR_TINY ? tiny_order_0 : order_0));
10682
10683 for (size_t i = 0; i < ARRAY_SIZE (order_0); ++i)
10684 reg_alloc_order[i] = order[i];
10685 }
10686
10687
10688 /* Implement `TARGET_REGISTER_MOVE_COST' */
10689
10690 static int
avr_register_move_cost(machine_mode mode ATTRIBUTE_UNUSED,reg_class_t from,reg_class_t to)10691 avr_register_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
10692 reg_class_t from, reg_class_t to)
10693 {
10694 return (from == STACK_REG ? 6
10695 : to == STACK_REG ? 12
10696 : 2);
10697 }
10698
10699
10700 /* Implement `TARGET_MEMORY_MOVE_COST' */
10701
10702 static int
avr_memory_move_cost(machine_mode mode,reg_class_t rclass ATTRIBUTE_UNUSED,bool in ATTRIBUTE_UNUSED)10703 avr_memory_move_cost (machine_mode mode,
10704 reg_class_t rclass ATTRIBUTE_UNUSED,
10705 bool in ATTRIBUTE_UNUSED)
10706 {
10707 return (mode == QImode ? 2
10708 : mode == HImode ? 4
10709 : mode == SImode ? 8
10710 : mode == SFmode ? 8
10711 : 16);
10712 }
10713
10714
10715 /* Cost for mul highpart. X is a LSHIFTRT, i.e. the outer TRUNCATE is
10716 already stripped off. */
10717
10718 static int
avr_mul_highpart_cost(rtx x,int)10719 avr_mul_highpart_cost (rtx x, int)
10720 {
10721 if (AVR_HAVE_MUL
10722 && LSHIFTRT == GET_CODE (x)
10723 && MULT == GET_CODE (XEXP (x, 0))
10724 && CONST_INT_P (XEXP (x, 1)))
10725 {
10726 // This is the wider mode.
10727 machine_mode mode = GET_MODE (x);
10728
10729 // The middle-end might still have PR81444, i.e. it is calling the cost
10730 // functions with strange modes. Fix this now by also considering
10731 // PSImode (should actually be SImode instead).
10732 if (HImode == mode || PSImode == mode || SImode == mode)
10733 {
10734 return COSTS_N_INSNS (2);
10735 }
10736 }
10737
10738 return 10000;
10739 }
10740
10741
10742 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
10743 cost of an RTX operand given its context. X is the rtx of the
10744 operand, MODE is its mode, and OUTER is the rtx_code of this
10745 operand's parent operator. */
10746
10747 static int
avr_operand_rtx_cost(rtx x,machine_mode mode,enum rtx_code outer,int opno,bool speed)10748 avr_operand_rtx_cost (rtx x, machine_mode mode, enum rtx_code outer,
10749 int opno, bool speed)
10750 {
10751 enum rtx_code code = GET_CODE (x);
10752 int total;
10753
10754 switch (code)
10755 {
10756 case REG:
10757 case SUBREG:
10758 return 0;
10759
10760 case CONST_INT:
10761 case CONST_FIXED:
10762 case CONST_DOUBLE:
10763 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
10764
10765 default:
10766 break;
10767 }
10768
10769 total = 0;
10770 avr_rtx_costs (x, mode, outer, opno, &total, speed);
10771 return total;
10772 }
10773
10774 /* Worker function for AVR backend's rtx_cost function.
10775 X is rtx expression whose cost is to be calculated.
10776 Return true if the complete cost has been computed.
10777 Return false if subexpressions should be scanned.
10778 In either case, *TOTAL contains the cost result. */
10779
10780 static bool
avr_rtx_costs_1(rtx x,machine_mode mode,int outer_code,int opno ATTRIBUTE_UNUSED,int * total,bool speed)10781 avr_rtx_costs_1 (rtx x, machine_mode mode, int outer_code,
10782 int opno ATTRIBUTE_UNUSED, int *total, bool speed)
10783 {
10784 enum rtx_code code = GET_CODE (x);
10785 HOST_WIDE_INT val;
10786
10787 switch (code)
10788 {
10789 case CONST_INT:
10790 case CONST_FIXED:
10791 case CONST_DOUBLE:
10792 case SYMBOL_REF:
10793 case CONST:
10794 case LABEL_REF:
10795 /* Immediate constants are as cheap as registers. */
10796 *total = 0;
10797 return true;
10798
10799 case MEM:
10800 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
10801 return true;
10802
10803 case NEG:
10804 switch (mode)
10805 {
10806 case E_QImode:
10807 case E_SFmode:
10808 *total = COSTS_N_INSNS (1);
10809 break;
10810
10811 case E_HImode:
10812 case E_PSImode:
10813 case E_SImode:
10814 *total = COSTS_N_INSNS (2 * GET_MODE_SIZE (mode) - 1);
10815 break;
10816
10817 default:
10818 return false;
10819 }
10820 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10821 return true;
10822
10823 case ABS:
10824 switch (mode)
10825 {
10826 case E_QImode:
10827 case E_SFmode:
10828 *total = COSTS_N_INSNS (1);
10829 break;
10830
10831 default:
10832 return false;
10833 }
10834 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10835 return true;
10836
10837 case NOT:
10838 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
10839 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10840 return true;
10841
10842 case ZERO_EXTEND:
10843 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
10844 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
10845 *total += avr_operand_rtx_cost (XEXP (x, 0), GET_MODE (XEXP (x, 0)),
10846 code, 0, speed);
10847 return true;
10848
10849 case SIGN_EXTEND:
10850 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
10851 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
10852 *total += avr_operand_rtx_cost (XEXP (x, 0), GET_MODE (XEXP (x, 0)),
10853 code, 0, speed);
10854 return true;
10855
10856 case PLUS:
10857 switch (mode)
10858 {
10859 case E_QImode:
10860 if (AVR_HAVE_MUL
10861 && MULT == GET_CODE (XEXP (x, 0))
10862 && register_operand (XEXP (x, 1), QImode))
10863 {
10864 /* multiply-add */
10865 *total = COSTS_N_INSNS (speed ? 4 : 3);
10866 /* multiply-add with constant: will be split and load constant. */
10867 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
10868 *total = COSTS_N_INSNS (1) + *total;
10869 return true;
10870 }
10871 *total = COSTS_N_INSNS (1);
10872 if (!CONST_INT_P (XEXP (x, 1)))
10873 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
10874 break;
10875
10876 case E_HImode:
10877 if (AVR_HAVE_MUL
10878 && (MULT == GET_CODE (XEXP (x, 0))
10879 || ASHIFT == GET_CODE (XEXP (x, 0)))
10880 && register_operand (XEXP (x, 1), HImode)
10881 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))
10882 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))))
10883 {
10884 /* multiply-add */
10885 *total = COSTS_N_INSNS (speed ? 5 : 4);
10886 /* multiply-add with constant: will be split and load constant. */
10887 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
10888 *total = COSTS_N_INSNS (1) + *total;
10889 return true;
10890 }
10891 if (!CONST_INT_P (XEXP (x, 1)))
10892 {
10893 *total = COSTS_N_INSNS (2);
10894 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10895 speed);
10896 }
10897 else if (IN_RANGE (INTVAL (XEXP (x, 1)), -63, 63))
10898 *total = COSTS_N_INSNS (1);
10899 else
10900 *total = COSTS_N_INSNS (2);
10901 break;
10902
10903 case E_PSImode:
10904 if (!CONST_INT_P (XEXP (x, 1)))
10905 {
10906 *total = COSTS_N_INSNS (3);
10907 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10908 speed);
10909 }
10910 else if (IN_RANGE (INTVAL (XEXP (x, 1)), -63, 63))
10911 *total = COSTS_N_INSNS (2);
10912 else
10913 *total = COSTS_N_INSNS (3);
10914 break;
10915
10916 case E_SImode:
10917 if (!CONST_INT_P (XEXP (x, 1)))
10918 {
10919 *total = COSTS_N_INSNS (4);
10920 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10921 speed);
10922 }
10923 else if (IN_RANGE (INTVAL (XEXP (x, 1)), -63, 63))
10924 *total = COSTS_N_INSNS (1);
10925 else
10926 *total = COSTS_N_INSNS (4);
10927 break;
10928
10929 default:
10930 return false;
10931 }
10932 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10933 return true;
10934
10935 case MINUS:
10936 if (AVR_HAVE_MUL
10937 && QImode == mode
10938 && register_operand (XEXP (x, 0), QImode)
10939 && MULT == GET_CODE (XEXP (x, 1)))
10940 {
10941 /* multiply-sub */
10942 *total = COSTS_N_INSNS (speed ? 4 : 3);
10943 /* multiply-sub with constant: will be split and load constant. */
10944 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
10945 *total = COSTS_N_INSNS (1) + *total;
10946 return true;
10947 }
10948 if (AVR_HAVE_MUL
10949 && HImode == mode
10950 && register_operand (XEXP (x, 0), HImode)
10951 && (MULT == GET_CODE (XEXP (x, 1))
10952 || ASHIFT == GET_CODE (XEXP (x, 1)))
10953 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))
10954 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))))
10955 {
10956 /* multiply-sub */
10957 *total = COSTS_N_INSNS (speed ? 5 : 4);
10958 /* multiply-sub with constant: will be split and load constant. */
10959 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
10960 *total = COSTS_N_INSNS (1) + *total;
10961 return true;
10962 }
10963 /* FALLTHRU */
10964 case AND:
10965 case IOR:
10966 if (IOR == code
10967 && HImode == mode
10968 && ASHIFT == GET_CODE (XEXP (x, 0)))
10969 {
10970 *total = COSTS_N_INSNS (2);
10971 // Just a rough estimate. If we see no sign- or zero-extend,
10972 // then increase the cost a little bit.
10973 if (REG_P (XEXP (XEXP (x, 0), 0)))
10974 *total += COSTS_N_INSNS (1);
10975 if (REG_P (XEXP (x, 1)))
10976 *total += COSTS_N_INSNS (1);
10977 return true;
10978 }
10979 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
10980 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10981 if (!CONST_INT_P (XEXP (x, 1)))
10982 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
10983 return true;
10984
10985 case XOR:
10986 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
10987 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10988 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
10989 return true;
10990
10991 case MULT:
10992 switch (mode)
10993 {
10994 case E_QImode:
10995 if (AVR_HAVE_MUL)
10996 *total = COSTS_N_INSNS (!speed ? 3 : 4);
10997 else if (!speed)
10998 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
10999 else
11000 return false;
11001 break;
11002
11003 case E_HImode:
11004 if (AVR_HAVE_MUL)
11005 {
11006 rtx op0 = XEXP (x, 0);
11007 rtx op1 = XEXP (x, 1);
11008 enum rtx_code code0 = GET_CODE (op0);
11009 enum rtx_code code1 = GET_CODE (op1);
11010 bool ex0 = SIGN_EXTEND == code0 || ZERO_EXTEND == code0;
11011 bool ex1 = SIGN_EXTEND == code1 || ZERO_EXTEND == code1;
11012
11013 if (ex0
11014 && (u8_operand (op1, HImode)
11015 || s8_operand (op1, HImode)))
11016 {
11017 *total = COSTS_N_INSNS (!speed ? 4 : 6);
11018 return true;
11019 }
11020 if (ex0
11021 && register_operand (op1, HImode))
11022 {
11023 *total = COSTS_N_INSNS (!speed ? 5 : 8);
11024 return true;
11025 }
11026 else if (ex0 || ex1)
11027 {
11028 *total = COSTS_N_INSNS (!speed ? 3 : 5);
11029 return true;
11030 }
11031 else if (register_operand (op0, HImode)
11032 && (u8_operand (op1, HImode)
11033 || s8_operand (op1, HImode)))
11034 {
11035 *total = COSTS_N_INSNS (!speed ? 6 : 9);
11036 return true;
11037 }
11038 else
11039 *total = COSTS_N_INSNS (!speed ? 7 : 10);
11040 }
11041 else if (!speed)
11042 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
11043 else
11044 return false;
11045 break;
11046
11047 case E_PSImode:
11048 if (!speed)
11049 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
11050 else
11051 *total = 10;
11052 break;
11053
11054 case E_SImode:
11055 case E_DImode:
11056 if (AVR_HAVE_MUL)
11057 {
11058 if (!speed)
11059 {
11060 /* Add some additional costs besides CALL like moves etc. */
11061
11062 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
11063 }
11064 else
11065 {
11066 /* Just a rough estimate. Even with -O2 we don't want bulky
11067 code expanded inline. */
11068
11069 *total = COSTS_N_INSNS (25);
11070 }
11071 }
11072 else
11073 {
11074 if (speed)
11075 *total = COSTS_N_INSNS (300);
11076 else
11077 /* Add some additional costs besides CALL like moves etc. */
11078 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
11079 }
11080
11081 if (mode == DImode)
11082 *total *= 2;
11083
11084 return true;
11085
11086 default:
11087 return false;
11088 }
11089 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
11090 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
11091 return true;
11092
11093 case DIV:
11094 case MOD:
11095 case UDIV:
11096 case UMOD:
11097 if (!speed)
11098 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
11099 else
11100 *total = COSTS_N_INSNS (15 * GET_MODE_SIZE (mode));
11101 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
11102 /* For div/mod with const-int divisor we have at least the cost of
11103 loading the divisor. */
11104 if (CONST_INT_P (XEXP (x, 1)))
11105 *total += COSTS_N_INSNS (GET_MODE_SIZE (mode));
11106 /* Add some overall penaly for clobbering and moving around registers */
11107 *total += COSTS_N_INSNS (2);
11108 return true;
11109
11110 case ROTATE:
11111 switch (mode)
11112 {
11113 case E_QImode:
11114 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
11115 *total = COSTS_N_INSNS (1);
11116
11117 break;
11118
11119 case E_HImode:
11120 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
11121 *total = COSTS_N_INSNS (3);
11122
11123 break;
11124
11125 case E_SImode:
11126 if (CONST_INT_P (XEXP (x, 1)))
11127 switch (INTVAL (XEXP (x, 1)))
11128 {
11129 case 8:
11130 case 24:
11131 *total = COSTS_N_INSNS (5);
11132 break;
11133 case 16:
11134 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
11135 break;
11136 }
11137 break;
11138
11139 default:
11140 return false;
11141 }
11142 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
11143 return true;
11144
11145 case ASHIFT:
11146 switch (mode)
11147 {
11148 case E_QImode:
11149 if (!CONST_INT_P (XEXP (x, 1)))
11150 {
11151 *total = COSTS_N_INSNS (!speed ? 4 : 17);
11152 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
11153 speed);
11154 }
11155 else
11156 {
11157 val = INTVAL (XEXP (x, 1));
11158 if (val == 7)
11159 *total = COSTS_N_INSNS (3);
11160 else if (val >= 0 && val <= 7)
11161 *total = COSTS_N_INSNS (val);
11162 else
11163 *total = COSTS_N_INSNS (1);
11164 }
11165 break;
11166
11167 case E_HImode:
11168 if (AVR_HAVE_MUL)
11169 {
11170 if (const_2_to_7_operand (XEXP (x, 1), HImode)
11171 && (SIGN_EXTEND == GET_CODE (XEXP (x, 0))
11172 || ZERO_EXTEND == GET_CODE (XEXP (x, 0))))
11173 {
11174 *total = COSTS_N_INSNS (!speed ? 4 : 6);
11175 return true;
11176 }
11177 }
11178
11179 if (const1_rtx == (XEXP (x, 1))
11180 && SIGN_EXTEND == GET_CODE (XEXP (x, 0)))
11181 {
11182 *total = COSTS_N_INSNS (2);
11183 return true;
11184 }
11185
11186 if (!CONST_INT_P (XEXP (x, 1)))
11187 {
11188 *total = COSTS_N_INSNS (!speed ? 5 : 41);
11189 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
11190 speed);
11191 }
11192 else
11193 switch (INTVAL (XEXP (x, 1)))
11194 {
11195 case 0:
11196 *total = 0;
11197 break;
11198 case 1:
11199 case 8:
11200 *total = COSTS_N_INSNS (2);
11201 break;
11202 case 9:
11203 *total = COSTS_N_INSNS (3);
11204 break;
11205 case 2:
11206 case 3:
11207 case 10:
11208 case 15:
11209 *total = COSTS_N_INSNS (4);
11210 break;
11211 case 7:
11212 case 11:
11213 case 12:
11214 *total = COSTS_N_INSNS (5);
11215 break;
11216 case 4:
11217 *total = COSTS_N_INSNS (!speed ? 5 : 8);
11218 break;
11219 case 6:
11220 *total = COSTS_N_INSNS (!speed ? 5 : 9);
11221 break;
11222 case 5:
11223 *total = COSTS_N_INSNS (!speed ? 5 : 10);
11224 break;
11225 default:
11226 *total = COSTS_N_INSNS (!speed ? 5 : 41);
11227 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
11228 speed);
11229 }
11230 break;
11231
11232 case E_PSImode:
11233 if (!CONST_INT_P (XEXP (x, 1)))
11234 {
11235 *total = COSTS_N_INSNS (!speed ? 6 : 73);
11236 }
11237 else
11238 switch (INTVAL (XEXP (x, 1)))
11239 {
11240 case 0:
11241 *total = 0;
11242 break;
11243 case 1:
11244 case 8:
11245 case 16:
11246 *total = COSTS_N_INSNS (3);
11247 break;
11248 case 23:
11249 *total = COSTS_N_INSNS (5);
11250 break;
11251 default:
11252 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
11253 break;
11254 }
11255 break;
11256
11257 case E_SImode:
11258 if (!CONST_INT_P (XEXP (x, 1)))
11259 {
11260 *total = COSTS_N_INSNS (!speed ? 7 : 113);
11261 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
11262 speed);
11263 }
11264 else
11265 switch (INTVAL (XEXP (x, 1)))
11266 {
11267 case 0:
11268 *total = 0;
11269 break;
11270 case 24:
11271 *total = COSTS_N_INSNS (3);
11272 break;
11273 case 1:
11274 case 8:
11275 case 16:
11276 *total = COSTS_N_INSNS (4);
11277 break;
11278 case 31:
11279 *total = COSTS_N_INSNS (6);
11280 break;
11281 case 2:
11282 *total = COSTS_N_INSNS (!speed ? 7 : 8);
11283 break;
11284 default:
11285 *total = COSTS_N_INSNS (!speed ? 7 : 113);
11286 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
11287 speed);
11288 }
11289 break;
11290
11291 default:
11292 return false;
11293 }
11294 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
11295 return true;
11296
11297 case ASHIFTRT:
11298 switch (mode)
11299 {
11300 case E_QImode:
11301 if (!CONST_INT_P (XEXP (x, 1)))
11302 {
11303 *total = COSTS_N_INSNS (!speed ? 4 : 17);
11304 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
11305 speed);
11306 }
11307 else
11308 {
11309 val = INTVAL (XEXP (x, 1));
11310 if (val == 6)
11311 *total = COSTS_N_INSNS (4);
11312 else if (val == 7)
11313 *total = COSTS_N_INSNS (2);
11314 else if (val >= 0 && val <= 7)
11315 *total = COSTS_N_INSNS (val);
11316 else
11317 *total = COSTS_N_INSNS (1);
11318 }
11319 break;
11320
11321 case E_HImode:
11322 if (!CONST_INT_P (XEXP (x, 1)))
11323 {
11324 *total = COSTS_N_INSNS (!speed ? 5 : 41);
11325 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
11326 speed);
11327 }
11328 else
11329 switch (INTVAL (XEXP (x, 1)))
11330 {
11331 case 0:
11332 *total = 0;
11333 break;
11334 case 1:
11335 *total = COSTS_N_INSNS (2);
11336 break;
11337 case 15:
11338 *total = COSTS_N_INSNS (3);
11339 break;
11340 case 2:
11341 case 7:
11342 case 8:
11343 case 9:
11344 *total = COSTS_N_INSNS (4);
11345 break;
11346 case 10:
11347 case 14:
11348 *total = COSTS_N_INSNS (5);
11349 break;
11350 case 11:
11351 *total = COSTS_N_INSNS (!speed ? 5 : 6);
11352 break;
11353 case 12:
11354 *total = COSTS_N_INSNS (!speed ? 5 : 7);
11355 break;
11356 case 6:
11357 case 13:
11358 *total = COSTS_N_INSNS (!speed ? 5 : 8);
11359 break;
11360 default:
11361 *total = COSTS_N_INSNS (!speed ? 5 : 41);
11362 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
11363 speed);
11364 }
11365 break;
11366
11367 case E_PSImode:
11368 if (!CONST_INT_P (XEXP (x, 1)))
11369 {
11370 *total = COSTS_N_INSNS (!speed ? 6 : 73);
11371 }
11372 else
11373 switch (INTVAL (XEXP (x, 1)))
11374 {
11375 case 0:
11376 *total = 0;
11377 break;
11378 case 1:
11379 *total = COSTS_N_INSNS (3);
11380 break;
11381 case 16:
11382 case 8:
11383 *total = COSTS_N_INSNS (5);
11384 break;
11385 case 23:
11386 *total = COSTS_N_INSNS (4);
11387 break;
11388 default:
11389 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
11390 break;
11391 }
11392 break;
11393
11394 case E_SImode:
11395 if (!CONST_INT_P (XEXP (x, 1)))
11396 {
11397 *total = COSTS_N_INSNS (!speed ? 7 : 113);
11398 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
11399 speed);
11400 }
11401 else
11402 switch (INTVAL (XEXP (x, 1)))
11403 {
11404 case 0:
11405 *total = 0;
11406 break;
11407 case 1:
11408 *total = COSTS_N_INSNS (4);
11409 break;
11410 case 8:
11411 case 16:
11412 case 24:
11413 *total = COSTS_N_INSNS (6);
11414 break;
11415 case 2:
11416 *total = COSTS_N_INSNS (!speed ? 7 : 8);
11417 break;
11418 case 31:
11419 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
11420 break;
11421 default:
11422 *total = COSTS_N_INSNS (!speed ? 7 : 113);
11423 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
11424 speed);
11425 }
11426 break;
11427
11428 default:
11429 return false;
11430 }
11431 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
11432 return true;
11433
11434 case LSHIFTRT:
11435 if (outer_code == TRUNCATE)
11436 {
11437 *total = avr_mul_highpart_cost (x, speed);
11438 return true;
11439 }
11440
11441 switch (mode)
11442 {
11443 case E_QImode:
11444 if (!CONST_INT_P (XEXP (x, 1)))
11445 {
11446 *total = COSTS_N_INSNS (!speed ? 4 : 17);
11447 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
11448 speed);
11449 }
11450 else
11451 {
11452 val = INTVAL (XEXP (x, 1));
11453 if (val == 7)
11454 *total = COSTS_N_INSNS (3);
11455 else if (val >= 0 && val <= 7)
11456 *total = COSTS_N_INSNS (val);
11457 else
11458 *total = COSTS_N_INSNS (1);
11459 }
11460 break;
11461
11462 case E_HImode:
11463 if (!CONST_INT_P (XEXP (x, 1)))
11464 {
11465 *total = COSTS_N_INSNS (!speed ? 5 : 41);
11466 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
11467 speed);
11468 }
11469 else
11470 switch (INTVAL (XEXP (x, 1)))
11471 {
11472 case 0:
11473 *total = 0;
11474 break;
11475 case 1:
11476 case 8:
11477 *total = COSTS_N_INSNS (2);
11478 break;
11479 case 9:
11480 *total = COSTS_N_INSNS (3);
11481 break;
11482 case 2:
11483 case 10:
11484 case 15:
11485 *total = COSTS_N_INSNS (4);
11486 break;
11487 case 7:
11488 case 11:
11489 *total = COSTS_N_INSNS (5);
11490 break;
11491 case 3:
11492 case 12:
11493 case 13:
11494 case 14:
11495 *total = COSTS_N_INSNS (!speed ? 5 : 6);
11496 break;
11497 case 4:
11498 *total = COSTS_N_INSNS (!speed ? 5 : 7);
11499 break;
11500 case 5:
11501 case 6:
11502 *total = COSTS_N_INSNS (!speed ? 5 : 9);
11503 break;
11504 default:
11505 *total = COSTS_N_INSNS (!speed ? 5 : 41);
11506 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
11507 speed);
11508 }
11509 break;
11510
11511 case E_PSImode:
11512 if (!CONST_INT_P (XEXP (x, 1)))
11513 {
11514 *total = COSTS_N_INSNS (!speed ? 6 : 73);
11515 }
11516 else
11517 switch (INTVAL (XEXP (x, 1)))
11518 {
11519 case 0:
11520 *total = 0;
11521 break;
11522 case 1:
11523 case 8:
11524 case 16:
11525 *total = COSTS_N_INSNS (3);
11526 break;
11527 case 23:
11528 *total = COSTS_N_INSNS (5);
11529 break;
11530 default:
11531 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
11532 break;
11533 }
11534 break;
11535
11536 case E_SImode:
11537 if (!CONST_INT_P (XEXP (x, 1)))
11538 {
11539 *total = COSTS_N_INSNS (!speed ? 7 : 113);
11540 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
11541 speed);
11542 }
11543 else
11544 switch (INTVAL (XEXP (x, 1)))
11545 {
11546 case 0:
11547 *total = 0;
11548 break;
11549 case 1:
11550 *total = COSTS_N_INSNS (4);
11551 break;
11552 case 2:
11553 *total = COSTS_N_INSNS (!speed ? 7 : 8);
11554 break;
11555 case 8:
11556 case 16:
11557 case 24:
11558 *total = COSTS_N_INSNS (4);
11559 break;
11560 case 31:
11561 *total = COSTS_N_INSNS (6);
11562 break;
11563 default:
11564 *total = COSTS_N_INSNS (!speed ? 7 : 113);
11565 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
11566 speed);
11567 }
11568 break;
11569
11570 default:
11571 return false;
11572 }
11573 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
11574 return true;
11575
11576 case COMPARE:
11577 switch (GET_MODE (XEXP (x, 0)))
11578 {
11579 case E_QImode:
11580 *total = COSTS_N_INSNS (1);
11581 if (!CONST_INT_P (XEXP (x, 1)))
11582 *total += avr_operand_rtx_cost (XEXP (x, 1), QImode, code,
11583 1, speed);
11584 break;
11585
11586 case E_HImode:
11587 *total = COSTS_N_INSNS (2);
11588 if (!CONST_INT_P (XEXP (x, 1)))
11589 *total += avr_operand_rtx_cost (XEXP (x, 1), HImode, code,
11590 1, speed);
11591 else if (INTVAL (XEXP (x, 1)) != 0)
11592 *total += COSTS_N_INSNS (1);
11593 break;
11594
11595 case E_PSImode:
11596 *total = COSTS_N_INSNS (3);
11597 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) != 0)
11598 *total += COSTS_N_INSNS (2);
11599 break;
11600
11601 case E_SImode:
11602 *total = COSTS_N_INSNS (4);
11603 if (!CONST_INT_P (XEXP (x, 1)))
11604 *total += avr_operand_rtx_cost (XEXP (x, 1), SImode, code,
11605 1, speed);
11606 else if (INTVAL (XEXP (x, 1)) != 0)
11607 *total += COSTS_N_INSNS (3);
11608 break;
11609
11610 default:
11611 return false;
11612 }
11613 *total += avr_operand_rtx_cost (XEXP (x, 0), GET_MODE (XEXP (x, 0)),
11614 code, 0, speed);
11615 return true;
11616
11617 case TRUNCATE:
11618 if (LSHIFTRT == GET_CODE (XEXP (x, 0)))
11619 {
11620 *total = avr_mul_highpart_cost (XEXP (x, 0), speed);
11621 return true;
11622 }
11623 break;
11624
11625 default:
11626 break;
11627 }
11628 return false;
11629 }
11630
11631
11632 /* Implement `TARGET_RTX_COSTS'. */
11633
11634 static bool
avr_rtx_costs(rtx x,machine_mode mode,int outer_code,int opno,int * total,bool speed)11635 avr_rtx_costs (rtx x, machine_mode mode, int outer_code,
11636 int opno, int *total, bool speed)
11637 {
11638 bool done = avr_rtx_costs_1 (x, mode, outer_code, opno, total, speed);
11639
11640 if (avr_log.rtx_costs)
11641 {
11642 avr_edump ("\n%?=%b (%s) total=%d, outer=%C:\n%r\n",
11643 done, speed ? "speed" : "size", *total, outer_code, x);
11644 }
11645
11646 return done;
11647 }
11648
11649
11650 /* Implement `TARGET_ADDRESS_COST'. */
11651
11652 static int
avr_address_cost(rtx x,machine_mode mode ATTRIBUTE_UNUSED,addr_space_t as ATTRIBUTE_UNUSED,bool speed ATTRIBUTE_UNUSED)11653 avr_address_cost (rtx x, machine_mode mode ATTRIBUTE_UNUSED,
11654 addr_space_t as ATTRIBUTE_UNUSED,
11655 bool speed ATTRIBUTE_UNUSED)
11656 {
11657 int cost = 4;
11658
11659 if (GET_CODE (x) == PLUS
11660 && CONST_INT_P (XEXP (x, 1))
11661 && (REG_P (XEXP (x, 0))
11662 || SUBREG_P (XEXP (x, 0))))
11663 {
11664 if (INTVAL (XEXP (x, 1)) > MAX_LD_OFFSET(mode))
11665 cost = 18;
11666 }
11667 else if (CONSTANT_ADDRESS_P (x))
11668 {
11669 if (io_address_operand (x, QImode))
11670 cost = 2;
11671
11672 if (AVR_TINY
11673 && avr_address_tiny_absdata_p (x, QImode))
11674 cost = 2;
11675 }
11676
11677 if (avr_log.address_cost)
11678 avr_edump ("\n%?: %d = %r\n", cost, x);
11679
11680 return cost;
11681 }
11682
11683 /* Test for extra memory constraint 'Q'.
11684 It's a memory address based on Y or Z pointer with valid displacement. */
11685
11686 int
extra_constraint_Q(rtx x)11687 extra_constraint_Q (rtx x)
11688 {
11689 int ok = 0;
11690 rtx plus = XEXP (x, 0);
11691
11692 if (GET_CODE (plus) == PLUS
11693 && REG_P (XEXP (plus, 0))
11694 && CONST_INT_P (XEXP (plus, 1))
11695 && (INTVAL (XEXP (plus, 1))
11696 <= MAX_LD_OFFSET (GET_MODE (x))))
11697 {
11698 rtx xx = XEXP (plus, 0);
11699 int regno = REGNO (xx);
11700
11701 ok = (/* allocate pseudos */
11702 regno >= FIRST_PSEUDO_REGISTER
11703 /* strictly check */
11704 || regno == REG_Z || regno == REG_Y
11705 /* XXX frame & arg pointer checks */
11706 || xx == frame_pointer_rtx
11707 || xx == arg_pointer_rtx);
11708
11709 if (avr_log.constraints)
11710 avr_edump ("\n%?=%d reload_completed=%d reload_in_progress=%d\n %r\n",
11711 ok, reload_completed, reload_in_progress, x);
11712 }
11713
11714 return ok;
11715 }
11716
11717 /* Convert condition code CONDITION to the valid AVR condition code. */
11718
11719 RTX_CODE
avr_normalize_condition(RTX_CODE condition)11720 avr_normalize_condition (RTX_CODE condition)
11721 {
11722 switch (condition)
11723 {
11724 case GT:
11725 return GE;
11726 case GTU:
11727 return GEU;
11728 case LE:
11729 return LT;
11730 case LEU:
11731 return LTU;
11732 default:
11733 gcc_unreachable ();
11734 }
11735 }
11736
11737 /* Helper function for `avr_reorg'. */
11738
11739 static rtx
avr_compare_pattern(rtx_insn * insn)11740 avr_compare_pattern (rtx_insn *insn)
11741 {
11742 rtx pattern = single_set (insn);
11743
11744 if (pattern
11745 && NONJUMP_INSN_P (insn)
11746 && SET_DEST (pattern) == cc0_rtx
11747 && GET_CODE (SET_SRC (pattern)) == COMPARE)
11748 {
11749 machine_mode mode0 = GET_MODE (XEXP (SET_SRC (pattern), 0));
11750 machine_mode mode1 = GET_MODE (XEXP (SET_SRC (pattern), 1));
11751
11752 /* The 64-bit comparisons have fixed operands ACC_A and ACC_B.
11753 They must not be swapped, thus skip them. */
11754
11755 if ((mode0 == VOIDmode || GET_MODE_SIZE (mode0) <= 4)
11756 && (mode1 == VOIDmode || GET_MODE_SIZE (mode1) <= 4))
11757 return pattern;
11758 }
11759
11760 return NULL_RTX;
11761 }
11762
11763 /* Helper function for `avr_reorg'. */
11764
11765 /* Expansion of switch/case decision trees leads to code like
11766
11767 cc0 = compare (Reg, Num)
11768 if (cc0 == 0)
11769 goto L1
11770
11771 cc0 = compare (Reg, Num)
11772 if (cc0 > 0)
11773 goto L2
11774
11775 The second comparison is superfluous and can be deleted.
11776 The second jump condition can be transformed from a
11777 "difficult" one to a "simple" one because "cc0 > 0" and
11778 "cc0 >= 0" will have the same effect here.
11779
11780 This function relies on the way switch/case is being expaned
11781 as binary decision tree. For example code see PR 49903.
11782
11783 Return TRUE if optimization performed.
11784 Return FALSE if nothing changed.
11785
11786 INSN1 is a comparison, i.e. avr_compare_pattern != 0.
11787
11788 We don't want to do this in text peephole because it is
11789 tedious to work out jump offsets there and the second comparison
11790 might have been transormed by `avr_reorg'.
11791
11792 RTL peephole won't do because peephole2 does not scan across
11793 basic blocks. */
11794
11795 static bool
avr_reorg_remove_redundant_compare(rtx_insn * insn1)11796 avr_reorg_remove_redundant_compare (rtx_insn *insn1)
11797 {
11798 rtx comp1, ifelse1, xcond1;
11799 rtx_insn *branch1;
11800 rtx comp2, ifelse2, xcond2;
11801 rtx_insn *branch2, *insn2;
11802 enum rtx_code code;
11803 rtx_insn *jump;
11804 rtx target, cond;
11805
11806 /* Look out for: compare1 - branch1 - compare2 - branch2 */
11807
11808 branch1 = next_nonnote_nondebug_insn (insn1);
11809 if (!branch1 || !JUMP_P (branch1))
11810 return false;
11811
11812 insn2 = next_nonnote_nondebug_insn (branch1);
11813 if (!insn2 || !avr_compare_pattern (insn2))
11814 return false;
11815
11816 branch2 = next_nonnote_nondebug_insn (insn2);
11817 if (!branch2 || !JUMP_P (branch2))
11818 return false;
11819
11820 comp1 = avr_compare_pattern (insn1);
11821 comp2 = avr_compare_pattern (insn2);
11822 xcond1 = single_set (branch1);
11823 xcond2 = single_set (branch2);
11824
11825 if (!comp1 || !comp2
11826 || !rtx_equal_p (comp1, comp2)
11827 || !xcond1 || SET_DEST (xcond1) != pc_rtx
11828 || !xcond2 || SET_DEST (xcond2) != pc_rtx
11829 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond1))
11830 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond2)))
11831 {
11832 return false;
11833 }
11834
11835 comp1 = SET_SRC (comp1);
11836 ifelse1 = SET_SRC (xcond1);
11837 ifelse2 = SET_SRC (xcond2);
11838
11839 /* comp<n> is COMPARE now and ifelse<n> is IF_THEN_ELSE. */
11840
11841 if (EQ != GET_CODE (XEXP (ifelse1, 0))
11842 || !REG_P (XEXP (comp1, 0))
11843 || !CONST_INT_P (XEXP (comp1, 1))
11844 || XEXP (ifelse1, 2) != pc_rtx
11845 || XEXP (ifelse2, 2) != pc_rtx
11846 || LABEL_REF != GET_CODE (XEXP (ifelse1, 1))
11847 || LABEL_REF != GET_CODE (XEXP (ifelse2, 1))
11848 || !COMPARISON_P (XEXP (ifelse2, 0))
11849 || cc0_rtx != XEXP (XEXP (ifelse1, 0), 0)
11850 || cc0_rtx != XEXP (XEXP (ifelse2, 0), 0)
11851 || const0_rtx != XEXP (XEXP (ifelse1, 0), 1)
11852 || const0_rtx != XEXP (XEXP (ifelse2, 0), 1))
11853 {
11854 return false;
11855 }
11856
11857 /* We filtered the insn sequence to look like
11858
11859 (set (cc0)
11860 (compare (reg:M N)
11861 (const_int VAL)))
11862 (set (pc)
11863 (if_then_else (eq (cc0)
11864 (const_int 0))
11865 (label_ref L1)
11866 (pc)))
11867
11868 (set (cc0)
11869 (compare (reg:M N)
11870 (const_int VAL)))
11871 (set (pc)
11872 (if_then_else (CODE (cc0)
11873 (const_int 0))
11874 (label_ref L2)
11875 (pc)))
11876 */
11877
11878 code = GET_CODE (XEXP (ifelse2, 0));
11879
11880 /* Map GT/GTU to GE/GEU which is easier for AVR.
11881 The first two instructions compare/branch on EQ
11882 so we may replace the difficult
11883
11884 if (x == VAL) goto L1;
11885 if (x > VAL) goto L2;
11886
11887 with easy
11888
11889 if (x == VAL) goto L1;
11890 if (x >= VAL) goto L2;
11891
11892 Similarly, replace LE/LEU by LT/LTU. */
11893
11894 switch (code)
11895 {
11896 case EQ:
11897 case LT: case LTU:
11898 case GE: case GEU:
11899 break;
11900
11901 case LE: case LEU:
11902 case GT: case GTU:
11903 code = avr_normalize_condition (code);
11904 break;
11905
11906 default:
11907 return false;
11908 }
11909
11910 /* Wrap the branches into UNSPECs so they won't be changed or
11911 optimized in the remainder. */
11912
11913 target = XEXP (XEXP (ifelse1, 1), 0);
11914 cond = XEXP (ifelse1, 0);
11915 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn1);
11916
11917 JUMP_LABEL (jump) = JUMP_LABEL (branch1);
11918
11919 target = XEXP (XEXP (ifelse2, 1), 0);
11920 cond = gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
11921 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn2);
11922
11923 JUMP_LABEL (jump) = JUMP_LABEL (branch2);
11924
11925 /* The comparisons in insn1 and insn2 are exactly the same;
11926 insn2 is superfluous so delete it. */
11927
11928 delete_insn (insn2);
11929 delete_insn (branch1);
11930 delete_insn (branch2);
11931
11932 return true;
11933 }
11934
11935
11936 /* Implement `TARGET_MACHINE_DEPENDENT_REORG'. */
11937 /* Optimize conditional jumps. */
11938
11939 static void
avr_reorg(void)11940 avr_reorg (void)
11941 {
11942 rtx_insn *insn = get_insns();
11943
11944 for (insn = next_real_insn (insn); insn; insn = next_real_insn (insn))
11945 {
11946 rtx pattern = avr_compare_pattern (insn);
11947
11948 if (!pattern)
11949 continue;
11950
11951 if (optimize
11952 && avr_reorg_remove_redundant_compare (insn))
11953 {
11954 continue;
11955 }
11956
11957 if (compare_diff_p (insn))
11958 {
11959 /* Now we work under compare insn with difficult branch. */
11960
11961 rtx_insn *next = next_real_insn (insn);
11962 rtx pat = PATTERN (next);
11963
11964 pattern = SET_SRC (pattern);
11965
11966 if (true_regnum (XEXP (pattern, 0)) >= 0
11967 && true_regnum (XEXP (pattern, 1)) >= 0)
11968 {
11969 rtx x = XEXP (pattern, 0);
11970 rtx src = SET_SRC (pat);
11971 rtx t = XEXP (src, 0);
11972 PUT_CODE (t, swap_condition (GET_CODE (t)));
11973 XEXP (pattern, 0) = XEXP (pattern, 1);
11974 XEXP (pattern, 1) = x;
11975 INSN_CODE (next) = -1;
11976 }
11977 else if (true_regnum (XEXP (pattern, 0)) >= 0
11978 && XEXP (pattern, 1) == const0_rtx)
11979 {
11980 /* This is a tst insn, we can reverse it. */
11981 rtx src = SET_SRC (pat);
11982 rtx t = XEXP (src, 0);
11983
11984 PUT_CODE (t, swap_condition (GET_CODE (t)));
11985 XEXP (pattern, 1) = XEXP (pattern, 0);
11986 XEXP (pattern, 0) = const0_rtx;
11987 INSN_CODE (next) = -1;
11988 INSN_CODE (insn) = -1;
11989 }
11990 else if (true_regnum (XEXP (pattern, 0)) >= 0
11991 && CONST_INT_P (XEXP (pattern, 1)))
11992 {
11993 rtx x = XEXP (pattern, 1);
11994 rtx src = SET_SRC (pat);
11995 rtx t = XEXP (src, 0);
11996 machine_mode mode = GET_MODE (XEXP (pattern, 0));
11997
11998 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
11999 {
12000 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
12001 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
12002 INSN_CODE (next) = -1;
12003 INSN_CODE (insn) = -1;
12004 }
12005 }
12006 }
12007 }
12008 }
12009
12010 /* Returns register number for function return value.*/
12011
12012 static inline unsigned int
avr_ret_register(void)12013 avr_ret_register (void)
12014 {
12015 return 24;
12016 }
12017
12018
12019 /* Implement `TARGET_FUNCTION_VALUE_REGNO_P'. */
12020
12021 static bool
avr_function_value_regno_p(const unsigned int regno)12022 avr_function_value_regno_p (const unsigned int regno)
12023 {
12024 return (regno == avr_ret_register ());
12025 }
12026
12027
12028 /* Implement `TARGET_LIBCALL_VALUE'. */
12029 /* Create an RTX representing the place where a
12030 library function returns a value of mode MODE. */
12031
12032 static rtx
avr_libcall_value(machine_mode mode,const_rtx func ATTRIBUTE_UNUSED)12033 avr_libcall_value (machine_mode mode,
12034 const_rtx func ATTRIBUTE_UNUSED)
12035 {
12036 int offs = GET_MODE_SIZE (mode);
12037
12038 if (offs <= 4)
12039 offs = (offs + 1) & ~1;
12040
12041 return gen_rtx_REG (mode, avr_ret_register () + 2 - offs);
12042 }
12043
12044
12045 /* Implement `TARGET_FUNCTION_VALUE'. */
12046 /* Create an RTX representing the place where a
12047 function returns a value of data type VALTYPE. */
12048
12049 static rtx
avr_function_value(const_tree type,const_tree fn_decl_or_type ATTRIBUTE_UNUSED,bool outgoing ATTRIBUTE_UNUSED)12050 avr_function_value (const_tree type,
12051 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
12052 bool outgoing ATTRIBUTE_UNUSED)
12053 {
12054 unsigned int offs;
12055
12056 if (TYPE_MODE (type) != BLKmode)
12057 return avr_libcall_value (TYPE_MODE (type), NULL_RTX);
12058
12059 offs = int_size_in_bytes (type);
12060 if (offs < 2)
12061 offs = 2;
12062 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
12063 offs = GET_MODE_SIZE (SImode);
12064 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
12065 offs = GET_MODE_SIZE (DImode);
12066
12067 return gen_rtx_REG (BLKmode, avr_ret_register () + 2 - offs);
12068 }
12069
12070 int
test_hard_reg_class(enum reg_class rclass,rtx x)12071 test_hard_reg_class (enum reg_class rclass, rtx x)
12072 {
12073 int regno = true_regnum (x);
12074 if (regno < 0)
12075 return 0;
12076
12077 if (TEST_HARD_REG_CLASS (rclass, regno))
12078 return 1;
12079
12080 return 0;
12081 }
12082
12083
12084 /* Helper for jump_over_one_insn_p: Test if INSN is a 2-word instruction
12085 and thus is suitable to be skipped by CPSE, SBRC, etc. */
12086
12087 static bool
avr_2word_insn_p(rtx_insn * insn)12088 avr_2word_insn_p (rtx_insn *insn)
12089 {
12090 if (TARGET_SKIP_BUG || !insn || get_attr_length (insn) != 2)
12091 {
12092 return false;
12093 }
12094
12095 switch (INSN_CODE (insn))
12096 {
12097 default:
12098 return false;
12099
12100 case CODE_FOR_movqi_insn:
12101 case CODE_FOR_movuqq_insn:
12102 case CODE_FOR_movqq_insn:
12103 {
12104 rtx set = single_set (insn);
12105 rtx src = SET_SRC (set);
12106 rtx dest = SET_DEST (set);
12107
12108 /* Factor out LDS and STS from movqi_insn. */
12109
12110 if (MEM_P (dest)
12111 && (REG_P (src) || src == CONST0_RTX (GET_MODE (dest))))
12112 {
12113 return CONSTANT_ADDRESS_P (XEXP (dest, 0));
12114 }
12115 else if (REG_P (dest)
12116 && MEM_P (src))
12117 {
12118 return CONSTANT_ADDRESS_P (XEXP (src, 0));
12119 }
12120
12121 return false;
12122 }
12123
12124 case CODE_FOR_call_insn:
12125 case CODE_FOR_call_value_insn:
12126 return true;
12127 }
12128 }
12129
12130
12131 int
jump_over_one_insn_p(rtx_insn * insn,rtx dest)12132 jump_over_one_insn_p (rtx_insn *insn, rtx dest)
12133 {
12134 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
12135 ? XEXP (dest, 0)
12136 : dest);
12137 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
12138 int dest_addr = INSN_ADDRESSES (uid);
12139 int jump_offset = dest_addr - jump_addr - get_attr_length (insn);
12140
12141 return (jump_offset == 1
12142 || (jump_offset == 2
12143 && avr_2word_insn_p (next_active_insn (insn))));
12144 }
12145
12146
12147 /* Implement TARGET_HARD_REGNO_MODE_OK. On the enhanced core, anything
12148 larger than 1 byte must start in even numbered register for "movw" to
12149 work (this way we don't have to check for odd registers everywhere). */
12150
12151 static bool
avr_hard_regno_mode_ok(unsigned int regno,machine_mode mode)12152 avr_hard_regno_mode_ok (unsigned int regno, machine_mode mode)
12153 {
12154 /* NOTE: 8-bit values must not be disallowed for R28 or R29.
12155 Disallowing QI et al. in these regs might lead to code like
12156 (set (subreg:QI (reg:HI 28) n) ...)
12157 which will result in wrong code because reload does not
12158 handle SUBREGs of hard regsisters like this.
12159 This could be fixed in reload. However, it appears
12160 that fixing reload is not wanted by reload people. */
12161
12162 /* Any GENERAL_REGS register can hold 8-bit values. */
12163
12164 if (GET_MODE_SIZE (mode) == 1)
12165 return true;
12166
12167 /* FIXME: Ideally, the following test is not needed.
12168 However, it turned out that it can reduce the number
12169 of spill fails. AVR and it's poor endowment with
12170 address registers is extreme stress test for reload. */
12171
12172 if (GET_MODE_SIZE (mode) >= 4
12173 && regno >= REG_X)
12174 return false;
12175
12176 /* All modes larger than 8 bits should start in an even register. */
12177
12178 return !(regno & 1);
12179 }
12180
12181
12182 /* Implement TARGET_HARD_REGNO_CALL_PART_CLOBBERED. */
12183
12184 static bool
avr_hard_regno_call_part_clobbered(unsigned regno,machine_mode mode)12185 avr_hard_regno_call_part_clobbered (unsigned regno, machine_mode mode)
12186 {
12187 /* FIXME: This hook gets called with MODE:REGNO combinations that don't
12188 represent valid hard registers like, e.g. HI:29. Returning TRUE
12189 for such registers can lead to performance degradation as mentioned
12190 in PR53595. Thus, report invalid hard registers as FALSE. */
12191
12192 if (!avr_hard_regno_mode_ok (regno, mode))
12193 return 0;
12194
12195 /* Return true if any of the following boundaries is crossed:
12196 17/18 or 19/20 (if AVR_TINY), 27/28 and 29/30. */
12197
12198 return ((regno <= LAST_CALLEE_SAVED_REG
12199 && regno + GET_MODE_SIZE (mode) > 1 + LAST_CALLEE_SAVED_REG)
12200 || (regno < REG_Y && regno + GET_MODE_SIZE (mode) > REG_Y)
12201 || (regno < REG_Z && regno + GET_MODE_SIZE (mode) > REG_Z));
12202 }
12203
12204
12205 /* Implement `MODE_CODE_BASE_REG_CLASS'. */
12206
12207 enum reg_class
avr_mode_code_base_reg_class(machine_mode mode ATTRIBUTE_UNUSED,addr_space_t as,RTX_CODE outer_code,RTX_CODE index_code ATTRIBUTE_UNUSED)12208 avr_mode_code_base_reg_class (machine_mode mode ATTRIBUTE_UNUSED,
12209 addr_space_t as, RTX_CODE outer_code,
12210 RTX_CODE index_code ATTRIBUTE_UNUSED)
12211 {
12212 if (!ADDR_SPACE_GENERIC_P (as))
12213 {
12214 return POINTER_Z_REGS;
12215 }
12216
12217 if (!avr_strict_X)
12218 return reload_completed ? BASE_POINTER_REGS : POINTER_REGS;
12219
12220 return PLUS == outer_code ? BASE_POINTER_REGS : POINTER_REGS;
12221 }
12222
12223
12224 /* Implement `REGNO_MODE_CODE_OK_FOR_BASE_P'. */
12225
12226 bool
avr_regno_mode_code_ok_for_base_p(int regno,machine_mode mode ATTRIBUTE_UNUSED,addr_space_t as ATTRIBUTE_UNUSED,RTX_CODE outer_code,RTX_CODE index_code ATTRIBUTE_UNUSED)12227 avr_regno_mode_code_ok_for_base_p (int regno,
12228 machine_mode mode ATTRIBUTE_UNUSED,
12229 addr_space_t as ATTRIBUTE_UNUSED,
12230 RTX_CODE outer_code,
12231 RTX_CODE index_code ATTRIBUTE_UNUSED)
12232 {
12233 bool ok = false;
12234
12235 if (!ADDR_SPACE_GENERIC_P (as))
12236 {
12237 if (regno < FIRST_PSEUDO_REGISTER
12238 && regno == REG_Z)
12239 {
12240 return true;
12241 }
12242
12243 if (reg_renumber)
12244 {
12245 regno = reg_renumber[regno];
12246
12247 if (regno == REG_Z)
12248 {
12249 return true;
12250 }
12251 }
12252
12253 return false;
12254 }
12255
12256 if (regno < FIRST_PSEUDO_REGISTER
12257 && (regno == REG_X
12258 || regno == REG_Y
12259 || regno == REG_Z
12260 || regno == ARG_POINTER_REGNUM))
12261 {
12262 ok = true;
12263 }
12264 else if (reg_renumber)
12265 {
12266 regno = reg_renumber[regno];
12267
12268 if (regno == REG_X
12269 || regno == REG_Y
12270 || regno == REG_Z
12271 || regno == ARG_POINTER_REGNUM)
12272 {
12273 ok = true;
12274 }
12275 }
12276
12277 if (avr_strict_X
12278 && PLUS == outer_code
12279 && regno == REG_X)
12280 {
12281 ok = false;
12282 }
12283
12284 return ok;
12285 }
12286
12287
12288 /* A helper for `output_reload_insisf' and `output_reload_inhi'. */
12289 /* Set 32-bit register OP[0] to compile-time constant OP[1].
12290 CLOBBER_REG is a QI clobber register or NULL_RTX.
12291 LEN == NULL: output instructions.
12292 LEN != NULL: set *LEN to the length of the instruction sequence
12293 (in words) printed with LEN = NULL.
12294 If CLEAR_P is true, OP[0] had been cleard to Zero already.
12295 If CLEAR_P is false, nothing is known about OP[0].
12296
12297 The effect on cc0 is as follows:
12298
12299 Load 0 to any register except ZERO_REG : NONE
12300 Load ld register with any value : NONE
12301 Anything else: : CLOBBER */
12302
12303 static void
output_reload_in_const(rtx * op,rtx clobber_reg,int * len,bool clear_p)12304 output_reload_in_const (rtx *op, rtx clobber_reg, int *len, bool clear_p)
12305 {
12306 rtx src = op[1];
12307 rtx dest = op[0];
12308 rtx xval, xdest[4];
12309 int ival[4];
12310 int clobber_val = 1234;
12311 bool cooked_clobber_p = false;
12312 bool set_p = false;
12313 machine_mode mode = GET_MODE (dest);
12314 int n_bytes = GET_MODE_SIZE (mode);
12315
12316 gcc_assert (REG_P (dest)
12317 && CONSTANT_P (src));
12318
12319 if (len)
12320 *len = 0;
12321
12322 /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
12323 but has some subregs that are in LD_REGS. Use the MSB (REG:QI 17). */
12324
12325 if (REGNO (dest) < 16
12326 && REGNO (dest) + GET_MODE_SIZE (mode) > 16)
12327 {
12328 clobber_reg = all_regs_rtx[REGNO (dest) + n_bytes - 1];
12329 }
12330
12331 /* We might need a clobber reg but don't have one. Look at the value to
12332 be loaded more closely. A clobber is only needed if it is a symbol
12333 or contains a byte that is neither 0, -1 or a power of 2. */
12334
12335 if (NULL_RTX == clobber_reg
12336 && !test_hard_reg_class (LD_REGS, dest)
12337 && (! (CONST_INT_P (src) || CONST_FIXED_P (src) || CONST_DOUBLE_P (src))
12338 || !avr_popcount_each_byte (src, n_bytes,
12339 (1 << 0) | (1 << 1) | (1 << 8))))
12340 {
12341 /* We have no clobber register but need one. Cook one up.
12342 That's cheaper than loading from constant pool. */
12343
12344 cooked_clobber_p = true;
12345 clobber_reg = all_regs_rtx[REG_Z + 1];
12346 avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg, len, 1);
12347 }
12348
12349 /* Now start filling DEST from LSB to MSB. */
12350
12351 for (int n = 0; n < n_bytes; n++)
12352 {
12353 int ldreg_p;
12354 bool done_byte = false;
12355 rtx xop[3];
12356
12357 /* Crop the n-th destination byte. */
12358
12359 xdest[n] = simplify_gen_subreg (QImode, dest, mode, n);
12360 ldreg_p = test_hard_reg_class (LD_REGS, xdest[n]);
12361
12362 if (!CONST_INT_P (src)
12363 && !CONST_FIXED_P (src)
12364 && !CONST_DOUBLE_P (src))
12365 {
12366 static const char* const asm_code[][2] =
12367 {
12368 { "ldi %2,lo8(%1)" CR_TAB "mov %0,%2", "ldi %0,lo8(%1)" },
12369 { "ldi %2,hi8(%1)" CR_TAB "mov %0,%2", "ldi %0,hi8(%1)" },
12370 { "ldi %2,hlo8(%1)" CR_TAB "mov %0,%2", "ldi %0,hlo8(%1)" },
12371 { "ldi %2,hhi8(%1)" CR_TAB "mov %0,%2", "ldi %0,hhi8(%1)" }
12372 };
12373
12374 xop[0] = xdest[n];
12375 xop[1] = src;
12376 xop[2] = clobber_reg;
12377
12378 avr_asm_len (asm_code[n][ldreg_p], xop, len, ldreg_p ? 1 : 2);
12379
12380 continue;
12381 }
12382
12383 /* Crop the n-th source byte. */
12384
12385 xval = simplify_gen_subreg (QImode, src, mode, n);
12386 ival[n] = INTVAL (xval);
12387
12388 /* Look if we can reuse the low word by means of MOVW. */
12389
12390 if (n == 2
12391 && n_bytes >= 4
12392 && AVR_HAVE_MOVW)
12393 {
12394 rtx lo16 = simplify_gen_subreg (HImode, src, mode, 0);
12395 rtx hi16 = simplify_gen_subreg (HImode, src, mode, 2);
12396
12397 if (INTVAL (lo16) == INTVAL (hi16))
12398 {
12399 if (INTVAL (lo16) != 0 || !clear_p)
12400 avr_asm_len ("movw %C0,%A0", &op[0], len, 1);
12401
12402 break;
12403 }
12404 }
12405
12406 /* Don't use CLR so that cc0 is set as expected. */
12407
12408 if (ival[n] == 0)
12409 {
12410 if (!clear_p)
12411 avr_asm_len (ldreg_p ? "ldi %0,0"
12412 : AVR_ZERO_REGNO == REGNO (xdest[n]) ? "clr %0"
12413 : "mov %0,__zero_reg__",
12414 &xdest[n], len, 1);
12415 continue;
12416 }
12417
12418 if (clobber_val == ival[n]
12419 && REGNO (clobber_reg) == REGNO (xdest[n]))
12420 {
12421 continue;
12422 }
12423
12424 /* LD_REGS can use LDI to move a constant value */
12425
12426 if (ldreg_p)
12427 {
12428 xop[0] = xdest[n];
12429 xop[1] = xval;
12430 avr_asm_len ("ldi %0,lo8(%1)", xop, len, 1);
12431 continue;
12432 }
12433
12434 /* Try to reuse value already loaded in some lower byte. */
12435
12436 for (int j = 0; j < n; j++)
12437 if (ival[j] == ival[n])
12438 {
12439 xop[0] = xdest[n];
12440 xop[1] = xdest[j];
12441
12442 avr_asm_len ("mov %0,%1", xop, len, 1);
12443 done_byte = true;
12444 break;
12445 }
12446
12447 if (done_byte)
12448 continue;
12449
12450 /* Need no clobber reg for -1: Use CLR/DEC */
12451
12452 if (ival[n] == -1)
12453 {
12454 if (!clear_p)
12455 avr_asm_len ("clr %0", &xdest[n], len, 1);
12456
12457 avr_asm_len ("dec %0", &xdest[n], len, 1);
12458 continue;
12459 }
12460 else if (ival[n] == 1)
12461 {
12462 if (!clear_p)
12463 avr_asm_len ("clr %0", &xdest[n], len, 1);
12464
12465 avr_asm_len ("inc %0", &xdest[n], len, 1);
12466 continue;
12467 }
12468
12469 /* Use T flag or INC to manage powers of 2 if we have
12470 no clobber reg. */
12471
12472 if (NULL_RTX == clobber_reg
12473 && single_one_operand (xval, QImode))
12474 {
12475 xop[0] = xdest[n];
12476 xop[1] = GEN_INT (exact_log2 (ival[n] & GET_MODE_MASK (QImode)));
12477
12478 gcc_assert (constm1_rtx != xop[1]);
12479
12480 if (!set_p)
12481 {
12482 set_p = true;
12483 avr_asm_len ("set", xop, len, 1);
12484 }
12485
12486 if (!clear_p)
12487 avr_asm_len ("clr %0", xop, len, 1);
12488
12489 avr_asm_len ("bld %0,%1", xop, len, 1);
12490 continue;
12491 }
12492
12493 /* We actually need the LD_REGS clobber reg. */
12494
12495 gcc_assert (NULL_RTX != clobber_reg);
12496
12497 xop[0] = xdest[n];
12498 xop[1] = xval;
12499 xop[2] = clobber_reg;
12500 clobber_val = ival[n];
12501
12502 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
12503 "mov %0,%2", xop, len, 2);
12504 }
12505
12506 /* If we cooked up a clobber reg above, restore it. */
12507
12508 if (cooked_clobber_p)
12509 {
12510 avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg, len, 1);
12511 }
12512 }
12513
12514
12515 /* Reload the constant OP[1] into the HI register OP[0].
12516 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
12517 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
12518 need a clobber reg or have to cook one up.
12519
12520 PLEN == NULL: Output instructions.
12521 PLEN != NULL: Output nothing. Set *PLEN to number of words occupied
12522 by the insns printed.
12523
12524 Return "". */
12525
12526 const char*
output_reload_inhi(rtx * op,rtx clobber_reg,int * plen)12527 output_reload_inhi (rtx *op, rtx clobber_reg, int *plen)
12528 {
12529 output_reload_in_const (op, clobber_reg, plen, false);
12530 return "";
12531 }
12532
12533
12534 /* Reload a SI or SF compile time constant OP[1] into the register OP[0].
12535 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
12536 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
12537 need a clobber reg or have to cook one up.
12538
12539 LEN == NULL: Output instructions.
12540
12541 LEN != NULL: Output nothing. Set *LEN to number of words occupied
12542 by the insns printed.
12543
12544 Return "". */
12545
12546 const char *
output_reload_insisf(rtx * op,rtx clobber_reg,int * len)12547 output_reload_insisf (rtx *op, rtx clobber_reg, int *len)
12548 {
12549 if (AVR_HAVE_MOVW
12550 && !test_hard_reg_class (LD_REGS, op[0])
12551 && (CONST_INT_P (op[1])
12552 || CONST_FIXED_P (op[1])
12553 || CONST_DOUBLE_P (op[1])))
12554 {
12555 int len_clr, len_noclr;
12556
12557 /* In some cases it is better to clear the destination beforehand, e.g.
12558
12559 CLR R2 CLR R3 MOVW R4,R2 INC R2
12560
12561 is shorther than
12562
12563 CLR R2 INC R2 CLR R3 CLR R4 CLR R5
12564
12565 We find it too tedious to work that out in the print function.
12566 Instead, we call the print function twice to get the lengths of
12567 both methods and use the shortest one. */
12568
12569 output_reload_in_const (op, clobber_reg, &len_clr, true);
12570 output_reload_in_const (op, clobber_reg, &len_noclr, false);
12571
12572 if (len_noclr - len_clr == 4)
12573 {
12574 /* Default needs 4 CLR instructions: clear register beforehand. */
12575
12576 avr_asm_len ("mov %A0,__zero_reg__" CR_TAB
12577 "mov %B0,__zero_reg__" CR_TAB
12578 "movw %C0,%A0", &op[0], len, 3);
12579
12580 output_reload_in_const (op, clobber_reg, len, true);
12581
12582 if (len)
12583 *len += 3;
12584
12585 return "";
12586 }
12587 }
12588
12589 /* Default: destination not pre-cleared. */
12590
12591 output_reload_in_const (op, clobber_reg, len, false);
12592 return "";
12593 }
12594
12595 const char*
avr_out_reload_inpsi(rtx * op,rtx clobber_reg,int * len)12596 avr_out_reload_inpsi (rtx *op, rtx clobber_reg, int *len)
12597 {
12598 output_reload_in_const (op, clobber_reg, len, false);
12599 return "";
12600 }
12601
12602
12603 /* Worker function for `ASM_OUTPUT_ADDR_VEC'. */
12604 /* Emit jump tables out-of-line so that branches crossing the table
12605 get shorter offsets. If we have JUMP + CALL, then put the tables
12606 in a dedicated non-.text section so that CALLs get better chance to
12607 be relaxed to RCALLs.
12608
12609 We emit the tables by hand because `function_rodata_section' does not
12610 work as expected, cf. PR71151, and we do *NOT* want the table to be
12611 in .rodata, hence setting JUMP_TABLES_IN_TEXT_SECTION = 0 is of limited
12612 use; and setting it to 1 attributes table lengths to branch offsets...
12613 Moreover, fincal.c keeps switching section before each table entry
12614 which we find too fragile as to rely on section caching. */
12615
12616 void
avr_output_addr_vec(rtx_insn * labl,rtx table)12617 avr_output_addr_vec (rtx_insn *labl, rtx table)
12618 {
12619 FILE *stream = asm_out_file;
12620
12621 app_disable();
12622
12623 // Switch to appropriate (sub)section.
12624
12625 if (DECL_SECTION_NAME (current_function_decl)
12626 && symtab_node::get (current_function_decl)
12627 && ! symtab_node::get (current_function_decl)->implicit_section)
12628 {
12629 // .subsection will emit the code after the function and in the
12630 // section as chosen by the user.
12631
12632 switch_to_section (current_function_section ());
12633 fprintf (stream, "\t.subsection\t1\n");
12634 }
12635 else
12636 {
12637 // Since PR63223 there is no restriction where to put the table; it
12638 // may even reside above 128 KiB. We put it in a section as high as
12639 // possible and avoid progmem in order not to waste flash <= 64 KiB.
12640
12641 const char *sec_name = ".jumptables.gcc";
12642
12643 // The table belongs to its host function, therefore use fine
12644 // grained sections so that, if that function is removed by
12645 // --gc-sections, the child table(s) may also be removed. */
12646
12647 tree asm_name = DECL_ASSEMBLER_NAME (current_function_decl);
12648 const char *fname = IDENTIFIER_POINTER (asm_name);
12649 fname = targetm.strip_name_encoding (fname);
12650 sec_name = ACONCAT ((sec_name, ".", fname, NULL));
12651
12652 fprintf (stream, "\t.section\t%s,\"%s\",@progbits\n", sec_name,
12653 AVR_HAVE_JMP_CALL ? "a" : "ax");
12654 }
12655
12656 // Output the label that preceeds the table.
12657
12658 ASM_OUTPUT_ALIGN (stream, 1);
12659 targetm.asm_out.internal_label (stream, "L", CODE_LABEL_NUMBER (labl));
12660
12661 // Output the table's content.
12662
12663 int vlen = XVECLEN (table, 0);
12664
12665 for (int idx = 0; idx < vlen; idx++)
12666 {
12667 int value = CODE_LABEL_NUMBER (XEXP (XVECEXP (table, 0, idx), 0));
12668
12669 if (AVR_HAVE_JMP_CALL)
12670 fprintf (stream, "\t.word gs(.L%d)\n", value);
12671 else
12672 fprintf (stream, "\trjmp .L%d\n", value);
12673 }
12674
12675 // Switch back to original section. As we clobbered the section above,
12676 // forget the current section before switching back.
12677
12678 in_section = NULL;
12679 switch_to_section (current_function_section ());
12680 }
12681
12682
12683 /* Implement `TARGET_CONDITIONAL_REGISTER_USAGE'. */
12684
12685 static void
avr_conditional_register_usage(void)12686 avr_conditional_register_usage (void)
12687 {
12688 if (AVR_TINY)
12689 {
12690 const int tiny_reg_alloc_order[] = {
12691 24, 25,
12692 22, 23,
12693 30, 31,
12694 26, 27,
12695 28, 29,
12696 21, 20, 19, 18,
12697 16, 17,
12698 32, 33, 34, 35,
12699 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0
12700 };
12701
12702 /* Set R0-R17 as fixed registers. Reset R0-R17 in call used register list
12703 - R0-R15 are not available in Tiny Core devices
12704 - R16 and R17 are fixed registers. */
12705
12706 for (size_t i = 0; i <= 17; i++)
12707 {
12708 fixed_regs[i] = 1;
12709 call_used_regs[i] = 1;
12710 }
12711
12712 /* Set R18 to R21 as callee saved registers
12713 - R18, R19, R20 and R21 are the callee saved registers in
12714 Tiny Core devices */
12715
12716 for (size_t i = 18; i <= LAST_CALLEE_SAVED_REG; i++)
12717 {
12718 call_used_regs[i] = 0;
12719 }
12720
12721 /* Update register allocation order for Tiny Core devices */
12722
12723 for (size_t i = 0; i < ARRAY_SIZE (tiny_reg_alloc_order); i++)
12724 {
12725 reg_alloc_order[i] = tiny_reg_alloc_order[i];
12726 }
12727
12728 CLEAR_HARD_REG_SET (reg_class_contents[(int) ADDW_REGS]);
12729 CLEAR_HARD_REG_SET (reg_class_contents[(int) NO_LD_REGS]);
12730 }
12731 }
12732
12733 /* Implement `TARGET_HARD_REGNO_SCRATCH_OK'. */
12734 /* Returns true if SCRATCH are safe to be allocated as a scratch
12735 registers (for a define_peephole2) in the current function. */
12736
12737 static bool
avr_hard_regno_scratch_ok(unsigned int regno)12738 avr_hard_regno_scratch_ok (unsigned int regno)
12739 {
12740 /* Interrupt functions can only use registers that have already been saved
12741 by the prologue, even if they would normally be call-clobbered. */
12742
12743 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
12744 && !df_regs_ever_live_p (regno))
12745 return false;
12746
12747 /* Don't allow hard registers that might be part of the frame pointer.
12748 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
12749 and don't care for a frame pointer that spans more than one register. */
12750
12751 if ((!reload_completed || frame_pointer_needed)
12752 && (regno == REG_Y || regno == REG_Y + 1))
12753 {
12754 return false;
12755 }
12756
12757 return true;
12758 }
12759
12760
12761 /* Worker function for `HARD_REGNO_RENAME_OK'. */
12762 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
12763
12764 int
avr_hard_regno_rename_ok(unsigned int old_reg,unsigned int new_reg)12765 avr_hard_regno_rename_ok (unsigned int old_reg,
12766 unsigned int new_reg)
12767 {
12768 /* Interrupt functions can only use registers that have already been
12769 saved by the prologue, even if they would normally be
12770 call-clobbered. */
12771
12772 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
12773 && !df_regs_ever_live_p (new_reg))
12774 return 0;
12775
12776 /* Don't allow hard registers that might be part of the frame pointer.
12777 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
12778 and don't care for a frame pointer that spans more than one register. */
12779
12780 if ((!reload_completed || frame_pointer_needed)
12781 && (old_reg == REG_Y || old_reg == REG_Y + 1
12782 || new_reg == REG_Y || new_reg == REG_Y + 1))
12783 {
12784 return 0;
12785 }
12786
12787 return 1;
12788 }
12789
12790 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
12791 or memory location in the I/O space (QImode only).
12792
12793 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
12794 Operand 1: register operand to test, or CONST_INT memory address.
12795 Operand 2: bit number.
12796 Operand 3: label to jump to if the test is true. */
12797
12798 const char*
avr_out_sbxx_branch(rtx_insn * insn,rtx operands[])12799 avr_out_sbxx_branch (rtx_insn *insn, rtx operands[])
12800 {
12801 enum rtx_code comp = GET_CODE (operands[0]);
12802 bool long_jump = get_attr_length (insn) >= 4;
12803 bool reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
12804
12805 if (comp == GE)
12806 comp = EQ;
12807 else if (comp == LT)
12808 comp = NE;
12809
12810 if (reverse)
12811 comp = reverse_condition (comp);
12812
12813 switch (GET_CODE (operands[1]))
12814 {
12815 default:
12816 gcc_unreachable();
12817
12818 case CONST_INT:
12819 case CONST:
12820 case SYMBOL_REF:
12821
12822 if (low_io_address_operand (operands[1], QImode))
12823 {
12824 if (comp == EQ)
12825 output_asm_insn ("sbis %i1,%2", operands);
12826 else
12827 output_asm_insn ("sbic %i1,%2", operands);
12828 }
12829 else
12830 {
12831 gcc_assert (io_address_operand (operands[1], QImode));
12832 output_asm_insn ("in __tmp_reg__,%i1", operands);
12833 if (comp == EQ)
12834 output_asm_insn ("sbrs __tmp_reg__,%2", operands);
12835 else
12836 output_asm_insn ("sbrc __tmp_reg__,%2", operands);
12837 }
12838
12839 break; /* CONST_INT */
12840
12841 case REG:
12842
12843 if (comp == EQ)
12844 output_asm_insn ("sbrs %T1%T2", operands);
12845 else
12846 output_asm_insn ("sbrc %T1%T2", operands);
12847
12848 break; /* REG */
12849 } /* switch */
12850
12851 if (long_jump)
12852 return ("rjmp .+4" CR_TAB
12853 "jmp %x3");
12854
12855 if (!reverse)
12856 return "rjmp %x3";
12857
12858 return "";
12859 }
12860
12861 /* Worker function for `TARGET_ASM_CONSTRUCTOR'. */
12862
12863 static void
avr_asm_out_ctor(rtx symbol,int priority)12864 avr_asm_out_ctor (rtx symbol, int priority)
12865 {
12866 fputs ("\t.global __do_global_ctors\n", asm_out_file);
12867 default_ctor_section_asm_out_constructor (symbol, priority);
12868 }
12869
12870
12871 /* Worker function for `TARGET_ASM_DESTRUCTOR'. */
12872
12873 static void
avr_asm_out_dtor(rtx symbol,int priority)12874 avr_asm_out_dtor (rtx symbol, int priority)
12875 {
12876 fputs ("\t.global __do_global_dtors\n", asm_out_file);
12877 default_dtor_section_asm_out_destructor (symbol, priority);
12878 }
12879
12880
12881 /* Worker function for `TARGET_RETURN_IN_MEMORY'. */
12882
12883 static bool
avr_return_in_memory(const_tree type,const_tree fntype ATTRIBUTE_UNUSED)12884 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
12885 {
12886 HOST_WIDE_INT size = int_size_in_bytes (type);
12887 HOST_WIDE_INT ret_size_limit = AVR_TINY ? 4 : 8;
12888
12889 /* In avr, there are 8 return registers. But, for Tiny Core
12890 (ATtiny4/5/9/10/20/40) devices, only 4 registers are available.
12891 Return true if size is unknown or greater than the limit. */
12892
12893 if (size == -1 || size > ret_size_limit)
12894 {
12895 return true;
12896 }
12897 else
12898 {
12899 return false;
12900 }
12901 }
12902
12903
12904 /* Implement `CASE_VALUES_THRESHOLD'. */
12905 /* Supply the default for --param case-values-threshold=0 */
12906
12907 static unsigned int
avr_case_values_threshold(void)12908 avr_case_values_threshold (void)
12909 {
12910 /* The exact break-even point between a jump table and an if-else tree
12911 depends on several factors not available here like, e.g. if 8-bit
12912 comparisons can be used in the if-else tree or not, on the
12913 range of the case values, if the case value can be reused, on the
12914 register allocation, etc. '7' appears to be a good choice. */
12915
12916 return 7;
12917 }
12918
12919
12920 /* Implement `TARGET_ADDR_SPACE_ADDRESS_MODE'. */
12921
12922 static scalar_int_mode
avr_addr_space_address_mode(addr_space_t as)12923 avr_addr_space_address_mode (addr_space_t as)
12924 {
12925 return avr_addrspace[as].pointer_size == 3 ? PSImode : HImode;
12926 }
12927
12928
12929 /* Implement `TARGET_ADDR_SPACE_POINTER_MODE'. */
12930
12931 static scalar_int_mode
avr_addr_space_pointer_mode(addr_space_t as)12932 avr_addr_space_pointer_mode (addr_space_t as)
12933 {
12934 return avr_addr_space_address_mode (as);
12935 }
12936
12937
12938 /* Helper for following function. */
12939
12940 static bool
avr_reg_ok_for_pgm_addr(rtx reg,bool strict)12941 avr_reg_ok_for_pgm_addr (rtx reg, bool strict)
12942 {
12943 gcc_assert (REG_P (reg));
12944
12945 if (strict)
12946 {
12947 return REGNO (reg) == REG_Z;
12948 }
12949
12950 /* Avoid combine to propagate hard regs. */
12951
12952 if (can_create_pseudo_p()
12953 && REGNO (reg) < REG_Z)
12954 {
12955 return false;
12956 }
12957
12958 return true;
12959 }
12960
12961
12962 /* Implement `TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P'. */
12963
12964 static bool
avr_addr_space_legitimate_address_p(machine_mode mode,rtx x,bool strict,addr_space_t as)12965 avr_addr_space_legitimate_address_p (machine_mode mode, rtx x,
12966 bool strict, addr_space_t as)
12967 {
12968 bool ok = false;
12969
12970 switch (as)
12971 {
12972 default:
12973 gcc_unreachable();
12974
12975 case ADDR_SPACE_GENERIC:
12976 return avr_legitimate_address_p (mode, x, strict);
12977
12978 case ADDR_SPACE_FLASH:
12979 case ADDR_SPACE_FLASH1:
12980 case ADDR_SPACE_FLASH2:
12981 case ADDR_SPACE_FLASH3:
12982 case ADDR_SPACE_FLASH4:
12983 case ADDR_SPACE_FLASH5:
12984
12985 switch (GET_CODE (x))
12986 {
12987 case REG:
12988 ok = avr_reg_ok_for_pgm_addr (x, strict);
12989 break;
12990
12991 case POST_INC:
12992 ok = avr_reg_ok_for_pgm_addr (XEXP (x, 0), strict);
12993 break;
12994
12995 default:
12996 break;
12997 }
12998
12999 break; /* FLASH */
13000
13001 case ADDR_SPACE_MEMX:
13002 if (REG_P (x))
13003 ok = (!strict
13004 && can_create_pseudo_p());
13005
13006 if (LO_SUM == GET_CODE (x))
13007 {
13008 rtx hi = XEXP (x, 0);
13009 rtx lo = XEXP (x, 1);
13010
13011 ok = (REG_P (hi)
13012 && (!strict || REGNO (hi) < FIRST_PSEUDO_REGISTER)
13013 && REG_P (lo)
13014 && REGNO (lo) == REG_Z);
13015 }
13016
13017 break; /* MEMX */
13018 }
13019
13020 if (avr_log.legitimate_address_p)
13021 {
13022 avr_edump ("\n%?: ret=%b, mode=%m strict=%d "
13023 "reload_completed=%d reload_in_progress=%d %s:",
13024 ok, mode, strict, reload_completed, reload_in_progress,
13025 reg_renumber ? "(reg_renumber)" : "");
13026
13027 if (GET_CODE (x) == PLUS
13028 && REG_P (XEXP (x, 0))
13029 && CONST_INT_P (XEXP (x, 1))
13030 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
13031 && reg_renumber)
13032 {
13033 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
13034 true_regnum (XEXP (x, 0)));
13035 }
13036
13037 avr_edump ("\n%r\n", x);
13038 }
13039
13040 return ok;
13041 }
13042
13043
13044 /* Implement `TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS'. */
13045
13046 static rtx
avr_addr_space_legitimize_address(rtx x,rtx old_x,machine_mode mode,addr_space_t as)13047 avr_addr_space_legitimize_address (rtx x, rtx old_x,
13048 machine_mode mode, addr_space_t as)
13049 {
13050 if (ADDR_SPACE_GENERIC_P (as))
13051 return avr_legitimize_address (x, old_x, mode);
13052
13053 if (avr_log.legitimize_address)
13054 {
13055 avr_edump ("\n%?: mode=%m\n %r\n", mode, old_x);
13056 }
13057
13058 return old_x;
13059 }
13060
13061
13062 /* Implement `TARGET_ADDR_SPACE_CONVERT'. */
13063
13064 static rtx
avr_addr_space_convert(rtx src,tree type_from,tree type_to)13065 avr_addr_space_convert (rtx src, tree type_from, tree type_to)
13066 {
13067 addr_space_t as_from = TYPE_ADDR_SPACE (TREE_TYPE (type_from));
13068 addr_space_t as_to = TYPE_ADDR_SPACE (TREE_TYPE (type_to));
13069
13070 if (avr_log.progmem)
13071 avr_edump ("\n%!: op = %r\nfrom = %t\nto = %t\n",
13072 src, type_from, type_to);
13073
13074 /* Up-casting from 16-bit to 24-bit pointer. */
13075
13076 if (as_from != ADDR_SPACE_MEMX
13077 && as_to == ADDR_SPACE_MEMX)
13078 {
13079 int msb;
13080 rtx sym = src;
13081 rtx reg = gen_reg_rtx (PSImode);
13082
13083 while (CONST == GET_CODE (sym) || PLUS == GET_CODE (sym))
13084 sym = XEXP (sym, 0);
13085
13086 /* Look at symbol flags: avr_encode_section_info set the flags
13087 also if attribute progmem was seen so that we get the right
13088 promotion for, e.g. PSTR-like strings that reside in generic space
13089 but are located in flash. In that case we patch the incoming
13090 address space. */
13091
13092 if (SYMBOL_REF_P (sym)
13093 && ADDR_SPACE_FLASH == AVR_SYMBOL_GET_ADDR_SPACE (sym))
13094 {
13095 as_from = ADDR_SPACE_FLASH;
13096 }
13097
13098 /* Linearize memory: RAM has bit 23 set. */
13099
13100 msb = ADDR_SPACE_GENERIC_P (as_from)
13101 ? 0x80
13102 : avr_addrspace[as_from].segment;
13103
13104 src = force_reg (Pmode, src);
13105
13106 emit_insn (msb == 0
13107 ? gen_zero_extendhipsi2 (reg, src)
13108 : gen_n_extendhipsi2 (reg, gen_int_mode (msb, QImode), src));
13109
13110 return reg;
13111 }
13112
13113 /* Down-casting from 24-bit to 16-bit throws away the high byte. */
13114
13115 if (as_from == ADDR_SPACE_MEMX
13116 && as_to != ADDR_SPACE_MEMX)
13117 {
13118 rtx new_src = gen_reg_rtx (Pmode);
13119
13120 src = force_reg (PSImode, src);
13121
13122 emit_move_insn (new_src,
13123 simplify_gen_subreg (Pmode, src, PSImode, 0));
13124 return new_src;
13125 }
13126
13127 return src;
13128 }
13129
13130
13131 /* Implement `TARGET_ADDR_SPACE_SUBSET_P'. */
13132
13133 static bool
avr_addr_space_subset_p(addr_space_t subset ATTRIBUTE_UNUSED,addr_space_t superset ATTRIBUTE_UNUSED)13134 avr_addr_space_subset_p (addr_space_t subset ATTRIBUTE_UNUSED,
13135 addr_space_t superset ATTRIBUTE_UNUSED)
13136 {
13137 /* Allow any kind of pointer mess. */
13138
13139 return true;
13140 }
13141
13142
13143 /* Implement `TARGET_CONVERT_TO_TYPE'. */
13144
13145 static tree
avr_convert_to_type(tree type,tree expr)13146 avr_convert_to_type (tree type, tree expr)
13147 {
13148 /* Print a diagnose for pointer conversion that changes the address
13149 space of the pointer target to a non-enclosing address space,
13150 provided -Waddr-space-convert is on.
13151
13152 FIXME: Filter out cases where the target object is known to
13153 be located in the right memory, like in
13154
13155 (const __flash*) PSTR ("text")
13156
13157 Also try to distinguish between explicit casts requested by
13158 the user and implicit casts like
13159
13160 void f (const __flash char*);
13161
13162 void g (const char *p)
13163 {
13164 f ((const __flash*) p);
13165 }
13166
13167 under the assumption that an explicit casts means that the user
13168 knows what he is doing, e.g. interface with PSTR or old style
13169 code with progmem and pgm_read_xxx.
13170 */
13171
13172 if (avr_warn_addr_space_convert
13173 && expr != error_mark_node
13174 && POINTER_TYPE_P (type)
13175 && POINTER_TYPE_P (TREE_TYPE (expr)))
13176 {
13177 addr_space_t as_old = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (expr)));
13178 addr_space_t as_new = TYPE_ADDR_SPACE (TREE_TYPE (type));
13179
13180 if (avr_log.progmem)
13181 avr_edump ("%?: type = %t\nexpr = %t\n\n", type, expr);
13182
13183 if (as_new != ADDR_SPACE_MEMX
13184 && as_new != as_old)
13185 {
13186 location_t loc = EXPR_LOCATION (expr);
13187 const char *name_old = avr_addrspace[as_old].name;
13188 const char *name_new = avr_addrspace[as_new].name;
13189
13190 warning (OPT_Waddr_space_convert,
13191 "conversion from address space %qs to address space %qs",
13192 ADDR_SPACE_GENERIC_P (as_old) ? "generic" : name_old,
13193 ADDR_SPACE_GENERIC_P (as_new) ? "generic" : name_new);
13194
13195 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, expr);
13196 }
13197 }
13198
13199 return NULL_TREE;
13200 }
13201
13202
13203 /* Implement `TARGET_LEGITIMATE_COMBINED_INSN'. */
13204
13205 /* PR78883: Filter out paradoxical SUBREGs of MEM which are not handled
13206 properly by following passes. As INSN_SCHEDULING is off and hence
13207 general_operand accepts such expressions, ditch them now. */
13208
13209 static bool
avr_legitimate_combined_insn(rtx_insn * insn)13210 avr_legitimate_combined_insn (rtx_insn *insn)
13211 {
13212 subrtx_iterator::array_type array;
13213
13214 FOR_EACH_SUBRTX (iter, array, PATTERN (insn), NONCONST)
13215 {
13216 const_rtx op = *iter;
13217
13218 if (SUBREG_P (op)
13219 && MEM_P (SUBREG_REG (op))
13220 && (GET_MODE_SIZE (GET_MODE (op))
13221 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op)))))
13222 {
13223 return false;
13224 }
13225 }
13226
13227 return true;
13228 }
13229
13230
13231 /* PR63633: The middle-end might come up with hard regs as input operands.
13232
13233 RMASK is a bit mask representing a subset of hard registers R0...R31:
13234 Rn is an element of that set iff bit n of RMASK is set.
13235 OPMASK describes a subset of OP[]: If bit n of OPMASK is 1 then
13236 OP[n] has to be fixed; otherwise OP[n] is left alone.
13237
13238 For each element of OPMASK which is a hard register overlapping RMASK,
13239 replace OP[n] with a newly created pseudo register
13240
13241 HREG == 0: Also emit a move insn that copies the contents of that
13242 hard register into the new pseudo.
13243
13244 HREG != 0: Also set HREG[n] to the hard register. */
13245
13246 static void
avr_fix_operands(rtx * op,rtx * hreg,unsigned opmask,unsigned rmask)13247 avr_fix_operands (rtx *op, rtx *hreg, unsigned opmask, unsigned rmask)
13248 {
13249 for (; opmask; opmask >>= 1, op++)
13250 {
13251 rtx reg = *op;
13252
13253 if (hreg)
13254 *hreg = NULL_RTX;
13255
13256 if ((opmask & 1)
13257 && REG_P (reg)
13258 && REGNO (reg) < FIRST_PSEUDO_REGISTER
13259 // This hard-reg overlaps other prohibited hard regs?
13260 && (rmask & regmask (GET_MODE (reg), REGNO (reg))))
13261 {
13262 *op = gen_reg_rtx (GET_MODE (reg));
13263 if (hreg == NULL)
13264 emit_move_insn (*op, reg);
13265 else
13266 *hreg = reg;
13267 }
13268
13269 if (hreg)
13270 hreg++;
13271 }
13272 }
13273
13274
13275 void
avr_fix_inputs(rtx * op,unsigned opmask,unsigned rmask)13276 avr_fix_inputs (rtx *op, unsigned opmask, unsigned rmask)
13277 {
13278 avr_fix_operands (op, NULL, opmask, rmask);
13279 }
13280
13281
13282 /* Helper for the function below: If bit n of MASK is set and
13283 HREG[n] != NULL, then emit a move insn to copy OP[n] to HREG[n].
13284 Otherwise do nothing for that n. Return TRUE. */
13285
13286 static bool
avr_move_fixed_operands(rtx * op,rtx * hreg,unsigned mask)13287 avr_move_fixed_operands (rtx *op, rtx *hreg, unsigned mask)
13288 {
13289 for (; mask; mask >>= 1, op++, hreg++)
13290 if ((mask & 1)
13291 && *hreg)
13292 emit_move_insn (*hreg, *op);
13293
13294 return true;
13295 }
13296
13297
13298 /* PR63633: The middle-end might come up with hard regs as output operands.
13299
13300 GEN is a sequence generating function like gen_mulsi3 with 3 operands OP[].
13301 RMASK is a bit mask representing a subset of hard registers R0...R31:
13302 Rn is an element of that set iff bit n of RMASK is set.
13303 OPMASK describes a subset of OP[]: If bit n of OPMASK is 1 then
13304 OP[n] has to be fixed; otherwise OP[n] is left alone.
13305
13306 Emit the insn sequence as generated by GEN() with all elements of OPMASK
13307 which are hard registers overlapping RMASK replaced by newly created
13308 pseudo registers. After the sequence has been emitted, emit insns that
13309 move the contents of respective pseudos to their hard regs. */
13310
13311 bool
avr_emit3_fix_outputs(rtx (* gen)(rtx,rtx,rtx),rtx * op,unsigned opmask,unsigned rmask)13312 avr_emit3_fix_outputs (rtx (*gen)(rtx,rtx,rtx), rtx *op,
13313 unsigned opmask, unsigned rmask)
13314 {
13315 const int n = 3;
13316 rtx hreg[n];
13317
13318 /* It is letigimate for GEN to call this function, and in order not to
13319 get self-recursive we use the following static kludge. This is the
13320 only way not to duplicate all expanders and to avoid ugly and
13321 hard-to-maintain C-code instead of the much more appreciated RTL
13322 representation as supplied by define_expand. */
13323 static bool lock = false;
13324
13325 gcc_assert (opmask < (1u << n));
13326
13327 if (lock)
13328 return false;
13329
13330 avr_fix_operands (op, hreg, opmask, rmask);
13331
13332 lock = true;
13333 emit_insn (gen (op[0], op[1], op[2]));
13334 lock = false;
13335
13336 return avr_move_fixed_operands (op, hreg, opmask);
13337 }
13338
13339
13340 /* Worker function for movmemhi expander.
13341 XOP[0] Destination as MEM:BLK
13342 XOP[1] Source " "
13343 XOP[2] # Bytes to copy
13344
13345 Return TRUE if the expansion is accomplished.
13346 Return FALSE if the operand compination is not supported. */
13347
13348 bool
avr_emit_movmemhi(rtx * xop)13349 avr_emit_movmemhi (rtx *xop)
13350 {
13351 HOST_WIDE_INT count;
13352 machine_mode loop_mode;
13353 addr_space_t as = MEM_ADDR_SPACE (xop[1]);
13354 rtx loop_reg, addr1, a_src, a_dest, insn, xas;
13355 rtx a_hi8 = NULL_RTX;
13356
13357 if (avr_mem_flash_p (xop[0]))
13358 return false;
13359
13360 if (!CONST_INT_P (xop[2]))
13361 return false;
13362
13363 count = INTVAL (xop[2]);
13364 if (count <= 0)
13365 return false;
13366
13367 a_src = XEXP (xop[1], 0);
13368 a_dest = XEXP (xop[0], 0);
13369
13370 if (PSImode == GET_MODE (a_src))
13371 {
13372 gcc_assert (as == ADDR_SPACE_MEMX);
13373
13374 loop_mode = (count < 0x100) ? QImode : HImode;
13375 loop_reg = gen_rtx_REG (loop_mode, 24);
13376 emit_move_insn (loop_reg, gen_int_mode (count, loop_mode));
13377
13378 addr1 = simplify_gen_subreg (HImode, a_src, PSImode, 0);
13379 a_hi8 = simplify_gen_subreg (QImode, a_src, PSImode, 2);
13380 }
13381 else
13382 {
13383 int segment = avr_addrspace[as].segment;
13384
13385 if (segment
13386 && avr_n_flash > 1)
13387 {
13388 a_hi8 = GEN_INT (segment);
13389 emit_move_insn (rampz_rtx, a_hi8 = copy_to_mode_reg (QImode, a_hi8));
13390 }
13391 else if (!ADDR_SPACE_GENERIC_P (as))
13392 {
13393 as = ADDR_SPACE_FLASH;
13394 }
13395
13396 addr1 = a_src;
13397
13398 loop_mode = (count <= 0x100) ? QImode : HImode;
13399 loop_reg = copy_to_mode_reg (loop_mode, gen_int_mode (count, loop_mode));
13400 }
13401
13402 xas = GEN_INT (as);
13403
13404 /* FIXME: Register allocator might come up with spill fails if it is left
13405 on its own. Thus, we allocate the pointer registers by hand:
13406 Z = source address
13407 X = destination address */
13408
13409 emit_move_insn (lpm_addr_reg_rtx, addr1);
13410 emit_move_insn (gen_rtx_REG (HImode, REG_X), a_dest);
13411
13412 /* FIXME: Register allocator does a bad job and might spill address
13413 register(s) inside the loop leading to additional move instruction
13414 to/from stack which could clobber tmp_reg. Thus, do *not* emit
13415 load and store as separate insns. Instead, we perform the copy
13416 by means of one monolithic insn. */
13417
13418 gcc_assert (TMP_REGNO == LPM_REGNO);
13419
13420 if (as != ADDR_SPACE_MEMX)
13421 {
13422 /* Load instruction ([E]LPM or LD) is known at compile time:
13423 Do the copy-loop inline. */
13424
13425 rtx (*fun) (rtx, rtx, rtx)
13426 = QImode == loop_mode ? gen_movmem_qi : gen_movmem_hi;
13427
13428 insn = fun (xas, loop_reg, loop_reg);
13429 }
13430 else
13431 {
13432 rtx (*fun) (rtx, rtx)
13433 = QImode == loop_mode ? gen_movmemx_qi : gen_movmemx_hi;
13434
13435 emit_move_insn (gen_rtx_REG (QImode, 23), a_hi8);
13436
13437 insn = fun (xas, GEN_INT (avr_addr.rampz));
13438 }
13439
13440 set_mem_addr_space (SET_SRC (XVECEXP (insn, 0, 0)), as);
13441 emit_insn (insn);
13442
13443 return true;
13444 }
13445
13446
13447 /* Print assembler for movmem_qi, movmem_hi insns...
13448 $0 : Address Space
13449 $1, $2 : Loop register
13450 Z : Source address
13451 X : Destination address
13452 */
13453
13454 const char*
avr_out_movmem(rtx_insn * insn ATTRIBUTE_UNUSED,rtx * op,int * plen)13455 avr_out_movmem (rtx_insn *insn ATTRIBUTE_UNUSED, rtx *op, int *plen)
13456 {
13457 addr_space_t as = (addr_space_t) INTVAL (op[0]);
13458 machine_mode loop_mode = GET_MODE (op[1]);
13459 bool sbiw_p = test_hard_reg_class (ADDW_REGS, op[1]);
13460 rtx xop[3];
13461
13462 if (plen)
13463 *plen = 0;
13464
13465 xop[0] = op[0];
13466 xop[1] = op[1];
13467 xop[2] = tmp_reg_rtx;
13468
13469 /* Loop label */
13470
13471 avr_asm_len ("0:", xop, plen, 0);
13472
13473 /* Load with post-increment */
13474
13475 switch (as)
13476 {
13477 default:
13478 gcc_unreachable();
13479
13480 case ADDR_SPACE_GENERIC:
13481
13482 avr_asm_len ("ld %2,Z+", xop, plen, 1);
13483 break;
13484
13485 case ADDR_SPACE_FLASH:
13486
13487 if (AVR_HAVE_LPMX)
13488 avr_asm_len ("lpm %2,Z+", xop, plen, 1);
13489 else
13490 avr_asm_len ("lpm" CR_TAB
13491 "adiw r30,1", xop, plen, 2);
13492 break;
13493
13494 case ADDR_SPACE_FLASH1:
13495 case ADDR_SPACE_FLASH2:
13496 case ADDR_SPACE_FLASH3:
13497 case ADDR_SPACE_FLASH4:
13498 case ADDR_SPACE_FLASH5:
13499
13500 if (AVR_HAVE_ELPMX)
13501 avr_asm_len ("elpm %2,Z+", xop, plen, 1);
13502 else
13503 avr_asm_len ("elpm" CR_TAB
13504 "adiw r30,1", xop, plen, 2);
13505 break;
13506 }
13507
13508 /* Store with post-increment */
13509
13510 avr_asm_len ("st X+,%2", xop, plen, 1);
13511
13512 /* Decrement loop-counter and set Z-flag */
13513
13514 if (QImode == loop_mode)
13515 {
13516 avr_asm_len ("dec %1", xop, plen, 1);
13517 }
13518 else if (sbiw_p)
13519 {
13520 avr_asm_len ("sbiw %1,1", xop, plen, 1);
13521 }
13522 else
13523 {
13524 avr_asm_len ("subi %A1,1" CR_TAB
13525 "sbci %B1,0", xop, plen, 2);
13526 }
13527
13528 /* Loop until zero */
13529
13530 return avr_asm_len ("brne 0b", xop, plen, 1);
13531 }
13532
13533
13534
13535 /* Helper for __builtin_avr_delay_cycles */
13536
13537 static rtx
avr_mem_clobber(void)13538 avr_mem_clobber (void)
13539 {
13540 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (Pmode));
13541 MEM_VOLATILE_P (mem) = 1;
13542 return mem;
13543 }
13544
13545 static void
avr_expand_delay_cycles(rtx operands0)13546 avr_expand_delay_cycles (rtx operands0)
13547 {
13548 unsigned HOST_WIDE_INT cycles = UINTVAL (operands0) & GET_MODE_MASK (SImode);
13549 unsigned HOST_WIDE_INT cycles_used;
13550 unsigned HOST_WIDE_INT loop_count;
13551
13552 if (IN_RANGE (cycles, 83886082, 0xFFFFFFFF))
13553 {
13554 loop_count = ((cycles - 9) / 6) + 1;
13555 cycles_used = ((loop_count - 1) * 6) + 9;
13556 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count, SImode),
13557 avr_mem_clobber()));
13558 cycles -= cycles_used;
13559 }
13560
13561 if (IN_RANGE (cycles, 262145, 83886081))
13562 {
13563 loop_count = ((cycles - 7) / 5) + 1;
13564 if (loop_count > 0xFFFFFF)
13565 loop_count = 0xFFFFFF;
13566 cycles_used = ((loop_count - 1) * 5) + 7;
13567 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count, SImode),
13568 avr_mem_clobber()));
13569 cycles -= cycles_used;
13570 }
13571
13572 if (IN_RANGE (cycles, 768, 262144))
13573 {
13574 loop_count = ((cycles - 5) / 4) + 1;
13575 if (loop_count > 0xFFFF)
13576 loop_count = 0xFFFF;
13577 cycles_used = ((loop_count - 1) * 4) + 5;
13578 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count, HImode),
13579 avr_mem_clobber()));
13580 cycles -= cycles_used;
13581 }
13582
13583 if (IN_RANGE (cycles, 6, 767))
13584 {
13585 loop_count = cycles / 3;
13586 if (loop_count > 255)
13587 loop_count = 255;
13588 cycles_used = loop_count * 3;
13589 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count, QImode),
13590 avr_mem_clobber()));
13591 cycles -= cycles_used;
13592 }
13593
13594 while (cycles >= 2)
13595 {
13596 emit_insn (gen_nopv (GEN_INT (2)));
13597 cycles -= 2;
13598 }
13599
13600 if (cycles == 1)
13601 {
13602 emit_insn (gen_nopv (GEN_INT (1)));
13603 cycles--;
13604 }
13605 }
13606
13607
13608 static void
avr_expand_nops(rtx operands0)13609 avr_expand_nops (rtx operands0)
13610 {
13611 unsigned HOST_WIDE_INT n_nops = UINTVAL (operands0) & GET_MODE_MASK (HImode);
13612
13613 while (n_nops--)
13614 {
13615 emit_insn (gen_nopv (const1_rtx));
13616 }
13617 }
13618
13619
13620 /* Compute the image of x under f, i.e. perform x --> f(x) */
13621
13622 static int
avr_map(unsigned int f,int x)13623 avr_map (unsigned int f, int x)
13624 {
13625 return x < 8 ? (f >> (4 * x)) & 0xf : 0;
13626 }
13627
13628
13629 /* Return some metrics of map A. */
13630
13631 enum
13632 {
13633 /* Number of fixed points in { 0 ... 7 } */
13634 MAP_FIXED_0_7,
13635
13636 /* Size of preimage of non-fixed points in { 0 ... 7 } */
13637 MAP_NONFIXED_0_7,
13638
13639 /* Mask representing the fixed points in { 0 ... 7 } */
13640 MAP_MASK_FIXED_0_7,
13641
13642 /* Size of the preimage of { 0 ... 7 } */
13643 MAP_PREIMAGE_0_7,
13644
13645 /* Mask that represents the preimage of { f } */
13646 MAP_MASK_PREIMAGE_F
13647 };
13648
13649 static unsigned
avr_map_metric(unsigned int a,int mode)13650 avr_map_metric (unsigned int a, int mode)
13651 {
13652 unsigned metric = 0;
13653
13654 for (unsigned i = 0; i < 8; i++)
13655 {
13656 unsigned ai = avr_map (a, i);
13657
13658 if (mode == MAP_FIXED_0_7)
13659 metric += ai == i;
13660 else if (mode == MAP_NONFIXED_0_7)
13661 metric += ai < 8 && ai != i;
13662 else if (mode == MAP_MASK_FIXED_0_7)
13663 metric |= ((unsigned) (ai == i)) << i;
13664 else if (mode == MAP_PREIMAGE_0_7)
13665 metric += ai < 8;
13666 else if (mode == MAP_MASK_PREIMAGE_F)
13667 metric |= ((unsigned) (ai == 0xf)) << i;
13668 else
13669 gcc_unreachable();
13670 }
13671
13672 return metric;
13673 }
13674
13675
13676 /* Return true if IVAL has a 0xf in its hexadecimal representation
13677 and false, otherwise. Only nibbles 0..7 are taken into account.
13678 Used as constraint helper for C0f and Cxf. */
13679
13680 bool
avr_has_nibble_0xf(rtx ival)13681 avr_has_nibble_0xf (rtx ival)
13682 {
13683 unsigned int map = UINTVAL (ival) & GET_MODE_MASK (SImode);
13684 return avr_map_metric (map, MAP_MASK_PREIMAGE_F) != 0;
13685 }
13686
13687
13688 /* We have a set of bits that are mapped by a function F.
13689 Try to decompose F by means of a second function G so that
13690
13691 F = F o G^-1 o G
13692
13693 and
13694
13695 cost (F o G^-1) + cost (G) < cost (F)
13696
13697 Example: Suppose builtin insert_bits supplies us with the map
13698 F = 0x3210ffff. Instead of doing 4 bit insertions to get the high
13699 nibble of the result, we can just as well rotate the bits before inserting
13700 them and use the map 0x7654ffff which is cheaper than the original map.
13701 For this example G = G^-1 = 0x32107654 and F o G^-1 = 0x7654ffff. */
13702
13703 typedef struct
13704 {
13705 /* tree code of binary function G */
13706 enum tree_code code;
13707
13708 /* The constant second argument of G */
13709 int arg;
13710
13711 /* G^-1, the inverse of G (*, arg) */
13712 unsigned ginv;
13713
13714 /* The cost of applying G (*, arg) */
13715 int cost;
13716
13717 /* The composition F o G^-1 (*, arg) for some function F */
13718 unsigned int map;
13719
13720 /* For debug purpose only */
13721 const char *str;
13722 } avr_map_op_t;
13723
13724 static const avr_map_op_t avr_map_op[] =
13725 {
13726 { LROTATE_EXPR, 0, 0x76543210, 0, 0, "id" },
13727 { LROTATE_EXPR, 1, 0x07654321, 2, 0, "<<<" },
13728 { LROTATE_EXPR, 2, 0x10765432, 4, 0, "<<<" },
13729 { LROTATE_EXPR, 3, 0x21076543, 4, 0, "<<<" },
13730 { LROTATE_EXPR, 4, 0x32107654, 1, 0, "<<<" },
13731 { LROTATE_EXPR, 5, 0x43210765, 3, 0, "<<<" },
13732 { LROTATE_EXPR, 6, 0x54321076, 5, 0, "<<<" },
13733 { LROTATE_EXPR, 7, 0x65432107, 3, 0, "<<<" },
13734 { RSHIFT_EXPR, 1, 0x6543210c, 1, 0, ">>" },
13735 { RSHIFT_EXPR, 1, 0x7543210c, 1, 0, ">>" },
13736 { RSHIFT_EXPR, 2, 0x543210cc, 2, 0, ">>" },
13737 { RSHIFT_EXPR, 2, 0x643210cc, 2, 0, ">>" },
13738 { RSHIFT_EXPR, 2, 0x743210cc, 2, 0, ">>" },
13739 { LSHIFT_EXPR, 1, 0xc7654321, 1, 0, "<<" },
13740 { LSHIFT_EXPR, 2, 0xcc765432, 2, 0, "<<" }
13741 };
13742
13743
13744 /* Try to decompose F as F = (F o G^-1) o G as described above.
13745 The result is a struct representing F o G^-1 and G.
13746 If result.cost < 0 then such a decomposition does not exist. */
13747
13748 static avr_map_op_t
avr_map_decompose(unsigned int f,const avr_map_op_t * g,bool val_const_p)13749 avr_map_decompose (unsigned int f, const avr_map_op_t *g, bool val_const_p)
13750 {
13751 bool val_used_p = avr_map_metric (f, MAP_MASK_PREIMAGE_F) != 0;
13752 avr_map_op_t f_ginv = *g;
13753 unsigned int ginv = g->ginv;
13754
13755 f_ginv.cost = -1;
13756
13757 /* Step 1: Computing F o G^-1 */
13758
13759 for (int i = 7; i >= 0; i--)
13760 {
13761 int x = avr_map (f, i);
13762
13763 if (x <= 7)
13764 {
13765 x = avr_map (ginv, x);
13766
13767 /* The bit is no element of the image of G: no avail (cost = -1) */
13768
13769 if (x > 7)
13770 return f_ginv;
13771 }
13772
13773 f_ginv.map = (f_ginv.map << 4) + x;
13774 }
13775
13776 /* Step 2: Compute the cost of the operations.
13777 The overall cost of doing an operation prior to the insertion is
13778 the cost of the insertion plus the cost of the operation. */
13779
13780 /* Step 2a: Compute cost of F o G^-1 */
13781
13782 if (avr_map_metric (f_ginv.map, MAP_NONFIXED_0_7) == 0)
13783 /* The mapping consists only of fixed points and can be folded
13784 to AND/OR logic in the remainder. Reasonable cost is 3. */
13785 f_ginv.cost = 2 + (val_used_p && !val_const_p);
13786 else
13787 {
13788 rtx xop[4];
13789
13790 /* Get the cost of the insn by calling the output worker with some
13791 fake values. Mimic effect of reloading xop[3]: Unused operands
13792 are mapped to 0 and used operands are reloaded to xop[0]. */
13793
13794 xop[0] = all_regs_rtx[24];
13795 xop[1] = gen_int_mode (f_ginv.map, SImode);
13796 xop[2] = all_regs_rtx[25];
13797 xop[3] = val_used_p ? xop[0] : const0_rtx;
13798
13799 avr_out_insert_bits (xop, &f_ginv.cost);
13800
13801 f_ginv.cost += val_const_p && val_used_p ? 1 : 0;
13802 }
13803
13804 /* Step 2b: Add cost of G */
13805
13806 f_ginv.cost += g->cost;
13807
13808 if (avr_log.builtin)
13809 avr_edump (" %s%d=%d", g->str, g->arg, f_ginv.cost);
13810
13811 return f_ginv;
13812 }
13813
13814
13815 /* Insert bits from XOP[1] into XOP[0] according to MAP.
13816 XOP[0] and XOP[1] don't overlap.
13817 If FIXP_P = true: Move all bits according to MAP using BLD/BST sequences.
13818 If FIXP_P = false: Just move the bit if its position in the destination
13819 is different to its source position. */
13820
13821 static void
avr_move_bits(rtx * xop,unsigned int map,bool fixp_p,int * plen)13822 avr_move_bits (rtx *xop, unsigned int map, bool fixp_p, int *plen)
13823 {
13824 /* T-flag contains this bit of the source, i.e. of XOP[1] */
13825 int t_bit_src = -1;
13826
13827 /* We order the operations according to the requested source bit b. */
13828
13829 for (int b = 0; b < 8; b++)
13830 for (int bit_dest = 0; bit_dest < 8; bit_dest++)
13831 {
13832 int bit_src = avr_map (map, bit_dest);
13833
13834 if (b != bit_src
13835 || bit_src >= 8
13836 /* Same position: No need to copy as requested by FIXP_P. */
13837 || (bit_dest == bit_src && !fixp_p))
13838 continue;
13839
13840 if (t_bit_src != bit_src)
13841 {
13842 /* Source bit is not yet in T: Store it to T. */
13843
13844 t_bit_src = bit_src;
13845
13846 xop[3] = GEN_INT (bit_src);
13847 avr_asm_len ("bst %T1%T3", xop, plen, 1);
13848 }
13849
13850 /* Load destination bit with T. */
13851
13852 xop[3] = GEN_INT (bit_dest);
13853 avr_asm_len ("bld %T0%T3", xop, plen, 1);
13854 }
13855 }
13856
13857
13858 /* PLEN == 0: Print assembler code for `insert_bits'.
13859 PLEN != 0: Compute code length in bytes.
13860
13861 OP[0]: Result
13862 OP[1]: The mapping composed of nibbles. If nibble no. N is
13863 0: Bit N of result is copied from bit OP[2].0
13864 ... ...
13865 7: Bit N of result is copied from bit OP[2].7
13866 0xf: Bit N of result is copied from bit OP[3].N
13867 OP[2]: Bits to be inserted
13868 OP[3]: Target value */
13869
13870 const char*
avr_out_insert_bits(rtx * op,int * plen)13871 avr_out_insert_bits (rtx *op, int *plen)
13872 {
13873 unsigned int map = UINTVAL (op[1]) & GET_MODE_MASK (SImode);
13874 unsigned mask_fixed;
13875 bool fixp_p = true;
13876 rtx xop[4];
13877
13878 xop[0] = op[0];
13879 xop[1] = op[2];
13880 xop[2] = op[3];
13881
13882 gcc_assert (REG_P (xop[2]) || CONST_INT_P (xop[2]));
13883
13884 if (plen)
13885 *plen = 0;
13886 else if (flag_print_asm_name)
13887 fprintf (asm_out_file, ASM_COMMENT_START "map = 0x%08x\n", map);
13888
13889 /* If MAP has fixed points it might be better to initialize the result
13890 with the bits to be inserted instead of moving all bits by hand. */
13891
13892 mask_fixed = avr_map_metric (map, MAP_MASK_FIXED_0_7);
13893
13894 if (REGNO (xop[0]) == REGNO (xop[1]))
13895 {
13896 /* Avoid early-clobber conflicts */
13897
13898 avr_asm_len ("mov __tmp_reg__,%1", xop, plen, 1);
13899 xop[1] = tmp_reg_rtx;
13900 fixp_p = false;
13901 }
13902
13903 if (avr_map_metric (map, MAP_MASK_PREIMAGE_F))
13904 {
13905 /* XOP[2] is used and reloaded to XOP[0] already */
13906
13907 int n_fix = 0, n_nofix = 0;
13908
13909 gcc_assert (REG_P (xop[2]));
13910
13911 /* Get the code size of the bit insertions; once with all bits
13912 moved and once with fixed points omitted. */
13913
13914 avr_move_bits (xop, map, true, &n_fix);
13915 avr_move_bits (xop, map, false, &n_nofix);
13916
13917 if (fixp_p && n_fix - n_nofix > 3)
13918 {
13919 xop[3] = gen_int_mode (~mask_fixed, QImode);
13920
13921 avr_asm_len ("eor %0,%1" CR_TAB
13922 "andi %0,%3" CR_TAB
13923 "eor %0,%1", xop, plen, 3);
13924 fixp_p = false;
13925 }
13926 }
13927 else
13928 {
13929 /* XOP[2] is unused */
13930
13931 if (fixp_p && mask_fixed)
13932 {
13933 avr_asm_len ("mov %0,%1", xop, plen, 1);
13934 fixp_p = false;
13935 }
13936 }
13937
13938 /* Move/insert remaining bits. */
13939
13940 avr_move_bits (xop, map, fixp_p, plen);
13941
13942 return "";
13943 }
13944
13945
13946 /* IDs for all the AVR builtins. */
13947
13948 enum avr_builtin_id
13949 {
13950 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, CODE, LIBNAME) \
13951 AVR_BUILTIN_ ## NAME,
13952 #include "builtins.def"
13953 #undef DEF_BUILTIN
13954
13955 AVR_BUILTIN_COUNT
13956 };
13957
13958 struct GTY(()) avr_builtin_description
13959 {
13960 enum insn_code icode;
13961 int n_args;
13962 tree fndecl;
13963 };
13964
13965
13966 /* Notice that avr_bdesc[] and avr_builtin_id are initialized in such a way
13967 that a built-in's ID can be used to access the built-in by means of
13968 avr_bdesc[ID] */
13969
13970 static GTY(()) struct avr_builtin_description
13971 avr_bdesc[AVR_BUILTIN_COUNT] =
13972 {
13973 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, ICODE, LIBNAME) \
13974 { (enum insn_code) CODE_FOR_ ## ICODE, N_ARGS, NULL_TREE },
13975 #include "builtins.def"
13976 #undef DEF_BUILTIN
13977 };
13978
13979
13980 /* Implement `TARGET_BUILTIN_DECL'. */
13981
13982 static tree
avr_builtin_decl(unsigned id,bool initialize_p ATTRIBUTE_UNUSED)13983 avr_builtin_decl (unsigned id, bool initialize_p ATTRIBUTE_UNUSED)
13984 {
13985 if (id < AVR_BUILTIN_COUNT)
13986 return avr_bdesc[id].fndecl;
13987
13988 return error_mark_node;
13989 }
13990
13991
13992 static void
avr_init_builtin_int24(void)13993 avr_init_builtin_int24 (void)
13994 {
13995 tree int24_type = make_signed_type (GET_MODE_BITSIZE (PSImode));
13996 tree uint24_type = make_unsigned_type (GET_MODE_BITSIZE (PSImode));
13997
13998 lang_hooks.types.register_builtin_type (int24_type, "__int24");
13999 lang_hooks.types.register_builtin_type (uint24_type, "__uint24");
14000 }
14001
14002
14003 /* Implement `TARGET_INIT_BUILTINS' */
14004 /* Set up all builtin functions for this target. */
14005
14006 static void
avr_init_builtins(void)14007 avr_init_builtins (void)
14008 {
14009 tree void_ftype_void
14010 = build_function_type_list (void_type_node, NULL_TREE);
14011 tree uchar_ftype_uchar
14012 = build_function_type_list (unsigned_char_type_node,
14013 unsigned_char_type_node,
14014 NULL_TREE);
14015 tree uint_ftype_uchar_uchar
14016 = build_function_type_list (unsigned_type_node,
14017 unsigned_char_type_node,
14018 unsigned_char_type_node,
14019 NULL_TREE);
14020 tree int_ftype_char_char
14021 = build_function_type_list (integer_type_node,
14022 char_type_node,
14023 char_type_node,
14024 NULL_TREE);
14025 tree int_ftype_char_uchar
14026 = build_function_type_list (integer_type_node,
14027 char_type_node,
14028 unsigned_char_type_node,
14029 NULL_TREE);
14030 tree void_ftype_ulong
14031 = build_function_type_list (void_type_node,
14032 long_unsigned_type_node,
14033 NULL_TREE);
14034
14035 tree uchar_ftype_ulong_uchar_uchar
14036 = build_function_type_list (unsigned_char_type_node,
14037 long_unsigned_type_node,
14038 unsigned_char_type_node,
14039 unsigned_char_type_node,
14040 NULL_TREE);
14041
14042 tree const_memx_void_node
14043 = build_qualified_type (void_type_node,
14044 TYPE_QUAL_CONST
14045 | ENCODE_QUAL_ADDR_SPACE (ADDR_SPACE_MEMX));
14046
14047 tree const_memx_ptr_type_node
14048 = build_pointer_type_for_mode (const_memx_void_node, PSImode, false);
14049
14050 tree char_ftype_const_memx_ptr
14051 = build_function_type_list (char_type_node,
14052 const_memx_ptr_type_node,
14053 NULL);
14054
14055 #define ITYP(T) \
14056 lang_hooks.types.type_for_size (TYPE_PRECISION (T), TYPE_UNSIGNED (T))
14057
14058 #define FX_FTYPE_FX(fx) \
14059 tree fx##r_ftype_##fx##r \
14060 = build_function_type_list (node_##fx##r, node_##fx##r, NULL); \
14061 tree fx##k_ftype_##fx##k \
14062 = build_function_type_list (node_##fx##k, node_##fx##k, NULL)
14063
14064 #define FX_FTYPE_FX_INT(fx) \
14065 tree fx##r_ftype_##fx##r_int \
14066 = build_function_type_list (node_##fx##r, node_##fx##r, \
14067 integer_type_node, NULL); \
14068 tree fx##k_ftype_##fx##k_int \
14069 = build_function_type_list (node_##fx##k, node_##fx##k, \
14070 integer_type_node, NULL)
14071
14072 #define INT_FTYPE_FX(fx) \
14073 tree int_ftype_##fx##r \
14074 = build_function_type_list (integer_type_node, node_##fx##r, NULL); \
14075 tree int_ftype_##fx##k \
14076 = build_function_type_list (integer_type_node, node_##fx##k, NULL)
14077
14078 #define INTX_FTYPE_FX(fx) \
14079 tree int##fx##r_ftype_##fx##r \
14080 = build_function_type_list (ITYP (node_##fx##r), node_##fx##r, NULL); \
14081 tree int##fx##k_ftype_##fx##k \
14082 = build_function_type_list (ITYP (node_##fx##k), node_##fx##k, NULL)
14083
14084 #define FX_FTYPE_INTX(fx) \
14085 tree fx##r_ftype_int##fx##r \
14086 = build_function_type_list (node_##fx##r, ITYP (node_##fx##r), NULL); \
14087 tree fx##k_ftype_int##fx##k \
14088 = build_function_type_list (node_##fx##k, ITYP (node_##fx##k), NULL)
14089
14090 tree node_hr = short_fract_type_node;
14091 tree node_nr = fract_type_node;
14092 tree node_lr = long_fract_type_node;
14093 tree node_llr = long_long_fract_type_node;
14094
14095 tree node_uhr = unsigned_short_fract_type_node;
14096 tree node_unr = unsigned_fract_type_node;
14097 tree node_ulr = unsigned_long_fract_type_node;
14098 tree node_ullr = unsigned_long_long_fract_type_node;
14099
14100 tree node_hk = short_accum_type_node;
14101 tree node_nk = accum_type_node;
14102 tree node_lk = long_accum_type_node;
14103 tree node_llk = long_long_accum_type_node;
14104
14105 tree node_uhk = unsigned_short_accum_type_node;
14106 tree node_unk = unsigned_accum_type_node;
14107 tree node_ulk = unsigned_long_accum_type_node;
14108 tree node_ullk = unsigned_long_long_accum_type_node;
14109
14110
14111 /* For absfx builtins. */
14112
14113 FX_FTYPE_FX (h);
14114 FX_FTYPE_FX (n);
14115 FX_FTYPE_FX (l);
14116 FX_FTYPE_FX (ll);
14117
14118 /* For roundfx builtins. */
14119
14120 FX_FTYPE_FX_INT (h);
14121 FX_FTYPE_FX_INT (n);
14122 FX_FTYPE_FX_INT (l);
14123 FX_FTYPE_FX_INT (ll);
14124
14125 FX_FTYPE_FX_INT (uh);
14126 FX_FTYPE_FX_INT (un);
14127 FX_FTYPE_FX_INT (ul);
14128 FX_FTYPE_FX_INT (ull);
14129
14130 /* For countlsfx builtins. */
14131
14132 INT_FTYPE_FX (h);
14133 INT_FTYPE_FX (n);
14134 INT_FTYPE_FX (l);
14135 INT_FTYPE_FX (ll);
14136
14137 INT_FTYPE_FX (uh);
14138 INT_FTYPE_FX (un);
14139 INT_FTYPE_FX (ul);
14140 INT_FTYPE_FX (ull);
14141
14142 /* For bitsfx builtins. */
14143
14144 INTX_FTYPE_FX (h);
14145 INTX_FTYPE_FX (n);
14146 INTX_FTYPE_FX (l);
14147 INTX_FTYPE_FX (ll);
14148
14149 INTX_FTYPE_FX (uh);
14150 INTX_FTYPE_FX (un);
14151 INTX_FTYPE_FX (ul);
14152 INTX_FTYPE_FX (ull);
14153
14154 /* For fxbits builtins. */
14155
14156 FX_FTYPE_INTX (h);
14157 FX_FTYPE_INTX (n);
14158 FX_FTYPE_INTX (l);
14159 FX_FTYPE_INTX (ll);
14160
14161 FX_FTYPE_INTX (uh);
14162 FX_FTYPE_INTX (un);
14163 FX_FTYPE_INTX (ul);
14164 FX_FTYPE_INTX (ull);
14165
14166
14167 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, CODE, LIBNAME) \
14168 { \
14169 int id = AVR_BUILTIN_ ## NAME; \
14170 const char *Name = "__builtin_avr_" #NAME; \
14171 char *name = (char*) alloca (1 + strlen (Name)); \
14172 \
14173 gcc_assert (id < AVR_BUILTIN_COUNT); \
14174 avr_bdesc[id].fndecl \
14175 = add_builtin_function (avr_tolower (name, Name), TYPE, id, \
14176 BUILT_IN_MD, LIBNAME, NULL_TREE); \
14177 }
14178 #include "builtins.def"
14179 #undef DEF_BUILTIN
14180
14181 avr_init_builtin_int24 ();
14182 }
14183
14184
14185 /* Subroutine of avr_expand_builtin to expand vanilla builtins
14186 with non-void result and 1 ... 3 arguments. */
14187
14188 static rtx
avr_default_expand_builtin(enum insn_code icode,tree exp,rtx target)14189 avr_default_expand_builtin (enum insn_code icode, tree exp, rtx target)
14190 {
14191 rtx pat, xop[3];
14192 int n_args = call_expr_nargs (exp);
14193 machine_mode tmode = insn_data[icode].operand[0].mode;
14194
14195 gcc_assert (n_args >= 1 && n_args <= 3);
14196
14197 if (target == NULL_RTX
14198 || GET_MODE (target) != tmode
14199 || !insn_data[icode].operand[0].predicate (target, tmode))
14200 {
14201 target = gen_reg_rtx (tmode);
14202 }
14203
14204 for (int n = 0; n < n_args; n++)
14205 {
14206 tree arg = CALL_EXPR_ARG (exp, n);
14207 rtx op = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
14208 machine_mode opmode = GET_MODE (op);
14209 machine_mode mode = insn_data[icode].operand[n + 1].mode;
14210
14211 if ((opmode == SImode || opmode == VOIDmode) && mode == HImode)
14212 {
14213 opmode = HImode;
14214 op = gen_lowpart (HImode, op);
14215 }
14216
14217 /* In case the insn wants input operands in modes different from
14218 the result, abort. */
14219
14220 gcc_assert (opmode == mode || opmode == VOIDmode);
14221
14222 if (!insn_data[icode].operand[n + 1].predicate (op, mode))
14223 op = copy_to_mode_reg (mode, op);
14224
14225 xop[n] = op;
14226 }
14227
14228 switch (n_args)
14229 {
14230 case 1: pat = GEN_FCN (icode) (target, xop[0]); break;
14231 case 2: pat = GEN_FCN (icode) (target, xop[0], xop[1]); break;
14232 case 3: pat = GEN_FCN (icode) (target, xop[0], xop[1], xop[2]); break;
14233
14234 default:
14235 gcc_unreachable();
14236 }
14237
14238 if (pat == NULL_RTX)
14239 return NULL_RTX;
14240
14241 emit_insn (pat);
14242
14243 return target;
14244 }
14245
14246
14247 /* Implement `TARGET_EXPAND_BUILTIN'. */
14248 /* Expand an expression EXP that calls a built-in function,
14249 with result going to TARGET if that's convenient
14250 (and in mode MODE if that's convenient).
14251 SUBTARGET may be used as the target for computing one of EXP's operands.
14252 IGNORE is nonzero if the value is to be ignored. */
14253
14254 static rtx
avr_expand_builtin(tree exp,rtx target,rtx subtarget ATTRIBUTE_UNUSED,machine_mode mode ATTRIBUTE_UNUSED,int ignore)14255 avr_expand_builtin (tree exp, rtx target,
14256 rtx subtarget ATTRIBUTE_UNUSED,
14257 machine_mode mode ATTRIBUTE_UNUSED,
14258 int ignore)
14259 {
14260 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
14261 const char *bname = IDENTIFIER_POINTER (DECL_NAME (fndecl));
14262 unsigned int id = DECL_FUNCTION_CODE (fndecl);
14263 const struct avr_builtin_description *d = &avr_bdesc[id];
14264 tree arg0;
14265 rtx op0;
14266
14267 gcc_assert (id < AVR_BUILTIN_COUNT);
14268
14269 switch (id)
14270 {
14271 case AVR_BUILTIN_NOP:
14272 emit_insn (gen_nopv (GEN_INT (1)));
14273 return 0;
14274
14275 case AVR_BUILTIN_DELAY_CYCLES:
14276 {
14277 arg0 = CALL_EXPR_ARG (exp, 0);
14278 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
14279
14280 if (!CONST_INT_P (op0))
14281 error ("%s expects a compile time integer constant", bname);
14282 else
14283 avr_expand_delay_cycles (op0);
14284
14285 return NULL_RTX;
14286 }
14287
14288 case AVR_BUILTIN_NOPS:
14289 {
14290 arg0 = CALL_EXPR_ARG (exp, 0);
14291 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
14292
14293 if (!CONST_INT_P (op0))
14294 error ("%s expects a compile time integer constant", bname);
14295 else
14296 avr_expand_nops (op0);
14297
14298 return NULL_RTX;
14299 }
14300
14301 case AVR_BUILTIN_INSERT_BITS:
14302 {
14303 arg0 = CALL_EXPR_ARG (exp, 0);
14304 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
14305
14306 if (!CONST_INT_P (op0))
14307 {
14308 error ("%s expects a compile time long integer constant"
14309 " as first argument", bname);
14310 return target;
14311 }
14312
14313 break;
14314 }
14315
14316 case AVR_BUILTIN_ROUNDHR: case AVR_BUILTIN_ROUNDUHR:
14317 case AVR_BUILTIN_ROUNDR: case AVR_BUILTIN_ROUNDUR:
14318 case AVR_BUILTIN_ROUNDLR: case AVR_BUILTIN_ROUNDULR:
14319 case AVR_BUILTIN_ROUNDLLR: case AVR_BUILTIN_ROUNDULLR:
14320
14321 case AVR_BUILTIN_ROUNDHK: case AVR_BUILTIN_ROUNDUHK:
14322 case AVR_BUILTIN_ROUNDK: case AVR_BUILTIN_ROUNDUK:
14323 case AVR_BUILTIN_ROUNDLK: case AVR_BUILTIN_ROUNDULK:
14324 case AVR_BUILTIN_ROUNDLLK: case AVR_BUILTIN_ROUNDULLK:
14325
14326 /* Warn about odd rounding. Rounding points >= FBIT will have
14327 no effect. */
14328
14329 if (TREE_CODE (CALL_EXPR_ARG (exp, 1)) != INTEGER_CST)
14330 break;
14331
14332 int rbit = (int) TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1));
14333
14334 if (rbit >= (int) GET_MODE_FBIT (mode))
14335 {
14336 warning (OPT_Wextra, "rounding to %d bits has no effect for "
14337 "fixed-point value with %d fractional bits",
14338 rbit, GET_MODE_FBIT (mode));
14339
14340 return expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX, mode,
14341 EXPAND_NORMAL);
14342 }
14343 else if (rbit <= - (int) GET_MODE_IBIT (mode))
14344 {
14345 warning (0, "rounding result will always be 0");
14346 return CONST0_RTX (mode);
14347 }
14348
14349 /* The rounding points RP satisfies now: -IBIT < RP < FBIT.
14350
14351 TR 18037 only specifies results for RP > 0. However, the
14352 remaining cases of -IBIT < RP <= 0 can easily be supported
14353 without any additional overhead. */
14354
14355 break; /* round */
14356 }
14357
14358 /* No fold found and no insn: Call support function from libgcc. */
14359
14360 if (d->icode == CODE_FOR_nothing
14361 && DECL_ASSEMBLER_NAME (get_callee_fndecl (exp)) != NULL_TREE)
14362 {
14363 return expand_call (exp, target, ignore);
14364 }
14365
14366 /* No special treatment needed: vanilla expand. */
14367
14368 gcc_assert (d->icode != CODE_FOR_nothing);
14369 gcc_assert (d->n_args == call_expr_nargs (exp));
14370
14371 if (d->n_args == 0)
14372 {
14373 emit_insn ((GEN_FCN (d->icode)) (target));
14374 return NULL_RTX;
14375 }
14376
14377 return avr_default_expand_builtin (d->icode, exp, target);
14378 }
14379
14380
14381 /* Helper for `avr_fold_builtin' that folds absfx (FIXED_CST). */
14382
14383 static tree
avr_fold_absfx(tree tval)14384 avr_fold_absfx (tree tval)
14385 {
14386 if (FIXED_CST != TREE_CODE (tval))
14387 return NULL_TREE;
14388
14389 /* Our fixed-points have no padding: Use double_int payload directly. */
14390
14391 FIXED_VALUE_TYPE fval = TREE_FIXED_CST (tval);
14392 unsigned int bits = GET_MODE_BITSIZE (fval.mode);
14393 double_int ival = fval.data.sext (bits);
14394
14395 if (!ival.is_negative())
14396 return tval;
14397
14398 /* ISO/IEC TR 18037, 7.18a.6.2: The absfx functions are saturating. */
14399
14400 fval.data = (ival == double_int::min_value (bits, false).sext (bits))
14401 ? double_int::max_value (bits, false)
14402 : -ival;
14403
14404 return build_fixed (TREE_TYPE (tval), fval);
14405 }
14406
14407
14408 /* Implement `TARGET_FOLD_BUILTIN'. */
14409
14410 static tree
avr_fold_builtin(tree fndecl,int n_args ATTRIBUTE_UNUSED,tree * arg,bool ignore ATTRIBUTE_UNUSED)14411 avr_fold_builtin (tree fndecl, int n_args ATTRIBUTE_UNUSED, tree *arg,
14412 bool ignore ATTRIBUTE_UNUSED)
14413 {
14414 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
14415 tree val_type = TREE_TYPE (TREE_TYPE (fndecl));
14416
14417 if (!optimize)
14418 return NULL_TREE;
14419
14420 switch (fcode)
14421 {
14422 default:
14423 break;
14424
14425 case AVR_BUILTIN_SWAP:
14426 {
14427 return fold_build2 (LROTATE_EXPR, val_type, arg[0],
14428 build_int_cst (val_type, 4));
14429 }
14430
14431 case AVR_BUILTIN_ABSHR:
14432 case AVR_BUILTIN_ABSR:
14433 case AVR_BUILTIN_ABSLR:
14434 case AVR_BUILTIN_ABSLLR:
14435
14436 case AVR_BUILTIN_ABSHK:
14437 case AVR_BUILTIN_ABSK:
14438 case AVR_BUILTIN_ABSLK:
14439 case AVR_BUILTIN_ABSLLK:
14440 /* GCC is not good with folding ABS for fixed-point. Do it by hand. */
14441
14442 return avr_fold_absfx (arg[0]);
14443
14444 case AVR_BUILTIN_BITSHR: case AVR_BUILTIN_HRBITS:
14445 case AVR_BUILTIN_BITSHK: case AVR_BUILTIN_HKBITS:
14446 case AVR_BUILTIN_BITSUHR: case AVR_BUILTIN_UHRBITS:
14447 case AVR_BUILTIN_BITSUHK: case AVR_BUILTIN_UHKBITS:
14448
14449 case AVR_BUILTIN_BITSR: case AVR_BUILTIN_RBITS:
14450 case AVR_BUILTIN_BITSK: case AVR_BUILTIN_KBITS:
14451 case AVR_BUILTIN_BITSUR: case AVR_BUILTIN_URBITS:
14452 case AVR_BUILTIN_BITSUK: case AVR_BUILTIN_UKBITS:
14453
14454 case AVR_BUILTIN_BITSLR: case AVR_BUILTIN_LRBITS:
14455 case AVR_BUILTIN_BITSLK: case AVR_BUILTIN_LKBITS:
14456 case AVR_BUILTIN_BITSULR: case AVR_BUILTIN_ULRBITS:
14457 case AVR_BUILTIN_BITSULK: case AVR_BUILTIN_ULKBITS:
14458
14459 case AVR_BUILTIN_BITSLLR: case AVR_BUILTIN_LLRBITS:
14460 case AVR_BUILTIN_BITSLLK: case AVR_BUILTIN_LLKBITS:
14461 case AVR_BUILTIN_BITSULLR: case AVR_BUILTIN_ULLRBITS:
14462 case AVR_BUILTIN_BITSULLK: case AVR_BUILTIN_ULLKBITS:
14463
14464 gcc_assert (TYPE_PRECISION (val_type)
14465 == TYPE_PRECISION (TREE_TYPE (arg[0])));
14466
14467 return build1 (VIEW_CONVERT_EXPR, val_type, arg[0]);
14468
14469 case AVR_BUILTIN_INSERT_BITS:
14470 {
14471 tree tbits = arg[1];
14472 tree tval = arg[2];
14473 tree tmap;
14474 tree map_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
14475 unsigned int map;
14476 bool changed = false;
14477 avr_map_op_t best_g;
14478
14479 if (TREE_CODE (arg[0]) != INTEGER_CST)
14480 {
14481 /* No constant as first argument: Don't fold this and run into
14482 error in avr_expand_builtin. */
14483
14484 break;
14485 }
14486
14487 tmap = wide_int_to_tree (map_type, wi::to_wide (arg[0]));
14488 map = TREE_INT_CST_LOW (tmap);
14489
14490 if (TREE_CODE (tval) != INTEGER_CST
14491 && avr_map_metric (map, MAP_MASK_PREIMAGE_F) == 0)
14492 {
14493 /* There are no F in the map, i.e. 3rd operand is unused.
14494 Replace that argument with some constant to render
14495 respective input unused. */
14496
14497 tval = build_int_cst (val_type, 0);
14498 changed = true;
14499 }
14500
14501 if (TREE_CODE (tbits) != INTEGER_CST
14502 && avr_map_metric (map, MAP_PREIMAGE_0_7) == 0)
14503 {
14504 /* Similar for the bits to be inserted. If they are unused,
14505 we can just as well pass 0. */
14506
14507 tbits = build_int_cst (val_type, 0);
14508 }
14509
14510 if (TREE_CODE (tbits) == INTEGER_CST)
14511 {
14512 /* Inserting bits known at compile time is easy and can be
14513 performed by AND and OR with appropriate masks. */
14514
14515 int bits = TREE_INT_CST_LOW (tbits);
14516 int mask_ior = 0, mask_and = 0xff;
14517
14518 for (size_t i = 0; i < 8; i++)
14519 {
14520 int mi = avr_map (map, i);
14521
14522 if (mi < 8)
14523 {
14524 if (bits & (1 << mi)) mask_ior |= (1 << i);
14525 else mask_and &= ~(1 << i);
14526 }
14527 }
14528
14529 tval = fold_build2 (BIT_IOR_EXPR, val_type, tval,
14530 build_int_cst (val_type, mask_ior));
14531 return fold_build2 (BIT_AND_EXPR, val_type, tval,
14532 build_int_cst (val_type, mask_and));
14533 }
14534
14535 if (changed)
14536 return build_call_expr (fndecl, 3, tmap, tbits, tval);
14537
14538 /* If bits don't change their position we can use vanilla logic
14539 to merge the two arguments. */
14540
14541 if (avr_map_metric (map, MAP_NONFIXED_0_7) == 0)
14542 {
14543 int mask_f = avr_map_metric (map, MAP_MASK_PREIMAGE_F);
14544 tree tres, tmask = build_int_cst (val_type, mask_f ^ 0xff);
14545
14546 tres = fold_build2 (BIT_XOR_EXPR, val_type, tbits, tval);
14547 tres = fold_build2 (BIT_AND_EXPR, val_type, tres, tmask);
14548 return fold_build2 (BIT_XOR_EXPR, val_type, tres, tval);
14549 }
14550
14551 /* Try to decomposing map to reduce overall cost. */
14552
14553 if (avr_log.builtin)
14554 avr_edump ("\n%?: %x\n%?: ROL cost: ", map);
14555
14556 best_g = avr_map_op[0];
14557 best_g.cost = 1000;
14558
14559 for (size_t i = 0; i < ARRAY_SIZE (avr_map_op); i++)
14560 {
14561 avr_map_op_t g
14562 = avr_map_decompose (map, avr_map_op + i,
14563 TREE_CODE (tval) == INTEGER_CST);
14564
14565 if (g.cost >= 0 && g.cost < best_g.cost)
14566 best_g = g;
14567 }
14568
14569 if (avr_log.builtin)
14570 avr_edump ("\n");
14571
14572 if (best_g.arg == 0)
14573 /* No optimization found */
14574 break;
14575
14576 /* Apply operation G to the 2nd argument. */
14577
14578 if (avr_log.builtin)
14579 avr_edump ("%?: using OP(%s%d, %x) cost %d\n",
14580 best_g.str, best_g.arg, best_g.map, best_g.cost);
14581
14582 /* Do right-shifts arithmetically: They copy the MSB instead of
14583 shifting in a non-usable value (0) as with logic right-shift. */
14584
14585 tbits = fold_convert (signed_char_type_node, tbits);
14586 tbits = fold_build2 (best_g.code, signed_char_type_node, tbits,
14587 build_int_cst (val_type, best_g.arg));
14588 tbits = fold_convert (val_type, tbits);
14589
14590 /* Use map o G^-1 instead of original map to undo the effect of G. */
14591
14592 tmap = wide_int_to_tree (map_type, best_g.map);
14593
14594 return build_call_expr (fndecl, 3, tmap, tbits, tval);
14595 } /* AVR_BUILTIN_INSERT_BITS */
14596 }
14597
14598 return NULL_TREE;
14599 }
14600
14601
14602
14603 /* Initialize the GCC target structure. */
14604
14605 #undef TARGET_ASM_ALIGNED_HI_OP
14606 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
14607 #undef TARGET_ASM_ALIGNED_SI_OP
14608 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
14609 #undef TARGET_ASM_UNALIGNED_HI_OP
14610 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
14611 #undef TARGET_ASM_UNALIGNED_SI_OP
14612 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
14613 #undef TARGET_ASM_INTEGER
14614 #define TARGET_ASM_INTEGER avr_assemble_integer
14615 #undef TARGET_ASM_FILE_START
14616 #define TARGET_ASM_FILE_START avr_file_start
14617 #undef TARGET_ASM_FILE_END
14618 #define TARGET_ASM_FILE_END avr_file_end
14619
14620 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
14621 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
14622 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
14623 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
14624
14625 #undef TARGET_FUNCTION_VALUE
14626 #define TARGET_FUNCTION_VALUE avr_function_value
14627 #undef TARGET_LIBCALL_VALUE
14628 #define TARGET_LIBCALL_VALUE avr_libcall_value
14629 #undef TARGET_FUNCTION_VALUE_REGNO_P
14630 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
14631
14632 #undef TARGET_ATTRIBUTE_TABLE
14633 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
14634 #undef TARGET_INSERT_ATTRIBUTES
14635 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
14636 #undef TARGET_SECTION_TYPE_FLAGS
14637 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
14638
14639 #undef TARGET_ASM_NAMED_SECTION
14640 #define TARGET_ASM_NAMED_SECTION avr_asm_named_section
14641 #undef TARGET_ASM_INIT_SECTIONS
14642 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
14643 #undef TARGET_ENCODE_SECTION_INFO
14644 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
14645 #undef TARGET_ASM_SELECT_SECTION
14646 #define TARGET_ASM_SELECT_SECTION avr_asm_select_section
14647
14648 #undef TARGET_ASM_FINAL_POSTSCAN_INSN
14649 #define TARGET_ASM_FINAL_POSTSCAN_INSN avr_asm_final_postscan_insn
14650
14651 #undef TARGET_REGISTER_MOVE_COST
14652 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
14653 #undef TARGET_MEMORY_MOVE_COST
14654 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
14655 #undef TARGET_RTX_COSTS
14656 #define TARGET_RTX_COSTS avr_rtx_costs
14657 #undef TARGET_ADDRESS_COST
14658 #define TARGET_ADDRESS_COST avr_address_cost
14659 #undef TARGET_MACHINE_DEPENDENT_REORG
14660 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
14661 #undef TARGET_FUNCTION_ARG
14662 #define TARGET_FUNCTION_ARG avr_function_arg
14663 #undef TARGET_FUNCTION_ARG_ADVANCE
14664 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
14665
14666 #undef TARGET_SET_CURRENT_FUNCTION
14667 #define TARGET_SET_CURRENT_FUNCTION avr_set_current_function
14668
14669 #undef TARGET_RETURN_IN_MEMORY
14670 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
14671
14672 #undef TARGET_STRICT_ARGUMENT_NAMING
14673 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
14674
14675 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
14676 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
14677
14678 #undef TARGET_CONDITIONAL_REGISTER_USAGE
14679 #define TARGET_CONDITIONAL_REGISTER_USAGE avr_conditional_register_usage
14680
14681 #undef TARGET_HARD_REGNO_MODE_OK
14682 #define TARGET_HARD_REGNO_MODE_OK avr_hard_regno_mode_ok
14683 #undef TARGET_HARD_REGNO_SCRATCH_OK
14684 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
14685 #undef TARGET_HARD_REGNO_CALL_PART_CLOBBERED
14686 #define TARGET_HARD_REGNO_CALL_PART_CLOBBERED \
14687 avr_hard_regno_call_part_clobbered
14688
14689 #undef TARGET_CASE_VALUES_THRESHOLD
14690 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
14691
14692 #undef TARGET_FRAME_POINTER_REQUIRED
14693 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
14694 #undef TARGET_CAN_ELIMINATE
14695 #define TARGET_CAN_ELIMINATE avr_can_eliminate
14696
14697 #undef TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS
14698 #define TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS avr_allocate_stack_slots_for_args
14699
14700 #undef TARGET_WARN_FUNC_RETURN
14701 #define TARGET_WARN_FUNC_RETURN avr_warn_func_return
14702
14703 #undef TARGET_CLASS_LIKELY_SPILLED_P
14704 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
14705
14706 #undef TARGET_OPTION_OVERRIDE
14707 #define TARGET_OPTION_OVERRIDE avr_option_override
14708
14709 #undef TARGET_CANNOT_MODIFY_JUMPS_P
14710 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
14711
14712 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
14713 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
14714
14715 #undef TARGET_INIT_BUILTINS
14716 #define TARGET_INIT_BUILTINS avr_init_builtins
14717
14718 #undef TARGET_BUILTIN_DECL
14719 #define TARGET_BUILTIN_DECL avr_builtin_decl
14720
14721 #undef TARGET_EXPAND_BUILTIN
14722 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
14723
14724 #undef TARGET_FOLD_BUILTIN
14725 #define TARGET_FOLD_BUILTIN avr_fold_builtin
14726
14727 #undef TARGET_SCALAR_MODE_SUPPORTED_P
14728 #define TARGET_SCALAR_MODE_SUPPORTED_P avr_scalar_mode_supported_p
14729
14730 #undef TARGET_BUILD_BUILTIN_VA_LIST
14731 #define TARGET_BUILD_BUILTIN_VA_LIST avr_build_builtin_va_list
14732
14733 #undef TARGET_FIXED_POINT_SUPPORTED_P
14734 #define TARGET_FIXED_POINT_SUPPORTED_P hook_bool_void_true
14735
14736 #undef TARGET_CONVERT_TO_TYPE
14737 #define TARGET_CONVERT_TO_TYPE avr_convert_to_type
14738
14739 #undef TARGET_LRA_P
14740 #define TARGET_LRA_P hook_bool_void_false
14741
14742 #undef TARGET_ADDR_SPACE_SUBSET_P
14743 #define TARGET_ADDR_SPACE_SUBSET_P avr_addr_space_subset_p
14744
14745 #undef TARGET_ADDR_SPACE_CONVERT
14746 #define TARGET_ADDR_SPACE_CONVERT avr_addr_space_convert
14747
14748 #undef TARGET_ADDR_SPACE_ADDRESS_MODE
14749 #define TARGET_ADDR_SPACE_ADDRESS_MODE avr_addr_space_address_mode
14750
14751 #undef TARGET_ADDR_SPACE_POINTER_MODE
14752 #define TARGET_ADDR_SPACE_POINTER_MODE avr_addr_space_pointer_mode
14753
14754 #undef TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
14755 #define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P \
14756 avr_addr_space_legitimate_address_p
14757
14758 #undef TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS
14759 #define TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS avr_addr_space_legitimize_address
14760
14761 #undef TARGET_ADDR_SPACE_DIAGNOSE_USAGE
14762 #define TARGET_ADDR_SPACE_DIAGNOSE_USAGE avr_addr_space_diagnose_usage
14763
14764 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
14765 #define TARGET_MODE_DEPENDENT_ADDRESS_P avr_mode_dependent_address_p
14766
14767 #undef TARGET_PRINT_OPERAND
14768 #define TARGET_PRINT_OPERAND avr_print_operand
14769 #undef TARGET_PRINT_OPERAND_ADDRESS
14770 #define TARGET_PRINT_OPERAND_ADDRESS avr_print_operand_address
14771 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
14772 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P avr_print_operand_punct_valid_p
14773
14774 #undef TARGET_USE_BY_PIECES_INFRASTRUCTURE_P
14775 #define TARGET_USE_BY_PIECES_INFRASTRUCTURE_P \
14776 avr_use_by_pieces_infrastructure_p
14777
14778 #undef TARGET_LEGITIMATE_COMBINED_INSN
14779 #define TARGET_LEGITIMATE_COMBINED_INSN avr_legitimate_combined_insn
14780
14781 #undef TARGET_STARTING_FRAME_OFFSET
14782 #define TARGET_STARTING_FRAME_OFFSET avr_starting_frame_offset
14783
14784 struct gcc_target targetm = TARGET_INITIALIZER;
14785
14786
14787 #include "gt-avr.h"
14788