1 /* Auxiliary functions for pipeline descriptions pattern of Andes
2 NDS32 cpu for GNU compiler
3 Copyright (C) 2012-2020 Free Software Foundation, Inc.
4 Contributed by Andes Technology Corporation.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 3, or (at your
11 option) any later version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* ------------------------------------------------------------------------ */
23
24 #define IN_TARGET_CODE 1
25
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "backend.h"
30 #include "target.h"
31 #include "rtl.h"
32 #include "tree.h"
33 #include "memmodel.h"
34 #include "tm_p.h"
35 #include "optabs.h" /* For GEN_FCN. */
36 #include "recog.h"
37 #include "tm-constrs.h"
38 #include "insn-attr.h"
39
40
41 namespace nds32 {
42
43 /* Get the rtx in the PATTERN field of an insn. If INSN is not an insn,
44 the funciton doesn't change anything and returns it directly. */
45 rtx
extract_pattern_from_insn(rtx insn)46 extract_pattern_from_insn (rtx insn)
47 {
48 if (INSN_P (insn))
49 return PATTERN (insn);
50
51 return insn;
52 }
53
54 /* Get the number of elements in a parallel rtx. */
55 size_t
parallel_elements(rtx parallel_rtx)56 parallel_elements (rtx parallel_rtx)
57 {
58 parallel_rtx = extract_pattern_from_insn (parallel_rtx);
59 gcc_assert (GET_CODE (parallel_rtx) == PARALLEL);
60
61 return XVECLEN (parallel_rtx, 0);
62 }
63
64 /* Extract an rtx from a parallel rtx with index NTH. If NTH is a negative
65 value, the function returns the last NTH rtx. */
66 rtx
parallel_element(rtx parallel_rtx,int nth)67 parallel_element (rtx parallel_rtx, int nth)
68 {
69 parallel_rtx = extract_pattern_from_insn (parallel_rtx);
70 gcc_assert (GET_CODE (parallel_rtx) == PARALLEL);
71
72 int len = parallel_elements (parallel_rtx);
73
74 if (nth >= 0)
75 {
76 if (nth >= len)
77 return NULL_RTX;
78
79 return XVECEXP (parallel_rtx, 0, nth);
80 }
81 else
82 {
83 if (len + nth < 0)
84 return NULL_RTX;
85
86 return XVECEXP (parallel_rtx, 0, len + nth);
87 }
88 }
89
90 /* Functions to determine whether INSN is single-word, double-word
91 or partial-word load/store insn. */
92
93 bool
load_single_p(rtx_insn * insn)94 load_single_p (rtx_insn *insn)
95 {
96 if (get_attr_type (insn) != TYPE_LOAD)
97 return false;
98
99 if (INSN_CODE (insn) == CODE_FOR_move_di ||
100 INSN_CODE (insn) == CODE_FOR_move_df)
101 return false;
102
103 return true;
104 }
105
106 bool
store_single_p(rtx_insn * insn)107 store_single_p (rtx_insn *insn)
108 {
109 if (get_attr_type (insn) != TYPE_STORE)
110 return false;
111
112 if (INSN_CODE (insn) == CODE_FOR_move_di ||
113 INSN_CODE (insn) == CODE_FOR_move_df)
114 return false;
115
116 return true;
117 }
118
119 bool
load_double_p(rtx_insn * insn)120 load_double_p (rtx_insn *insn)
121 {
122 if (get_attr_type (insn) != TYPE_LOAD)
123 return false;
124
125 if (INSN_CODE (insn) != CODE_FOR_move_di &&
126 INSN_CODE (insn) != CODE_FOR_move_df)
127 return false;
128
129 return true;
130 }
131
132 bool
store_double_p(rtx_insn * insn)133 store_double_p (rtx_insn *insn)
134 {
135 if (get_attr_type (insn) != TYPE_STORE)
136 return false;
137
138 if (INSN_CODE (insn) != CODE_FOR_move_di &&
139 INSN_CODE (insn) != CODE_FOR_move_df)
140 return false;
141
142 return true;
143 }
144
145 bool
store_offset_reg_p(rtx_insn * insn)146 store_offset_reg_p (rtx_insn *insn)
147 {
148 if (get_attr_type (insn) != TYPE_STORE)
149 return false;
150
151 rtx offset_rtx = extract_offset_rtx (insn);
152
153 if (offset_rtx == NULL_RTX)
154 return false;
155
156 if (REG_P (offset_rtx))
157 return true;
158
159 return false;
160 }
161
162 /* Determine if INSN is a post update insn. */
163 bool
post_update_insn_p(rtx_insn * insn)164 post_update_insn_p (rtx_insn *insn)
165 {
166 if (find_post_update_rtx (insn) == -1)
167 return false;
168 else
169 return true;
170 }
171
172 /* Check if the address of MEM_RTX consists of a base register and an
173 immediate offset. */
174 bool
immed_offset_p(rtx mem_rtx)175 immed_offset_p (rtx mem_rtx)
176 {
177 gcc_assert (MEM_P (mem_rtx));
178
179 rtx addr_rtx = XEXP (mem_rtx, 0);
180
181 /* (mem (reg)) is equivalent to (mem (plus (reg) (const_int 0))) */
182 if (REG_P (addr_rtx))
183 return true;
184
185 /* (mem (plus (reg) (const_int))) */
186 if (GET_CODE (addr_rtx) == PLUS
187 && GET_CODE (XEXP (addr_rtx, 1)) == CONST_INT)
188 return true;
189
190 return false;
191 }
192
193 /* Find the post update rtx in INSN. If INSN is a load/store multiple insn,
194 the function returns the vector index of its parallel part. If INSN is a
195 single load/store insn, the function returns 0. If INSN is not a post-
196 update insn, the function returns -1. */
197 int
find_post_update_rtx(rtx_insn * insn)198 find_post_update_rtx (rtx_insn *insn)
199 {
200 rtx mem_rtx;
201 int i, len;
202
203 switch (get_attr_type (insn))
204 {
205 case TYPE_LOAD_MULTIPLE:
206 case TYPE_STORE_MULTIPLE:
207 /* Find a pattern in a parallel rtx:
208 (set (reg) (plus (reg) (const_int))) */
209 len = parallel_elements (insn);
210 for (i = 0; i < len; ++i)
211 {
212 rtx curr_insn = parallel_element (insn, i);
213
214 if (GET_CODE (curr_insn) == SET
215 && REG_P (SET_DEST (curr_insn))
216 && GET_CODE (SET_SRC (curr_insn)) == PLUS)
217 return i;
218 }
219 return -1;
220
221 case TYPE_LOAD:
222 case TYPE_FLOAD:
223 case TYPE_STORE:
224 case TYPE_FSTORE:
225 mem_rtx = extract_mem_rtx (insn);
226 /* (mem (post_inc (reg))) */
227 switch (GET_CODE (XEXP (mem_rtx, 0)))
228 {
229 case POST_INC:
230 case POST_DEC:
231 case POST_MODIFY:
232 return 0;
233
234 default:
235 return -1;
236 }
237
238 default:
239 gcc_unreachable ();
240 }
241 }
242
243 /* Extract the MEM rtx from a load/store insn. */
244 rtx
extract_mem_rtx(rtx_insn * insn)245 extract_mem_rtx (rtx_insn *insn)
246 {
247 rtx body = PATTERN (insn);
248
249 switch (get_attr_type (insn))
250 {
251 case TYPE_LOAD:
252 case TYPE_FLOAD:
253 if (MEM_P (SET_SRC (body)))
254 return SET_SRC (body);
255
256 /* unaligned address: (unspec [(mem)]) */
257 if (GET_CODE (SET_SRC (body)) == UNSPEC)
258 {
259 gcc_assert (MEM_P (XVECEXP (SET_SRC (body), 0, 0)));
260 return XVECEXP (SET_SRC (body), 0, 0);
261 }
262
263 /* (sign_extend (mem)) */
264 gcc_assert (MEM_P (XEXP (SET_SRC (body), 0)));
265 return XEXP (SET_SRC (body), 0);
266
267 case TYPE_STORE:
268 case TYPE_FSTORE:
269 if (MEM_P (SET_DEST (body)))
270 return SET_DEST (body);
271
272 /* unaligned address: (unspec [(mem)]) */
273 if (GET_CODE (SET_DEST (body)) == UNSPEC)
274 {
275 gcc_assert (MEM_P (XVECEXP (SET_DEST (body), 0, 0)));
276 return XVECEXP (SET_DEST (body), 0, 0);
277 }
278
279 /* (sign_extend (mem)) */
280 gcc_assert (MEM_P (XEXP (SET_DEST (body), 0)));
281 return XEXP (SET_DEST (body), 0);
282
283 default:
284 gcc_unreachable ();
285 }
286 }
287
288 /* Extract the base register from load/store insns. The function returns
289 NULL_RTX if the address is not consist of any registers. */
290 rtx
extract_base_reg(rtx_insn * insn)291 extract_base_reg (rtx_insn *insn)
292 {
293 int post_update_rtx_index;
294 rtx mem_rtx;
295 rtx plus_rtx;
296
297 /* Find the MEM rtx. If we can find an insn updating the base register,
298 the base register will be returned directly. */
299 switch (get_attr_type (insn))
300 {
301 case TYPE_LOAD_MULTIPLE:
302 post_update_rtx_index = find_post_update_rtx (insn);
303
304 if (post_update_rtx_index != -1)
305 return SET_DEST (parallel_element (insn, post_update_rtx_index));
306
307 mem_rtx = SET_SRC (parallel_element (insn, 0));
308 break;
309
310 case TYPE_STORE_MULTIPLE:
311 post_update_rtx_index = find_post_update_rtx (insn);
312
313 if (post_update_rtx_index != -1)
314 return SET_DEST (parallel_element (insn, post_update_rtx_index));
315
316 mem_rtx = SET_DEST (parallel_element (insn, 0));
317 break;
318
319 case TYPE_LOAD:
320 case TYPE_FLOAD:
321 case TYPE_STORE:
322 case TYPE_FSTORE:
323 mem_rtx = extract_mem_rtx (insn);
324 break;
325
326 default:
327 gcc_unreachable ();
328 }
329
330 gcc_assert (MEM_P (mem_rtx));
331
332 /* (mem (reg)) */
333 if (REG_P (XEXP (mem_rtx, 0)))
334 return XEXP (mem_rtx, 0);
335
336 /* (mem (lo_sum (reg) (symbol_ref)) */
337 if (GET_CODE (XEXP (mem_rtx, 0)) == LO_SUM)
338 return XEXP (XEXP (mem_rtx, 0), 0);
339
340 plus_rtx = XEXP (mem_rtx, 0);
341
342 if (GET_CODE (plus_rtx) == SYMBOL_REF
343 || GET_CODE (plus_rtx) == CONST)
344 return NULL_RTX;
345
346 /* (mem (plus (reg) (const_int))) or
347 (mem (plus (mult (reg) (const_int 4)) (reg))) or
348 (mem (post_inc (reg))) or
349 (mem (post_dec (reg))) or
350 (mem (post_modify (reg) (plus (reg) (reg)))) */
351 gcc_assert (GET_CODE (plus_rtx) == PLUS
352 || GET_CODE (plus_rtx) == POST_INC
353 || GET_CODE (plus_rtx) == POST_DEC
354 || GET_CODE (plus_rtx) == POST_MODIFY);
355
356 if (REG_P (XEXP (plus_rtx, 0)))
357 return XEXP (plus_rtx, 0);
358
359 gcc_assert (REG_P (XEXP (plus_rtx, 1)));
360 return XEXP (plus_rtx, 1);
361 }
362
363 /* Extract the offset rtx from load/store insns. The function returns
364 NULL_RTX if offset is absent. */
365 rtx
extract_offset_rtx(rtx_insn * insn)366 extract_offset_rtx (rtx_insn *insn)
367 {
368 rtx mem_rtx;
369 rtx plus_rtx;
370 rtx offset_rtx;
371
372 /* Find the MEM rtx. The multiple load/store insns doens't have
373 the offset field so we can return NULL_RTX here. */
374 switch (get_attr_type (insn))
375 {
376 case TYPE_LOAD_MULTIPLE:
377 case TYPE_STORE_MULTIPLE:
378 return NULL_RTX;
379
380 case TYPE_LOAD:
381 case TYPE_FLOAD:
382 case TYPE_STORE:
383 case TYPE_FSTORE:
384 mem_rtx = extract_mem_rtx (insn);
385 break;
386
387 default:
388 gcc_unreachable ();
389 }
390
391 gcc_assert (MEM_P (mem_rtx));
392
393 /* (mem (reg)) */
394 if (REG_P (XEXP (mem_rtx, 0)))
395 return NULL_RTX;
396
397 plus_rtx = XEXP (mem_rtx, 0);
398
399 switch (GET_CODE (plus_rtx))
400 {
401 case SYMBOL_REF:
402 case CONST:
403 case POST_INC:
404 case POST_DEC:
405 return NULL_RTX;
406
407 case PLUS:
408 /* (mem (plus (reg) (const_int))) or
409 (mem (plus (mult (reg) (const_int 4)) (reg))) */
410 if (REG_P (XEXP (plus_rtx, 0)))
411 offset_rtx = XEXP (plus_rtx, 1);
412 else
413 {
414 gcc_assert (REG_P (XEXP (plus_rtx, 1)));
415 offset_rtx = XEXP (plus_rtx, 0);
416 }
417
418 if (ARITHMETIC_P (offset_rtx))
419 {
420 gcc_assert (GET_CODE (offset_rtx) == MULT);
421 gcc_assert (REG_P (XEXP (offset_rtx, 0)));
422 offset_rtx = XEXP (offset_rtx, 0);
423 }
424 break;
425
426 case LO_SUM:
427 /* (mem (lo_sum (reg) (symbol_ref)) */
428 offset_rtx = XEXP (plus_rtx, 1);
429 break;
430
431 case POST_MODIFY:
432 /* (mem (post_modify (reg) (plus (reg) (reg / const_int)))) */
433 gcc_assert (REG_P (XEXP (plus_rtx, 0)));
434 plus_rtx = XEXP (plus_rtx, 1);
435 gcc_assert (GET_CODE (plus_rtx) == PLUS);
436 offset_rtx = XEXP (plus_rtx, 0);
437 break;
438
439 default:
440 gcc_unreachable ();
441 }
442
443 return offset_rtx;
444 }
445
446 /* Extract the register of the shift operand from an ALU_SHIFT rtx. */
447 rtx
extract_shift_reg(rtx alu_shift_rtx)448 extract_shift_reg (rtx alu_shift_rtx)
449 {
450 alu_shift_rtx = extract_pattern_from_insn (alu_shift_rtx);
451
452 rtx alu_rtx = SET_SRC (alu_shift_rtx);
453 rtx shift_rtx;
454
455 /* Various forms of ALU_SHIFT can be made by the combiner.
456 See the difference between add_slli and sub_slli in nds32.md. */
457 if (REG_P (XEXP (alu_rtx, 0)))
458 shift_rtx = XEXP (alu_rtx, 1);
459 else
460 shift_rtx = XEXP (alu_rtx, 0);
461
462 return XEXP (shift_rtx, 0);
463 }
464
465 /* Check if INSN is a movd44 insn. */
466 bool
movd44_insn_p(rtx_insn * insn)467 movd44_insn_p (rtx_insn *insn)
468 {
469 if (get_attr_type (insn) == TYPE_ALU
470 && (INSN_CODE (insn) == CODE_FOR_move_di
471 || INSN_CODE (insn) == CODE_FOR_move_df))
472 {
473 rtx body = PATTERN (insn);
474 gcc_assert (GET_CODE (body) == SET);
475
476 rtx src = SET_SRC (body);
477 rtx dest = SET_DEST (body);
478
479 if ((REG_P (src) || GET_CODE (src) == SUBREG)
480 && (REG_P (dest) || GET_CODE (dest) == SUBREG))
481 return true;
482
483 return false;
484 }
485
486 return false;
487 }
488
489 /* Extract the second result (odd reg) of a movd44 insn. */
490 rtx
extract_movd44_odd_reg(rtx_insn * insn)491 extract_movd44_odd_reg (rtx_insn *insn)
492 {
493 gcc_assert (movd44_insn_p (insn));
494
495 rtx def_reg = SET_DEST (PATTERN (insn));
496 machine_mode mode;
497
498 gcc_assert (REG_P (def_reg) || GET_CODE (def_reg) == SUBREG);
499 switch (GET_MODE (def_reg))
500 {
501 case E_DImode:
502 mode = SImode;
503 break;
504
505 case E_DFmode:
506 mode = SFmode;
507 break;
508
509 default:
510 gcc_unreachable ();
511 }
512
513 return gen_highpart (mode, def_reg);
514 }
515
516 /* Extract the rtx representing non-accumulation operands of a MAC insn. */
517 rtx
extract_mac_non_acc_rtx(rtx_insn * insn)518 extract_mac_non_acc_rtx (rtx_insn *insn)
519 {
520 rtx exp = SET_SRC (PATTERN (insn));
521
522 switch (get_attr_type (insn))
523 {
524 case TYPE_MAC:
525 case TYPE_DMAC:
526 if (REG_P (XEXP (exp, 0)))
527 return XEXP (exp, 1);
528 else
529 return XEXP (exp, 0);
530
531 default:
532 gcc_unreachable ();
533 }
534 }
535
536 /* Check if the DIV insn needs two write ports. */
537 bool
divmod_p(rtx_insn * insn)538 divmod_p (rtx_insn *insn)
539 {
540 gcc_assert (get_attr_type (insn) == TYPE_DIV);
541
542 if (INSN_CODE (insn) == CODE_FOR_divmodsi4
543 || INSN_CODE (insn) == CODE_FOR_udivmodsi4)
544 return true;
545
546 return false;
547 }
548
549 /* Extract the rtx representing the branch target to help recognize
550 data hazards. */
551 rtx
extract_branch_target_rtx(rtx_insn * insn)552 extract_branch_target_rtx (rtx_insn *insn)
553 {
554 gcc_assert (CALL_P (insn) || JUMP_P (insn));
555
556 rtx body = PATTERN (insn);
557
558 if (GET_CODE (body) == SET)
559 {
560 /* RTXs in IF_THEN_ELSE are branch conditions. */
561 if (GET_CODE (SET_SRC (body)) == IF_THEN_ELSE)
562 return NULL_RTX;
563
564 return SET_SRC (body);
565 }
566
567 if (GET_CODE (body) == CALL)
568 return XEXP (body, 0);
569
570 if (GET_CODE (body) == PARALLEL)
571 {
572 rtx first_rtx = parallel_element (body, 0);
573
574 if (GET_CODE (first_rtx) == SET)
575 return SET_SRC (first_rtx);
576
577 if (GET_CODE (first_rtx) == CALL)
578 return XEXP (first_rtx, 0);
579 }
580
581 /* Handle special cases of bltzal, bgezal and jralnez. */
582 if (GET_CODE (body) == COND_EXEC)
583 {
584 rtx addr_rtx = XEXP (body, 1);
585
586 if (GET_CODE (addr_rtx) == SET)
587 return SET_SRC (addr_rtx);
588
589 if (GET_CODE (addr_rtx) == PARALLEL)
590 {
591 rtx first_rtx = parallel_element (addr_rtx, 0);
592
593 if (GET_CODE (first_rtx) == SET)
594 {
595 rtx call_rtx = SET_SRC (first_rtx);
596 gcc_assert (GET_CODE (call_rtx) == CALL);
597
598 return XEXP (call_rtx, 0);
599 }
600
601 if (GET_CODE (first_rtx) == CALL)
602 return XEXP (first_rtx, 0);
603 }
604 }
605
606 gcc_unreachable ();
607 }
608
609 /* Extract the rtx representing the branch condition to help recognize
610 data hazards. */
611 rtx
extract_branch_condition_rtx(rtx_insn * insn)612 extract_branch_condition_rtx (rtx_insn *insn)
613 {
614 gcc_assert (CALL_P (insn) || JUMP_P (insn));
615
616 rtx body = PATTERN (insn);
617
618 if (GET_CODE (body) == SET)
619 {
620 rtx if_then_else_rtx = SET_SRC (body);
621
622 if (GET_CODE (if_then_else_rtx) == IF_THEN_ELSE)
623 return XEXP (if_then_else_rtx, 0);
624
625 return NULL_RTX;
626 }
627
628 if (GET_CODE (body) == COND_EXEC)
629 return XEXP (body, 0);
630
631 return NULL_RTX;
632 }
633
634 } // namespace nds32
635