1 /* Compute different info about registers.
2 Copyright (C) 1987-2018 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20
21 /* This file contains regscan pass of the compiler and passes for
22 dealing with info about modes of pseudo-registers inside
23 subregisters. It also defines some tables of information about the
24 hardware registers, function init_reg_sets to initialize the
25 tables, and other auxiliary functions to deal with info about
26 registers and their classes. */
27
28 #include "config.h"
29 #include "system.h"
30 #include "coretypes.h"
31 #include "backend.h"
32 #include "target.h"
33 #include "rtl.h"
34 #include "tree.h"
35 #include "df.h"
36 #include "memmodel.h"
37 #include "tm_p.h"
38 #include "insn-config.h"
39 #include "regs.h"
40 #include "ira.h"
41 #include "recog.h"
42 #include "diagnostic-core.h"
43 #include "reload.h"
44 #include "output.h"
45 #include "tree-pass.h"
46
47 /* Maximum register number used in this function, plus one. */
48
49 int max_regno;
50
51 /* Used to cache the results of simplifiable_subregs. SHAPE is the input
52 parameter and SIMPLIFIABLE_REGS is the result. */
53 struct simplifiable_subreg
54 {
55 simplifiable_subreg (const subreg_shape &);
56
57 subreg_shape shape;
58 HARD_REG_SET simplifiable_regs;
59 };
60
61 struct target_hard_regs default_target_hard_regs;
62 struct target_regs default_target_regs;
63 #if SWITCHABLE_TARGET
64 struct target_hard_regs *this_target_hard_regs = &default_target_hard_regs;
65 struct target_regs *this_target_regs = &default_target_regs;
66 #endif
67
68 /* Data for initializing fixed_regs. */
69 static const char initial_fixed_regs[] = FIXED_REGISTERS;
70
71 /* Data for initializing call_used_regs. */
72 static const char initial_call_used_regs[] = CALL_USED_REGISTERS;
73
74 #ifdef CALL_REALLY_USED_REGISTERS
75 /* Data for initializing call_really_used_regs. */
76 static const char initial_call_really_used_regs[] = CALL_REALLY_USED_REGISTERS;
77 #endif
78
79 #ifdef CALL_REALLY_USED_REGISTERS
80 #define CALL_REALLY_USED_REGNO_P(X) call_really_used_regs[X]
81 #else
82 #define CALL_REALLY_USED_REGNO_P(X) call_used_regs[X]
83 #endif
84
85 /* Indexed by hard register number, contains 1 for registers
86 that are being used for global register decls.
87 These must be exempt from ordinary flow analysis
88 and are also considered fixed. */
89 char global_regs[FIRST_PSEUDO_REGISTER];
90
91 /* Declaration for the global register. */
92 tree global_regs_decl[FIRST_PSEUDO_REGISTER];
93
94 /* Same information as REGS_INVALIDATED_BY_CALL but in regset form to be used
95 in dataflow more conveniently. */
96 regset regs_invalidated_by_call_regset;
97
98 /* Same information as FIXED_REG_SET but in regset form. */
99 regset fixed_reg_set_regset;
100
101 /* The bitmap_obstack is used to hold some static variables that
102 should not be reset after each function is compiled. */
103 static bitmap_obstack persistent_obstack;
104
105 /* Used to initialize reg_alloc_order. */
106 #ifdef REG_ALLOC_ORDER
107 static int initial_reg_alloc_order[FIRST_PSEUDO_REGISTER] = REG_ALLOC_ORDER;
108 #endif
109
110 /* The same information, but as an array of unsigned ints. We copy from
111 these unsigned ints to the table above. We do this so the tm.h files
112 do not have to be aware of the wordsize for machines with <= 64 regs.
113 Note that we hard-code 32 here, not HOST_BITS_PER_INT. */
114 #define N_REG_INTS \
115 ((FIRST_PSEUDO_REGISTER + (32 - 1)) / 32)
116
117 static const unsigned int_reg_class_contents[N_REG_CLASSES][N_REG_INTS]
118 = REG_CLASS_CONTENTS;
119
120 /* Array containing all of the register names. */
121 static const char *const initial_reg_names[] = REGISTER_NAMES;
122
123 /* Array containing all of the register class names. */
124 const char * reg_class_names[] = REG_CLASS_NAMES;
125
126 /* No more global register variables may be declared; true once
127 reginfo has been initialized. */
128 static int no_global_reg_vars = 0;
129
130 /* Given a register bitmap, turn on the bits in a HARD_REG_SET that
131 correspond to the hard registers, if any, set in that map. This
132 could be done far more efficiently by having all sorts of special-cases
133 with moving single words, but probably isn't worth the trouble. */
134 void
reg_set_to_hard_reg_set(HARD_REG_SET * to,const_bitmap from)135 reg_set_to_hard_reg_set (HARD_REG_SET *to, const_bitmap from)
136 {
137 unsigned i;
138 bitmap_iterator bi;
139
140 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
141 {
142 if (i >= FIRST_PSEUDO_REGISTER)
143 return;
144 SET_HARD_REG_BIT (*to, i);
145 }
146 }
147
148 /* Function called only once per target_globals to initialize the
149 target_hard_regs structure. Once this is done, various switches
150 may override. */
151 void
init_reg_sets(void)152 init_reg_sets (void)
153 {
154 int i, j;
155
156 /* First copy the register information from the initial int form into
157 the regsets. */
158
159 for (i = 0; i < N_REG_CLASSES; i++)
160 {
161 CLEAR_HARD_REG_SET (reg_class_contents[i]);
162
163 /* Note that we hard-code 32 here, not HOST_BITS_PER_INT. */
164 for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
165 if (int_reg_class_contents[i][j / 32]
166 & ((unsigned) 1 << (j % 32)))
167 SET_HARD_REG_BIT (reg_class_contents[i], j);
168 }
169
170 /* Sanity check: make sure the target macros FIXED_REGISTERS and
171 CALL_USED_REGISTERS had the right number of initializers. */
172 gcc_assert (sizeof fixed_regs == sizeof initial_fixed_regs);
173 gcc_assert (sizeof call_used_regs == sizeof initial_call_used_regs);
174 #ifdef CALL_REALLY_USED_REGISTERS
175 gcc_assert (sizeof call_really_used_regs
176 == sizeof initial_call_really_used_regs);
177 #endif
178 #ifdef REG_ALLOC_ORDER
179 gcc_assert (sizeof reg_alloc_order == sizeof initial_reg_alloc_order);
180 #endif
181 gcc_assert (sizeof reg_names == sizeof initial_reg_names);
182
183 memcpy (fixed_regs, initial_fixed_regs, sizeof fixed_regs);
184 memcpy (call_used_regs, initial_call_used_regs, sizeof call_used_regs);
185 #ifdef CALL_REALLY_USED_REGISTERS
186 memcpy (call_really_used_regs, initial_call_really_used_regs,
187 sizeof call_really_used_regs);
188 #endif
189 #ifdef REG_ALLOC_ORDER
190 memcpy (reg_alloc_order, initial_reg_alloc_order, sizeof reg_alloc_order);
191 #endif
192 memcpy (reg_names, initial_reg_names, sizeof reg_names);
193
194 SET_HARD_REG_SET (accessible_reg_set);
195 SET_HARD_REG_SET (operand_reg_set);
196 }
197
198 /* We need to save copies of some of the register information which
199 can be munged by command-line switches so we can restore it during
200 subsequent back-end reinitialization. */
201 static char saved_fixed_regs[FIRST_PSEUDO_REGISTER];
202 static char saved_call_used_regs[FIRST_PSEUDO_REGISTER];
203 #ifdef CALL_REALLY_USED_REGISTERS
204 static char saved_call_really_used_regs[FIRST_PSEUDO_REGISTER];
205 #endif
206 static const char *saved_reg_names[FIRST_PSEUDO_REGISTER];
207 static HARD_REG_SET saved_accessible_reg_set;
208 static HARD_REG_SET saved_operand_reg_set;
209
210 /* Save the register information. */
211 void
save_register_info(void)212 save_register_info (void)
213 {
214 /* Sanity check: make sure the target macros FIXED_REGISTERS and
215 CALL_USED_REGISTERS had the right number of initializers. */
216 gcc_assert (sizeof fixed_regs == sizeof saved_fixed_regs);
217 gcc_assert (sizeof call_used_regs == sizeof saved_call_used_regs);
218 memcpy (saved_fixed_regs, fixed_regs, sizeof fixed_regs);
219 memcpy (saved_call_used_regs, call_used_regs, sizeof call_used_regs);
220
221 /* Likewise for call_really_used_regs. */
222 #ifdef CALL_REALLY_USED_REGISTERS
223 gcc_assert (sizeof call_really_used_regs
224 == sizeof saved_call_really_used_regs);
225 memcpy (saved_call_really_used_regs, call_really_used_regs,
226 sizeof call_really_used_regs);
227 #endif
228
229 /* And similarly for reg_names. */
230 gcc_assert (sizeof reg_names == sizeof saved_reg_names);
231 memcpy (saved_reg_names, reg_names, sizeof reg_names);
232 COPY_HARD_REG_SET (saved_accessible_reg_set, accessible_reg_set);
233 COPY_HARD_REG_SET (saved_operand_reg_set, operand_reg_set);
234 }
235
236 /* Restore the register information. */
237 static void
restore_register_info(void)238 restore_register_info (void)
239 {
240 memcpy (fixed_regs, saved_fixed_regs, sizeof fixed_regs);
241 memcpy (call_used_regs, saved_call_used_regs, sizeof call_used_regs);
242
243 #ifdef CALL_REALLY_USED_REGISTERS
244 memcpy (call_really_used_regs, saved_call_really_used_regs,
245 sizeof call_really_used_regs);
246 #endif
247
248 memcpy (reg_names, saved_reg_names, sizeof reg_names);
249 COPY_HARD_REG_SET (accessible_reg_set, saved_accessible_reg_set);
250 COPY_HARD_REG_SET (operand_reg_set, saved_operand_reg_set);
251 }
252
253 /* After switches have been processed, which perhaps alter
254 `fixed_regs' and `call_used_regs', convert them to HARD_REG_SETs. */
255 static void
init_reg_sets_1(void)256 init_reg_sets_1 (void)
257 {
258 unsigned int i, j;
259 unsigned int /* machine_mode */ m;
260
261 restore_register_info ();
262
263 #ifdef REG_ALLOC_ORDER
264 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
265 inv_reg_alloc_order[reg_alloc_order[i]] = i;
266 #endif
267
268 /* Let the target tweak things if necessary. */
269
270 targetm.conditional_register_usage ();
271
272 /* Compute number of hard regs in each class. */
273
274 memset (reg_class_size, 0, sizeof reg_class_size);
275 for (i = 0; i < N_REG_CLASSES; i++)
276 {
277 bool any_nonfixed = false;
278 for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
279 if (TEST_HARD_REG_BIT (reg_class_contents[i], j))
280 {
281 reg_class_size[i]++;
282 if (!fixed_regs[j])
283 any_nonfixed = true;
284 }
285 class_only_fixed_regs[i] = !any_nonfixed;
286 }
287
288 /* Initialize the table of subunions.
289 reg_class_subunion[I][J] gets the largest-numbered reg-class
290 that is contained in the union of classes I and J. */
291
292 memset (reg_class_subunion, 0, sizeof reg_class_subunion);
293 for (i = 0; i < N_REG_CLASSES; i++)
294 {
295 for (j = 0; j < N_REG_CLASSES; j++)
296 {
297 HARD_REG_SET c;
298 int k;
299
300 COPY_HARD_REG_SET (c, reg_class_contents[i]);
301 IOR_HARD_REG_SET (c, reg_class_contents[j]);
302 for (k = 0; k < N_REG_CLASSES; k++)
303 if (hard_reg_set_subset_p (reg_class_contents[k], c)
304 && !hard_reg_set_subset_p (reg_class_contents[k],
305 reg_class_contents
306 [(int) reg_class_subunion[i][j]]))
307 reg_class_subunion[i][j] = (enum reg_class) k;
308 }
309 }
310
311 /* Initialize the table of superunions.
312 reg_class_superunion[I][J] gets the smallest-numbered reg-class
313 containing the union of classes I and J. */
314
315 memset (reg_class_superunion, 0, sizeof reg_class_superunion);
316 for (i = 0; i < N_REG_CLASSES; i++)
317 {
318 for (j = 0; j < N_REG_CLASSES; j++)
319 {
320 HARD_REG_SET c;
321 int k;
322
323 COPY_HARD_REG_SET (c, reg_class_contents[i]);
324 IOR_HARD_REG_SET (c, reg_class_contents[j]);
325 for (k = 0; k < N_REG_CLASSES; k++)
326 if (hard_reg_set_subset_p (c, reg_class_contents[k]))
327 break;
328
329 reg_class_superunion[i][j] = (enum reg_class) k;
330 }
331 }
332
333 /* Initialize the tables of subclasses and superclasses of each reg class.
334 First clear the whole table, then add the elements as they are found. */
335
336 for (i = 0; i < N_REG_CLASSES; i++)
337 {
338 for (j = 0; j < N_REG_CLASSES; j++)
339 reg_class_subclasses[i][j] = LIM_REG_CLASSES;
340 }
341
342 for (i = 0; i < N_REG_CLASSES; i++)
343 {
344 if (i == (int) NO_REGS)
345 continue;
346
347 for (j = i + 1; j < N_REG_CLASSES; j++)
348 if (hard_reg_set_subset_p (reg_class_contents[i],
349 reg_class_contents[j]))
350 {
351 /* Reg class I is a subclass of J.
352 Add J to the table of superclasses of I. */
353 enum reg_class *p;
354
355 /* Add I to the table of superclasses of J. */
356 p = ®_class_subclasses[j][0];
357 while (*p != LIM_REG_CLASSES) p++;
358 *p = (enum reg_class) i;
359 }
360 }
361
362 /* Initialize "constant" tables. */
363
364 CLEAR_HARD_REG_SET (fixed_reg_set);
365 CLEAR_HARD_REG_SET (call_used_reg_set);
366 CLEAR_HARD_REG_SET (call_fixed_reg_set);
367 CLEAR_HARD_REG_SET (regs_invalidated_by_call);
368 if (!regs_invalidated_by_call_regset)
369 {
370 bitmap_obstack_initialize (&persistent_obstack);
371 regs_invalidated_by_call_regset = ALLOC_REG_SET (&persistent_obstack);
372 }
373 else
374 CLEAR_REG_SET (regs_invalidated_by_call_regset);
375 if (!fixed_reg_set_regset)
376 fixed_reg_set_regset = ALLOC_REG_SET (&persistent_obstack);
377 else
378 CLEAR_REG_SET (fixed_reg_set_regset);
379
380 AND_HARD_REG_SET (operand_reg_set, accessible_reg_set);
381 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
382 {
383 /* As a special exception, registers whose class is NO_REGS are
384 not accepted by `register_operand'. The reason for this change
385 is to allow the representation of special architecture artifacts
386 (such as a condition code register) without extending the rtl
387 definitions. Since registers of class NO_REGS cannot be used
388 as registers in any case where register classes are examined,
389 it is better to apply this exception in a target-independent way. */
390 if (REGNO_REG_CLASS (i) == NO_REGS)
391 CLEAR_HARD_REG_BIT (operand_reg_set, i);
392
393 /* If a register is too limited to be treated as a register operand,
394 then it should never be allocated to a pseudo. */
395 if (!TEST_HARD_REG_BIT (operand_reg_set, i))
396 {
397 fixed_regs[i] = 1;
398 call_used_regs[i] = 1;
399 }
400
401 /* call_used_regs must include fixed_regs. */
402 gcc_assert (!fixed_regs[i] || call_used_regs[i]);
403 #ifdef CALL_REALLY_USED_REGISTERS
404 /* call_used_regs must include call_really_used_regs. */
405 gcc_assert (!call_really_used_regs[i] || call_used_regs[i]);
406 #endif
407
408 if (fixed_regs[i])
409 {
410 SET_HARD_REG_BIT (fixed_reg_set, i);
411 SET_REGNO_REG_SET (fixed_reg_set_regset, i);
412 }
413
414 if (call_used_regs[i])
415 SET_HARD_REG_BIT (call_used_reg_set, i);
416
417 /* There are a couple of fixed registers that we know are safe to
418 exclude from being clobbered by calls:
419
420 The frame pointer is always preserved across calls. The arg
421 pointer is if it is fixed. The stack pointer usually is,
422 unless TARGET_RETURN_POPS_ARGS, in which case an explicit
423 CLOBBER will be present. If we are generating PIC code, the
424 PIC offset table register is preserved across calls, though the
425 target can override that. */
426
427 if (i == STACK_POINTER_REGNUM)
428 ;
429 else if (global_regs[i])
430 {
431 SET_HARD_REG_BIT (regs_invalidated_by_call, i);
432 SET_REGNO_REG_SET (regs_invalidated_by_call_regset, i);
433 }
434 else if (i == FRAME_POINTER_REGNUM)
435 ;
436 else if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
437 && i == HARD_FRAME_POINTER_REGNUM)
438 ;
439 else if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
440 && i == ARG_POINTER_REGNUM && fixed_regs[i])
441 ;
442 else if (!PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
443 && i == (unsigned) PIC_OFFSET_TABLE_REGNUM && fixed_regs[i])
444 ;
445 else if (CALL_REALLY_USED_REGNO_P (i))
446 {
447 SET_HARD_REG_BIT (regs_invalidated_by_call, i);
448 SET_REGNO_REG_SET (regs_invalidated_by_call_regset, i);
449 }
450 }
451
452 COPY_HARD_REG_SET (call_fixed_reg_set, fixed_reg_set);
453 COPY_HARD_REG_SET (fixed_nonglobal_reg_set, fixed_reg_set);
454
455 /* Preserve global registers if called more than once. */
456 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
457 {
458 if (global_regs[i])
459 {
460 fixed_regs[i] = call_used_regs[i] = 1;
461 SET_HARD_REG_BIT (fixed_reg_set, i);
462 SET_HARD_REG_BIT (call_used_reg_set, i);
463 SET_HARD_REG_BIT (call_fixed_reg_set, i);
464 }
465 }
466
467 memset (have_regs_of_mode, 0, sizeof (have_regs_of_mode));
468 memset (contains_reg_of_mode, 0, sizeof (contains_reg_of_mode));
469 for (m = 0; m < (unsigned int) MAX_MACHINE_MODE; m++)
470 {
471 HARD_REG_SET ok_regs, ok_regs2;
472 CLEAR_HARD_REG_SET (ok_regs);
473 CLEAR_HARD_REG_SET (ok_regs2);
474 for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
475 if (!TEST_HARD_REG_BIT (fixed_nonglobal_reg_set, j)
476 && targetm.hard_regno_mode_ok (j, (machine_mode) m))
477 {
478 SET_HARD_REG_BIT (ok_regs, j);
479 if (!fixed_regs[j])
480 SET_HARD_REG_BIT (ok_regs2, j);
481 }
482
483 for (i = 0; i < N_REG_CLASSES; i++)
484 if ((targetm.class_max_nregs ((reg_class_t) i, (machine_mode) m)
485 <= reg_class_size[i])
486 && hard_reg_set_intersect_p (ok_regs, reg_class_contents[i]))
487 {
488 contains_reg_of_mode[i][m] = 1;
489 if (hard_reg_set_intersect_p (ok_regs2, reg_class_contents[i]))
490 {
491 have_regs_of_mode[m] = 1;
492 contains_allocatable_reg_of_mode[i][m] = 1;
493 }
494 }
495 }
496 }
497
498 /* Compute the table of register modes.
499 These values are used to record death information for individual registers
500 (as opposed to a multi-register mode).
501 This function might be invoked more than once, if the target has support
502 for changing register usage conventions on a per-function basis.
503 */
504 void
init_reg_modes_target(void)505 init_reg_modes_target (void)
506 {
507 int i, j;
508
509 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
510 for (j = 0; j < MAX_MACHINE_MODE; j++)
511 this_target_regs->x_hard_regno_nregs[i][j]
512 = targetm.hard_regno_nregs (i, (machine_mode) j);
513
514 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
515 {
516 reg_raw_mode[i] = choose_hard_reg_mode (i, 1, false);
517
518 /* If we couldn't find a valid mode, just use the previous mode
519 if it is suitable, otherwise fall back on word_mode. */
520 if (reg_raw_mode[i] == VOIDmode)
521 {
522 if (i > 0 && hard_regno_nregs (i, reg_raw_mode[i - 1]) == 1)
523 reg_raw_mode[i] = reg_raw_mode[i - 1];
524 else
525 reg_raw_mode[i] = word_mode;
526 }
527 }
528 }
529
530 /* Finish initializing the register sets and initialize the register modes.
531 This function might be invoked more than once, if the target has support
532 for changing register usage conventions on a per-function basis.
533 */
534 void
init_regs(void)535 init_regs (void)
536 {
537 /* This finishes what was started by init_reg_sets, but couldn't be done
538 until after register usage was specified. */
539 init_reg_sets_1 ();
540 }
541
542 /* The same as previous function plus initializing IRA. */
543 void
reinit_regs(void)544 reinit_regs (void)
545 {
546 init_regs ();
547 /* caller_save needs to be re-initialized. */
548 caller_save_initialized_p = false;
549 if (this_target_rtl->target_specific_initialized)
550 {
551 ira_init ();
552 recog_init ();
553 }
554 }
555
556 /* Initialize some fake stack-frame MEM references for use in
557 memory_move_secondary_cost. */
558 void
init_fake_stack_mems(void)559 init_fake_stack_mems (void)
560 {
561 int i;
562
563 for (i = 0; i < MAX_MACHINE_MODE; i++)
564 top_of_stack[i] = gen_rtx_MEM ((machine_mode) i, stack_pointer_rtx);
565 }
566
567
568 /* Compute cost of moving data from a register of class FROM to one of
569 TO, using MODE. */
570
571 int
register_move_cost(machine_mode mode,reg_class_t from,reg_class_t to)572 register_move_cost (machine_mode mode, reg_class_t from, reg_class_t to)
573 {
574 return targetm.register_move_cost (mode, from, to);
575 }
576
577 /* Compute cost of moving registers to/from memory. */
578
579 int
memory_move_cost(machine_mode mode,reg_class_t rclass,bool in)580 memory_move_cost (machine_mode mode, reg_class_t rclass, bool in)
581 {
582 return targetm.memory_move_cost (mode, rclass, in);
583 }
584
585 /* Compute extra cost of moving registers to/from memory due to reloads.
586 Only needed if secondary reloads are required for memory moves. */
587 int
memory_move_secondary_cost(machine_mode mode,reg_class_t rclass,bool in)588 memory_move_secondary_cost (machine_mode mode, reg_class_t rclass,
589 bool in)
590 {
591 reg_class_t altclass;
592 int partial_cost = 0;
593 /* We need a memory reference to feed to SECONDARY... macros. */
594 /* mem may be unused even if the SECONDARY_ macros are defined. */
595 rtx mem ATTRIBUTE_UNUSED = top_of_stack[(int) mode];
596
597 altclass = secondary_reload_class (in ? 1 : 0, rclass, mode, mem);
598
599 if (altclass == NO_REGS)
600 return 0;
601
602 if (in)
603 partial_cost = register_move_cost (mode, altclass, rclass);
604 else
605 partial_cost = register_move_cost (mode, rclass, altclass);
606
607 if (rclass == altclass)
608 /* This isn't simply a copy-to-temporary situation. Can't guess
609 what it is, so TARGET_MEMORY_MOVE_COST really ought not to be
610 calling here in that case.
611
612 I'm tempted to put in an assert here, but returning this will
613 probably only give poor estimates, which is what we would've
614 had before this code anyways. */
615 return partial_cost;
616
617 /* Check if the secondary reload register will also need a
618 secondary reload. */
619 return memory_move_secondary_cost (mode, altclass, in) + partial_cost;
620 }
621
622 /* Return a machine mode that is legitimate for hard reg REGNO and large
623 enough to save nregs. If we can't find one, return VOIDmode.
624 If CALL_SAVED is true, only consider modes that are call saved. */
625 machine_mode
choose_hard_reg_mode(unsigned int regno ATTRIBUTE_UNUSED,unsigned int nregs,bool call_saved)626 choose_hard_reg_mode (unsigned int regno ATTRIBUTE_UNUSED,
627 unsigned int nregs, bool call_saved)
628 {
629 unsigned int /* machine_mode */ m;
630 machine_mode found_mode = VOIDmode, mode;
631
632 /* We first look for the largest integer mode that can be validly
633 held in REGNO. If none, we look for the largest floating-point mode.
634 If we still didn't find a valid mode, try CCmode.
635
636 The tests use maybe_gt rather than known_gt because we want (for example)
637 N V4SFs to win over plain V4SF even though N might be 1. */
638 FOR_EACH_MODE_IN_CLASS (mode, MODE_INT)
639 if (hard_regno_nregs (regno, mode) == nregs
640 && targetm.hard_regno_mode_ok (regno, mode)
641 && (!call_saved
642 || !targetm.hard_regno_call_part_clobbered (regno, mode))
643 && maybe_gt (GET_MODE_SIZE (mode), GET_MODE_SIZE (found_mode)))
644 found_mode = mode;
645
646 FOR_EACH_MODE_IN_CLASS (mode, MODE_FLOAT)
647 if (hard_regno_nregs (regno, mode) == nregs
648 && targetm.hard_regno_mode_ok (regno, mode)
649 && (!call_saved
650 || !targetm.hard_regno_call_part_clobbered (regno, mode))
651 && maybe_gt (GET_MODE_SIZE (mode), GET_MODE_SIZE (found_mode)))
652 found_mode = mode;
653
654 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_FLOAT)
655 if (hard_regno_nregs (regno, mode) == nregs
656 && targetm.hard_regno_mode_ok (regno, mode)
657 && (!call_saved
658 || !targetm.hard_regno_call_part_clobbered (regno, mode))
659 && maybe_gt (GET_MODE_SIZE (mode), GET_MODE_SIZE (found_mode)))
660 found_mode = mode;
661
662 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_INT)
663 if (hard_regno_nregs (regno, mode) == nregs
664 && targetm.hard_regno_mode_ok (regno, mode)
665 && (!call_saved
666 || !targetm.hard_regno_call_part_clobbered (regno, mode))
667 && maybe_gt (GET_MODE_SIZE (mode), GET_MODE_SIZE (found_mode)))
668 found_mode = mode;
669
670 if (found_mode != VOIDmode)
671 return found_mode;
672
673 /* Iterate over all of the CCmodes. */
674 for (m = (unsigned int) CCmode; m < (unsigned int) NUM_MACHINE_MODES; ++m)
675 {
676 mode = (machine_mode) m;
677 if (hard_regno_nregs (regno, mode) == nregs
678 && targetm.hard_regno_mode_ok (regno, mode)
679 && (!call_saved
680 || !targetm.hard_regno_call_part_clobbered (regno, mode)))
681 return mode;
682 }
683
684 /* We can't find a mode valid for this register. */
685 return VOIDmode;
686 }
687
688 /* Specify the usage characteristics of the register named NAME.
689 It should be a fixed register if FIXED and a
690 call-used register if CALL_USED. */
691 void
fix_register(const char * name,int fixed,int call_used)692 fix_register (const char *name, int fixed, int call_used)
693 {
694 int i;
695 int reg, nregs;
696
697 /* Decode the name and update the primary form of
698 the register info. */
699
700 if ((reg = decode_reg_name_and_count (name, &nregs)) >= 0)
701 {
702 gcc_assert (nregs >= 1);
703 for (i = reg; i < reg + nregs; i++)
704 {
705 if ((i == STACK_POINTER_REGNUM
706 #ifdef HARD_FRAME_POINTER_REGNUM
707 || i == HARD_FRAME_POINTER_REGNUM
708 #else
709 || i == FRAME_POINTER_REGNUM
710 #endif
711 )
712 && (fixed == 0 || call_used == 0))
713 {
714 switch (fixed)
715 {
716 case 0:
717 switch (call_used)
718 {
719 case 0:
720 error ("can%'t use %qs as a call-saved register", name);
721 break;
722
723 case 1:
724 error ("can%'t use %qs as a call-used register", name);
725 break;
726
727 default:
728 gcc_unreachable ();
729 }
730 break;
731
732 case 1:
733 switch (call_used)
734 {
735 case 1:
736 error ("can%'t use %qs as a fixed register", name);
737 break;
738
739 case 0:
740 default:
741 gcc_unreachable ();
742 }
743 break;
744
745 default:
746 gcc_unreachable ();
747 }
748 }
749 else
750 {
751 fixed_regs[i] = fixed;
752 call_used_regs[i] = call_used;
753 #ifdef CALL_REALLY_USED_REGISTERS
754 if (fixed == 0)
755 call_really_used_regs[i] = call_used;
756 #endif
757 }
758 }
759 }
760 else
761 {
762 warning (0, "unknown register name: %s", name);
763 }
764 }
765
766 /* Mark register number I as global. */
767 void
globalize_reg(tree decl,int i)768 globalize_reg (tree decl, int i)
769 {
770 location_t loc = DECL_SOURCE_LOCATION (decl);
771
772 #ifdef STACK_REGS
773 if (IN_RANGE (i, FIRST_STACK_REG, LAST_STACK_REG))
774 {
775 error ("stack register used for global register variable");
776 return;
777 }
778 #endif
779
780 if (fixed_regs[i] == 0 && no_global_reg_vars)
781 error_at (loc, "global register variable follows a function definition");
782
783 if (global_regs[i])
784 {
785 warning_at (loc, 0,
786 "register of %qD used for multiple global register variables",
787 decl);
788 inform (DECL_SOURCE_LOCATION (global_regs_decl[i]),
789 "conflicts with %qD", global_regs_decl[i]);
790 return;
791 }
792
793 if (call_used_regs[i] && ! fixed_regs[i])
794 warning_at (loc, 0, "call-clobbered register used for global register variable");
795
796 global_regs[i] = 1;
797 global_regs_decl[i] = decl;
798
799 /* If we're globalizing the frame pointer, we need to set the
800 appropriate regs_invalidated_by_call bit, even if it's already
801 set in fixed_regs. */
802 if (i != STACK_POINTER_REGNUM)
803 {
804 SET_HARD_REG_BIT (regs_invalidated_by_call, i);
805 SET_REGNO_REG_SET (regs_invalidated_by_call_regset, i);
806 }
807
808 /* If already fixed, nothing else to do. */
809 if (fixed_regs[i])
810 return;
811
812 fixed_regs[i] = call_used_regs[i] = 1;
813 #ifdef CALL_REALLY_USED_REGISTERS
814 call_really_used_regs[i] = 1;
815 #endif
816
817 SET_HARD_REG_BIT (fixed_reg_set, i);
818 SET_HARD_REG_BIT (call_used_reg_set, i);
819 SET_HARD_REG_BIT (call_fixed_reg_set, i);
820
821 reinit_regs ();
822 }
823
824
825 /* Structure used to record preferences of given pseudo. */
826 struct reg_pref
827 {
828 /* (enum reg_class) prefclass is the preferred class. May be
829 NO_REGS if no class is better than memory. */
830 char prefclass;
831
832 /* altclass is a register class that we should use for allocating
833 pseudo if no register in the preferred class is available.
834 If no register in this class is available, memory is preferred.
835
836 It might appear to be more general to have a bitmask of classes here,
837 but since it is recommended that there be a class corresponding to the
838 union of most major pair of classes, that generality is not required. */
839 char altclass;
840
841 /* allocnoclass is a register class that IRA uses for allocating
842 the pseudo. */
843 char allocnoclass;
844 };
845
846 /* Record preferences of each pseudo. This is available after RA is
847 run. */
848 static struct reg_pref *reg_pref;
849
850 /* Current size of reg_info. */
851 static int reg_info_size;
852 /* Max_reg_num still last resize_reg_info call. */
853 static int max_regno_since_last_resize;
854
855 /* Return the reg_class in which pseudo reg number REGNO is best allocated.
856 This function is sometimes called before the info has been computed.
857 When that happens, just return GENERAL_REGS, which is innocuous. */
858 enum reg_class
reg_preferred_class(int regno)859 reg_preferred_class (int regno)
860 {
861 if (reg_pref == 0)
862 return GENERAL_REGS;
863
864 gcc_assert (regno < reg_info_size);
865 return (enum reg_class) reg_pref[regno].prefclass;
866 }
867
868 enum reg_class
reg_alternate_class(int regno)869 reg_alternate_class (int regno)
870 {
871 if (reg_pref == 0)
872 return ALL_REGS;
873
874 gcc_assert (regno < reg_info_size);
875 return (enum reg_class) reg_pref[regno].altclass;
876 }
877
878 /* Return the reg_class which is used by IRA for its allocation. */
879 enum reg_class
reg_allocno_class(int regno)880 reg_allocno_class (int regno)
881 {
882 if (reg_pref == 0)
883 return NO_REGS;
884
885 gcc_assert (regno < reg_info_size);
886 return (enum reg_class) reg_pref[regno].allocnoclass;
887 }
888
889
890
891 /* Allocate space for reg info and initilize it. */
892 static void
allocate_reg_info(void)893 allocate_reg_info (void)
894 {
895 int i;
896
897 max_regno_since_last_resize = max_reg_num ();
898 reg_info_size = max_regno_since_last_resize * 3 / 2 + 1;
899 gcc_assert (! reg_pref && ! reg_renumber);
900 reg_renumber = XNEWVEC (short, reg_info_size);
901 reg_pref = XCNEWVEC (struct reg_pref, reg_info_size);
902 memset (reg_renumber, -1, reg_info_size * sizeof (short));
903 for (i = 0; i < reg_info_size; i++)
904 {
905 reg_pref[i].prefclass = GENERAL_REGS;
906 reg_pref[i].altclass = ALL_REGS;
907 reg_pref[i].allocnoclass = GENERAL_REGS;
908 }
909 }
910
911
912 /* Resize reg info. The new elements will be initialized. Return TRUE
913 if new pseudos were added since the last call. */
914 bool
resize_reg_info(void)915 resize_reg_info (void)
916 {
917 int old, i;
918 bool change_p;
919
920 if (reg_pref == NULL)
921 {
922 allocate_reg_info ();
923 return true;
924 }
925 change_p = max_regno_since_last_resize != max_reg_num ();
926 max_regno_since_last_resize = max_reg_num ();
927 if (reg_info_size >= max_reg_num ())
928 return change_p;
929 old = reg_info_size;
930 reg_info_size = max_reg_num () * 3 / 2 + 1;
931 gcc_assert (reg_pref && reg_renumber);
932 reg_renumber = XRESIZEVEC (short, reg_renumber, reg_info_size);
933 reg_pref = XRESIZEVEC (struct reg_pref, reg_pref, reg_info_size);
934 memset (reg_pref + old, -1,
935 (reg_info_size - old) * sizeof (struct reg_pref));
936 memset (reg_renumber + old, -1, (reg_info_size - old) * sizeof (short));
937 for (i = old; i < reg_info_size; i++)
938 {
939 reg_pref[i].prefclass = GENERAL_REGS;
940 reg_pref[i].altclass = ALL_REGS;
941 reg_pref[i].allocnoclass = GENERAL_REGS;
942 }
943 return true;
944 }
945
946
947 /* Free up the space allocated by allocate_reg_info. */
948 void
free_reg_info(void)949 free_reg_info (void)
950 {
951 if (reg_pref)
952 {
953 free (reg_pref);
954 reg_pref = NULL;
955 }
956
957 if (reg_renumber)
958 {
959 free (reg_renumber);
960 reg_renumber = NULL;
961 }
962 }
963
964 /* Initialize some global data for this pass. */
965 static unsigned int
reginfo_init(void)966 reginfo_init (void)
967 {
968 if (df)
969 df_compute_regs_ever_live (true);
970
971 /* This prevents dump_reg_info from losing if called
972 before reginfo is run. */
973 reg_pref = NULL;
974 reg_info_size = max_regno_since_last_resize = 0;
975 /* No more global register variables may be declared. */
976 no_global_reg_vars = 1;
977 return 1;
978 }
979
980 namespace {
981
982 const pass_data pass_data_reginfo_init =
983 {
984 RTL_PASS, /* type */
985 "reginfo", /* name */
986 OPTGROUP_NONE, /* optinfo_flags */
987 TV_NONE, /* tv_id */
988 0, /* properties_required */
989 0, /* properties_provided */
990 0, /* properties_destroyed */
991 0, /* todo_flags_start */
992 0, /* todo_flags_finish */
993 };
994
995 class pass_reginfo_init : public rtl_opt_pass
996 {
997 public:
pass_reginfo_init(gcc::context * ctxt)998 pass_reginfo_init (gcc::context *ctxt)
999 : rtl_opt_pass (pass_data_reginfo_init, ctxt)
1000 {}
1001
1002 /* opt_pass methods: */
execute(function *)1003 virtual unsigned int execute (function *) { return reginfo_init (); }
1004
1005 }; // class pass_reginfo_init
1006
1007 } // anon namespace
1008
1009 rtl_opt_pass *
make_pass_reginfo_init(gcc::context * ctxt)1010 make_pass_reginfo_init (gcc::context *ctxt)
1011 {
1012 return new pass_reginfo_init (ctxt);
1013 }
1014
1015
1016
1017 /* Set up preferred, alternate, and allocno classes for REGNO as
1018 PREFCLASS, ALTCLASS, and ALLOCNOCLASS. */
1019 void
setup_reg_classes(int regno,enum reg_class prefclass,enum reg_class altclass,enum reg_class allocnoclass)1020 setup_reg_classes (int regno,
1021 enum reg_class prefclass, enum reg_class altclass,
1022 enum reg_class allocnoclass)
1023 {
1024 if (reg_pref == NULL)
1025 return;
1026 gcc_assert (reg_info_size >= max_reg_num ());
1027 reg_pref[regno].prefclass = prefclass;
1028 reg_pref[regno].altclass = altclass;
1029 reg_pref[regno].allocnoclass = allocnoclass;
1030 }
1031
1032
1033 /* This is the `regscan' pass of the compiler, run just before cse and
1034 again just before loop. It finds the first and last use of each
1035 pseudo-register. */
1036
1037 static void reg_scan_mark_refs (rtx, rtx_insn *);
1038
1039 void
reg_scan(rtx_insn * f,unsigned int nregs ATTRIBUTE_UNUSED)1040 reg_scan (rtx_insn *f, unsigned int nregs ATTRIBUTE_UNUSED)
1041 {
1042 rtx_insn *insn;
1043
1044 timevar_push (TV_REG_SCAN);
1045
1046 for (insn = f; insn; insn = NEXT_INSN (insn))
1047 if (INSN_P (insn))
1048 {
1049 reg_scan_mark_refs (PATTERN (insn), insn);
1050 if (REG_NOTES (insn))
1051 reg_scan_mark_refs (REG_NOTES (insn), insn);
1052 }
1053
1054 timevar_pop (TV_REG_SCAN);
1055 }
1056
1057
1058 /* X is the expression to scan. INSN is the insn it appears in.
1059 NOTE_FLAG is nonzero if X is from INSN's notes rather than its body.
1060 We should only record information for REGs with numbers
1061 greater than or equal to MIN_REGNO. */
1062 static void
reg_scan_mark_refs(rtx x,rtx_insn * insn)1063 reg_scan_mark_refs (rtx x, rtx_insn *insn)
1064 {
1065 enum rtx_code code;
1066 rtx dest;
1067 rtx note;
1068
1069 if (!x)
1070 return;
1071 code = GET_CODE (x);
1072 switch (code)
1073 {
1074 case CONST:
1075 CASE_CONST_ANY:
1076 case CC0:
1077 case PC:
1078 case SYMBOL_REF:
1079 case LABEL_REF:
1080 case ADDR_VEC:
1081 case ADDR_DIFF_VEC:
1082 case REG:
1083 return;
1084
1085 case EXPR_LIST:
1086 if (XEXP (x, 0))
1087 reg_scan_mark_refs (XEXP (x, 0), insn);
1088 if (XEXP (x, 1))
1089 reg_scan_mark_refs (XEXP (x, 1), insn);
1090 break;
1091
1092 case INSN_LIST:
1093 case INT_LIST:
1094 if (XEXP (x, 1))
1095 reg_scan_mark_refs (XEXP (x, 1), insn);
1096 break;
1097
1098 case CLOBBER:
1099 if (MEM_P (XEXP (x, 0)))
1100 reg_scan_mark_refs (XEXP (XEXP (x, 0), 0), insn);
1101 break;
1102
1103 case SET:
1104 /* Count a set of the destination if it is a register. */
1105 for (dest = SET_DEST (x);
1106 GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
1107 || GET_CODE (dest) == ZERO_EXTRACT;
1108 dest = XEXP (dest, 0))
1109 ;
1110
1111 /* If this is setting a pseudo from another pseudo or the sum of a
1112 pseudo and a constant integer and the other pseudo is known to be
1113 a pointer, set the destination to be a pointer as well.
1114
1115 Likewise if it is setting the destination from an address or from a
1116 value equivalent to an address or to the sum of an address and
1117 something else.
1118
1119 But don't do any of this if the pseudo corresponds to a user
1120 variable since it should have already been set as a pointer based
1121 on the type. */
1122
1123 if (REG_P (SET_DEST (x))
1124 && REGNO (SET_DEST (x)) >= FIRST_PSEUDO_REGISTER
1125 /* If the destination pseudo is set more than once, then other
1126 sets might not be to a pointer value (consider access to a
1127 union in two threads of control in the presence of global
1128 optimizations). So only set REG_POINTER on the destination
1129 pseudo if this is the only set of that pseudo. */
1130 && DF_REG_DEF_COUNT (REGNO (SET_DEST (x))) == 1
1131 && ! REG_USERVAR_P (SET_DEST (x))
1132 && ! REG_POINTER (SET_DEST (x))
1133 && ((REG_P (SET_SRC (x))
1134 && REG_POINTER (SET_SRC (x)))
1135 || ((GET_CODE (SET_SRC (x)) == PLUS
1136 || GET_CODE (SET_SRC (x)) == LO_SUM)
1137 && CONST_INT_P (XEXP (SET_SRC (x), 1))
1138 && REG_P (XEXP (SET_SRC (x), 0))
1139 && REG_POINTER (XEXP (SET_SRC (x), 0)))
1140 || GET_CODE (SET_SRC (x)) == CONST
1141 || GET_CODE (SET_SRC (x)) == SYMBOL_REF
1142 || GET_CODE (SET_SRC (x)) == LABEL_REF
1143 || (GET_CODE (SET_SRC (x)) == HIGH
1144 && (GET_CODE (XEXP (SET_SRC (x), 0)) == CONST
1145 || GET_CODE (XEXP (SET_SRC (x), 0)) == SYMBOL_REF
1146 || GET_CODE (XEXP (SET_SRC (x), 0)) == LABEL_REF))
1147 || ((GET_CODE (SET_SRC (x)) == PLUS
1148 || GET_CODE (SET_SRC (x)) == LO_SUM)
1149 && (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST
1150 || GET_CODE (XEXP (SET_SRC (x), 1)) == SYMBOL_REF
1151 || GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF))
1152 || ((note = find_reg_note (insn, REG_EQUAL, 0)) != 0
1153 && (GET_CODE (XEXP (note, 0)) == CONST
1154 || GET_CODE (XEXP (note, 0)) == SYMBOL_REF
1155 || GET_CODE (XEXP (note, 0)) == LABEL_REF))))
1156 REG_POINTER (SET_DEST (x)) = 1;
1157
1158 /* If this is setting a register from a register or from a simple
1159 conversion of a register, propagate REG_EXPR. */
1160 if (REG_P (dest) && !REG_ATTRS (dest))
1161 set_reg_attrs_from_value (dest, SET_SRC (x));
1162
1163 /* fall through */
1164
1165 default:
1166 {
1167 const char *fmt = GET_RTX_FORMAT (code);
1168 int i;
1169 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1170 {
1171 if (fmt[i] == 'e')
1172 reg_scan_mark_refs (XEXP (x, i), insn);
1173 else if (fmt[i] == 'E' && XVEC (x, i) != 0)
1174 {
1175 int j;
1176 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1177 reg_scan_mark_refs (XVECEXP (x, i, j), insn);
1178 }
1179 }
1180 }
1181 }
1182 }
1183
1184
1185 /* Return nonzero if C1 is a subset of C2, i.e., if every register in C1
1186 is also in C2. */
1187 int
reg_class_subset_p(reg_class_t c1,reg_class_t c2)1188 reg_class_subset_p (reg_class_t c1, reg_class_t c2)
1189 {
1190 return (c1 == c2
1191 || c2 == ALL_REGS
1192 || hard_reg_set_subset_p (reg_class_contents[(int) c1],
1193 reg_class_contents[(int) c2]));
1194 }
1195
1196 /* Return nonzero if there is a register that is in both C1 and C2. */
1197 int
reg_classes_intersect_p(reg_class_t c1,reg_class_t c2)1198 reg_classes_intersect_p (reg_class_t c1, reg_class_t c2)
1199 {
1200 return (c1 == c2
1201 || c1 == ALL_REGS
1202 || c2 == ALL_REGS
1203 || hard_reg_set_intersect_p (reg_class_contents[(int) c1],
1204 reg_class_contents[(int) c2]));
1205 }
1206
1207
1208 inline hashval_t
hash(const simplifiable_subreg * value)1209 simplifiable_subregs_hasher::hash (const simplifiable_subreg *value)
1210 {
1211 inchash::hash h;
1212 h.add_hwi (value->shape.unique_id ());
1213 return h.end ();
1214 }
1215
1216 inline bool
equal(const simplifiable_subreg * value,const subreg_shape * compare)1217 simplifiable_subregs_hasher::equal (const simplifiable_subreg *value,
1218 const subreg_shape *compare)
1219 {
1220 return value->shape == *compare;
1221 }
1222
simplifiable_subreg(const subreg_shape & shape_in)1223 inline simplifiable_subreg::simplifiable_subreg (const subreg_shape &shape_in)
1224 : shape (shape_in)
1225 {
1226 CLEAR_HARD_REG_SET (simplifiable_regs);
1227 }
1228
1229 /* Return the set of hard registers that are able to form the subreg
1230 described by SHAPE. */
1231
1232 const HARD_REG_SET &
simplifiable_subregs(const subreg_shape & shape)1233 simplifiable_subregs (const subreg_shape &shape)
1234 {
1235 if (!this_target_hard_regs->x_simplifiable_subregs)
1236 this_target_hard_regs->x_simplifiable_subregs
1237 = new hash_table <simplifiable_subregs_hasher> (30);
1238 inchash::hash h;
1239 h.add_hwi (shape.unique_id ());
1240 simplifiable_subreg **slot
1241 = (this_target_hard_regs->x_simplifiable_subregs
1242 ->find_slot_with_hash (&shape, h.end (), INSERT));
1243
1244 if (!*slot)
1245 {
1246 simplifiable_subreg *info = new simplifiable_subreg (shape);
1247 for (unsigned int i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1248 if (targetm.hard_regno_mode_ok (i, shape.inner_mode)
1249 && simplify_subreg_regno (i, shape.inner_mode, shape.offset,
1250 shape.outer_mode) >= 0)
1251 SET_HARD_REG_BIT (info->simplifiable_regs, i);
1252 *slot = info;
1253 }
1254 return (*slot)->simplifiable_regs;
1255 }
1256
1257 /* Passes for keeping and updating info about modes of registers
1258 inside subregisters. */
1259
1260 static HARD_REG_SET **valid_mode_changes;
1261 static obstack valid_mode_changes_obstack;
1262
1263 /* Restrict the choice of register for SUBREG_REG (SUBREG) based
1264 on information about SUBREG.
1265
1266 If PARTIAL_DEF, SUBREG is a partial definition of a multipart inner
1267 register and we want to ensure that the other parts of the inner
1268 register are correctly preserved. If !PARTIAL_DEF we need to
1269 ensure that SUBREG itself can be formed. */
1270
1271 static void
record_subregs_of_mode(rtx subreg,bool partial_def)1272 record_subregs_of_mode (rtx subreg, bool partial_def)
1273 {
1274 unsigned int regno;
1275
1276 if (!REG_P (SUBREG_REG (subreg)))
1277 return;
1278
1279 regno = REGNO (SUBREG_REG (subreg));
1280 if (regno < FIRST_PSEUDO_REGISTER)
1281 return;
1282
1283 subreg_shape shape (shape_of_subreg (subreg));
1284 if (partial_def)
1285 {
1286 /* The number of independently-accessible SHAPE.outer_mode values
1287 in SHAPE.inner_mode is GET_MODE_SIZE (SHAPE.inner_mode) / SIZE.
1288 We need to check that the assignment will preserve all the other
1289 SIZE-byte chunks in the inner register besides the one that
1290 includes SUBREG.
1291
1292 In practice it is enough to check whether an equivalent
1293 SHAPE.inner_mode value in an adjacent SIZE-byte chunk can be formed.
1294 If the underlying registers are small enough, both subregs will
1295 be valid. If the underlying registers are too large, one of the
1296 subregs will be invalid.
1297
1298 This relies on the fact that we've already been passed
1299 SUBREG with PARTIAL_DEF set to false.
1300
1301 The size of the outer mode must ordered wrt the size of the
1302 inner mode's registers, since otherwise we wouldn't know at
1303 compile time how many registers the outer mode occupies. */
1304 poly_uint64 size = ordered_max (REGMODE_NATURAL_SIZE (shape.inner_mode),
1305 GET_MODE_SIZE (shape.outer_mode));
1306 gcc_checking_assert (known_lt (size, GET_MODE_SIZE (shape.inner_mode)));
1307 if (known_ge (shape.offset, size))
1308 shape.offset -= size;
1309 else
1310 shape.offset += size;
1311 }
1312
1313 if (valid_mode_changes[regno])
1314 AND_HARD_REG_SET (*valid_mode_changes[regno],
1315 simplifiable_subregs (shape));
1316 else
1317 {
1318 valid_mode_changes[regno]
1319 = XOBNEW (&valid_mode_changes_obstack, HARD_REG_SET);
1320 COPY_HARD_REG_SET (*valid_mode_changes[regno],
1321 simplifiable_subregs (shape));
1322 }
1323 }
1324
1325 /* Call record_subregs_of_mode for all the subregs in X. */
1326 static void
find_subregs_of_mode(rtx x)1327 find_subregs_of_mode (rtx x)
1328 {
1329 enum rtx_code code = GET_CODE (x);
1330 const char * const fmt = GET_RTX_FORMAT (code);
1331 int i;
1332
1333 if (code == SUBREG)
1334 record_subregs_of_mode (x, false);
1335
1336 /* Time for some deep diving. */
1337 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1338 {
1339 if (fmt[i] == 'e')
1340 find_subregs_of_mode (XEXP (x, i));
1341 else if (fmt[i] == 'E')
1342 {
1343 int j;
1344 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1345 find_subregs_of_mode (XVECEXP (x, i, j));
1346 }
1347 }
1348 }
1349
1350 void
init_subregs_of_mode(void)1351 init_subregs_of_mode (void)
1352 {
1353 basic_block bb;
1354 rtx_insn *insn;
1355
1356 gcc_obstack_init (&valid_mode_changes_obstack);
1357 valid_mode_changes = XCNEWVEC (HARD_REG_SET *, max_reg_num ());
1358
1359 FOR_EACH_BB_FN (bb, cfun)
1360 FOR_BB_INSNS (bb, insn)
1361 if (NONDEBUG_INSN_P (insn))
1362 {
1363 find_subregs_of_mode (PATTERN (insn));
1364 df_ref def;
1365 FOR_EACH_INSN_DEF (def, insn)
1366 if (DF_REF_FLAGS_IS_SET (def, DF_REF_PARTIAL)
1367 && read_modify_subreg_p (DF_REF_REG (def)))
1368 record_subregs_of_mode (DF_REF_REG (def), true);
1369 }
1370 }
1371
1372 const HARD_REG_SET *
valid_mode_changes_for_regno(unsigned int regno)1373 valid_mode_changes_for_regno (unsigned int regno)
1374 {
1375 return valid_mode_changes[regno];
1376 }
1377
1378 void
finish_subregs_of_mode(void)1379 finish_subregs_of_mode (void)
1380 {
1381 XDELETEVEC (valid_mode_changes);
1382 obstack_free (&valid_mode_changes_obstack, NULL);
1383 }
1384
1385 /* Free all data attached to the structure. This isn't a destructor because
1386 we don't want to run on exit. */
1387
1388 void
finalize()1389 target_hard_regs::finalize ()
1390 {
1391 delete x_simplifiable_subregs;
1392 }
1393