1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
53 #include "target.h"
54 #include "timevar.h"
55
56 /* Decide whether a function's arguments should be processed
57 from first to last or from last to first.
58
59 They should if the stack and args grow in opposite directions, but
60 only if we have push insns. */
61
62 #ifdef PUSH_ROUNDING
63
64 #ifndef PUSH_ARGS_REVERSED
65 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
66 #define PUSH_ARGS_REVERSED /* If it's last to first. */
67 #endif
68 #endif
69
70 #endif
71
72 #ifndef STACK_PUSH_CODE
73 #ifdef STACK_GROWS_DOWNWARD
74 #define STACK_PUSH_CODE PRE_DEC
75 #else
76 #define STACK_PUSH_CODE PRE_INC
77 #endif
78 #endif
79
80
81 /* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
87 int cse_not_expected;
88
89 /* This structure is used by move_by_pieces to describe the move to
90 be performed. */
91 struct move_by_pieces
92 {
93 rtx to;
94 rtx to_addr;
95 int autinc_to;
96 int explicit_inc_to;
97 rtx from;
98 rtx from_addr;
99 int autinc_from;
100 int explicit_inc_from;
101 unsigned HOST_WIDE_INT len;
102 HOST_WIDE_INT offset;
103 int reverse;
104 };
105
106 /* This structure is used by store_by_pieces to describe the clear to
107 be performed. */
108
109 struct store_by_pieces
110 {
111 rtx to;
112 rtx to_addr;
113 int autinc_to;
114 int explicit_inc_to;
115 unsigned HOST_WIDE_INT len;
116 HOST_WIDE_INT offset;
117 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
118 void *constfundata;
119 int reverse;
120 };
121
122 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
123 unsigned int,
124 unsigned int);
125 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
126 struct move_by_pieces *);
127 static bool block_move_libcall_safe_for_call_parm (void);
128 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned);
129 static rtx emit_block_move_via_libcall (rtx, rtx, rtx, bool);
130 static tree emit_block_move_libcall_fn (int);
131 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
132 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
133 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
134 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
135 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
136 struct store_by_pieces *);
137 static rtx clear_storage_via_libcall (rtx, rtx, bool);
138 static tree clear_storage_libcall_fn (int);
139 static rtx compress_float_constant (rtx, rtx);
140 static rtx get_subtarget (rtx);
141 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
142 HOST_WIDE_INT, enum machine_mode,
143 tree, tree, int, int);
144 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
145 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
146 tree, tree, int);
147
148 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
149
150 static int is_aligning_offset (tree, tree);
151 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
152 enum expand_modifier);
153 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
154 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
155 #ifdef PUSH_ROUNDING
156 static void emit_single_push_insn (enum machine_mode, rtx, tree);
157 #endif
158 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
159 static rtx const_vector_from_tree (tree);
160 static void write_complex_part (rtx, rtx, bool);
161
162 /* Record for each mode whether we can move a register directly to or
163 from an object of that mode in memory. If we can't, we won't try
164 to use that mode directly when accessing a field of that mode. */
165
166 static char direct_load[NUM_MACHINE_MODES];
167 static char direct_store[NUM_MACHINE_MODES];
168
169 /* Record for each mode whether we can float-extend from memory. */
170
171 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
172
173 /* This macro is used to determine whether move_by_pieces should be called
174 to perform a structure copy. */
175 #ifndef MOVE_BY_PIECES_P
176 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
177 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
178 < (unsigned int) MOVE_RATIO)
179 #endif
180
181 /* This macro is used to determine whether clear_by_pieces should be
182 called to clear storage. */
183 #ifndef CLEAR_BY_PIECES_P
184 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
185 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
186 < (unsigned int) CLEAR_RATIO)
187 #endif
188
189 /* This macro is used to determine whether store_by_pieces should be
190 called to "memset" storage with byte values other than zero, or
191 to "memcpy" storage when the source is a constant string. */
192 #ifndef STORE_BY_PIECES_P
193 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
194 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
195 < (unsigned int) MOVE_RATIO)
196 #endif
197
198 /* This array records the insn_code of insns to perform block moves. */
199 enum insn_code movmem_optab[NUM_MACHINE_MODES];
200
201 /* This array records the insn_code of insns to perform block sets. */
202 enum insn_code setmem_optab[NUM_MACHINE_MODES];
203
204 /* These arrays record the insn_code of three different kinds of insns
205 to perform block compares. */
206 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
207 enum insn_code cmpstrn_optab[NUM_MACHINE_MODES];
208 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
209
210 /* Synchronization primitives. */
211 enum insn_code sync_add_optab[NUM_MACHINE_MODES];
212 enum insn_code sync_sub_optab[NUM_MACHINE_MODES];
213 enum insn_code sync_ior_optab[NUM_MACHINE_MODES];
214 enum insn_code sync_and_optab[NUM_MACHINE_MODES];
215 enum insn_code sync_xor_optab[NUM_MACHINE_MODES];
216 enum insn_code sync_nand_optab[NUM_MACHINE_MODES];
217 enum insn_code sync_old_add_optab[NUM_MACHINE_MODES];
218 enum insn_code sync_old_sub_optab[NUM_MACHINE_MODES];
219 enum insn_code sync_old_ior_optab[NUM_MACHINE_MODES];
220 enum insn_code sync_old_and_optab[NUM_MACHINE_MODES];
221 enum insn_code sync_old_xor_optab[NUM_MACHINE_MODES];
222 enum insn_code sync_old_nand_optab[NUM_MACHINE_MODES];
223 enum insn_code sync_new_add_optab[NUM_MACHINE_MODES];
224 enum insn_code sync_new_sub_optab[NUM_MACHINE_MODES];
225 enum insn_code sync_new_ior_optab[NUM_MACHINE_MODES];
226 enum insn_code sync_new_and_optab[NUM_MACHINE_MODES];
227 enum insn_code sync_new_xor_optab[NUM_MACHINE_MODES];
228 enum insn_code sync_new_nand_optab[NUM_MACHINE_MODES];
229 enum insn_code sync_compare_and_swap[NUM_MACHINE_MODES];
230 enum insn_code sync_compare_and_swap_cc[NUM_MACHINE_MODES];
231 enum insn_code sync_lock_test_and_set[NUM_MACHINE_MODES];
232 enum insn_code sync_lock_release[NUM_MACHINE_MODES];
233
234 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
235
236 #ifndef SLOW_UNALIGNED_ACCESS
237 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
238 #endif
239
240 /* This is run once per compilation to set up which modes can be used
241 directly in memory and to initialize the block move optab. */
242
243 void
init_expr_once(void)244 init_expr_once (void)
245 {
246 rtx insn, pat;
247 enum machine_mode mode;
248 int num_clobbers;
249 rtx mem, mem1;
250 rtx reg;
251
252 /* Try indexing by frame ptr and try by stack ptr.
253 It is known that on the Convex the stack ptr isn't a valid index.
254 With luck, one or the other is valid on any machine. */
255 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
256 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
257
258 /* A scratch register we can modify in-place below to avoid
259 useless RTL allocations. */
260 reg = gen_rtx_REG (VOIDmode, -1);
261
262 insn = rtx_alloc (INSN);
263 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
264 PATTERN (insn) = pat;
265
266 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
267 mode = (enum machine_mode) ((int) mode + 1))
268 {
269 int regno;
270
271 direct_load[(int) mode] = direct_store[(int) mode] = 0;
272 PUT_MODE (mem, mode);
273 PUT_MODE (mem1, mode);
274 PUT_MODE (reg, mode);
275
276 /* See if there is some register that can be used in this mode and
277 directly loaded or stored from memory. */
278
279 if (mode != VOIDmode && mode != BLKmode)
280 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
281 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
282 regno++)
283 {
284 if (! HARD_REGNO_MODE_OK (regno, mode))
285 continue;
286
287 REGNO (reg) = regno;
288
289 SET_SRC (pat) = mem;
290 SET_DEST (pat) = reg;
291 if (recog (pat, insn, &num_clobbers) >= 0)
292 direct_load[(int) mode] = 1;
293
294 SET_SRC (pat) = mem1;
295 SET_DEST (pat) = reg;
296 if (recog (pat, insn, &num_clobbers) >= 0)
297 direct_load[(int) mode] = 1;
298
299 SET_SRC (pat) = reg;
300 SET_DEST (pat) = mem;
301 if (recog (pat, insn, &num_clobbers) >= 0)
302 direct_store[(int) mode] = 1;
303
304 SET_SRC (pat) = reg;
305 SET_DEST (pat) = mem1;
306 if (recog (pat, insn, &num_clobbers) >= 0)
307 direct_store[(int) mode] = 1;
308 }
309 }
310
311 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
312
313 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
314 mode = GET_MODE_WIDER_MODE (mode))
315 {
316 enum machine_mode srcmode;
317 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
318 srcmode = GET_MODE_WIDER_MODE (srcmode))
319 {
320 enum insn_code ic;
321
322 ic = can_extend_p (mode, srcmode, 0);
323 if (ic == CODE_FOR_nothing)
324 continue;
325
326 PUT_MODE (mem, srcmode);
327
328 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
329 float_extend_from_mem[mode][srcmode] = true;
330 }
331 }
332 }
333
334 /* This is run at the start of compiling a function. */
335
336 void
init_expr(void)337 init_expr (void)
338 {
339 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
340 }
341
342 /* Copy data from FROM to TO, where the machine modes are not the same.
343 Both modes may be integer, or both may be floating.
344 UNSIGNEDP should be nonzero if FROM is an unsigned type.
345 This causes zero-extension instead of sign-extension. */
346
347 void
convert_move(rtx to,rtx from,int unsignedp)348 convert_move (rtx to, rtx from, int unsignedp)
349 {
350 enum machine_mode to_mode = GET_MODE (to);
351 enum machine_mode from_mode = GET_MODE (from);
352 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
353 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
354 enum insn_code code;
355 rtx libcall;
356
357 /* rtx code for making an equivalent value. */
358 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
359 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
360
361
362 gcc_assert (to_real == from_real);
363
364 /* If the source and destination are already the same, then there's
365 nothing to do. */
366 if (to == from)
367 return;
368
369 /* If FROM is a SUBREG that indicates that we have already done at least
370 the required extension, strip it. We don't handle such SUBREGs as
371 TO here. */
372
373 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
374 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
375 >= GET_MODE_SIZE (to_mode))
376 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
377 from = gen_lowpart (to_mode, from), from_mode = to_mode;
378
379 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
380
381 if (to_mode == from_mode
382 || (from_mode == VOIDmode && CONSTANT_P (from)))
383 {
384 emit_move_insn (to, from);
385 return;
386 }
387
388 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
389 {
390 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
391
392 if (VECTOR_MODE_P (to_mode))
393 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
394 else
395 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
396
397 emit_move_insn (to, from);
398 return;
399 }
400
401 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
402 {
403 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
404 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
405 return;
406 }
407
408 if (to_real)
409 {
410 rtx value, insns;
411 convert_optab tab;
412
413 gcc_assert (GET_MODE_PRECISION (from_mode)
414 != GET_MODE_PRECISION (to_mode));
415
416 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
417 tab = sext_optab;
418 else
419 tab = trunc_optab;
420
421 /* Try converting directly if the insn is supported. */
422
423 code = tab->handlers[to_mode][from_mode].insn_code;
424 if (code != CODE_FOR_nothing)
425 {
426 emit_unop_insn (code, to, from,
427 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
428 return;
429 }
430
431 /* Otherwise use a libcall. */
432 libcall = tab->handlers[to_mode][from_mode].libfunc;
433
434 /* Is this conversion implemented yet? */
435 gcc_assert (libcall);
436
437 start_sequence ();
438 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
439 1, from, from_mode);
440 insns = get_insns ();
441 end_sequence ();
442 emit_libcall_block (insns, to, value,
443 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
444 from)
445 : gen_rtx_FLOAT_EXTEND (to_mode, from));
446 return;
447 }
448
449 /* Handle pointer conversion. */ /* SPEE 900220. */
450 /* Targets are expected to provide conversion insns between PxImode and
451 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
452 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
453 {
454 enum machine_mode full_mode
455 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
456
457 gcc_assert (trunc_optab->handlers[to_mode][full_mode].insn_code
458 != CODE_FOR_nothing);
459
460 if (full_mode != from_mode)
461 from = convert_to_mode (full_mode, from, unsignedp);
462 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
463 to, from, UNKNOWN);
464 return;
465 }
466 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
467 {
468 rtx new_from;
469 enum machine_mode full_mode
470 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
471
472 gcc_assert (sext_optab->handlers[full_mode][from_mode].insn_code
473 != CODE_FOR_nothing);
474
475 if (to_mode == full_mode)
476 {
477 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
478 to, from, UNKNOWN);
479 return;
480 }
481
482 new_from = gen_reg_rtx (full_mode);
483 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
484 new_from, from, UNKNOWN);
485
486 /* else proceed to integer conversions below. */
487 from_mode = full_mode;
488 from = new_from;
489 }
490
491 /* Now both modes are integers. */
492
493 /* Handle expanding beyond a word. */
494 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
495 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
496 {
497 rtx insns;
498 rtx lowpart;
499 rtx fill_value;
500 rtx lowfrom;
501 int i;
502 enum machine_mode lowpart_mode;
503 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
504
505 /* Try converting directly if the insn is supported. */
506 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
507 != CODE_FOR_nothing)
508 {
509 /* If FROM is a SUBREG, put it into a register. Do this
510 so that we always generate the same set of insns for
511 better cse'ing; if an intermediate assignment occurred,
512 we won't be doing the operation directly on the SUBREG. */
513 if (optimize > 0 && GET_CODE (from) == SUBREG)
514 from = force_reg (from_mode, from);
515 emit_unop_insn (code, to, from, equiv_code);
516 return;
517 }
518 /* Next, try converting via full word. */
519 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
520 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
521 != CODE_FOR_nothing))
522 {
523 if (REG_P (to))
524 {
525 if (reg_overlap_mentioned_p (to, from))
526 from = force_reg (from_mode, from);
527 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
528 }
529 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
530 emit_unop_insn (code, to,
531 gen_lowpart (word_mode, to), equiv_code);
532 return;
533 }
534
535 /* No special multiword conversion insn; do it by hand. */
536 start_sequence ();
537
538 /* Since we will turn this into a no conflict block, we must ensure
539 that the source does not overlap the target. */
540
541 if (reg_overlap_mentioned_p (to, from))
542 from = force_reg (from_mode, from);
543
544 /* Get a copy of FROM widened to a word, if necessary. */
545 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
546 lowpart_mode = word_mode;
547 else
548 lowpart_mode = from_mode;
549
550 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
551
552 lowpart = gen_lowpart (lowpart_mode, to);
553 emit_move_insn (lowpart, lowfrom);
554
555 /* Compute the value to put in each remaining word. */
556 if (unsignedp)
557 fill_value = const0_rtx;
558 else
559 {
560 #ifdef HAVE_slt
561 if (HAVE_slt
562 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
563 && STORE_FLAG_VALUE == -1)
564 {
565 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
566 lowpart_mode, 0);
567 fill_value = gen_reg_rtx (word_mode);
568 emit_insn (gen_slt (fill_value));
569 }
570 else
571 #endif
572 {
573 fill_value
574 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
575 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
576 NULL_RTX, 0);
577 fill_value = convert_to_mode (word_mode, fill_value, 1);
578 }
579 }
580
581 /* Fill the remaining words. */
582 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
583 {
584 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
585 rtx subword = operand_subword (to, index, 1, to_mode);
586
587 gcc_assert (subword);
588
589 if (fill_value != subword)
590 emit_move_insn (subword, fill_value);
591 }
592
593 insns = get_insns ();
594 end_sequence ();
595
596 emit_no_conflict_block (insns, to, from, NULL_RTX,
597 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
598 return;
599 }
600
601 /* Truncating multi-word to a word or less. */
602 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
603 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
604 {
605 if (!((MEM_P (from)
606 && ! MEM_VOLATILE_P (from)
607 && direct_load[(int) to_mode]
608 && ! mode_dependent_address_p (XEXP (from, 0)))
609 || REG_P (from)
610 || GET_CODE (from) == SUBREG))
611 from = force_reg (from_mode, from);
612 convert_move (to, gen_lowpart (word_mode, from), 0);
613 return;
614 }
615
616 /* Now follow all the conversions between integers
617 no more than a word long. */
618
619 /* For truncation, usually we can just refer to FROM in a narrower mode. */
620 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
621 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
622 GET_MODE_BITSIZE (from_mode)))
623 {
624 if (!((MEM_P (from)
625 && ! MEM_VOLATILE_P (from)
626 && direct_load[(int) to_mode]
627 && ! mode_dependent_address_p (XEXP (from, 0)))
628 || REG_P (from)
629 || GET_CODE (from) == SUBREG))
630 from = force_reg (from_mode, from);
631 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
632 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
633 from = copy_to_reg (from);
634 emit_move_insn (to, gen_lowpart (to_mode, from));
635 return;
636 }
637
638 /* Handle extension. */
639 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
640 {
641 /* Convert directly if that works. */
642 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
643 != CODE_FOR_nothing)
644 {
645 emit_unop_insn (code, to, from, equiv_code);
646 return;
647 }
648 else
649 {
650 enum machine_mode intermediate;
651 rtx tmp;
652 tree shift_amount;
653
654 /* Search for a mode to convert via. */
655 for (intermediate = from_mode; intermediate != VOIDmode;
656 intermediate = GET_MODE_WIDER_MODE (intermediate))
657 if (((can_extend_p (to_mode, intermediate, unsignedp)
658 != CODE_FOR_nothing)
659 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
660 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
661 GET_MODE_BITSIZE (intermediate))))
662 && (can_extend_p (intermediate, from_mode, unsignedp)
663 != CODE_FOR_nothing))
664 {
665 convert_move (to, convert_to_mode (intermediate, from,
666 unsignedp), unsignedp);
667 return;
668 }
669
670 /* No suitable intermediate mode.
671 Generate what we need with shifts. */
672 shift_amount = build_int_cst (NULL_TREE,
673 GET_MODE_BITSIZE (to_mode)
674 - GET_MODE_BITSIZE (from_mode));
675 from = gen_lowpart (to_mode, force_reg (from_mode, from));
676 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
677 to, unsignedp);
678 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
679 to, unsignedp);
680 if (tmp != to)
681 emit_move_insn (to, tmp);
682 return;
683 }
684 }
685
686 /* Support special truncate insns for certain modes. */
687 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
688 {
689 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
690 to, from, UNKNOWN);
691 return;
692 }
693
694 /* Handle truncation of volatile memrefs, and so on;
695 the things that couldn't be truncated directly,
696 and for which there was no special instruction.
697
698 ??? Code above formerly short-circuited this, for most integer
699 mode pairs, with a force_reg in from_mode followed by a recursive
700 call to this routine. Appears always to have been wrong. */
701 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
702 {
703 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
704 emit_move_insn (to, temp);
705 return;
706 }
707
708 /* Mode combination is not recognized. */
709 gcc_unreachable ();
710 }
711
712 /* Return an rtx for a value that would result
713 from converting X to mode MODE.
714 Both X and MODE may be floating, or both integer.
715 UNSIGNEDP is nonzero if X is an unsigned value.
716 This can be done by referring to a part of X in place
717 or by copying to a new temporary with conversion. */
718
719 rtx
convert_to_mode(enum machine_mode mode,rtx x,int unsignedp)720 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
721 {
722 return convert_modes (mode, VOIDmode, x, unsignedp);
723 }
724
725 /* Return an rtx for a value that would result
726 from converting X from mode OLDMODE to mode MODE.
727 Both modes may be floating, or both integer.
728 UNSIGNEDP is nonzero if X is an unsigned value.
729
730 This can be done by referring to a part of X in place
731 or by copying to a new temporary with conversion.
732
733 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
734
735 rtx
convert_modes(enum machine_mode mode,enum machine_mode oldmode,rtx x,int unsignedp)736 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
737 {
738 rtx temp;
739
740 /* If FROM is a SUBREG that indicates that we have already done at least
741 the required extension, strip it. */
742
743 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
744 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
745 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
746 x = gen_lowpart (mode, x);
747
748 if (GET_MODE (x) != VOIDmode)
749 oldmode = GET_MODE (x);
750
751 if (mode == oldmode || oldmode == BFmode)
752 return x;
753
754 /* There is one case that we must handle specially: If we are converting
755 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
756 we are to interpret the constant as unsigned, gen_lowpart will do
757 the wrong if the constant appears negative. What we want to do is
758 make the high-order word of the constant zero, not all ones. */
759
760 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
761 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
762 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
763 {
764 HOST_WIDE_INT val = INTVAL (x);
765
766 if (oldmode != VOIDmode
767 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
768 {
769 int width = GET_MODE_BITSIZE (oldmode);
770
771 /* We need to zero extend VAL. */
772 val &= ((HOST_WIDE_INT) 1 << width) - 1;
773 }
774
775 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
776 }
777
778 /* We can do this with a gen_lowpart if both desired and current modes
779 are integer, and this is either a constant integer, a register, or a
780 non-volatile MEM. Except for the constant case where MODE is no
781 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
782
783 if ((GET_CODE (x) == CONST_INT
784 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
785 || (GET_MODE_CLASS (mode) == MODE_INT
786 && GET_MODE_CLASS (oldmode) == MODE_INT
787 && (GET_CODE (x) == CONST_DOUBLE
788 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
789 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
790 && direct_load[(int) mode])
791 || (REG_P (x)
792 && (! HARD_REGISTER_P (x)
793 || HARD_REGNO_MODE_OK (REGNO (x), mode))
794 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
795 GET_MODE_BITSIZE (GET_MODE (x)))))))))
796 {
797 /* ?? If we don't know OLDMODE, we have to assume here that
798 X does not need sign- or zero-extension. This may not be
799 the case, but it's the best we can do. */
800 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
801 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
802 {
803 HOST_WIDE_INT val = INTVAL (x);
804 int width = GET_MODE_BITSIZE (oldmode);
805
806 /* We must sign or zero-extend in this case. Start by
807 zero-extending, then sign extend if we need to. */
808 val &= ((HOST_WIDE_INT) 1 << width) - 1;
809 if (! unsignedp
810 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
811 val |= (HOST_WIDE_INT) (-1) << width;
812
813 return gen_int_mode (val, mode);
814 }
815
816 return gen_lowpart (mode, x);
817 }
818
819 /* Converting from integer constant into mode is always equivalent to an
820 subreg operation. */
821 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
822 {
823 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
824 return simplify_gen_subreg (mode, x, oldmode, 0);
825 }
826
827 temp = gen_reg_rtx (mode);
828 convert_move (temp, x, unsignedp);
829 return temp;
830 }
831
832 /* STORE_MAX_PIECES is the number of bytes at a time that we can
833 store efficiently. Due to internal GCC limitations, this is
834 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
835 for an immediate constant. */
836
837 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
838
839 /* Determine whether the LEN bytes can be moved by using several move
840 instructions. Return nonzero if a call to move_by_pieces should
841 succeed. */
842
843 int
can_move_by_pieces(unsigned HOST_WIDE_INT len,unsigned int align ATTRIBUTE_UNUSED)844 can_move_by_pieces (unsigned HOST_WIDE_INT len,
845 unsigned int align ATTRIBUTE_UNUSED)
846 {
847 return MOVE_BY_PIECES_P (len, align);
848 }
849
850 /* Generate several move instructions to copy LEN bytes from block FROM to
851 block TO. (These are MEM rtx's with BLKmode).
852
853 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
854 used to push FROM to the stack.
855
856 ALIGN is maximum stack alignment we can assume.
857
858 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
859 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
860 stpcpy. */
861
862 rtx
move_by_pieces(rtx to,rtx from,unsigned HOST_WIDE_INT len,unsigned int align,int endp)863 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
864 unsigned int align, int endp)
865 {
866 struct move_by_pieces data;
867 rtx to_addr, from_addr = XEXP (from, 0);
868 unsigned int max_size = MOVE_MAX_PIECES + 1;
869 enum machine_mode mode = VOIDmode, tmode;
870 enum insn_code icode;
871
872 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
873
874 data.offset = 0;
875 data.from_addr = from_addr;
876 if (to)
877 {
878 to_addr = XEXP (to, 0);
879 data.to = to;
880 data.autinc_to
881 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
882 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
883 data.reverse
884 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
885 }
886 else
887 {
888 to_addr = NULL_RTX;
889 data.to = NULL_RTX;
890 data.autinc_to = 1;
891 #ifdef STACK_GROWS_DOWNWARD
892 data.reverse = 1;
893 #else
894 data.reverse = 0;
895 #endif
896 }
897 data.to_addr = to_addr;
898 data.from = from;
899 data.autinc_from
900 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
901 || GET_CODE (from_addr) == POST_INC
902 || GET_CODE (from_addr) == POST_DEC);
903
904 data.explicit_inc_from = 0;
905 data.explicit_inc_to = 0;
906 if (data.reverse) data.offset = len;
907 data.len = len;
908
909 /* If copying requires more than two move insns,
910 copy addresses to registers (to make displacements shorter)
911 and use post-increment if available. */
912 if (!(data.autinc_from && data.autinc_to)
913 && move_by_pieces_ninsns (len, align, max_size) > 2)
914 {
915 /* Find the mode of the largest move... */
916 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
917 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
918 if (GET_MODE_SIZE (tmode) < max_size)
919 mode = tmode;
920
921 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
922 {
923 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
924 data.autinc_from = 1;
925 data.explicit_inc_from = -1;
926 }
927 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
928 {
929 data.from_addr = copy_addr_to_reg (from_addr);
930 data.autinc_from = 1;
931 data.explicit_inc_from = 1;
932 }
933 if (!data.autinc_from && CONSTANT_P (from_addr))
934 data.from_addr = copy_addr_to_reg (from_addr);
935 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
936 {
937 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
938 data.autinc_to = 1;
939 data.explicit_inc_to = -1;
940 }
941 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
942 {
943 data.to_addr = copy_addr_to_reg (to_addr);
944 data.autinc_to = 1;
945 data.explicit_inc_to = 1;
946 }
947 if (!data.autinc_to && CONSTANT_P (to_addr))
948 data.to_addr = copy_addr_to_reg (to_addr);
949 }
950
951 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
952 if (align >= GET_MODE_ALIGNMENT (tmode))
953 align = GET_MODE_ALIGNMENT (tmode);
954 else
955 {
956 enum machine_mode xmode;
957
958 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
959 tmode != VOIDmode;
960 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
961 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
962 || SLOW_UNALIGNED_ACCESS (tmode, align))
963 break;
964
965 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
966 }
967
968 /* First move what we can in the largest integer mode, then go to
969 successively smaller modes. */
970
971 while (max_size > 1)
972 {
973 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
974 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
975 if (GET_MODE_SIZE (tmode) < max_size)
976 mode = tmode;
977
978 if (mode == VOIDmode)
979 break;
980
981 icode = mov_optab->handlers[(int) mode].insn_code;
982 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
983 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
984
985 max_size = GET_MODE_SIZE (mode);
986 }
987
988 /* The code above should have handled everything. */
989 gcc_assert (!data.len);
990
991 if (endp)
992 {
993 rtx to1;
994
995 gcc_assert (!data.reverse);
996 if (data.autinc_to)
997 {
998 if (endp == 2)
999 {
1000 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1001 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1002 else
1003 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1004 -1));
1005 }
1006 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1007 data.offset);
1008 }
1009 else
1010 {
1011 if (endp == 2)
1012 --data.offset;
1013 to1 = adjust_address (data.to, QImode, data.offset);
1014 }
1015 return to1;
1016 }
1017 else
1018 return data.to;
1019 }
1020
1021 /* Return number of insns required to move L bytes by pieces.
1022 ALIGN (in bits) is maximum alignment we can assume. */
1023
1024 static unsigned HOST_WIDE_INT
move_by_pieces_ninsns(unsigned HOST_WIDE_INT l,unsigned int align,unsigned int max_size)1025 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1026 unsigned int max_size)
1027 {
1028 unsigned HOST_WIDE_INT n_insns = 0;
1029 enum machine_mode tmode;
1030
1031 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1032 if (align >= GET_MODE_ALIGNMENT (tmode))
1033 align = GET_MODE_ALIGNMENT (tmode);
1034 else
1035 {
1036 enum machine_mode tmode, xmode;
1037
1038 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1039 tmode != VOIDmode;
1040 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1041 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1042 || SLOW_UNALIGNED_ACCESS (tmode, align))
1043 break;
1044
1045 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1046 }
1047
1048 while (max_size > 1)
1049 {
1050 enum machine_mode mode = VOIDmode;
1051 enum insn_code icode;
1052
1053 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1054 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1055 if (GET_MODE_SIZE (tmode) < max_size)
1056 mode = tmode;
1057
1058 if (mode == VOIDmode)
1059 break;
1060
1061 icode = mov_optab->handlers[(int) mode].insn_code;
1062 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1063 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1064
1065 max_size = GET_MODE_SIZE (mode);
1066 }
1067
1068 gcc_assert (!l);
1069 return n_insns;
1070 }
1071
1072 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1073 with move instructions for mode MODE. GENFUN is the gen_... function
1074 to make a move insn for that mode. DATA has all the other info. */
1075
1076 static void
move_by_pieces_1(rtx (* genfun)(rtx,...),enum machine_mode mode,struct move_by_pieces * data)1077 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1078 struct move_by_pieces *data)
1079 {
1080 unsigned int size = GET_MODE_SIZE (mode);
1081 rtx to1 = NULL_RTX, from1;
1082
1083 while (data->len >= size)
1084 {
1085 if (data->reverse)
1086 data->offset -= size;
1087
1088 if (data->to)
1089 {
1090 if (data->autinc_to)
1091 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1092 data->offset);
1093 else
1094 to1 = adjust_address (data->to, mode, data->offset);
1095 }
1096
1097 if (data->autinc_from)
1098 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1099 data->offset);
1100 else
1101 from1 = adjust_address (data->from, mode, data->offset);
1102
1103 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1104 emit_insn (gen_add2_insn (data->to_addr,
1105 GEN_INT (-(HOST_WIDE_INT)size)));
1106 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1107 emit_insn (gen_add2_insn (data->from_addr,
1108 GEN_INT (-(HOST_WIDE_INT)size)));
1109
1110 if (data->to)
1111 emit_insn ((*genfun) (to1, from1));
1112 else
1113 {
1114 #ifdef PUSH_ROUNDING
1115 emit_single_push_insn (mode, from1, NULL);
1116 #else
1117 gcc_unreachable ();
1118 #endif
1119 }
1120
1121 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1122 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1123 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1124 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1125
1126 if (! data->reverse)
1127 data->offset += size;
1128
1129 data->len -= size;
1130 }
1131 }
1132
1133 /* Emit code to move a block Y to a block X. This may be done with
1134 string-move instructions, with multiple scalar move instructions,
1135 or with a library call.
1136
1137 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1138 SIZE is an rtx that says how long they are.
1139 ALIGN is the maximum alignment we can assume they have.
1140 METHOD describes what kind of copy this is, and what mechanisms may be used.
1141
1142 Return the address of the new block, if memcpy is called and returns it,
1143 0 otherwise. */
1144
1145 rtx
emit_block_move(rtx x,rtx y,rtx size,enum block_op_methods method)1146 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1147 {
1148 bool may_use_call;
1149 rtx retval = 0;
1150 unsigned int align;
1151
1152 switch (method)
1153 {
1154 case BLOCK_OP_NORMAL:
1155 case BLOCK_OP_TAILCALL:
1156 may_use_call = true;
1157 break;
1158
1159 case BLOCK_OP_CALL_PARM:
1160 may_use_call = block_move_libcall_safe_for_call_parm ();
1161
1162 /* Make inhibit_defer_pop nonzero around the library call
1163 to force it to pop the arguments right away. */
1164 NO_DEFER_POP;
1165 break;
1166
1167 case BLOCK_OP_NO_LIBCALL:
1168 may_use_call = false;
1169 break;
1170
1171 default:
1172 gcc_unreachable ();
1173 }
1174
1175 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1176
1177 gcc_assert (MEM_P (x));
1178 gcc_assert (MEM_P (y));
1179 gcc_assert (size);
1180
1181 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1182 block copy is more efficient for other large modes, e.g. DCmode. */
1183 x = adjust_address (x, BLKmode, 0);
1184 y = adjust_address (y, BLKmode, 0);
1185
1186 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1187 can be incorrect is coming from __builtin_memcpy. */
1188 if (GET_CODE (size) == CONST_INT)
1189 {
1190 if (INTVAL (size) == 0)
1191 return 0;
1192
1193 x = shallow_copy_rtx (x);
1194 y = shallow_copy_rtx (y);
1195 set_mem_size (x, size);
1196 set_mem_size (y, size);
1197 }
1198
1199 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1200 move_by_pieces (x, y, INTVAL (size), align, 0);
1201 else if (emit_block_move_via_movmem (x, y, size, align))
1202 ;
1203 else if (may_use_call)
1204 retval = emit_block_move_via_libcall (x, y, size,
1205 method == BLOCK_OP_TAILCALL);
1206 else
1207 emit_block_move_via_loop (x, y, size, align);
1208
1209 if (method == BLOCK_OP_CALL_PARM)
1210 OK_DEFER_POP;
1211
1212 return retval;
1213 }
1214
1215 /* A subroutine of emit_block_move. Returns true if calling the
1216 block move libcall will not clobber any parameters which may have
1217 already been placed on the stack. */
1218
1219 static bool
block_move_libcall_safe_for_call_parm(void)1220 block_move_libcall_safe_for_call_parm (void)
1221 {
1222 /* If arguments are pushed on the stack, then they're safe. */
1223 if (PUSH_ARGS)
1224 return true;
1225
1226 /* If registers go on the stack anyway, any argument is sure to clobber
1227 an outgoing argument. */
1228 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1229 {
1230 tree fn = emit_block_move_libcall_fn (false);
1231 (void) fn;
1232 if (REG_PARM_STACK_SPACE (fn) != 0)
1233 return false;
1234 }
1235 #endif
1236
1237 /* If any argument goes in memory, then it might clobber an outgoing
1238 argument. */
1239 {
1240 CUMULATIVE_ARGS args_so_far;
1241 tree fn, arg;
1242
1243 fn = emit_block_move_libcall_fn (false);
1244 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1245
1246 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1247 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1248 {
1249 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1250 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1251 if (!tmp || !REG_P (tmp))
1252 return false;
1253 if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
1254 return false;
1255 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1256 }
1257 }
1258 return true;
1259 }
1260
1261 /* A subroutine of emit_block_move. Expand a movmem pattern;
1262 return true if successful. */
1263
1264 static bool
emit_block_move_via_movmem(rtx x,rtx y,rtx size,unsigned int align)1265 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align)
1266 {
1267 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1268 int save_volatile_ok = volatile_ok;
1269 enum machine_mode mode;
1270
1271 /* Since this is a move insn, we don't care about volatility. */
1272 volatile_ok = 1;
1273
1274 /* Try the most limited insn first, because there's no point
1275 including more than one in the machine description unless
1276 the more limited one has some advantage. */
1277
1278 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1279 mode = GET_MODE_WIDER_MODE (mode))
1280 {
1281 enum insn_code code = movmem_optab[(int) mode];
1282 insn_operand_predicate_fn pred;
1283
1284 if (code != CODE_FOR_nothing
1285 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1286 here because if SIZE is less than the mode mask, as it is
1287 returned by the macro, it will definitely be less than the
1288 actual mode mask. */
1289 && ((GET_CODE (size) == CONST_INT
1290 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1291 <= (GET_MODE_MASK (mode) >> 1)))
1292 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1293 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1294 || (*pred) (x, BLKmode))
1295 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1296 || (*pred) (y, BLKmode))
1297 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1298 || (*pred) (opalign, VOIDmode)))
1299 {
1300 rtx op2;
1301 rtx last = get_last_insn ();
1302 rtx pat;
1303
1304 op2 = convert_to_mode (mode, size, 1);
1305 pred = insn_data[(int) code].operand[2].predicate;
1306 if (pred != 0 && ! (*pred) (op2, mode))
1307 op2 = copy_to_mode_reg (mode, op2);
1308
1309 /* ??? When called via emit_block_move_for_call, it'd be
1310 nice if there were some way to inform the backend, so
1311 that it doesn't fail the expansion because it thinks
1312 emitting the libcall would be more efficient. */
1313
1314 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1315 if (pat)
1316 {
1317 emit_insn (pat);
1318 volatile_ok = save_volatile_ok;
1319 return true;
1320 }
1321 else
1322 delete_insns_since (last);
1323 }
1324 }
1325
1326 volatile_ok = save_volatile_ok;
1327 return false;
1328 }
1329
1330 /* A subroutine of emit_block_move. Expand a call to memcpy.
1331 Return the return value from memcpy, 0 otherwise. */
1332
1333 static rtx
emit_block_move_via_libcall(rtx dst,rtx src,rtx size,bool tailcall)1334 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1335 {
1336 rtx dst_addr, src_addr;
1337 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1338 enum machine_mode size_mode;
1339 rtx retval;
1340
1341 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1342 pseudos. We can then place those new pseudos into a VAR_DECL and
1343 use them later. */
1344
1345 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1346 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1347
1348 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1349 src_addr = convert_memory_address (ptr_mode, src_addr);
1350
1351 dst_tree = make_tree (ptr_type_node, dst_addr);
1352 src_tree = make_tree (ptr_type_node, src_addr);
1353
1354 #if 0
1355 size_mode = TYPE_MODE (sizetype);
1356
1357 size = convert_to_mode (size_mode, size, 1);
1358 size = copy_to_mode_reg (size_mode, size);
1359
1360 /* It is incorrect to use the libcall calling conventions to call
1361 memcpy in this context. This could be a user call to memcpy and
1362 the user may wish to examine the return value from memcpy. For
1363 targets where libcalls and normal calls have different conventions
1364 for returning pointers, we could end up generating incorrect code.
1365 (TIGCC 20050205) NO, the "incorrect" code is actually correct for us! */
1366 size_tree = make_tree (sizetype, size);
1367
1368 fn = emit_block_move_libcall_fn (true);
1369 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1370 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1371 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1372
1373 /* Now we have to build up the CALL_EXPR itself. */
1374 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1375 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1376 call_expr, arg_list, NULL_TREE);
1377 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1378
1379 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1380 #endif /* 0 */
1381
1382 retval = emit_library_call_value (memcpy_libfunc, NULL_RTX, LCT_NORMAL,
1383 VOIDmode, 3, dst_addr, Pmode,
1384 src_addr, Pmode,
1385 convert_to_mode (TYPE_MODE (sizetype),
1386 size, TYPE_UNSIGNED (sizetype)),
1387 TYPE_MODE (sizetype));
1388
1389 return retval;
1390 }
1391
1392 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1393 for the function we use for block copies. The first time FOR_CALL
1394 is true, we call assemble_external. */
1395
1396 static GTY(()) tree block_move_fn;
1397
1398 void
init_block_move_fn(const char * asmspec)1399 init_block_move_fn (const char *asmspec)
1400 {
1401 if (!block_move_fn)
1402 {
1403 tree args, fn;
1404
1405 fn = get_identifier ("memcpy");
1406 args = build_function_type_list (ptr_type_node, ptr_type_node,
1407 const_ptr_type_node, sizetype,
1408 NULL_TREE);
1409
1410 fn = build_decl (FUNCTION_DECL, fn, args);
1411 DECL_EXTERNAL (fn) = 1;
1412 TREE_PUBLIC (fn) = 1;
1413 DECL_ARTIFICIAL (fn) = 1;
1414 TREE_NOTHROW (fn) = 1;
1415 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1416 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1417
1418 block_move_fn = fn;
1419 }
1420
1421 if (asmspec)
1422 set_user_assembler_name (block_move_fn, asmspec);
1423 }
1424
1425 static tree
emit_block_move_libcall_fn(int for_call)1426 emit_block_move_libcall_fn (int for_call)
1427 {
1428 static bool emitted_extern;
1429
1430 if (!block_move_fn)
1431 init_block_move_fn (NULL);
1432
1433 if (for_call && !emitted_extern)
1434 {
1435 emitted_extern = true;
1436 make_decl_rtl (block_move_fn);
1437 assemble_external (block_move_fn);
1438 }
1439
1440 return block_move_fn;
1441 }
1442
1443 /* A subroutine of emit_block_move. Copy the data via an explicit
1444 loop. This is used only when libcalls are forbidden. */
1445 /* ??? It'd be nice to copy in hunks larger than QImode. */
1446
1447 static void
emit_block_move_via_loop(rtx x,rtx y,rtx size,unsigned int align ATTRIBUTE_UNUSED)1448 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1449 unsigned int align ATTRIBUTE_UNUSED)
1450 {
1451 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1452 enum machine_mode iter_mode;
1453
1454 iter_mode = GET_MODE (size);
1455 if (iter_mode == VOIDmode)
1456 iter_mode = word_mode;
1457
1458 top_label = gen_label_rtx ();
1459 cmp_label = gen_label_rtx ();
1460 iter = gen_reg_rtx (iter_mode);
1461
1462 emit_move_insn (iter, const0_rtx);
1463
1464 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1465 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1466 do_pending_stack_adjust ();
1467
1468 emit_jump (cmp_label);
1469 emit_label (top_label);
1470
1471 tmp = convert_modes (Pmode, iter_mode, iter, true);
1472 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1473 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1474 x = change_address (x, QImode, x_addr);
1475 y = change_address (y, QImode, y_addr);
1476
1477 emit_move_insn (x, y);
1478
1479 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1480 true, OPTAB_LIB_WIDEN);
1481 if (tmp != iter)
1482 emit_move_insn (iter, tmp);
1483
1484 emit_label (cmp_label);
1485
1486 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1487 true, top_label);
1488 }
1489
1490 /* Copy all or part of a value X into registers starting at REGNO.
1491 The number of registers to be filled is NREGS. */
1492
1493 void
move_block_to_reg(int regno,rtx x,int nregs,enum machine_mode mode)1494 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1495 {
1496 int i;
1497 #ifdef HAVE_load_multiple
1498 rtx pat;
1499 rtx last;
1500 #endif
1501
1502 if (nregs == 0)
1503 return;
1504
1505 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1506 x = validize_mem (force_const_mem (mode, x));
1507
1508 /* See if the machine can do this with a load multiple insn. */
1509 #ifdef HAVE_load_multiple
1510 if (HAVE_load_multiple)
1511 {
1512 last = get_last_insn ();
1513 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1514 GEN_INT (nregs));
1515 if (pat)
1516 {
1517 emit_insn (pat);
1518 return;
1519 }
1520 else
1521 delete_insns_since (last);
1522 }
1523 #endif
1524
1525 for (i = 0; i < nregs; i++)
1526 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1527 operand_subword_force (x, i, mode));
1528 }
1529
1530 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1531 The number of registers to be filled is NREGS. */
1532
1533 void
move_block_from_reg(int regno,rtx x,int nregs)1534 move_block_from_reg (int regno, rtx x, int nregs)
1535 {
1536 int i;
1537
1538 if (nregs == 0)
1539 return;
1540
1541 /* See if the machine can do this with a store multiple insn. */
1542 #ifdef HAVE_store_multiple
1543 if (HAVE_store_multiple)
1544 {
1545 rtx last = get_last_insn ();
1546 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1547 GEN_INT (nregs));
1548 if (pat)
1549 {
1550 emit_insn (pat);
1551 return;
1552 }
1553 else
1554 delete_insns_since (last);
1555 }
1556 #endif
1557
1558 for (i = 0; i < nregs; i++)
1559 {
1560 rtx tem = operand_subword (x, i, 1, BLKmode);
1561
1562 gcc_assert (tem);
1563
1564 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1565 }
1566 }
1567
1568 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1569 ORIG, where ORIG is a non-consecutive group of registers represented by
1570 a PARALLEL. The clone is identical to the original except in that the
1571 original set of registers is replaced by a new set of pseudo registers.
1572 The new set has the same modes as the original set. */
1573
1574 rtx
gen_group_rtx(rtx orig)1575 gen_group_rtx (rtx orig)
1576 {
1577 int i, length;
1578 rtx *tmps;
1579
1580 gcc_assert (GET_CODE (orig) == PARALLEL);
1581
1582 length = XVECLEN (orig, 0);
1583 tmps = alloca (sizeof (rtx) * length);
1584
1585 /* Skip a NULL entry in first slot. */
1586 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1587
1588 if (i)
1589 tmps[0] = 0;
1590
1591 for (; i < length; i++)
1592 {
1593 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1594 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1595
1596 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1597 }
1598
1599 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1600 }
1601
1602 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1603 except that values are placed in TMPS[i], and must later be moved
1604 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1605
1606 static void
emit_group_load_1(rtx * tmps,rtx dst,rtx orig_src,tree type,int ssize)1607 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1608 {
1609 rtx src;
1610 int start, i;
1611 enum machine_mode m = GET_MODE (orig_src);
1612
1613 gcc_assert (GET_CODE (dst) == PARALLEL);
1614
1615 if (m != VOIDmode
1616 && !SCALAR_INT_MODE_P (m)
1617 && !MEM_P (orig_src)
1618 && GET_CODE (orig_src) != CONCAT)
1619 {
1620 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1621 if (imode == BLKmode)
1622 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1623 else
1624 src = gen_reg_rtx (imode);
1625 if (imode != BLKmode)
1626 src = gen_lowpart (GET_MODE (orig_src), src);
1627 emit_move_insn (src, orig_src);
1628 /* ...and back again. */
1629 if (imode != BLKmode)
1630 src = gen_lowpart (imode, src);
1631 emit_group_load_1 (tmps, dst, src, type, ssize);
1632 return;
1633 }
1634
1635 /* Check for a NULL entry, used to indicate that the parameter goes
1636 both on the stack and in registers. */
1637 if (XEXP (XVECEXP (dst, 0, 0), 0))
1638 start = 0;
1639 else
1640 start = 1;
1641
1642 /* Process the pieces. */
1643 for (i = start; i < XVECLEN (dst, 0); i++)
1644 {
1645 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1646 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1647 unsigned int bytelen = GET_MODE_SIZE (mode);
1648 int shift = 0;
1649
1650 /* Handle trailing fragments that run over the size of the struct. */
1651 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1652 {
1653 /* Arrange to shift the fragment to where it belongs.
1654 extract_bit_field loads to the lsb of the reg. */
1655 if (
1656 #ifdef BLOCK_REG_PADDING
1657 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1658 == (BYTES_BIG_ENDIAN ? upward : downward)
1659 #else
1660 BYTES_BIG_ENDIAN
1661 #endif
1662 )
1663 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1664 bytelen = ssize - bytepos;
1665 gcc_assert (bytelen > 0);
1666 }
1667
1668 /* If we won't be loading directly from memory, protect the real source
1669 from strange tricks we might play; but make sure that the source can
1670 be loaded directly into the destination. */
1671 src = orig_src;
1672 if (!MEM_P (orig_src)
1673 && (!CONSTANT_P (orig_src)
1674 || (GET_MODE (orig_src) != mode
1675 && GET_MODE (orig_src) != VOIDmode)))
1676 {
1677 if (GET_MODE (orig_src) == VOIDmode)
1678 src = gen_reg_rtx (mode);
1679 else
1680 src = gen_reg_rtx (GET_MODE (orig_src));
1681
1682 emit_move_insn (src, orig_src);
1683 }
1684
1685 /* Optimize the access just a bit. */
1686 if (MEM_P (src)
1687 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1688 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1689 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1690 && bytelen == GET_MODE_SIZE (mode))
1691 {
1692 tmps[i] = gen_reg_rtx (mode);
1693 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1694 }
1695 else if (COMPLEX_MODE_P (mode)
1696 && GET_MODE (src) == mode
1697 && bytelen == GET_MODE_SIZE (mode))
1698 /* Let emit_move_complex do the bulk of the work. */
1699 tmps[i] = src;
1700 else if (GET_CODE (src) == CONCAT)
1701 {
1702 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1703 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1704
1705 if ((bytepos == 0 && bytelen == slen0)
1706 || (bytepos != 0 && bytepos + bytelen <= slen))
1707 {
1708 /* The following assumes that the concatenated objects all
1709 have the same size. In this case, a simple calculation
1710 can be used to determine the object and the bit field
1711 to be extracted. */
1712 tmps[i] = XEXP (src, bytepos / slen0);
1713 if (! CONSTANT_P (tmps[i])
1714 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1715 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1716 (bytepos % slen0) * BITS_PER_UNIT,
1717 1, NULL_RTX, mode, mode);
1718 }
1719 else
1720 {
1721 rtx mem;
1722
1723 gcc_assert (!bytepos);
1724 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1725 emit_move_insn (mem, src);
1726 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1727 0, 1, NULL_RTX, mode, mode);
1728 }
1729 }
1730 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1731 SIMD register, which is currently broken. While we get GCC
1732 to emit proper RTL for these cases, let's dump to memory. */
1733 else if (VECTOR_MODE_P (GET_MODE (dst))
1734 && REG_P (src))
1735 {
1736 int slen = GET_MODE_SIZE (GET_MODE (src));
1737 rtx mem;
1738
1739 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1740 emit_move_insn (mem, src);
1741 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1742 }
1743 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1744 && XVECLEN (dst, 0) > 1)
1745 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1746 else if (CONSTANT_P (src)
1747 || (REG_P (src) && GET_MODE (src) == mode))
1748 tmps[i] = src;
1749 else
1750 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1751 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1752 mode, mode);
1753
1754 if (shift)
1755 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1756 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1757 }
1758 }
1759
1760 /* Emit code to move a block SRC of type TYPE to a block DST,
1761 where DST is non-consecutive registers represented by a PARALLEL.
1762 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1763 if not known. */
1764
1765 void
emit_group_load(rtx dst,rtx src,tree type,int ssize)1766 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1767 {
1768 rtx *tmps;
1769 int i;
1770
1771 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1772 emit_group_load_1 (tmps, dst, src, type, ssize);
1773
1774 /* Copy the extracted pieces into the proper (probable) hard regs. */
1775 for (i = 0; i < XVECLEN (dst, 0); i++)
1776 {
1777 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1778 if (d == NULL)
1779 continue;
1780 emit_move_insn (d, tmps[i]);
1781 }
1782 }
1783
1784 /* Similar, but load SRC into new pseudos in a format that looks like
1785 PARALLEL. This can later be fed to emit_group_move to get things
1786 in the right place. */
1787
1788 rtx
emit_group_load_into_temps(rtx parallel,rtx src,tree type,int ssize)1789 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1790 {
1791 rtvec vec;
1792 int i;
1793
1794 vec = rtvec_alloc (XVECLEN (parallel, 0));
1795 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1796
1797 /* Convert the vector to look just like the original PARALLEL, except
1798 with the computed values. */
1799 for (i = 0; i < XVECLEN (parallel, 0); i++)
1800 {
1801 rtx e = XVECEXP (parallel, 0, i);
1802 rtx d = XEXP (e, 0);
1803
1804 if (d)
1805 {
1806 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1807 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1808 }
1809 RTVEC_ELT (vec, i) = e;
1810 }
1811
1812 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1813 }
1814
1815 /* Emit code to move a block SRC to block DST, where SRC and DST are
1816 non-consecutive groups of registers, each represented by a PARALLEL. */
1817
1818 void
emit_group_move(rtx dst,rtx src)1819 emit_group_move (rtx dst, rtx src)
1820 {
1821 int i;
1822
1823 gcc_assert (GET_CODE (src) == PARALLEL
1824 && GET_CODE (dst) == PARALLEL
1825 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1826
1827 /* Skip first entry if NULL. */
1828 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1829 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1830 XEXP (XVECEXP (src, 0, i), 0));
1831 }
1832
1833 /* Move a group of registers represented by a PARALLEL into pseudos. */
1834
1835 rtx
emit_group_move_into_temps(rtx src)1836 emit_group_move_into_temps (rtx src)
1837 {
1838 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1839 int i;
1840
1841 for (i = 0; i < XVECLEN (src, 0); i++)
1842 {
1843 rtx e = XVECEXP (src, 0, i);
1844 rtx d = XEXP (e, 0);
1845
1846 if (d)
1847 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1848 RTVEC_ELT (vec, i) = e;
1849 }
1850
1851 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1852 }
1853
1854 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1855 where SRC is non-consecutive registers represented by a PARALLEL.
1856 SSIZE represents the total size of block ORIG_DST, or -1 if not
1857 known. */
1858
1859 void
emit_group_store(rtx orig_dst,rtx src,tree type ATTRIBUTE_UNUSED,int ssize)1860 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1861 {
1862 rtx *tmps, dst;
1863 int start, i;
1864 enum machine_mode m = GET_MODE (orig_dst);
1865
1866 gcc_assert (GET_CODE (src) == PARALLEL);
1867
1868 if (!SCALAR_INT_MODE_P (m)
1869 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1870 {
1871 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1872 if (imode == BLKmode)
1873 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1874 else
1875 dst = gen_reg_rtx (imode);
1876 emit_group_store (dst, src, type, ssize);
1877 if (imode != BLKmode)
1878 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1879 emit_move_insn (orig_dst, dst);
1880 return;
1881 }
1882
1883 /* Check for a NULL entry, used to indicate that the parameter goes
1884 both on the stack and in registers. */
1885 if (XEXP (XVECEXP (src, 0, 0), 0))
1886 start = 0;
1887 else
1888 start = 1;
1889
1890 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
1891
1892 /* Copy the (probable) hard regs into pseudos. */
1893 for (i = start; i < XVECLEN (src, 0); i++)
1894 {
1895 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1896 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1897 emit_move_insn (tmps[i], reg);
1898 }
1899
1900 /* If we won't be storing directly into memory, protect the real destination
1901 from strange tricks we might play. */
1902 dst = orig_dst;
1903 if (GET_CODE (dst) == PARALLEL)
1904 {
1905 rtx temp;
1906
1907 /* We can get a PARALLEL dst if there is a conditional expression in
1908 a return statement. In that case, the dst and src are the same,
1909 so no action is necessary. */
1910 if (rtx_equal_p (dst, src))
1911 return;
1912
1913 /* It is unclear if we can ever reach here, but we may as well handle
1914 it. Allocate a temporary, and split this into a store/load to/from
1915 the temporary. */
1916
1917 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1918 emit_group_store (temp, src, type, ssize);
1919 emit_group_load (dst, temp, type, ssize);
1920 return;
1921 }
1922 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1923 {
1924 dst = gen_reg_rtx (GET_MODE (orig_dst));
1925 /* Make life a bit easier for combine. */
1926 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
1927 }
1928
1929 /* Process the pieces. */
1930 for (i = start; i < XVECLEN (src, 0); i++)
1931 {
1932 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1933 enum machine_mode mode = GET_MODE (tmps[i]);
1934 unsigned int bytelen = GET_MODE_SIZE (mode);
1935 rtx dest = dst;
1936
1937 /* Handle trailing fragments that run over the size of the struct. */
1938 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1939 {
1940 /* store_bit_field always takes its value from the lsb.
1941 Move the fragment to the lsb if it's not already there. */
1942 if (
1943 #ifdef BLOCK_REG_PADDING
1944 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
1945 == (BYTES_BIG_ENDIAN ? upward : downward)
1946 #else
1947 BYTES_BIG_ENDIAN
1948 #endif
1949 )
1950 {
1951 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1952 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
1953 build_int_cst (NULL_TREE, shift),
1954 tmps[i], 0);
1955 }
1956 bytelen = ssize - bytepos;
1957 }
1958
1959 if (GET_CODE (dst) == CONCAT)
1960 {
1961 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1962 dest = XEXP (dst, 0);
1963 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1964 {
1965 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
1966 dest = XEXP (dst, 1);
1967 }
1968 else
1969 {
1970 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
1971 dest = assign_stack_temp (GET_MODE (dest),
1972 GET_MODE_SIZE (GET_MODE (dest)), 0);
1973 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
1974 tmps[i]);
1975 dst = dest;
1976 break;
1977 }
1978 }
1979
1980 /* Optimize the access just a bit. */
1981 if (MEM_P (dest)
1982 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
1983 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
1984 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1985 && bytelen == GET_MODE_SIZE (mode))
1986 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
1987 else
1988 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
1989 mode, tmps[i]);
1990 }
1991
1992 /* Copy from the pseudo into the (probable) hard reg. */
1993 if (orig_dst != dst)
1994 emit_move_insn (orig_dst, dst);
1995 }
1996
1997 /* Generate code to copy a BLKmode object of TYPE out of a
1998 set of registers starting with SRCREG into TGTBLK. If TGTBLK
1999 is null, a stack temporary is created. TGTBLK is returned.
2000
2001 The purpose of this routine is to handle functions that return
2002 BLKmode structures in registers. Some machines (the PA for example)
2003 want to return all small structures in registers regardless of the
2004 structure's alignment. */
2005
2006 rtx
copy_blkmode_from_reg(rtx tgtblk,rtx srcreg,tree type)2007 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2008 {
2009 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2010 rtx src = NULL, dst = NULL;
2011 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2012 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2013
2014 if (tgtblk == 0)
2015 {
2016 tgtblk = assign_temp (build_qualified_type (type,
2017 (TYPE_QUALS (type)
2018 | TYPE_QUAL_CONST)),
2019 0, 1, 1);
2020 preserve_temp_slots (tgtblk);
2021 }
2022
2023 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2024 into a new pseudo which is a full word. */
2025
2026 if (GET_MODE (srcreg) != BLKmode
2027 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2028 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2029
2030 /* If the structure doesn't take up a whole number of words, see whether
2031 SRCREG is padded on the left or on the right. If it's on the left,
2032 set PADDING_CORRECTION to the number of bits to skip.
2033
2034 In most ABIs, the structure will be returned at the least end of
2035 the register, which translates to right padding on little-endian
2036 targets and left padding on big-endian targets. The opposite
2037 holds if the structure is returned at the most significant
2038 end of the register. */
2039 if (bytes % UNITS_PER_WORD != 0
2040 && (targetm.calls.return_in_msb (type)
2041 ? !BYTES_BIG_ENDIAN
2042 : BYTES_BIG_ENDIAN))
2043 padding_correction
2044 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2045
2046 /* Copy the structure BITSIZE bites at a time.
2047
2048 We could probably emit more efficient code for machines which do not use
2049 strict alignment, but it doesn't seem worth the effort at the current
2050 time. */
2051 for (bitpos = 0, xbitpos = padding_correction;
2052 bitpos < bytes * BITS_PER_UNIT;
2053 bitpos += bitsize, xbitpos += bitsize)
2054 {
2055 /* We need a new source operand each time xbitpos is on a
2056 word boundary and when xbitpos == padding_correction
2057 (the first time through). */
2058 if (xbitpos % BITS_PER_WORD == 0
2059 || xbitpos == padding_correction)
2060 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2061 GET_MODE (srcreg));
2062
2063 /* We need a new destination operand each time bitpos is on
2064 a word boundary. */
2065 if (bitpos % BITS_PER_WORD == 0)
2066 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2067
2068 /* Use xbitpos for the source extraction (right justified) and
2069 xbitpos for the destination store (left justified). */
2070 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2071 extract_bit_field (src, bitsize,
2072 xbitpos % BITS_PER_WORD, 1,
2073 NULL_RTX, word_mode, word_mode));
2074 }
2075
2076 return tgtblk;
2077 }
2078
2079 /* Add a USE expression for REG to the (possibly empty) list pointed
2080 to by CALL_FUSAGE. REG must denote a hard register. */
2081
2082 void
use_reg(rtx * call_fusage,rtx reg)2083 use_reg (rtx *call_fusage, rtx reg)
2084 {
2085 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2086
2087 *call_fusage
2088 = gen_rtx_EXPR_LIST (VOIDmode,
2089 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2090 }
2091
2092 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2093 starting at REGNO. All of these registers must be hard registers. */
2094
2095 void
use_regs(rtx * call_fusage,int regno,int nregs)2096 use_regs (rtx *call_fusage, int regno, int nregs)
2097 {
2098 int i;
2099
2100 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2101
2102 for (i = 0; i < nregs; i++)
2103 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2104 }
2105
2106 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2107 PARALLEL REGS. This is for calls that pass values in multiple
2108 non-contiguous locations. The Irix 6 ABI has examples of this. */
2109
2110 void
use_group_regs(rtx * call_fusage,rtx regs)2111 use_group_regs (rtx *call_fusage, rtx regs)
2112 {
2113 int i;
2114
2115 for (i = 0; i < XVECLEN (regs, 0); i++)
2116 {
2117 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2118
2119 /* A NULL entry means the parameter goes both on the stack and in
2120 registers. This can also be a MEM for targets that pass values
2121 partially on the stack and partially in registers. */
2122 if (reg != 0 && REG_P (reg))
2123 use_reg (call_fusage, reg);
2124 }
2125 }
2126
2127
2128 /* Determine whether the LEN bytes generated by CONSTFUN can be
2129 stored to memory using several move instructions. CONSTFUNDATA is
2130 a pointer which will be passed as argument in every CONSTFUN call.
2131 ALIGN is maximum alignment we can assume. Return nonzero if a
2132 call to store_by_pieces should succeed. */
2133
2134 int
can_store_by_pieces(unsigned HOST_WIDE_INT len,rtx (* constfun)(void *,HOST_WIDE_INT,enum machine_mode),void * constfundata,unsigned int align)2135 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2136 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2137 void *constfundata, unsigned int align)
2138 {
2139 unsigned HOST_WIDE_INT l;
2140 unsigned int max_size;
2141 HOST_WIDE_INT offset = 0;
2142 enum machine_mode mode, tmode;
2143 enum insn_code icode;
2144 int reverse;
2145 rtx cst;
2146
2147 if (len == 0)
2148 return 1;
2149
2150 if (! STORE_BY_PIECES_P (len, align))
2151 return 0;
2152
2153 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2154 if (align >= GET_MODE_ALIGNMENT (tmode))
2155 align = GET_MODE_ALIGNMENT (tmode);
2156 else
2157 {
2158 enum machine_mode xmode;
2159
2160 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2161 tmode != VOIDmode;
2162 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2163 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2164 || SLOW_UNALIGNED_ACCESS (tmode, align))
2165 break;
2166
2167 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2168 }
2169
2170 /* We would first store what we can in the largest integer mode, then go to
2171 successively smaller modes. */
2172
2173 for (reverse = 0;
2174 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2175 reverse++)
2176 {
2177 l = len;
2178 mode = VOIDmode;
2179 max_size = STORE_MAX_PIECES + 1;
2180 while (max_size > 1)
2181 {
2182 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2183 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2184 if (GET_MODE_SIZE (tmode) < max_size)
2185 mode = tmode;
2186
2187 if (mode == VOIDmode)
2188 break;
2189
2190 icode = mov_optab->handlers[(int) mode].insn_code;
2191 if (icode != CODE_FOR_nothing
2192 && align >= GET_MODE_ALIGNMENT (mode))
2193 {
2194 unsigned int size = GET_MODE_SIZE (mode);
2195
2196 while (l >= size)
2197 {
2198 if (reverse)
2199 offset -= size;
2200
2201 cst = (*constfun) (constfundata, offset, mode);
2202 if (!LEGITIMATE_CONSTANT_P (cst))
2203 return 0;
2204
2205 if (!reverse)
2206 offset += size;
2207
2208 l -= size;
2209 }
2210 }
2211
2212 max_size = GET_MODE_SIZE (mode);
2213 }
2214
2215 /* The code above should have handled everything. */
2216 gcc_assert (!l);
2217 }
2218
2219 return 1;
2220 }
2221
2222 /* Generate several move instructions to store LEN bytes generated by
2223 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2224 pointer which will be passed as argument in every CONSTFUN call.
2225 ALIGN is maximum alignment we can assume.
2226 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2227 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2228 stpcpy. */
2229
2230 rtx
store_by_pieces(rtx to,unsigned HOST_WIDE_INT len,rtx (* constfun)(void *,HOST_WIDE_INT,enum machine_mode),void * constfundata,unsigned int align,int endp)2231 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2232 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2233 void *constfundata, unsigned int align, int endp)
2234 {
2235 struct store_by_pieces data;
2236
2237 if (len == 0)
2238 {
2239 gcc_assert (endp != 2);
2240 return to;
2241 }
2242
2243 gcc_assert (STORE_BY_PIECES_P (len, align));
2244 data.constfun = constfun;
2245 data.constfundata = constfundata;
2246 data.len = len;
2247 data.to = to;
2248 store_by_pieces_1 (&data, align);
2249 if (endp)
2250 {
2251 rtx to1;
2252
2253 gcc_assert (!data.reverse);
2254 if (data.autinc_to)
2255 {
2256 if (endp == 2)
2257 {
2258 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2259 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2260 else
2261 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2262 -1));
2263 }
2264 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2265 data.offset);
2266 }
2267 else
2268 {
2269 if (endp == 2)
2270 --data.offset;
2271 to1 = adjust_address (data.to, QImode, data.offset);
2272 }
2273 return to1;
2274 }
2275 else
2276 return data.to;
2277 }
2278
2279 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2280 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2281
2282 static void
clear_by_pieces(rtx to,unsigned HOST_WIDE_INT len,unsigned int align)2283 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2284 {
2285 struct store_by_pieces data;
2286
2287 if (len == 0)
2288 return;
2289
2290 data.constfun = clear_by_pieces_1;
2291 data.constfundata = NULL;
2292 data.len = len;
2293 data.to = to;
2294 store_by_pieces_1 (&data, align);
2295 }
2296
2297 /* Callback routine for clear_by_pieces.
2298 Return const0_rtx unconditionally. */
2299
2300 static rtx
clear_by_pieces_1(void * data ATTRIBUTE_UNUSED,HOST_WIDE_INT offset ATTRIBUTE_UNUSED,enum machine_mode mode ATTRIBUTE_UNUSED)2301 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2302 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2303 enum machine_mode mode ATTRIBUTE_UNUSED)
2304 {
2305 return const0_rtx;
2306 }
2307
2308 /* Subroutine of clear_by_pieces and store_by_pieces.
2309 Generate several move instructions to store LEN bytes of block TO. (A MEM
2310 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2311
2312 static void
store_by_pieces_1(struct store_by_pieces * data ATTRIBUTE_UNUSED,unsigned int align ATTRIBUTE_UNUSED)2313 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2314 unsigned int align ATTRIBUTE_UNUSED)
2315 {
2316 rtx to_addr = XEXP (data->to, 0);
2317 unsigned int max_size = STORE_MAX_PIECES + 1;
2318 enum machine_mode mode = VOIDmode, tmode;
2319 enum insn_code icode;
2320
2321 data->offset = 0;
2322 data->to_addr = to_addr;
2323 data->autinc_to
2324 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2325 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2326
2327 data->explicit_inc_to = 0;
2328 data->reverse
2329 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2330 if (data->reverse)
2331 data->offset = data->len;
2332
2333 /* If storing requires more than two move insns,
2334 copy addresses to registers (to make displacements shorter)
2335 and use post-increment if available. */
2336 if (!data->autinc_to
2337 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2338 {
2339 /* Determine the main mode we'll be using. */
2340 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2341 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2342 if (GET_MODE_SIZE (tmode) < max_size)
2343 mode = tmode;
2344
2345 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2346 {
2347 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2348 data->autinc_to = 1;
2349 data->explicit_inc_to = -1;
2350 }
2351
2352 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2353 && ! data->autinc_to)
2354 {
2355 data->to_addr = copy_addr_to_reg (to_addr);
2356 data->autinc_to = 1;
2357 data->explicit_inc_to = 1;
2358 }
2359
2360 if ( !data->autinc_to && CONSTANT_P (to_addr))
2361 data->to_addr = copy_addr_to_reg (to_addr);
2362 }
2363
2364 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2365 if (align >= GET_MODE_ALIGNMENT (tmode))
2366 align = GET_MODE_ALIGNMENT (tmode);
2367 else
2368 {
2369 enum machine_mode xmode;
2370
2371 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2372 tmode != VOIDmode;
2373 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2374 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2375 || SLOW_UNALIGNED_ACCESS (tmode, align))
2376 break;
2377
2378 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2379 }
2380
2381 /* First store what we can in the largest integer mode, then go to
2382 successively smaller modes. */
2383
2384 while (max_size > 1)
2385 {
2386 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2387 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2388 if (GET_MODE_SIZE (tmode) < max_size)
2389 mode = tmode;
2390
2391 if (mode == VOIDmode)
2392 break;
2393
2394 icode = mov_optab->handlers[(int) mode].insn_code;
2395 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2396 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2397
2398 max_size = GET_MODE_SIZE (mode);
2399 }
2400
2401 /* The code above should have handled everything. */
2402 gcc_assert (!data->len);
2403 }
2404
2405 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2406 with move instructions for mode MODE. GENFUN is the gen_... function
2407 to make a move insn for that mode. DATA has all the other info. */
2408
2409 static void
store_by_pieces_2(rtx (* genfun)(rtx,...),enum machine_mode mode,struct store_by_pieces * data)2410 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2411 struct store_by_pieces *data)
2412 {
2413 unsigned int size = GET_MODE_SIZE (mode);
2414 rtx to1, cst;
2415
2416 while (data->len >= size)
2417 {
2418 if (data->reverse)
2419 data->offset -= size;
2420
2421 if (data->autinc_to)
2422 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2423 data->offset);
2424 else
2425 to1 = adjust_address (data->to, mode, data->offset);
2426
2427 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2428 emit_insn (gen_add2_insn (data->to_addr,
2429 GEN_INT (-(HOST_WIDE_INT) size)));
2430
2431 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2432 emit_insn ((*genfun) (to1, cst));
2433
2434 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2435 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2436
2437 if (! data->reverse)
2438 data->offset += size;
2439
2440 data->len -= size;
2441 }
2442 }
2443
2444 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2445 its length in bytes. */
2446
2447 rtx
clear_storage(rtx object,rtx size,enum block_op_methods method)2448 clear_storage (rtx object, rtx size, enum block_op_methods method)
2449 {
2450 enum machine_mode mode = GET_MODE (object);
2451 unsigned int align;
2452
2453 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2454
2455 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2456 just move a zero. Otherwise, do this a piece at a time. */
2457 if (mode != BLKmode
2458 && GET_CODE (size) == CONST_INT
2459 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2460 {
2461 rtx zero = CONST0_RTX (mode);
2462 if (zero != NULL)
2463 {
2464 emit_move_insn (object, zero);
2465 return NULL;
2466 }
2467
2468 if (COMPLEX_MODE_P (mode))
2469 {
2470 zero = CONST0_RTX (GET_MODE_INNER (mode));
2471 if (zero != NULL)
2472 {
2473 write_complex_part (object, zero, 0);
2474 write_complex_part (object, zero, 1);
2475 return NULL;
2476 }
2477 }
2478 }
2479
2480 if (size == const0_rtx)
2481 return NULL;
2482
2483 align = MEM_ALIGN (object);
2484
2485 if (GET_CODE (size) == CONST_INT
2486 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2487 clear_by_pieces (object, INTVAL (size), align);
2488 else if (set_storage_via_setmem (object, size, const0_rtx, align))
2489 ;
2490 else
2491 return clear_storage_via_libcall (object, size,
2492 method == BLOCK_OP_TAILCALL);
2493
2494 return NULL;
2495 }
2496
2497 /* A subroutine of clear_storage. Expand a call to memset.
2498 Return the return value of memset, 0 otherwise. */
2499
2500 static rtx
clear_storage_via_libcall(rtx object,rtx size,bool tailcall)2501 clear_storage_via_libcall (rtx object, rtx size, bool tailcall)
2502 {
2503 tree call_expr, arg_list, fn, object_tree, size_tree;
2504 enum machine_mode size_mode;
2505 rtx retval;
2506
2507 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2508 place those into new pseudos into a VAR_DECL and use them later. */
2509
2510 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2511
2512 #if 0
2513 size_mode = TYPE_MODE (sizetype);
2514 size = convert_to_mode (size_mode, size, 1);
2515 size = copy_to_mode_reg (size_mode, size);
2516
2517 /* It is incorrect to use the libcall calling conventions to call
2518 memset in this context. This could be a user call to memset and
2519 the user may wish to examine the return value from memset. For
2520 targets where libcalls and normal calls have different conventions
2521 for returning pointers, we could end up generating incorrect code.
2522 (TIGCC 20050205) NO, the "incorrect" code is actually correct for us! */
2523
2524 object_tree = make_tree (ptr_type_node, object);
2525 size_tree = make_tree (sizetype, size);
2526
2527 fn = clear_storage_libcall_fn (true);
2528 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2529 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2530 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2531
2532 /* Now we have to build up the CALL_EXPR itself. */
2533 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2534 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2535 call_expr, arg_list, NULL_TREE);
2536 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2537
2538 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2539 #endif /* 0 */
2540
2541 /* Note: Our memset libcall expects a short integer zero even with -mlong. */
2542 retval = emit_library_call_value (memset_libfunc, NULL_RTX, LCT_NORMAL,
2543 VOIDmode, 3, object, Pmode,
2544 const0_rtx, TYPE_MODE (short_integer_type_node),
2545 convert_to_mode (TYPE_MODE (sizetype),
2546 size, TYPE_UNSIGNED (sizetype)),
2547 TYPE_MODE (sizetype));
2548
2549 return retval;
2550 }
2551
2552 /* A subroutine of clear_storage_via_libcall. Create the tree node
2553 for the function we use for block clears. The first time FOR_CALL
2554 is true, we call assemble_external. */
2555
2556 static GTY(()) tree block_clear_fn;
2557
2558 void
init_block_clear_fn(const char * asmspec)2559 init_block_clear_fn (const char *asmspec)
2560 {
2561 if (!block_clear_fn)
2562 {
2563 tree fn, args;
2564
2565 fn = get_identifier ("memset");
2566 /* TIGCC Patch: The memset libcall expects a short integer zero even with
2567 -mlong. */
2568 args = build_function_type_list (ptr_type_node, ptr_type_node,
2569 short_integer_type_node, sizetype,
2570 NULL_TREE);
2571
2572 fn = build_decl (FUNCTION_DECL, fn, args);
2573 DECL_EXTERNAL (fn) = 1;
2574 TREE_PUBLIC (fn) = 1;
2575 DECL_ARTIFICIAL (fn) = 1;
2576 TREE_NOTHROW (fn) = 1;
2577 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2578 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2579
2580 block_clear_fn = fn;
2581 }
2582
2583 if (asmspec)
2584 set_user_assembler_name (block_clear_fn, asmspec);
2585 }
2586
2587 static tree
clear_storage_libcall_fn(int for_call)2588 clear_storage_libcall_fn (int for_call)
2589 {
2590 static bool emitted_extern;
2591
2592 if (!block_clear_fn)
2593 init_block_clear_fn (NULL);
2594
2595 if (for_call && !emitted_extern)
2596 {
2597 emitted_extern = true;
2598 make_decl_rtl (block_clear_fn);
2599 assemble_external (block_clear_fn);
2600 }
2601
2602 return block_clear_fn;
2603 }
2604
2605 /* Expand a setmem pattern; return true if successful. */
2606
2607 bool
set_storage_via_setmem(rtx object,rtx size,rtx val,unsigned int align)2608 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align)
2609 {
2610 /* Try the most limited insn first, because there's no point
2611 including more than one in the machine description unless
2612 the more limited one has some advantage. */
2613
2614 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2615 enum machine_mode mode;
2616
2617 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2618 mode = GET_MODE_WIDER_MODE (mode))
2619 {
2620 enum insn_code code = setmem_optab[(int) mode];
2621 insn_operand_predicate_fn pred;
2622
2623 if (code != CODE_FOR_nothing
2624 /* We don't need MODE to be narrower than
2625 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2626 the mode mask, as it is returned by the macro, it will
2627 definitely be less than the actual mode mask. */
2628 && ((GET_CODE (size) == CONST_INT
2629 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2630 <= (GET_MODE_MASK (mode) >> 1)))
2631 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2632 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2633 || (*pred) (object, BLKmode))
2634 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
2635 || (*pred) (opalign, VOIDmode)))
2636 {
2637 rtx opsize, opchar;
2638 enum machine_mode char_mode;
2639 rtx last = get_last_insn ();
2640 rtx pat;
2641
2642 opsize = convert_to_mode (mode, size, 1);
2643 pred = insn_data[(int) code].operand[1].predicate;
2644 if (pred != 0 && ! (*pred) (opsize, mode))
2645 opsize = copy_to_mode_reg (mode, opsize);
2646
2647 opchar = val;
2648 char_mode = insn_data[(int) code].operand[2].mode;
2649 if (char_mode != VOIDmode)
2650 {
2651 opchar = convert_to_mode (char_mode, opchar, 1);
2652 pred = insn_data[(int) code].operand[2].predicate;
2653 if (pred != 0 && ! (*pred) (opchar, char_mode))
2654 opchar = copy_to_mode_reg (char_mode, opchar);
2655 }
2656
2657 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
2658 if (pat)
2659 {
2660 emit_insn (pat);
2661 return true;
2662 }
2663 else
2664 delete_insns_since (last);
2665 }
2666 }
2667
2668 return false;
2669 }
2670
2671
2672 /* Write to one of the components of the complex value CPLX. Write VAL to
2673 the real part if IMAG_P is false, and the imaginary part if its true. */
2674
2675 static void
write_complex_part(rtx cplx,rtx val,bool imag_p)2676 write_complex_part (rtx cplx, rtx val, bool imag_p)
2677 {
2678 enum machine_mode cmode;
2679 enum machine_mode imode;
2680 unsigned ibitsize;
2681
2682 if (GET_CODE (cplx) == CONCAT)
2683 {
2684 emit_move_insn (XEXP (cplx, imag_p), val);
2685 return;
2686 }
2687
2688 cmode = GET_MODE (cplx);
2689 imode = GET_MODE_INNER (cmode);
2690 ibitsize = GET_MODE_BITSIZE (imode);
2691
2692 /* For MEMs simplify_gen_subreg may generate an invalid new address
2693 because, e.g., the original address is considered mode-dependent
2694 by the target, which restricts simplify_subreg from invoking
2695 adjust_address_nv. Instead of preparing fallback support for an
2696 invalid address, we call adjust_address_nv directly. */
2697 if (MEM_P (cplx))
2698 {
2699 emit_move_insn (adjust_address_nv (cplx, imode,
2700 imag_p ? GET_MODE_SIZE (imode) : 0),
2701 val);
2702 return;
2703 }
2704
2705 /* If the sub-object is at least word sized, then we know that subregging
2706 will work. This special case is important, since store_bit_field
2707 wants to operate on integer modes, and there's rarely an OImode to
2708 correspond to TCmode. */
2709 if (ibitsize >= BITS_PER_WORD
2710 /* For hard regs we have exact predicates. Assume we can split
2711 the original object if it spans an even number of hard regs.
2712 This special case is important for SCmode on 64-bit platforms
2713 where the natural size of floating-point regs is 32-bit. */
2714 || (REG_P (cplx)
2715 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2716 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2717 {
2718 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2719 imag_p ? GET_MODE_SIZE (imode) : 0);
2720 if (part)
2721 {
2722 emit_move_insn (part, val);
2723 return;
2724 }
2725 else
2726 /* simplify_gen_subreg may fail for sub-word MEMs. */
2727 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2728 }
2729
2730 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2731 }
2732
2733 /* Extract one of the components of the complex value CPLX. Extract the
2734 real part if IMAG_P is false, and the imaginary part if it's true. */
2735
2736 static rtx
read_complex_part(rtx cplx,bool imag_p)2737 read_complex_part (rtx cplx, bool imag_p)
2738 {
2739 enum machine_mode cmode, imode;
2740 unsigned ibitsize;
2741
2742 if (GET_CODE (cplx) == CONCAT)
2743 return XEXP (cplx, imag_p);
2744
2745 cmode = GET_MODE (cplx);
2746 imode = GET_MODE_INNER (cmode);
2747 ibitsize = GET_MODE_BITSIZE (imode);
2748
2749 /* Special case reads from complex constants that got spilled to memory. */
2750 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2751 {
2752 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2753 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2754 {
2755 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2756 if (CONSTANT_CLASS_P (part))
2757 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2758 }
2759 }
2760
2761 /* For MEMs simplify_gen_subreg may generate an invalid new address
2762 because, e.g., the original address is considered mode-dependent
2763 by the target, which restricts simplify_subreg from invoking
2764 adjust_address_nv. Instead of preparing fallback support for an
2765 invalid address, we call adjust_address_nv directly. */
2766 if (MEM_P (cplx))
2767 return adjust_address_nv (cplx, imode,
2768 imag_p ? GET_MODE_SIZE (imode) : 0);
2769
2770 /* If the sub-object is at least word sized, then we know that subregging
2771 will work. This special case is important, since extract_bit_field
2772 wants to operate on integer modes, and there's rarely an OImode to
2773 correspond to TCmode. */
2774 if (ibitsize >= BITS_PER_WORD
2775 /* For hard regs we have exact predicates. Assume we can split
2776 the original object if it spans an even number of hard regs.
2777 This special case is important for SCmode on 64-bit platforms
2778 where the natural size of floating-point regs is 32-bit. */
2779 || (REG_P (cplx)
2780 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2781 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2782 {
2783 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2784 imag_p ? GET_MODE_SIZE (imode) : 0);
2785 if (ret)
2786 return ret;
2787 else
2788 /* simplify_gen_subreg may fail for sub-word MEMs. */
2789 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2790 }
2791
2792 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2793 true, NULL_RTX, imode, imode);
2794 }
2795
2796 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2797 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2798 represented in NEW_MODE. If FORCE is true, this will never happen, as
2799 we'll force-create a SUBREG if needed. */
2800
2801 static rtx
emit_move_change_mode(enum machine_mode new_mode,enum machine_mode old_mode,rtx x,bool force)2802 emit_move_change_mode (enum machine_mode new_mode,
2803 enum machine_mode old_mode, rtx x, bool force)
2804 {
2805 rtx ret;
2806
2807 if (reload_in_progress && MEM_P (x))
2808 {
2809 /* We can't use gen_lowpart here because it may call change_address
2810 which is not appropriate if we were called when a reload was in
2811 progress. We don't have to worry about changing the address since
2812 the size in bytes is supposed to be the same. Copy the MEM to
2813 change the mode and move any substitutions from the old MEM to
2814 the new one. */
2815
2816 ret = adjust_address_nv (x, new_mode, 0);
2817 copy_replacements (x, ret);
2818 }
2819 else
2820 {
2821 /* Note that we do want simplify_subreg's behavior of validating
2822 that the new mode is ok for a hard register. If we were to use
2823 simplify_gen_subreg, we would create the subreg, but would
2824 probably run into the target not being able to implement it. */
2825 /* Except, of course, when FORCE is true, when this is exactly what
2826 we want. Which is needed for CCmodes on some targets. */
2827 if (force)
2828 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
2829 else
2830 ret = simplify_subreg (new_mode, x, old_mode, 0);
2831 }
2832
2833 return ret;
2834 }
2835
2836 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2837 an integer mode of the same size as MODE. Returns the instruction
2838 emitted, or NULL if such a move could not be generated. */
2839
2840 static rtx
emit_move_via_integer(enum machine_mode mode,rtx x,rtx y,bool force)2841 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
2842 {
2843 enum machine_mode imode;
2844 enum insn_code code;
2845
2846 /* There must exist a mode of the exact size we require. */
2847 imode = int_mode_for_mode (mode);
2848 if (imode == BLKmode)
2849 return NULL_RTX;
2850
2851 /* The target must support moves in this mode. */
2852 code = mov_optab->handlers[imode].insn_code;
2853 if (code == CODE_FOR_nothing)
2854 return NULL_RTX;
2855
2856 x = emit_move_change_mode (imode, mode, x, force);
2857 if (x == NULL_RTX)
2858 return NULL_RTX;
2859 y = emit_move_change_mode (imode, mode, y, force);
2860 if (y == NULL_RTX)
2861 return NULL_RTX;
2862 return emit_insn (GEN_FCN (code) (x, y));
2863 }
2864
2865 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
2866 Return an equivalent MEM that does not use an auto-increment. */
2867
2868 static rtx
emit_move_resolve_push(enum machine_mode mode,rtx x)2869 emit_move_resolve_push (enum machine_mode mode, rtx x)
2870 {
2871 enum rtx_code code = GET_CODE (XEXP (x, 0));
2872 HOST_WIDE_INT adjust;
2873 rtx temp;
2874
2875 adjust = GET_MODE_SIZE (mode);
2876 #ifdef PUSH_ROUNDING
2877 adjust = PUSH_ROUNDING (adjust);
2878 #endif
2879 if (code == PRE_DEC || code == POST_DEC)
2880 adjust = -adjust;
2881 else if (code == PRE_MODIFY || code == POST_MODIFY)
2882 {
2883 rtx expr = XEXP (XEXP (x, 0), 1);
2884 HOST_WIDE_INT val;
2885
2886 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
2887 gcc_assert (GET_CODE (XEXP (expr, 1)) == CONST_INT);
2888 val = INTVAL (XEXP (expr, 1));
2889 if (GET_CODE (expr) == MINUS)
2890 val = -val;
2891 gcc_assert (adjust == val || adjust == -val);
2892 adjust = val;
2893 }
2894
2895 /* Do not use anti_adjust_stack, since we don't want to update
2896 stack_pointer_delta. */
2897 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
2898 GEN_INT (adjust), stack_pointer_rtx,
2899 0, OPTAB_LIB_WIDEN);
2900 if (temp != stack_pointer_rtx)
2901 emit_move_insn (stack_pointer_rtx, temp);
2902
2903 switch (code)
2904 {
2905 case PRE_INC:
2906 case PRE_DEC:
2907 case PRE_MODIFY:
2908 temp = stack_pointer_rtx;
2909 break;
2910 case POST_INC:
2911 case POST_DEC:
2912 case POST_MODIFY:
2913 temp = plus_constant (stack_pointer_rtx, -adjust);
2914 break;
2915 default:
2916 gcc_unreachable ();
2917 }
2918
2919 return replace_equiv_address (x, temp);
2920 }
2921
2922 /* A subroutine of emit_move_complex. Generate a move from Y into X.
2923 X is known to satisfy push_operand, and MODE is known to be complex.
2924 Returns the last instruction emitted. */
2925
2926 static rtx
emit_move_complex_push(enum machine_mode mode,rtx x,rtx y)2927 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
2928 {
2929 enum machine_mode submode = GET_MODE_INNER (mode);
2930 bool imag_first;
2931
2932 #ifdef PUSH_ROUNDING
2933 unsigned int submodesize = GET_MODE_SIZE (submode);
2934
2935 /* In case we output to the stack, but the size is smaller than the
2936 machine can push exactly, we need to use move instructions. */
2937 if (PUSH_ROUNDING (submodesize) != submodesize)
2938 {
2939 x = emit_move_resolve_push (mode, x);
2940 return emit_move_insn (x, y);
2941 }
2942 #endif
2943
2944 /* Note that the real part always precedes the imag part in memory
2945 regardless of machine's endianness. */
2946 switch (GET_CODE (XEXP (x, 0)))
2947 {
2948 case PRE_DEC:
2949 case POST_DEC:
2950 imag_first = true;
2951 break;
2952 case PRE_INC:
2953 case POST_INC:
2954 imag_first = false;
2955 break;
2956 default:
2957 gcc_unreachable ();
2958 }
2959
2960 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2961 read_complex_part (y, imag_first));
2962 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2963 read_complex_part (y, !imag_first));
2964 }
2965
2966 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
2967 MODE is known to be complex. Returns the last instruction emitted. */
2968
2969 static rtx
emit_move_complex(enum machine_mode mode,rtx x,rtx y)2970 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
2971 {
2972 bool try_int;
2973
2974 /* Need to take special care for pushes, to maintain proper ordering
2975 of the data, and possibly extra padding. */
2976 if (push_operand (x, mode))
2977 return emit_move_complex_push (mode, x, y);
2978
2979 /* For memory to memory moves, optimal behavior can be had with the
2980 existing block move logic. */
2981 /* (TIGCC 20050323) ... and this is indeed the case for our target, despite
2982 claims to the contrary in PR rtl-optimization/20306. -- Kevin Kofler */
2983 if (MEM_P (x) && MEM_P (y))
2984 {
2985 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
2986 BLOCK_OP_NO_LIBCALL);
2987 return get_last_insn ();
2988 }
2989
2990 /* See if we can coerce the target into moving both values at once. */
2991
2992 /* Not possible if the values are inherently not adjacent. */
2993 if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
2994 try_int = false;
2995 /* Is possible if both are registers (or subregs of registers). */
2996 else if (register_operand (x, mode) && register_operand (y, mode))
2997 try_int = true;
2998 /* If one of the operands is a memory, and alignment constraints
2999 are friendly enough, we may be able to do combined memory operations.
3000 We do not attempt this if Y is a constant because that combination is
3001 usually better with the by-parts thing below. */
3002 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3003 && (!STRICT_ALIGNMENT
3004 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3005 try_int = true;
3006 else
3007 try_int = false;
3008
3009 if (try_int)
3010 {
3011 rtx ret = emit_move_via_integer (mode, x, y, true);
3012 if (ret)
3013 return ret;
3014 }
3015
3016 /* Show the output dies here. This is necessary for SUBREGs
3017 of pseudos since we cannot track their lifetimes correctly;
3018 hard regs shouldn't appear here except as return values. */
3019 if (!reload_completed && !reload_in_progress
3020 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3021 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3022
3023 write_complex_part (x, read_complex_part (y, false), false);
3024 write_complex_part (x, read_complex_part (y, true), true);
3025 return get_last_insn ();
3026 }
3027
3028 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3029 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3030
3031 static rtx
emit_move_ccmode(enum machine_mode mode,rtx x,rtx y)3032 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3033 {
3034 rtx ret;
3035
3036 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3037 if (mode != CCmode)
3038 {
3039 enum insn_code code = mov_optab->handlers[CCmode].insn_code;
3040 if (code != CODE_FOR_nothing)
3041 {
3042 x = emit_move_change_mode (CCmode, mode, x, true);
3043 y = emit_move_change_mode (CCmode, mode, y, true);
3044 return emit_insn (GEN_FCN (code) (x, y));
3045 }
3046 }
3047
3048 /* Otherwise, find the MODE_INT mode of the same width. */
3049 ret = emit_move_via_integer (mode, x, y, false);
3050 gcc_assert (ret != NULL);
3051 return ret;
3052 }
3053
3054 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3055 MODE is any multi-word or full-word mode that lacks a move_insn
3056 pattern. Note that you will get better code if you define such
3057 patterns, even if they must turn into multiple assembler instructions. */
3058
3059 static rtx
emit_move_multi_word(enum machine_mode mode,rtx x,rtx y)3060 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3061 {
3062 rtx last_insn = 0;
3063 rtx seq, inner;
3064 bool need_clobber;
3065 int i;
3066
3067 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3068
3069 /* If X is a push on the stack, do the push now and replace
3070 X with a reference to the stack pointer. */
3071 if (push_operand (x, mode))
3072 x = emit_move_resolve_push (mode, x);
3073
3074 /* If we are in reload, see if either operand is a MEM whose address
3075 is scheduled for replacement. */
3076 if (reload_in_progress && MEM_P (x)
3077 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3078 x = replace_equiv_address_nv (x, inner);
3079 if (reload_in_progress && MEM_P (y)
3080 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3081 y = replace_equiv_address_nv (y, inner);
3082
3083 start_sequence ();
3084
3085 need_clobber = false;
3086 for (i = 0;
3087 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3088 i++)
3089 {
3090 rtx xpart = operand_subword (x, i, 1, mode);
3091 rtx ypart = operand_subword (y, i, 1, mode);
3092
3093 /* If we can't get a part of Y, put Y into memory if it is a
3094 constant. Otherwise, force it into a register. Then we must
3095 be able to get a part of Y. */
3096 if (ypart == 0 && CONSTANT_P (y))
3097 {
3098 y = force_const_mem (mode, y);
3099 ypart = operand_subword (y, i, 1, mode);
3100 }
3101 else if (ypart == 0)
3102 ypart = operand_subword_force (y, i, mode);
3103
3104 gcc_assert (xpart && ypart);
3105
3106 need_clobber |= (GET_CODE (xpart) == SUBREG);
3107
3108 last_insn = emit_move_insn (xpart, ypart);
3109 }
3110
3111 seq = get_insns ();
3112 end_sequence ();
3113
3114 /* Show the output dies here. This is necessary for SUBREGs
3115 of pseudos since we cannot track their lifetimes correctly;
3116 hard regs shouldn't appear here except as return values.
3117 We never want to emit such a clobber after reload. */
3118 if (x != y
3119 && ! (reload_in_progress || reload_completed)
3120 && need_clobber != 0)
3121 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3122
3123 emit_insn (seq);
3124
3125 return last_insn;
3126 }
3127
3128 /* Low level part of emit_move_insn.
3129 Called just like emit_move_insn, but assumes X and Y
3130 are basically valid. */
3131
3132 rtx
emit_move_insn_1(rtx x,rtx y)3133 emit_move_insn_1 (rtx x, rtx y)
3134 {
3135 enum machine_mode mode = GET_MODE (x);
3136 enum insn_code code;
3137
3138 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3139
3140 code = mov_optab->handlers[mode].insn_code;
3141 if (code != CODE_FOR_nothing)
3142 return emit_insn (GEN_FCN (code) (x, y));
3143
3144 /* Expand complex moves by moving real part and imag part. */
3145 if (COMPLEX_MODE_P (mode))
3146 return emit_move_complex (mode, x, y);
3147
3148 if (GET_MODE_CLASS (mode) == MODE_CC)
3149 return emit_move_ccmode (mode, x, y);
3150
3151 /* Try using a move pattern for the corresponding integer mode. This is
3152 only safe when simplify_subreg can convert MODE constants into integer
3153 constants. At present, it can only do this reliably if the value
3154 fits within a HOST_WIDE_INT. */
3155 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3156 {
3157 rtx ret = emit_move_via_integer (mode, x, y, false);
3158 if (ret)
3159 return ret;
3160 }
3161
3162 return emit_move_multi_word (mode, x, y);
3163 }
3164
3165 /* Generate code to copy Y into X.
3166 Both Y and X must have the same mode, except that
3167 Y can be a constant with VOIDmode.
3168 This mode cannot be BLKmode; use emit_block_move for that.
3169
3170 Return the last instruction emitted. */
3171
3172 rtx
emit_move_insn(rtx x,rtx y)3173 emit_move_insn (rtx x, rtx y)
3174 {
3175 enum machine_mode mode = GET_MODE (x);
3176 rtx y_cst = NULL_RTX;
3177 rtx last_insn, set;
3178
3179 gcc_assert (mode != BLKmode
3180 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3181
3182 if (CONSTANT_P (y))
3183 {
3184 if (optimize
3185 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3186 && (last_insn = compress_float_constant (x, y)))
3187 return last_insn;
3188
3189 y_cst = y;
3190
3191 if (!LEGITIMATE_CONSTANT_P (y))
3192 {
3193 y = force_const_mem (mode, y);
3194
3195 /* If the target's cannot_force_const_mem prevented the spill,
3196 assume that the target's move expanders will also take care
3197 of the non-legitimate constant. */
3198 if (!y)
3199 y = y_cst;
3200 }
3201 }
3202
3203 /* If X or Y are memory references, verify that their addresses are valid
3204 for the machine. */
3205 if (MEM_P (x)
3206 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3207 && ! push_operand (x, GET_MODE (x)))
3208 || (flag_force_addr
3209 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3210 x = validize_mem (x);
3211
3212 if (MEM_P (y)
3213 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3214 || (flag_force_addr
3215 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3216 y = validize_mem (y);
3217
3218 gcc_assert (mode != BLKmode);
3219
3220 last_insn = emit_move_insn_1 (x, y);
3221
3222 if (y_cst && REG_P (x)
3223 && (set = single_set (last_insn)) != NULL_RTX
3224 && SET_DEST (set) == x
3225 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3226 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3227
3228 return last_insn;
3229 }
3230
3231 /* If Y is representable exactly in a narrower mode, and the target can
3232 perform the extension directly from constant or memory, then emit the
3233 move as an extension. */
3234
3235 static rtx
compress_float_constant(rtx x,rtx y)3236 compress_float_constant (rtx x, rtx y)
3237 {
3238 enum machine_mode dstmode = GET_MODE (x);
3239 enum machine_mode orig_srcmode = GET_MODE (y);
3240 enum machine_mode srcmode;
3241 REAL_VALUE_TYPE r;
3242 int oldcost, newcost;
3243
3244 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3245
3246 if (LEGITIMATE_CONSTANT_P (y))
3247 oldcost = rtx_cost (y, SET);
3248 else
3249 oldcost = rtx_cost (force_const_mem (dstmode, y), SET);
3250
3251 /* (TIGCC) We do not implement exact_real_truncate and there is no narrower
3252 float mode anyway. -- Kevin Kofler */
3253 #if 0
3254 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3255 srcmode != orig_srcmode;
3256 srcmode = GET_MODE_WIDER_MODE (srcmode))
3257 {
3258 enum insn_code ic;
3259 rtx trunc_y, last_insn;
3260
3261 /* Skip if the target can't extend this way. */
3262 ic = can_extend_p (dstmode, srcmode, 0);
3263 if (ic == CODE_FOR_nothing)
3264 continue;
3265
3266 /* Skip if the narrowed value isn't exact. */
3267 if (! exact_real_truncate (srcmode, &r))
3268 continue;
3269
3270 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3271
3272 if (LEGITIMATE_CONSTANT_P (trunc_y))
3273 {
3274 /* Skip if the target needs extra instructions to perform
3275 the extension. */
3276 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3277 continue;
3278 /* This is valid, but may not be cheaper than the original. */
3279 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3280 if (oldcost < newcost)
3281 continue;
3282 }
3283 else if (float_extend_from_mem[dstmode][srcmode])
3284 {
3285 trunc_y = force_const_mem (srcmode, trunc_y);
3286 /* This is valid, but may not be cheaper than the original. */
3287 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3288 if (oldcost < newcost)
3289 continue;
3290 trunc_y = validize_mem (trunc_y);
3291 }
3292 else
3293 continue;
3294
3295 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3296 last_insn = get_last_insn ();
3297
3298 if (REG_P (x))
3299 set_unique_reg_note (last_insn, REG_EQUAL, y);
3300
3301 return last_insn;
3302 }
3303 #endif /* 0 */
3304
3305 return NULL_RTX;
3306 }
3307
3308 /* Pushing data onto the stack. */
3309
3310 /* Push a block of length SIZE (perhaps variable)
3311 and return an rtx to address the beginning of the block.
3312 The value may be virtual_outgoing_args_rtx.
3313
3314 EXTRA is the number of bytes of padding to push in addition to SIZE.
3315 BELOW nonzero means this padding comes at low addresses;
3316 otherwise, the padding comes at high addresses. */
3317
3318 rtx
push_block(rtx size,int extra,int below)3319 push_block (rtx size, int extra, int below)
3320 {
3321 rtx temp;
3322
3323 size = convert_modes (Pmode, ptr_mode, size, 1);
3324 if (CONSTANT_P (size))
3325 anti_adjust_stack (plus_constant (size, extra));
3326 else if (REG_P (size) && extra == 0)
3327 anti_adjust_stack (size);
3328 else
3329 {
3330 temp = copy_to_mode_reg (Pmode, size);
3331 if (extra != 0)
3332 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3333 temp, 0, OPTAB_LIB_WIDEN);
3334 anti_adjust_stack (temp);
3335 }
3336
3337 #ifndef STACK_GROWS_DOWNWARD
3338 if (0)
3339 #else
3340 if (1)
3341 #endif
3342 {
3343 temp = virtual_outgoing_args_rtx;
3344 if (extra != 0 && below)
3345 temp = plus_constant (temp, extra);
3346 }
3347 else
3348 {
3349 if (GET_CODE (size) == CONST_INT)
3350 temp = plus_constant (virtual_outgoing_args_rtx,
3351 -INTVAL (size) - (below ? 0 : extra));
3352 else if (extra != 0 && !below)
3353 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3354 negate_rtx (Pmode, plus_constant (size, extra)));
3355 else
3356 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3357 negate_rtx (Pmode, size));
3358 }
3359
3360 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3361 }
3362
3363 #ifdef PUSH_ROUNDING
3364
3365 /* Emit single push insn. */
3366
3367 static void
emit_single_push_insn(enum machine_mode mode,rtx x,tree type)3368 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3369 {
3370 rtx dest_addr;
3371 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3372 rtx dest;
3373 enum insn_code icode;
3374 insn_operand_predicate_fn pred;
3375
3376 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3377 /* If there is push pattern, use it. Otherwise try old way of throwing
3378 MEM representing push operation to move expander. */
3379 icode = push_optab->handlers[(int) mode].insn_code;
3380 if (icode != CODE_FOR_nothing)
3381 {
3382 if (((pred = insn_data[(int) icode].operand[0].predicate)
3383 && !((*pred) (x, mode))))
3384 x = force_reg (mode, x);
3385 emit_insn (GEN_FCN (icode) (x));
3386 return;
3387 }
3388 if (GET_MODE_SIZE (mode) == rounded_size)
3389 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3390 /* If we are to pad downward, adjust the stack pointer first and
3391 then store X into the stack location using an offset. This is
3392 because emit_move_insn does not know how to pad; it does not have
3393 access to type. */
3394 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3395 {
3396 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3397 HOST_WIDE_INT offset;
3398
3399 emit_move_insn (stack_pointer_rtx,
3400 expand_binop (Pmode,
3401 #ifdef STACK_GROWS_DOWNWARD
3402 sub_optab,
3403 #else
3404 add_optab,
3405 #endif
3406 stack_pointer_rtx,
3407 GEN_INT (rounded_size),
3408 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3409
3410 offset = (HOST_WIDE_INT) padding_size;
3411 #ifdef STACK_GROWS_DOWNWARD
3412 if (STACK_PUSH_CODE == POST_DEC)
3413 /* We have already decremented the stack pointer, so get the
3414 previous value. */
3415 offset += (HOST_WIDE_INT) rounded_size;
3416 #else
3417 if (STACK_PUSH_CODE == POST_INC)
3418 /* We have already incremented the stack pointer, so get the
3419 previous value. */
3420 offset -= (HOST_WIDE_INT) rounded_size;
3421 #endif
3422 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3423 }
3424 else
3425 {
3426 #ifdef STACK_GROWS_DOWNWARD
3427 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3428 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3429 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3430 #else
3431 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3432 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3433 GEN_INT (rounded_size));
3434 #endif
3435 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3436 }
3437
3438 dest = gen_rtx_MEM (mode, dest_addr);
3439
3440 if (type != 0)
3441 {
3442 set_mem_attributes (dest, type, 1);
3443
3444 if (flag_optimize_sibling_calls)
3445 /* Function incoming arguments may overlap with sibling call
3446 outgoing arguments and we cannot allow reordering of reads
3447 from function arguments with stores to outgoing arguments
3448 of sibling calls. */
3449 set_mem_alias_set (dest, 0);
3450 }
3451 emit_move_insn (dest, x);
3452 }
3453 #endif
3454
3455 /* Generate code to push X onto the stack, assuming it has mode MODE and
3456 type TYPE.
3457 MODE is redundant except when X is a CONST_INT (since they don't
3458 carry mode info).
3459 SIZE is an rtx for the size of data to be copied (in bytes),
3460 needed only if X is BLKmode.
3461
3462 ALIGN (in bits) is maximum alignment we can assume.
3463
3464 If PARTIAL and REG are both nonzero, then copy that many of the first
3465 bytes of X into registers starting with REG, and push the rest of X.
3466 The amount of space pushed is decreased by PARTIAL bytes.
3467 REG must be a hard register in this case.
3468 If REG is zero but PARTIAL is not, take any all others actions for an
3469 argument partially in registers, but do not actually load any
3470 registers.
3471
3472 EXTRA is the amount in bytes of extra space to leave next to this arg.
3473 This is ignored if an argument block has already been allocated.
3474
3475 On a machine that lacks real push insns, ARGS_ADDR is the address of
3476 the bottom of the argument block for this call. We use indexing off there
3477 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3478 argument block has not been preallocated.
3479
3480 ARGS_SO_FAR is the size of args previously pushed for this call.
3481
3482 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3483 for arguments passed in registers. If nonzero, it will be the number
3484 of bytes required. */
3485
3486 void
emit_push_insn(rtx x,enum machine_mode mode,tree type,rtx size,unsigned int align,int partial,rtx reg,int extra,rtx args_addr,rtx args_so_far,int reg_parm_stack_space,rtx alignment_pad)3487 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3488 unsigned int align, int partial, rtx reg, int extra,
3489 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3490 rtx alignment_pad)
3491 {
3492 rtx xinner;
3493 enum direction stack_direction
3494 #ifdef STACK_GROWS_DOWNWARD
3495 = downward;
3496 #else
3497 = upward;
3498 #endif
3499
3500 /* Decide where to pad the argument: `downward' for below,
3501 `upward' for above, or `none' for don't pad it.
3502 Default is below for small data on big-endian machines; else above. */
3503 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3504
3505 /* Invert direction if stack is post-decrement.
3506 FIXME: why? */
3507 if (STACK_PUSH_CODE == POST_DEC)
3508 if (where_pad != none)
3509 where_pad = (where_pad == downward ? upward : downward);
3510
3511 xinner = x;
3512
3513 if (mode == BLKmode)
3514 {
3515 /* Copy a block into the stack, entirely or partially. */
3516
3517 rtx temp;
3518 int used;
3519 int offset;
3520 int skip;
3521
3522 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3523 used = partial - offset;
3524
3525 gcc_assert (size);
3526
3527 /* USED is now the # of bytes we need not copy to the stack
3528 because registers will take care of them. */
3529
3530 if (partial != 0)
3531 xinner = adjust_address (xinner, BLKmode, used);
3532
3533 /* If the partial register-part of the arg counts in its stack size,
3534 skip the part of stack space corresponding to the registers.
3535 Otherwise, start copying to the beginning of the stack space,
3536 by setting SKIP to 0. */
3537 skip = (reg_parm_stack_space == 0) ? 0 : used;
3538
3539 #ifdef PUSH_ROUNDING
3540 /* Do it with several push insns if that doesn't take lots of insns
3541 and if there is no difficulty with push insns that skip bytes
3542 on the stack for alignment purposes. */
3543 if (args_addr == 0
3544 && PUSH_ARGS
3545 && GET_CODE (size) == CONST_INT
3546 && skip == 0
3547 && MEM_ALIGN (xinner) >= align
3548 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3549 /* Here we avoid the case of a structure whose weak alignment
3550 forces many pushes of a small amount of data,
3551 and such small pushes do rounding that causes trouble. */
3552 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3553 || align >= BIGGEST_ALIGNMENT
3554 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3555 == (align / BITS_PER_UNIT)))
3556 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3557 {
3558 /* Push padding now if padding above and stack grows down,
3559 or if padding below and stack grows up.
3560 But if space already allocated, this has already been done. */
3561 if (extra && args_addr == 0
3562 && where_pad != none && where_pad != stack_direction)
3563 anti_adjust_stack (GEN_INT (extra));
3564
3565 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3566 }
3567 else
3568 #endif /* PUSH_ROUNDING */
3569 {
3570 rtx target;
3571
3572 /* Otherwise make space on the stack and copy the data
3573 to the address of that space. */
3574
3575 /* Deduct words put into registers from the size we must copy. */
3576 if (partial != 0)
3577 {
3578 if (GET_CODE (size) == CONST_INT)
3579 size = GEN_INT (INTVAL (size) - used);
3580 else
3581 size = expand_binop (GET_MODE (size), sub_optab, size,
3582 GEN_INT (used), NULL_RTX, 0,
3583 OPTAB_LIB_WIDEN);
3584 }
3585
3586 /* Get the address of the stack space.
3587 In this case, we do not deal with EXTRA separately.
3588 A single stack adjust will do. */
3589 if (! args_addr)
3590 {
3591 temp = push_block (size, extra, where_pad == downward);
3592 extra = 0;
3593 }
3594 else if (GET_CODE (args_so_far) == CONST_INT)
3595 temp = memory_address (BLKmode,
3596 plus_constant (args_addr,
3597 skip + INTVAL (args_so_far)));
3598 else
3599 temp = memory_address (BLKmode,
3600 plus_constant (gen_rtx_PLUS (Pmode,
3601 args_addr,
3602 args_so_far),
3603 skip));
3604
3605 if (!ACCUMULATE_OUTGOING_ARGS)
3606 {
3607 /* If the source is referenced relative to the stack pointer,
3608 copy it to another register to stabilize it. We do not need
3609 to do this if we know that we won't be changing sp. */
3610
3611 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3612 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3613 temp = copy_to_reg (temp);
3614 }
3615
3616 target = gen_rtx_MEM (BLKmode, temp);
3617
3618 /* We do *not* set_mem_attributes here, because incoming arguments
3619 may overlap with sibling call outgoing arguments and we cannot
3620 allow reordering of reads from function arguments with stores
3621 to outgoing arguments of sibling calls. We do, however, want
3622 to record the alignment of the stack slot. */
3623 /* ALIGN may well be better aligned than TYPE, e.g. due to
3624 PARM_BOUNDARY. Assume the caller isn't lying. */
3625 set_mem_align (target, align);
3626
3627 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3628 }
3629 }
3630 else if (partial > 0)
3631 {
3632 /* Scalar partly in registers. */
3633
3634 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3635 int i;
3636 int not_stack;
3637 /* # bytes of start of argument
3638 that we must make space for but need not store. */
3639 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3640 int args_offset = INTVAL (args_so_far);
3641 int skip;
3642
3643 /* Push padding now if padding above and stack grows down,
3644 or if padding below and stack grows up.
3645 But if space already allocated, this has already been done. */
3646 if (extra && args_addr == 0
3647 && where_pad != none && where_pad != stack_direction)
3648 anti_adjust_stack (GEN_INT (extra));
3649
3650 /* If we make space by pushing it, we might as well push
3651 the real data. Otherwise, we can leave OFFSET nonzero
3652 and leave the space uninitialized. */
3653 if (args_addr == 0)
3654 offset = 0;
3655
3656 /* Now NOT_STACK gets the number of words that we don't need to
3657 allocate on the stack. Convert OFFSET to words too. */
3658 not_stack = (partial - offset) / UNITS_PER_WORD;
3659 offset /= UNITS_PER_WORD;
3660
3661 /* If the partial register-part of the arg counts in its stack size,
3662 skip the part of stack space corresponding to the registers.
3663 Otherwise, start copying to the beginning of the stack space,
3664 by setting SKIP to 0. */
3665 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3666
3667 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3668 x = validize_mem (force_const_mem (mode, x));
3669
3670 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3671 SUBREGs of such registers are not allowed. */
3672 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3673 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3674 x = copy_to_reg (x);
3675
3676 /* Loop over all the words allocated on the stack for this arg. */
3677 /* We can do it by words, because any scalar bigger than a word
3678 has a size a multiple of a word. */
3679 #ifndef PUSH_ARGS_REVERSED
3680 for (i = not_stack; i < size; i++)
3681 #else
3682 for (i = size - 1; i >= not_stack; i--)
3683 #endif
3684 if (i >= not_stack + offset)
3685 emit_push_insn (operand_subword_force (x, i, mode),
3686 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3687 0, args_addr,
3688 GEN_INT (args_offset + ((i - not_stack + skip)
3689 * UNITS_PER_WORD)),
3690 reg_parm_stack_space, alignment_pad);
3691 }
3692 else
3693 {
3694 rtx addr;
3695 rtx dest;
3696
3697 /* Push padding now if padding above and stack grows down,
3698 or if padding below and stack grows up.
3699 But if space already allocated, this has already been done. */
3700 if (extra && args_addr == 0
3701 && where_pad != none && where_pad != stack_direction)
3702 anti_adjust_stack (GEN_INT (extra));
3703
3704 #ifdef PUSH_ROUNDING
3705 if (args_addr == 0 && PUSH_ARGS)
3706 emit_single_push_insn (mode, x, type);
3707 else
3708 #endif
3709 {
3710 if (GET_CODE (args_so_far) == CONST_INT)
3711 addr
3712 = memory_address (mode,
3713 plus_constant (args_addr,
3714 INTVAL (args_so_far)));
3715 else
3716 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3717 args_so_far));
3718 dest = gen_rtx_MEM (mode, addr);
3719
3720 /* We do *not* set_mem_attributes here, because incoming arguments
3721 may overlap with sibling call outgoing arguments and we cannot
3722 allow reordering of reads from function arguments with stores
3723 to outgoing arguments of sibling calls. We do, however, want
3724 to record the alignment of the stack slot. */
3725 /* ALIGN may well be better aligned than TYPE, e.g. due to
3726 PARM_BOUNDARY. Assume the caller isn't lying. */
3727 set_mem_align (dest, align);
3728
3729 emit_move_insn (dest, x);
3730 }
3731 }
3732
3733 /* If part should go in registers, copy that part
3734 into the appropriate registers. Do this now, at the end,
3735 since mem-to-mem copies above may do function calls. */
3736 if (partial > 0 && reg != 0)
3737 {
3738 /* Handle calls that pass values in multiple non-contiguous locations.
3739 The Irix 6 ABI has examples of this. */
3740 if (GET_CODE (reg) == PARALLEL)
3741 emit_group_load (reg, x, type, -1);
3742 else
3743 {
3744 gcc_assert (partial % UNITS_PER_WORD == 0);
3745 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
3746 }
3747 }
3748
3749 if (extra && args_addr == 0 && where_pad == stack_direction)
3750 anti_adjust_stack (GEN_INT (extra));
3751
3752 if (alignment_pad && args_addr == 0)
3753 anti_adjust_stack (alignment_pad);
3754 }
3755
3756 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3757 operations. */
3758
3759 static rtx
get_subtarget(rtx x)3760 get_subtarget (rtx x)
3761 {
3762 return (optimize
3763 || x == 0
3764 /* Only registers can be subtargets. */
3765 || !REG_P (x)
3766 /* Don't use hard regs to avoid extending their life. */
3767 || REGNO (x) < FIRST_PSEUDO_REGISTER
3768 ? 0 : x);
3769 }
3770
3771 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3772 FIELD is a bitfield. Returns true if the optimization was successful,
3773 and there's nothing else to do. */
3774
3775 static bool
optimize_bitfield_assignment_op(unsigned HOST_WIDE_INT bitsize,unsigned HOST_WIDE_INT bitpos,enum machine_mode mode1,rtx str_rtx,tree to,tree src)3776 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3777 unsigned HOST_WIDE_INT bitpos,
3778 enum machine_mode mode1, rtx str_rtx,
3779 tree to, tree src)
3780 {
3781 enum machine_mode str_mode = GET_MODE (str_rtx);
3782 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
3783 tree op0, op1;
3784 rtx value, result;
3785 optab binop;
3786
3787 if (mode1 != VOIDmode
3788 || bitsize >= BITS_PER_WORD
3789 || str_bitsize > BITS_PER_WORD
3790 || TREE_SIDE_EFFECTS (to)
3791 || TREE_THIS_VOLATILE (to))
3792 return false;
3793
3794 STRIP_NOPS (src);
3795 if (!BINARY_CLASS_P (src)
3796 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
3797 return false;
3798
3799 op0 = TREE_OPERAND (src, 0);
3800 op1 = TREE_OPERAND (src, 1);
3801 STRIP_NOPS (op0);
3802
3803 if (!operand_equal_p (to, op0, 0))
3804 return false;
3805
3806 if (MEM_P (str_rtx))
3807 {
3808 unsigned HOST_WIDE_INT offset1;
3809
3810 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
3811 str_mode = word_mode;
3812 str_mode = get_best_mode (bitsize, bitpos,
3813 MEM_ALIGN (str_rtx), str_mode, 0);
3814 if (str_mode == VOIDmode)
3815 return false;
3816 str_bitsize = GET_MODE_BITSIZE (str_mode);
3817
3818 offset1 = bitpos;
3819 bitpos %= str_bitsize;
3820 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
3821 str_rtx = adjust_address (str_rtx, str_mode, offset1);
3822 }
3823 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
3824 return false;
3825
3826 /* If the bit field covers the whole REG/MEM, store_field
3827 will likely generate better code. */
3828 if (bitsize >= str_bitsize)
3829 return false;
3830
3831 /* We can't handle fields split across multiple entities. */
3832 if (bitpos + bitsize > str_bitsize)
3833 return false;
3834
3835 if (BYTES_BIG_ENDIAN)
3836 bitpos = str_bitsize - bitpos - bitsize;
3837
3838 switch (TREE_CODE (src))
3839 {
3840 case PLUS_EXPR:
3841 case MINUS_EXPR:
3842 /* For now, just optimize the case of the topmost bitfield
3843 where we don't need to do any masking and also
3844 1 bit bitfields where xor can be used.
3845 We might win by one instruction for the other bitfields
3846 too if insv/extv instructions aren't used, so that
3847 can be added later. */
3848 if (bitpos + bitsize != str_bitsize
3849 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
3850 break;
3851
3852 value = expand_expr (op1, NULL_RTX, str_mode, 0);
3853 value = convert_modes (str_mode,
3854 TYPE_MODE (TREE_TYPE (op1)), value,
3855 TYPE_UNSIGNED (TREE_TYPE (op1)));
3856
3857 /* We may be accessing data outside the field, which means
3858 we can alias adjacent data. */
3859 if (MEM_P (str_rtx))
3860 {
3861 str_rtx = shallow_copy_rtx (str_rtx);
3862 set_mem_alias_set (str_rtx, 0);
3863 set_mem_expr (str_rtx, 0);
3864 }
3865
3866 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
3867 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
3868 {
3869 value = expand_and (str_mode, value, const1_rtx, NULL);
3870 binop = xor_optab;
3871 }
3872 value = expand_shift (LSHIFT_EXPR, str_mode, value,
3873 build_int_cst (NULL_TREE, bitpos),
3874 NULL_RTX, 1);
3875 result = expand_binop (str_mode, binop, str_rtx,
3876 value, str_rtx, 1, OPTAB_WIDEN);
3877 if (result != str_rtx)
3878 emit_move_insn (str_rtx, result);
3879 return true;
3880
3881 case BIT_IOR_EXPR:
3882 case BIT_XOR_EXPR:
3883 if (TREE_CODE (op1) != INTEGER_CST)
3884 break;
3885 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), 0);
3886 value = convert_modes (GET_MODE (str_rtx),
3887 TYPE_MODE (TREE_TYPE (op1)), value,
3888 TYPE_UNSIGNED (TREE_TYPE (op1)));
3889
3890 /* We may be accessing data outside the field, which means
3891 we can alias adjacent data. */
3892 if (MEM_P (str_rtx))
3893 {
3894 str_rtx = shallow_copy_rtx (str_rtx);
3895 set_mem_alias_set (str_rtx, 0);
3896 set_mem_expr (str_rtx, 0);
3897 }
3898
3899 binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab;
3900 if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3901 {
3902 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
3903 - 1);
3904 value = expand_and (GET_MODE (str_rtx), value, mask,
3905 NULL_RTX);
3906 }
3907 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
3908 build_int_cst (NULL_TREE, bitpos),
3909 NULL_RTX, 1);
3910 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
3911 value, str_rtx, 1, OPTAB_WIDEN);
3912 if (result != str_rtx)
3913 emit_move_insn (str_rtx, result);
3914 return true;
3915
3916 default:
3917 break;
3918 }
3919
3920 return false;
3921 }
3922
3923
3924 /* Expand an assignment that stores the value of FROM into TO. */
3925
3926 void
expand_assignment(tree to,tree from)3927 expand_assignment (tree to, tree from)
3928 {
3929 rtx to_rtx = 0;
3930 rtx result;
3931
3932 /* Don't crash if the lhs of the assignment was erroneous. */
3933
3934 if (TREE_CODE (to) == ERROR_MARK)
3935 {
3936 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3937 return;
3938 }
3939
3940 /* Assignment of a structure component needs special treatment
3941 if the structure component's rtx is not simply a MEM.
3942 Assignment of an array element at a constant index, and assignment of
3943 an array element in an unaligned packed structure field, has the same
3944 problem. */
3945 if (handled_component_p (to)
3946 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
3947 {
3948 enum machine_mode mode1;
3949 HOST_WIDE_INT bitsize, bitpos;
3950 tree offset;
3951 int unsignedp;
3952 int volatilep = 0;
3953 tree tem;
3954
3955 push_temp_slots ();
3956 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3957 &unsignedp, &volatilep, true);
3958
3959 /* If we are going to use store_bit_field and extract_bit_field,
3960 make sure to_rtx will be safe for multiple use. */
3961
3962 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3963
3964 if (offset != 0)
3965 {
3966 rtx offset_rtx;
3967
3968 if (!MEM_P (to_rtx))
3969 {
3970 /* We can get constant negative offsets into arrays with broken
3971 user code. Translate this to a trap instead of ICEing. */
3972 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
3973 expand_builtin_trap ();
3974 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
3975 }
3976
3977 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3978 #ifdef POINTERS_EXTEND_UNSIGNED
3979 if (GET_MODE (offset_rtx) != Pmode)
3980 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
3981 #else
3982 if (GET_MODE (offset_rtx) != ptr_mode)
3983 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3984 #endif
3985
3986 /* A constant address in TO_RTX can have VOIDmode, we must not try
3987 to call force_reg for that case. Avoid that case. */
3988 if (MEM_P (to_rtx)
3989 && GET_MODE (to_rtx) == BLKmode
3990 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3991 && bitsize > 0
3992 && (bitpos % bitsize) == 0
3993 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3994 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3995 {
3996 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3997 bitpos = 0;
3998 }
3999
4000 to_rtx = offset_address (to_rtx, offset_rtx,
4001 highest_pow2_factor_for_target (to,
4002 offset));
4003 }
4004
4005 /* Handle expand_expr of a complex value returning a CONCAT. */
4006 if (GET_CODE (to_rtx) == CONCAT)
4007 {
4008 if (TREE_CODE (TREE_TYPE (from)) == COMPLEX_TYPE)
4009 {
4010 gcc_assert (bitpos == 0);
4011 result = store_expr (from, to_rtx, false);
4012 }
4013 else
4014 {
4015 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
4016 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false);
4017 }
4018 }
4019 else
4020 {
4021 if (MEM_P (to_rtx))
4022 {
4023 /* If the field is at offset zero, we could have been given the
4024 DECL_RTX of the parent struct. Don't munge it. */
4025 to_rtx = shallow_copy_rtx (to_rtx);
4026
4027 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4028
4029 /* Deal with volatile and readonly fields. The former is only
4030 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4031 if (volatilep)
4032 MEM_VOLATILE_P (to_rtx) = 1;
4033 if (component_uses_parent_alias_set (to))
4034 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4035 }
4036
4037 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
4038 to_rtx, to, from))
4039 result = NULL;
4040 else
4041 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4042 TREE_TYPE (tem), get_alias_set (to));
4043 }
4044
4045 if (result)
4046 preserve_temp_slots (result);
4047 free_temp_slots ();
4048 pop_temp_slots ();
4049 return;
4050 }
4051
4052 /* If the rhs is a function call and its value is not an aggregate,
4053 call the function before we start to compute the lhs.
4054 This is needed for correct code for cases such as
4055 val = setjmp (buf) on machines where reference to val
4056 requires loading up part of an address in a separate insn.
4057
4058 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4059 since it might be a promoted variable where the zero- or sign- extension
4060 needs to be done. Handling this in the normal way is safe because no
4061 computation is done before the call. */
4062 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4063 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4064 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4065 && REG_P (DECL_RTL (to))))
4066 {
4067 rtx value;
4068
4069 push_temp_slots ();
4070 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
4071 if (to_rtx == 0)
4072 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4073
4074 /* Handle calls that return values in multiple non-contiguous locations.
4075 The Irix 6 ABI has examples of this. */
4076 if (GET_CODE (to_rtx) == PARALLEL)
4077 emit_group_load (to_rtx, value, TREE_TYPE (from),
4078 int_size_in_bytes (TREE_TYPE (from)));
4079 else if (GET_MODE (to_rtx) == BLKmode)
4080 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4081 else
4082 {
4083 if (POINTER_TYPE_P (TREE_TYPE (to)))
4084 value = convert_memory_address (GET_MODE (to_rtx), value);
4085 emit_move_insn (to_rtx, value);
4086 }
4087 preserve_temp_slots (to_rtx);
4088 free_temp_slots ();
4089 pop_temp_slots ();
4090 return;
4091 }
4092
4093 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4094 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4095
4096 if (to_rtx == 0)
4097 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4098
4099 /* Don't move directly into a return register. */
4100 if (TREE_CODE (to) == RESULT_DECL
4101 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4102 {
4103 rtx temp;
4104
4105 push_temp_slots ();
4106 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
4107
4108 if (GET_CODE (to_rtx) == PARALLEL)
4109 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4110 int_size_in_bytes (TREE_TYPE (from)));
4111 else
4112 emit_move_insn (to_rtx, temp);
4113
4114 preserve_temp_slots (to_rtx);
4115 free_temp_slots ();
4116 pop_temp_slots ();
4117 return;
4118 }
4119
4120 /* In case we are returning the contents of an object which overlaps
4121 the place the value is being stored, use a safe function when copying
4122 a value through a pointer into a structure value return block. */
4123 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4124 && current_function_returns_struct
4125 && !current_function_returns_pcc_struct)
4126 {
4127 rtx from_rtx, size;
4128
4129 push_temp_slots ();
4130 size = expr_size (from);
4131 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
4132
4133 emit_library_call (memmove_libfunc, LCT_NORMAL,
4134 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4135 XEXP (from_rtx, 0), Pmode,
4136 convert_to_mode (TYPE_MODE (sizetype),
4137 size, TYPE_UNSIGNED (sizetype)),
4138 TYPE_MODE (sizetype));
4139
4140 preserve_temp_slots (to_rtx);
4141 free_temp_slots ();
4142 pop_temp_slots ();
4143 return;
4144 }
4145
4146 /* Compute FROM and store the value in the rtx we got. */
4147
4148 push_temp_slots ();
4149 result = store_expr (from, to_rtx, 0);
4150 preserve_temp_slots (result);
4151 free_temp_slots ();
4152 pop_temp_slots ();
4153 return;
4154 }
4155
4156 /* Generate code for computing expression EXP,
4157 and storing the value into TARGET.
4158
4159 If the mode is BLKmode then we may return TARGET itself.
4160 It turns out that in BLKmode it doesn't cause a problem.
4161 because C has no operators that could combine two different
4162 assignments into the same BLKmode object with different values
4163 with no sequence point. Will other languages need this to
4164 be more thorough?
4165
4166 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4167 stack, and block moves may need to be treated specially. */
4168
4169 rtx
store_expr(tree exp,rtx target,int call_param_p)4170 store_expr (tree exp, rtx target, int call_param_p)
4171 {
4172 rtx temp;
4173 rtx alt_rtl = NULL_RTX;
4174 int dont_return_target = 0;
4175
4176 if (VOID_TYPE_P (TREE_TYPE (exp)))
4177 {
4178 /* C++ can generate ?: expressions with a throw expression in one
4179 branch and an rvalue in the other. Here, we resolve attempts to
4180 store the throw expression's nonexistent result. */
4181 gcc_assert (!call_param_p);
4182 expand_expr (exp, const0_rtx, VOIDmode, 0);
4183 return NULL_RTX;
4184 }
4185 if (TREE_CODE (exp) == COMPOUND_EXPR)
4186 {
4187 /* Perform first part of compound expression, then assign from second
4188 part. */
4189 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4190 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4191 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4192 }
4193 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4194 {
4195 /* For conditional expression, get safe form of the target. Then
4196 test the condition, doing the appropriate assignment on either
4197 side. This avoids the creation of unnecessary temporaries.
4198 For non-BLKmode, it is more efficient not to do this. */
4199
4200 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4201
4202 do_pending_stack_adjust ();
4203 NO_DEFER_POP;
4204 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4205 store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4206 emit_jump_insn (gen_jump (lab2));
4207 emit_barrier ();
4208 emit_label (lab1);
4209 store_expr (TREE_OPERAND (exp, 2), target, call_param_p);
4210 emit_label (lab2);
4211 OK_DEFER_POP;
4212
4213 return NULL_RTX;
4214 }
4215 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4216 /* If this is a scalar in a register that is stored in a wider mode
4217 than the declared mode, compute the result into its declared mode
4218 and then convert to the wider mode. Our value is the computed
4219 expression. */
4220 {
4221 rtx inner_target = 0;
4222
4223 /* We can do the conversion inside EXP, which will often result
4224 in some optimizations. Do the conversion in two steps: first
4225 change the signedness, if needed, then the extend. But don't
4226 do this if the type of EXP is a subtype of something else
4227 since then the conversion might involve more than just
4228 converting modes. */
4229 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4230 && TREE_TYPE (TREE_TYPE (exp)) == 0
4231 && (!lang_hooks.reduce_bit_field_operations
4232 || (GET_MODE_PRECISION (GET_MODE (target))
4233 == TYPE_PRECISION (TREE_TYPE (exp)))))
4234 {
4235 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4236 != SUBREG_PROMOTED_UNSIGNED_P (target))
4237 exp = convert
4238 (lang_hooks.types.signed_or_unsigned_type
4239 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4240
4241 exp = convert (lang_hooks.types.type_for_mode
4242 (GET_MODE (SUBREG_REG (target)),
4243 SUBREG_PROMOTED_UNSIGNED_P (target)),
4244 exp);
4245
4246 inner_target = SUBREG_REG (target);
4247 }
4248
4249 temp = expand_expr (exp, inner_target, VOIDmode,
4250 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4251
4252 /* If TEMP is a VOIDmode constant, use convert_modes to make
4253 sure that we properly convert it. */
4254 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4255 {
4256 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4257 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4258 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4259 GET_MODE (target), temp,
4260 SUBREG_PROMOTED_UNSIGNED_P (target));
4261 }
4262
4263 convert_move (SUBREG_REG (target), temp,
4264 SUBREG_PROMOTED_UNSIGNED_P (target));
4265
4266 return NULL_RTX;
4267 }
4268 else
4269 {
4270 temp = expand_expr_real (exp, target, GET_MODE (target),
4271 (call_param_p
4272 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4273 &alt_rtl);
4274 /* Return TARGET if it's a specified hardware register.
4275 If TARGET is a volatile mem ref, either return TARGET
4276 or return a reg copied *from* TARGET; ANSI requires this.
4277
4278 Otherwise, if TEMP is not TARGET, return TEMP
4279 if it is constant (for efficiency),
4280 or if we really want the correct value. */
4281 if (!(target && REG_P (target)
4282 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4283 && !(MEM_P (target) && MEM_VOLATILE_P (target))
4284 && ! rtx_equal_p (temp, target)
4285 && CONSTANT_P (temp))
4286 dont_return_target = 1;
4287 }
4288
4289 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4290 the same as that of TARGET, adjust the constant. This is needed, for
4291 example, in case it is a CONST_DOUBLE and we want only a word-sized
4292 value. */
4293 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4294 && TREE_CODE (exp) != ERROR_MARK
4295 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4296 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4297 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4298
4299 /* If value was not generated in the target, store it there.
4300 Convert the value to TARGET's type first if necessary and emit the
4301 pending incrementations that have been queued when expanding EXP.
4302 Note that we cannot emit the whole queue blindly because this will
4303 effectively disable the POST_INC optimization later.
4304
4305 If TEMP and TARGET compare equal according to rtx_equal_p, but
4306 one or both of them are volatile memory refs, we have to distinguish
4307 two cases:
4308 - expand_expr has used TARGET. In this case, we must not generate
4309 another copy. This can be detected by TARGET being equal according
4310 to == .
4311 - expand_expr has not used TARGET - that means that the source just
4312 happens to have the same RTX form. Since temp will have been created
4313 by expand_expr, it will compare unequal according to == .
4314 We must generate a copy in this case, to reach the correct number
4315 of volatile memory references. */
4316
4317 if ((! rtx_equal_p (temp, target)
4318 || (temp != target && (side_effects_p (temp)
4319 || side_effects_p (target))))
4320 && TREE_CODE (exp) != ERROR_MARK
4321 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4322 but TARGET is not valid memory reference, TEMP will differ
4323 from TARGET although it is really the same location. */
4324 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4325 /* If there's nothing to copy, don't bother. Don't call
4326 expr_size unless necessary, because some front-ends (C++)
4327 expr_size-hook must not be given objects that are not
4328 supposed to be bit-copied or bit-initialized. */
4329 && expr_size (exp) != const0_rtx)
4330 {
4331 if (GET_MODE (temp) != GET_MODE (target)
4332 && GET_MODE (temp) != VOIDmode)
4333 {
4334 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4335 if (dont_return_target)
4336 {
4337 /* In this case, we will return TEMP,
4338 so make sure it has the proper mode.
4339 But don't forget to store the value into TARGET. */
4340 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4341 emit_move_insn (target, temp);
4342 }
4343 else
4344 convert_move (target, temp, unsignedp);
4345 }
4346
4347 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4348 {
4349 /* Handle copying a string constant into an array. The string
4350 constant may be shorter than the array. So copy just the string's
4351 actual length, and clear the rest. First get the size of the data
4352 type of the string, which is actually the size of the target. */
4353 rtx size = expr_size (exp);
4354
4355 if (GET_CODE (size) == CONST_INT
4356 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4357 emit_block_move (target, temp, size,
4358 (call_param_p
4359 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4360 else
4361 {
4362 /* Compute the size of the data to copy from the string. */
4363 tree copy_size
4364 = size_binop (MIN_EXPR,
4365 make_tree (sizetype, size),
4366 size_int (TREE_STRING_LENGTH (exp)));
4367 rtx copy_size_rtx
4368 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4369 (call_param_p
4370 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4371 rtx label = 0;
4372
4373 /* Copy that much. */
4374 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4375 TYPE_UNSIGNED (sizetype));
4376 emit_block_move (target, temp, copy_size_rtx,
4377 (call_param_p
4378 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4379
4380 /* Figure out how much is left in TARGET that we have to clear.
4381 Do all calculations in ptr_mode. */
4382 if (GET_CODE (copy_size_rtx) == CONST_INT)
4383 {
4384 size = plus_constant (size, -INTVAL (copy_size_rtx));
4385 target = adjust_address (target, BLKmode,
4386 INTVAL (copy_size_rtx));
4387 }
4388 else
4389 {
4390 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4391 copy_size_rtx, NULL_RTX, 0,
4392 OPTAB_LIB_WIDEN);
4393
4394 #ifdef POINTERS_EXTEND_UNSIGNED
4395 if (GET_MODE (copy_size_rtx) != Pmode)
4396 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4397 TYPE_UNSIGNED (sizetype));
4398 #endif
4399
4400 target = offset_address (target, copy_size_rtx,
4401 highest_pow2_factor (copy_size));
4402 label = gen_label_rtx ();
4403 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4404 GET_MODE (size), 0, label);
4405 }
4406
4407 if (size != const0_rtx)
4408 clear_storage (target, size, BLOCK_OP_NORMAL);
4409
4410 if (label)
4411 emit_label (label);
4412 }
4413 }
4414 /* Handle calls that return values in multiple non-contiguous locations.
4415 The Irix 6 ABI has examples of this. */
4416 else if (GET_CODE (target) == PARALLEL)
4417 emit_group_load (target, temp, TREE_TYPE (exp),
4418 int_size_in_bytes (TREE_TYPE (exp)));
4419 else if (GET_MODE (temp) == BLKmode)
4420 emit_block_move (target, temp, expr_size (exp),
4421 (call_param_p
4422 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4423 else
4424 {
4425 temp = force_operand (temp, target);
4426 if (temp != target)
4427 emit_move_insn (target, temp);
4428 }
4429 }
4430
4431 return NULL_RTX;
4432 }
4433
4434 /* Examine CTOR to discover:
4435 * how many scalar fields are set to nonzero values,
4436 and place it in *P_NZ_ELTS;
4437 * how many scalar fields are set to non-constant values,
4438 and place it in *P_NC_ELTS; and
4439 * how many scalar fields in total are in CTOR,
4440 and place it in *P_ELT_COUNT.
4441 * if a type is a union, and the initializer from the constructor
4442 is not the largest element in the union, then set *p_must_clear. */
4443
4444 static void
categorize_ctor_elements_1(tree ctor,HOST_WIDE_INT * p_nz_elts,HOST_WIDE_INT * p_nc_elts,HOST_WIDE_INT * p_elt_count,bool * p_must_clear)4445 categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4446 HOST_WIDE_INT *p_nc_elts,
4447 HOST_WIDE_INT *p_elt_count,
4448 bool *p_must_clear)
4449 {
4450 unsigned HOST_WIDE_INT idx;
4451 HOST_WIDE_INT nz_elts, nc_elts, elt_count;
4452 tree value, purpose;
4453
4454 nz_elts = 0;
4455 nc_elts = 0;
4456 elt_count = 0;
4457
4458 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
4459 {
4460 HOST_WIDE_INT mult;
4461
4462 mult = 1;
4463 if (TREE_CODE (purpose) == RANGE_EXPR)
4464 {
4465 tree lo_index = TREE_OPERAND (purpose, 0);
4466 tree hi_index = TREE_OPERAND (purpose, 1);
4467
4468 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4469 mult = (tree_low_cst (hi_index, 1)
4470 - tree_low_cst (lo_index, 1) + 1);
4471 }
4472
4473 switch (TREE_CODE (value))
4474 {
4475 case CONSTRUCTOR:
4476 {
4477 HOST_WIDE_INT nz = 0, nc = 0, ic = 0;
4478 categorize_ctor_elements_1 (value, &nz, &nc, &ic, p_must_clear);
4479 nz_elts += mult * nz;
4480 nc_elts += mult * nc;
4481 elt_count += mult * ic;
4482 }
4483 break;
4484
4485 case INTEGER_CST:
4486 case REAL_CST:
4487 if (!initializer_zerop (value))
4488 nz_elts += mult;
4489 elt_count += mult;
4490 break;
4491
4492 case STRING_CST:
4493 nz_elts += mult * TREE_STRING_LENGTH (value);
4494 elt_count += mult * TREE_STRING_LENGTH (value);
4495 break;
4496
4497 case COMPLEX_CST:
4498 if (!initializer_zerop (TREE_REALPART (value)))
4499 nz_elts += mult;
4500 if (!initializer_zerop (TREE_IMAGPART (value)))
4501 nz_elts += mult;
4502 elt_count += mult;
4503 break;
4504
4505 case VECTOR_CST:
4506 {
4507 tree v;
4508 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4509 {
4510 if (!initializer_zerop (TREE_VALUE (v)))
4511 nz_elts += mult;
4512 elt_count += mult;
4513 }
4514 }
4515 break;
4516
4517 default:
4518 nz_elts += mult;
4519 elt_count += mult;
4520 if (!initializer_constant_valid_p (value, TREE_TYPE (value)))
4521 nc_elts += mult;
4522 break;
4523 }
4524 }
4525
4526 if (!*p_must_clear
4527 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4528 || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4529 {
4530 tree init_sub_type;
4531 bool clear_this = true;
4532
4533 if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
4534 {
4535 /* We don't expect more than one element of the union to be
4536 initialized. Not sure what we should do otherwise... */
4537 gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
4538 == 1);
4539
4540 init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
4541 CONSTRUCTOR_ELTS (ctor),
4542 0)->value);
4543
4544 /* ??? We could look at each element of the union, and find the
4545 largest element. Which would avoid comparing the size of the
4546 initialized element against any tail padding in the union.
4547 Doesn't seem worth the effort... */
4548 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4549 TYPE_SIZE (init_sub_type)) == 1)
4550 {
4551 /* And now we have to find out if the element itself is fully
4552 constructed. E.g. for union { struct { int a, b; } s; } u
4553 = { .s = { .a = 1 } }. */
4554 if (elt_count == count_type_elements (init_sub_type, false))
4555 clear_this = false;
4556 }
4557 }
4558
4559 *p_must_clear = clear_this;
4560 }
4561
4562 *p_nz_elts += nz_elts;
4563 *p_nc_elts += nc_elts;
4564 *p_elt_count += elt_count;
4565 }
4566
4567 void
categorize_ctor_elements(tree ctor,HOST_WIDE_INT * p_nz_elts,HOST_WIDE_INT * p_nc_elts,HOST_WIDE_INT * p_elt_count,bool * p_must_clear)4568 categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4569 HOST_WIDE_INT *p_nc_elts,
4570 HOST_WIDE_INT *p_elt_count,
4571 bool *p_must_clear)
4572 {
4573 *p_nz_elts = 0;
4574 *p_nc_elts = 0;
4575 *p_elt_count = 0;
4576 *p_must_clear = false;
4577 categorize_ctor_elements_1 (ctor, p_nz_elts, p_nc_elts, p_elt_count,
4578 p_must_clear);
4579 }
4580
4581 /* Count the number of scalars in TYPE. Return -1 on overflow or
4582 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
4583 array member at the end of the structure. */
4584
4585 HOST_WIDE_INT
count_type_elements(tree type,bool allow_flexarr)4586 count_type_elements (tree type, bool allow_flexarr)
4587 {
4588 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4589 switch (TREE_CODE (type))
4590 {
4591 case ARRAY_TYPE:
4592 {
4593 tree telts = array_type_nelts (type);
4594 if (telts && host_integerp (telts, 1))
4595 {
4596 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4597 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
4598 if (n == 0)
4599 return 0;
4600 else if (max / n > m)
4601 return n * m;
4602 }
4603 return -1;
4604 }
4605
4606 case RECORD_TYPE:
4607 {
4608 HOST_WIDE_INT n = 0, t;
4609 tree f;
4610
4611 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4612 if (TREE_CODE (f) == FIELD_DECL)
4613 {
4614 t = count_type_elements (TREE_TYPE (f), false);
4615 if (t < 0)
4616 {
4617 /* Check for structures with flexible array member. */
4618 tree tf = TREE_TYPE (f);
4619 if (allow_flexarr
4620 && TREE_CHAIN (f) == NULL
4621 && TREE_CODE (tf) == ARRAY_TYPE
4622 && TYPE_DOMAIN (tf)
4623 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
4624 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
4625 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
4626 && int_size_in_bytes (type) >= 0)
4627 break;
4628
4629 return -1;
4630 }
4631 n += t;
4632 }
4633
4634 return n;
4635 }
4636
4637 case UNION_TYPE:
4638 case QUAL_UNION_TYPE:
4639 {
4640 /* Ho hum. How in the world do we guess here? Clearly it isn't
4641 right to count the fields. Guess based on the number of words. */
4642 HOST_WIDE_INT n = int_size_in_bytes (type);
4643 if (n < 0)
4644 return -1;
4645 return n / UNITS_PER_WORD;
4646 }
4647
4648 case COMPLEX_TYPE:
4649 return 2;
4650
4651 case VECTOR_TYPE:
4652 return TYPE_VECTOR_SUBPARTS (type);
4653
4654 case INTEGER_TYPE:
4655 case REAL_TYPE:
4656 case ENUMERAL_TYPE:
4657 case BOOLEAN_TYPE:
4658 case CHAR_TYPE:
4659 case POINTER_TYPE:
4660 case OFFSET_TYPE:
4661 case REFERENCE_TYPE:
4662 return 1;
4663
4664 case VOID_TYPE:
4665 case METHOD_TYPE:
4666 case FUNCTION_TYPE:
4667 case LANG_TYPE:
4668 default:
4669 gcc_unreachable ();
4670 }
4671 }
4672
4673 /* Return 1 if EXP contains mostly (3/4) zeros. */
4674
4675 static int
mostly_zeros_p(tree exp)4676 mostly_zeros_p (tree exp)
4677 {
4678 if (TREE_CODE (exp) == CONSTRUCTOR)
4679
4680 {
4681 HOST_WIDE_INT nz_elts, nc_elts, count, elts;
4682 bool must_clear;
4683
4684 categorize_ctor_elements (exp, &nz_elts, &nc_elts, &count, &must_clear);
4685 if (must_clear)
4686 return 1;
4687
4688 elts = count_type_elements (TREE_TYPE (exp), false);
4689
4690 return nz_elts < elts / 4;
4691 }
4692
4693 return initializer_zerop (exp);
4694 }
4695
4696 /* Return 1 if EXP contains all zeros. */
4697
4698 static int
all_zeros_p(tree exp)4699 all_zeros_p (tree exp)
4700 {
4701 if (TREE_CODE (exp) == CONSTRUCTOR)
4702
4703 {
4704 HOST_WIDE_INT nz_elts, nc_elts, count;
4705 bool must_clear;
4706
4707 categorize_ctor_elements (exp, &nz_elts, &nc_elts, &count, &must_clear);
4708 return nz_elts == 0;
4709 }
4710
4711 return initializer_zerop (exp);
4712 }
4713
4714 /* Helper function for store_constructor.
4715 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4716 TYPE is the type of the CONSTRUCTOR, not the element type.
4717 CLEARED is as for store_constructor.
4718 ALIAS_SET is the alias set to use for any stores.
4719
4720 This provides a recursive shortcut back to store_constructor when it isn't
4721 necessary to go through store_field. This is so that we can pass through
4722 the cleared field to let store_constructor know that we may not have to
4723 clear a substructure if the outer structure has already been cleared. */
4724
4725 static void
store_constructor_field(rtx target,unsigned HOST_WIDE_INT bitsize,HOST_WIDE_INT bitpos,enum machine_mode mode,tree exp,tree type,int cleared,int alias_set)4726 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4727 HOST_WIDE_INT bitpos, enum machine_mode mode,
4728 tree exp, tree type, int cleared, int alias_set)
4729 {
4730 if (TREE_CODE (exp) == CONSTRUCTOR
4731 /* We can only call store_constructor recursively if the size and
4732 bit position are on a byte boundary. */
4733 && bitpos % BITS_PER_UNIT == 0
4734 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
4735 /* If we have a nonzero bitpos for a register target, then we just
4736 let store_field do the bitfield handling. This is unlikely to
4737 generate unnecessary clear instructions anyways. */
4738 && (bitpos == 0 || MEM_P (target)))
4739 {
4740 if (MEM_P (target))
4741 target
4742 = adjust_address (target,
4743 GET_MODE (target) == BLKmode
4744 || 0 != (bitpos
4745 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4746 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4747
4748
4749 /* Update the alias set, if required. */
4750 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
4751 && MEM_ALIAS_SET (target) != 0)
4752 {
4753 target = copy_rtx (target);
4754 set_mem_alias_set (target, alias_set);
4755 }
4756
4757 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4758 }
4759 else
4760 store_field (target, bitsize, bitpos, mode, exp, type, alias_set);
4761 }
4762
4763 /* Store the value of constructor EXP into the rtx TARGET.
4764 TARGET is either a REG or a MEM; we know it cannot conflict, since
4765 safe_from_p has been called.
4766 CLEARED is true if TARGET is known to have been zero'd.
4767 SIZE is the number of bytes of TARGET we are allowed to modify: this
4768 may not be the same as the size of EXP if we are assigning to a field
4769 which has been packed to exclude padding bits. */
4770
4771 static void
store_constructor(tree exp,rtx target,int cleared,HOST_WIDE_INT size)4772 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4773 {
4774 tree type = TREE_TYPE (exp);
4775 #ifdef WORD_REGISTER_OPERATIONS
4776 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4777 #endif
4778
4779 switch (TREE_CODE (type))
4780 {
4781 case RECORD_TYPE:
4782 case UNION_TYPE:
4783 case QUAL_UNION_TYPE:
4784 {
4785 unsigned HOST_WIDE_INT idx;
4786 tree field, value;
4787
4788 /* If size is zero or the target is already cleared, do nothing. */
4789 if (size == 0 || cleared)
4790 cleared = 1;
4791 /* We either clear the aggregate or indicate the value is dead. */
4792 else if ((TREE_CODE (type) == UNION_TYPE
4793 || TREE_CODE (type) == QUAL_UNION_TYPE)
4794 && ! CONSTRUCTOR_ELTS (exp))
4795 /* If the constructor is empty, clear the union. */
4796 {
4797 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
4798 cleared = 1;
4799 }
4800
4801 /* If we are building a static constructor into a register,
4802 set the initial value as zero so we can fold the value into
4803 a constant. But if more than one register is involved,
4804 this probably loses. */
4805 else if (REG_P (target) && TREE_STATIC (exp)
4806 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4807 {
4808 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4809 cleared = 1;
4810 }
4811
4812 /* If the constructor has fewer fields than the structure or
4813 if we are initializing the structure to mostly zeros, clear
4814 the whole structure first. Don't do this if TARGET is a
4815 register whose mode size isn't equal to SIZE since
4816 clear_storage can't handle this case. */
4817 else if (size > 0
4818 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
4819 != fields_length (type))
4820 || mostly_zeros_p (exp))
4821 && (!REG_P (target)
4822 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4823 == size)))
4824 {
4825 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
4826 cleared = 1;
4827 }
4828
4829 if (! cleared)
4830 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4831
4832 /* Store each element of the constructor into the
4833 corresponding field of TARGET. */
4834 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
4835 {
4836 enum machine_mode mode;
4837 HOST_WIDE_INT bitsize;
4838 HOST_WIDE_INT bitpos = 0;
4839 tree offset;
4840 rtx to_rtx = target;
4841
4842 /* Just ignore missing fields. We cleared the whole
4843 structure, above, if any fields are missing. */
4844 if (field == 0)
4845 continue;
4846
4847 if (cleared && initializer_zerop (value))
4848 continue;
4849
4850 if (host_integerp (DECL_SIZE (field), 1))
4851 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4852 else
4853 bitsize = -1;
4854
4855 mode = DECL_MODE (field);
4856 if (DECL_BIT_FIELD (field))
4857 mode = VOIDmode;
4858
4859 offset = DECL_FIELD_OFFSET (field);
4860 if (host_integerp (offset, 0)
4861 && host_integerp (bit_position (field), 0))
4862 {
4863 bitpos = int_bit_position (field);
4864 offset = 0;
4865 }
4866 else
4867 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4868
4869 if (offset)
4870 {
4871 rtx offset_rtx;
4872
4873 offset
4874 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4875 make_tree (TREE_TYPE (exp),
4876 target));
4877
4878 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4879 gcc_assert (MEM_P (to_rtx));
4880
4881 #ifdef POINTERS_EXTEND_UNSIGNED
4882 if (GET_MODE (offset_rtx) != Pmode)
4883 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4884 #else
4885 if (GET_MODE (offset_rtx) != ptr_mode)
4886 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4887 #endif
4888
4889 to_rtx = offset_address (to_rtx, offset_rtx,
4890 highest_pow2_factor (offset));
4891 }
4892
4893 #ifdef WORD_REGISTER_OPERATIONS
4894 /* If this initializes a field that is smaller than a
4895 word, at the start of a word, try to widen it to a full
4896 word. This special case allows us to output C++ member
4897 function initializations in a form that the optimizers
4898 can understand. */
4899 if (REG_P (target)
4900 && bitsize < BITS_PER_WORD
4901 && bitpos % BITS_PER_WORD == 0
4902 && GET_MODE_CLASS (mode) == MODE_INT
4903 && TREE_CODE (value) == INTEGER_CST
4904 && exp_size >= 0
4905 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4906 {
4907 tree type = TREE_TYPE (value);
4908
4909 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4910 {
4911 type = lang_hooks.types.type_for_size
4912 (BITS_PER_WORD, TYPE_UNSIGNED (type));
4913 value = convert (type, value);
4914 }
4915
4916 if (BYTES_BIG_ENDIAN)
4917 value
4918 = fold_build2 (LSHIFT_EXPR, type, value,
4919 build_int_cst (NULL_TREE,
4920 BITS_PER_WORD - bitsize));
4921 bitsize = BITS_PER_WORD;
4922 mode = word_mode;
4923 }
4924 #endif
4925
4926 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4927 && DECL_NONADDRESSABLE_P (field))
4928 {
4929 to_rtx = copy_rtx (to_rtx);
4930 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4931 }
4932
4933 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4934 value, type, cleared,
4935 get_alias_set (TREE_TYPE (field)));
4936 }
4937 break;
4938 }
4939 case ARRAY_TYPE:
4940 {
4941 tree value, index;
4942 unsigned HOST_WIDE_INT i;
4943 int need_to_clear;
4944 tree domain;
4945 tree elttype = TREE_TYPE (type);
4946 int const_bounds_p;
4947 HOST_WIDE_INT minelt = 0;
4948 HOST_WIDE_INT maxelt = 0;
4949
4950 domain = TYPE_DOMAIN (type);
4951 const_bounds_p = (TYPE_MIN_VALUE (domain)
4952 && TYPE_MAX_VALUE (domain)
4953 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4954 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4955
4956 /* If we have constant bounds for the range of the type, get them. */
4957 if (const_bounds_p)
4958 {
4959 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4960 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4961 }
4962
4963 /* If the constructor has fewer elements than the array, clear
4964 the whole array first. Similarly if this is static
4965 constructor of a non-BLKmode object. */
4966 if (cleared)
4967 need_to_clear = 0;
4968 else if (REG_P (target) && TREE_STATIC (exp))
4969 need_to_clear = 1;
4970 else
4971 {
4972 unsigned HOST_WIDE_INT idx;
4973 tree index, value;
4974 HOST_WIDE_INT count = 0, zero_count = 0;
4975 need_to_clear = ! const_bounds_p;
4976
4977 /* This loop is a more accurate version of the loop in
4978 mostly_zeros_p (it handles RANGE_EXPR in an index). It
4979 is also needed to check for missing elements. */
4980 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
4981 {
4982 HOST_WIDE_INT this_node_count;
4983
4984 if (need_to_clear)
4985 break;
4986
4987 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4988 {
4989 tree lo_index = TREE_OPERAND (index, 0);
4990 tree hi_index = TREE_OPERAND (index, 1);
4991
4992 if (! host_integerp (lo_index, 1)
4993 || ! host_integerp (hi_index, 1))
4994 {
4995 need_to_clear = 1;
4996 break;
4997 }
4998
4999 this_node_count = (tree_low_cst (hi_index, 1)
5000 - tree_low_cst (lo_index, 1) + 1);
5001 }
5002 else
5003 this_node_count = 1;
5004
5005 count += this_node_count;
5006 if (mostly_zeros_p (value))
5007 zero_count += this_node_count;
5008 }
5009
5010 /* Clear the entire array first if there are any missing
5011 elements, or if the incidence of zero elements is >=
5012 75%. */
5013 if (! need_to_clear
5014 && (count < maxelt - minelt + 1
5015 || 4 * zero_count >= 3 * count))
5016 need_to_clear = 1;
5017 }
5018
5019 if (need_to_clear && size > 0)
5020 {
5021 if (REG_P (target))
5022 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5023 else
5024 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5025 cleared = 1;
5026 }
5027
5028 if (!cleared && REG_P (target))
5029 /* Inform later passes that the old value is dead. */
5030 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5031
5032 /* Store each element of the constructor into the
5033 corresponding element of TARGET, determined by counting the
5034 elements. */
5035 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5036 {
5037 enum machine_mode mode;
5038 HOST_WIDE_INT bitsize;
5039 HOST_WIDE_INT bitpos;
5040 int unsignedp;
5041 rtx xtarget = target;
5042
5043 if (cleared && initializer_zerop (value))
5044 continue;
5045
5046 unsignedp = TYPE_UNSIGNED (elttype);
5047 mode = TYPE_MODE (elttype);
5048 if (mode == BLKmode)
5049 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5050 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5051 : -1);
5052 else
5053 bitsize = GET_MODE_BITSIZE (mode);
5054
5055 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5056 {
5057 tree lo_index = TREE_OPERAND (index, 0);
5058 tree hi_index = TREE_OPERAND (index, 1);
5059 rtx index_r, pos_rtx;
5060 HOST_WIDE_INT lo, hi, count;
5061 tree position;
5062
5063 /* If the range is constant and "small", unroll the loop. */
5064 if (const_bounds_p
5065 && host_integerp (lo_index, 0)
5066 && host_integerp (hi_index, 0)
5067 && (lo = tree_low_cst (lo_index, 0),
5068 hi = tree_low_cst (hi_index, 0),
5069 count = hi - lo + 1,
5070 (!MEM_P (target)
5071 || count <= 2
5072 || (host_integerp (TYPE_SIZE (elttype), 1)
5073 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5074 <= 40 * 8)))))
5075 {
5076 lo -= minelt; hi -= minelt;
5077 for (; lo <= hi; lo++)
5078 {
5079 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5080
5081 if (MEM_P (target)
5082 && !MEM_KEEP_ALIAS_SET_P (target)
5083 && TREE_CODE (type) == ARRAY_TYPE
5084 && TYPE_NONALIASED_COMPONENT (type))
5085 {
5086 target = copy_rtx (target);
5087 MEM_KEEP_ALIAS_SET_P (target) = 1;
5088 }
5089
5090 store_constructor_field
5091 (target, bitsize, bitpos, mode, value, type, cleared,
5092 get_alias_set (elttype));
5093 }
5094 }
5095 else
5096 {
5097 rtx loop_start = gen_label_rtx ();
5098 rtx loop_end = gen_label_rtx ();
5099 tree exit_cond;
5100
5101 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
5102 unsignedp = TYPE_UNSIGNED (domain);
5103
5104 index = build_decl (VAR_DECL, NULL_TREE, domain);
5105
5106 index_r
5107 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5108 &unsignedp, 0));
5109 SET_DECL_RTL (index, index_r);
5110 store_expr (lo_index, index_r, 0);
5111
5112 /* Build the head of the loop. */
5113 do_pending_stack_adjust ();
5114 emit_label (loop_start);
5115
5116 /* Assign value to element index. */
5117 position
5118 = convert (ssizetype,
5119 fold_build2 (MINUS_EXPR, TREE_TYPE (index),
5120 index, TYPE_MIN_VALUE (domain)));
5121 position = size_binop (MULT_EXPR, position,
5122 convert (ssizetype,
5123 TYPE_SIZE_UNIT (elttype)));
5124
5125 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
5126 xtarget = offset_address (target, pos_rtx,
5127 highest_pow2_factor (position));
5128 xtarget = adjust_address (xtarget, mode, 0);
5129 if (TREE_CODE (value) == CONSTRUCTOR)
5130 store_constructor (value, xtarget, cleared,
5131 bitsize / BITS_PER_UNIT);
5132 else
5133 store_expr (value, xtarget, 0);
5134
5135 /* Generate a conditional jump to exit the loop. */
5136 exit_cond = build2 (LT_EXPR, integer_type_node,
5137 index, hi_index);
5138 jumpif (exit_cond, loop_end);
5139
5140 /* Update the loop counter, and jump to the head of
5141 the loop. */
5142 expand_assignment (index,
5143 build2 (PLUS_EXPR, TREE_TYPE (index),
5144 index, integer_one_node));
5145
5146 emit_jump (loop_start);
5147
5148 /* Build the end of the loop. */
5149 emit_label (loop_end);
5150 }
5151 }
5152 else if ((index != 0 && ! host_integerp (index, 0))
5153 || ! host_integerp (TYPE_SIZE (elttype), 1))
5154 {
5155 tree position;
5156
5157 if (index == 0)
5158 index = ssize_int (1);
5159
5160 if (minelt)
5161 index = fold_convert (ssizetype,
5162 fold_build2 (MINUS_EXPR,
5163 TREE_TYPE (index),
5164 index,
5165 TYPE_MIN_VALUE (domain)));
5166
5167 position = size_binop (MULT_EXPR, index,
5168 convert (ssizetype,
5169 TYPE_SIZE_UNIT (elttype)));
5170 xtarget = offset_address (target,
5171 expand_expr (position, 0, VOIDmode, 0),
5172 highest_pow2_factor (position));
5173 xtarget = adjust_address (xtarget, mode, 0);
5174 store_expr (value, xtarget, 0);
5175 }
5176 else
5177 {
5178 if (index != 0)
5179 bitpos = ((tree_low_cst (index, 0) - minelt)
5180 * tree_low_cst (TYPE_SIZE (elttype), 1));
5181 else
5182 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5183
5184 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5185 && TREE_CODE (type) == ARRAY_TYPE
5186 && TYPE_NONALIASED_COMPONENT (type))
5187 {
5188 target = copy_rtx (target);
5189 MEM_KEEP_ALIAS_SET_P (target) = 1;
5190 }
5191 store_constructor_field (target, bitsize, bitpos, mode, value,
5192 type, cleared, get_alias_set (elttype));
5193 }
5194 }
5195 break;
5196 }
5197
5198 case VECTOR_TYPE:
5199 {
5200 unsigned HOST_WIDE_INT idx;
5201 constructor_elt *ce;
5202 int i;
5203 int need_to_clear;
5204 int icode = 0;
5205 tree elttype = TREE_TYPE (type);
5206 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5207 enum machine_mode eltmode = TYPE_MODE (elttype);
5208 HOST_WIDE_INT bitsize;
5209 HOST_WIDE_INT bitpos;
5210 rtvec vector = NULL;
5211 unsigned n_elts;
5212
5213 gcc_assert (eltmode != BLKmode);
5214
5215 n_elts = TYPE_VECTOR_SUBPARTS (type);
5216 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5217 {
5218 enum machine_mode mode = GET_MODE (target);
5219
5220 icode = (int) vec_init_optab->handlers[mode].insn_code;
5221 if (icode != CODE_FOR_nothing)
5222 {
5223 unsigned int i;
5224
5225 vector = rtvec_alloc (n_elts);
5226 for (i = 0; i < n_elts; i++)
5227 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5228 }
5229 }
5230
5231 /* If the constructor has fewer elements than the vector,
5232 clear the whole array first. Similarly if this is static
5233 constructor of a non-BLKmode object. */
5234 if (cleared)
5235 need_to_clear = 0;
5236 else if (REG_P (target) && TREE_STATIC (exp))
5237 need_to_clear = 1;
5238 else
5239 {
5240 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5241 tree value;
5242
5243 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
5244 {
5245 int n_elts_here = tree_low_cst
5246 (int_const_binop (TRUNC_DIV_EXPR,
5247 TYPE_SIZE (TREE_TYPE (value)),
5248 TYPE_SIZE (elttype), 0), 1);
5249
5250 count += n_elts_here;
5251 if (mostly_zeros_p (value))
5252 zero_count += n_elts_here;
5253 }
5254
5255 /* Clear the entire vector first if there are any missing elements,
5256 or if the incidence of zero elements is >= 75%. */
5257 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5258 }
5259
5260 if (need_to_clear && size > 0 && !vector)
5261 {
5262 if (REG_P (target))
5263 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5264 else
5265 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5266 cleared = 1;
5267 }
5268
5269 /* Inform later passes that the old value is dead. */
5270 if (!cleared && REG_P (target))
5271 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5272
5273 /* Store each element of the constructor into the corresponding
5274 element of TARGET, determined by counting the elements. */
5275 for (idx = 0, i = 0;
5276 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5277 idx++, i += bitsize / elt_size)
5278 {
5279 HOST_WIDE_INT eltpos;
5280 tree value = ce->value;
5281
5282 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5283 if (cleared && initializer_zerop (value))
5284 continue;
5285
5286 if (ce->index)
5287 eltpos = tree_low_cst (ce->index, 1);
5288 else
5289 eltpos = i;
5290
5291 if (vector)
5292 {
5293 /* Vector CONSTRUCTORs should only be built from smaller
5294 vectors in the case of BLKmode vectors. */
5295 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5296 RTVEC_ELT (vector, eltpos)
5297 = expand_expr (value, NULL_RTX, VOIDmode, 0);
5298 }
5299 else
5300 {
5301 enum machine_mode value_mode =
5302 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5303 ? TYPE_MODE (TREE_TYPE (value))
5304 : eltmode;
5305 bitpos = eltpos * elt_size;
5306 store_constructor_field (target, bitsize, bitpos,
5307 value_mode, value, type,
5308 cleared, get_alias_set (elttype));
5309 }
5310 }
5311
5312 if (vector)
5313 emit_insn (GEN_FCN (icode)
5314 (target,
5315 gen_rtx_PARALLEL (GET_MODE (target), vector)));
5316 break;
5317 }
5318
5319 default:
5320 gcc_unreachable ();
5321 }
5322 }
5323
5324 /* Store the value of EXP (an expression tree)
5325 into a subfield of TARGET which has mode MODE and occupies
5326 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5327 If MODE is VOIDmode, it means that we are storing into a bit-field.
5328
5329 Always return const0_rtx unless we have something particular to
5330 return.
5331
5332 TYPE is the type of the underlying object,
5333
5334 ALIAS_SET is the alias set for the destination. This value will
5335 (in general) be different from that for TARGET, since TARGET is a
5336 reference to the containing structure. */
5337
5338 static rtx
store_field(rtx target,HOST_WIDE_INT bitsize,HOST_WIDE_INT bitpos,enum machine_mode mode,tree exp,tree type,int alias_set)5339 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5340 enum machine_mode mode, tree exp, tree type, int alias_set)
5341 {
5342 HOST_WIDE_INT width_mask = 0;
5343
5344 if (TREE_CODE (exp) == ERROR_MARK)
5345 return const0_rtx;
5346
5347 /* If we have nothing to store, do nothing unless the expression has
5348 side-effects. */
5349 if (bitsize == 0)
5350 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5351 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5352 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5353
5354 /* If we are storing into an unaligned field of an aligned union that is
5355 in a register, we may have the mode of TARGET being an integer mode but
5356 MODE == BLKmode. In that case, get an aligned object whose size and
5357 alignment are the same as TARGET and store TARGET into it (we can avoid
5358 the store if the field being stored is the entire width of TARGET). Then
5359 call ourselves recursively to store the field into a BLKmode version of
5360 that object. Finally, load from the object into TARGET. This is not
5361 very efficient in general, but should only be slightly more expensive
5362 than the otherwise-required unaligned accesses. Perhaps this can be
5363 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5364 twice, once with emit_move_insn and once via store_field. */
5365
5366 if (mode == BLKmode
5367 && (REG_P (target) || GET_CODE (target) == SUBREG))
5368 {
5369 rtx object = assign_temp (type, 0, 1, 1);
5370 rtx blk_object = adjust_address (object, BLKmode, 0);
5371
5372 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5373 emit_move_insn (object, target);
5374
5375 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set);
5376
5377 emit_move_insn (target, object);
5378
5379 /* We want to return the BLKmode version of the data. */
5380 return blk_object;
5381 }
5382
5383 if (GET_CODE (target) == CONCAT)
5384 {
5385 /* We're storing into a struct containing a single __complex. */
5386
5387 gcc_assert (!bitpos);
5388 return store_expr (exp, target, 0);
5389 }
5390
5391 /* If the structure is in a register or if the component
5392 is a bit field, we cannot use addressing to access it.
5393 Use bit-field techniques or SUBREG to store in it. */
5394
5395 if (mode == VOIDmode
5396 || (mode != BLKmode && ! direct_store[(int) mode]
5397 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5398 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5399 || REG_P (target)
5400 || GET_CODE (target) == SUBREG
5401 /* If the field isn't aligned enough to store as an ordinary memref,
5402 store it as a bit field. */
5403 || (mode != BLKmode
5404 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5405 || bitpos % GET_MODE_ALIGNMENT (mode))
5406 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5407 || (bitpos % BITS_PER_UNIT != 0)))
5408 /* If the RHS and field are a constant size and the size of the
5409 RHS isn't the same size as the bitfield, we must use bitfield
5410 operations. */
5411 || (bitsize >= 0
5412 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5413 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5414 {
5415 rtx temp;
5416
5417 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5418 implies a mask operation. If the precision is the same size as
5419 the field we're storing into, that mask is redundant. This is
5420 particularly common with bit field assignments generated by the
5421 C front end. */
5422 if (TREE_CODE (exp) == NOP_EXPR)
5423 {
5424 tree type = TREE_TYPE (exp);
5425 if (INTEGRAL_TYPE_P (type)
5426 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5427 && bitsize == TYPE_PRECISION (type))
5428 {
5429 type = TREE_TYPE (TREE_OPERAND (exp, 0));
5430 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5431 exp = TREE_OPERAND (exp, 0);
5432 }
5433 }
5434
5435 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5436
5437 /* If BITSIZE is narrower than the size of the type of EXP
5438 we will be narrowing TEMP. Normally, what's wanted are the
5439 low-order bits. However, if EXP's type is a record and this is
5440 big-endian machine, we want the upper BITSIZE bits. */
5441 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5442 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5443 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5444 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5445 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5446 - bitsize),
5447 NULL_RTX, 1);
5448
5449 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5450 MODE. */
5451 if (mode != VOIDmode && mode != BLKmode
5452 && mode != TYPE_MODE (TREE_TYPE (exp)))
5453 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5454
5455 /* If the modes of TARGET and TEMP are both BLKmode, both
5456 must be in memory and BITPOS must be aligned on a byte
5457 boundary. If so, we simply do a block copy. */
5458 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5459 {
5460 gcc_assert (MEM_P (target) && MEM_P (temp)
5461 && !(bitpos % BITS_PER_UNIT));
5462
5463 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5464 emit_block_move (target, temp,
5465 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5466 / BITS_PER_UNIT),
5467 BLOCK_OP_NORMAL);
5468
5469 return const0_rtx;
5470 }
5471
5472 /* Store the value in the bitfield. */
5473 store_bit_field (target, bitsize, bitpos, mode, temp);
5474
5475 return const0_rtx;
5476 }
5477 else
5478 {
5479 /* Now build a reference to just the desired component. */
5480 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5481
5482 if (to_rtx == target)
5483 to_rtx = copy_rtx (to_rtx);
5484
5485 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5486 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5487 set_mem_alias_set (to_rtx, alias_set);
5488
5489 return store_expr (exp, to_rtx, 0);
5490 }
5491 }
5492
5493 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5494 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5495 codes and find the ultimate containing object, which we return.
5496
5497 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5498 bit position, and *PUNSIGNEDP to the signedness of the field.
5499 If the position of the field is variable, we store a tree
5500 giving the variable offset (in units) in *POFFSET.
5501 This offset is in addition to the bit position.
5502 If the position is not variable, we store 0 in *POFFSET.
5503
5504 If any of the extraction expressions is volatile,
5505 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5506
5507 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5508 is a mode that can be used to access the field. In that case, *PBITSIZE
5509 is redundant.
5510
5511 If the field describes a variable-sized object, *PMODE is set to
5512 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5513 this case, but the address of the object can be found.
5514
5515 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5516 look through nodes that serve as markers of a greater alignment than
5517 the one that can be deduced from the expression. These nodes make it
5518 possible for front-ends to prevent temporaries from being created by
5519 the middle-end on alignment considerations. For that purpose, the
5520 normal operating mode at high-level is to always pass FALSE so that
5521 the ultimate containing object is really returned; moreover, the
5522 associated predicate handled_component_p will always return TRUE
5523 on these nodes, thus indicating that they are essentially handled
5524 by get_inner_reference. TRUE should only be passed when the caller
5525 is scanning the expression in order to build another representation
5526 and specifically knows how to handle these nodes; as such, this is
5527 the normal operating mode in the RTL expanders. */
5528
5529 tree
get_inner_reference(tree exp,HOST_WIDE_INT * pbitsize,HOST_WIDE_INT * pbitpos,tree * poffset,enum machine_mode * pmode,int * punsignedp,int * pvolatilep,bool keep_aligning)5530 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5531 HOST_WIDE_INT *pbitpos, tree *poffset,
5532 enum machine_mode *pmode, int *punsignedp,
5533 int *pvolatilep, bool keep_aligning)
5534 {
5535 tree size_tree = 0;
5536 enum machine_mode mode = VOIDmode;
5537 tree offset = size_zero_node;
5538 tree bit_offset = bitsize_zero_node;
5539 tree tem;
5540
5541 /* First get the mode, signedness, and size. We do this from just the
5542 outermost expression. */
5543 if (TREE_CODE (exp) == COMPONENT_REF)
5544 {
5545 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5546 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5547 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5548
5549 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5550 }
5551 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5552 {
5553 size_tree = TREE_OPERAND (exp, 1);
5554 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5555 }
5556 else
5557 {
5558 mode = TYPE_MODE (TREE_TYPE (exp));
5559 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5560
5561 if (mode == BLKmode)
5562 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5563 else
5564 *pbitsize = GET_MODE_BITSIZE (mode);
5565 }
5566
5567 if (size_tree != 0)
5568 {
5569 if (! host_integerp (size_tree, 1))
5570 mode = BLKmode, *pbitsize = -1;
5571 else
5572 *pbitsize = tree_low_cst (size_tree, 1);
5573 }
5574
5575 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5576 and find the ultimate containing object. */
5577 while (1)
5578 {
5579 switch (TREE_CODE (exp))
5580 {
5581 case BIT_FIELD_REF:
5582 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5583 TREE_OPERAND (exp, 2));
5584 break;
5585
5586 case COMPONENT_REF:
5587 {
5588 tree field = TREE_OPERAND (exp, 1);
5589 tree this_offset = component_ref_field_offset (exp);
5590
5591 /* If this field hasn't been filled in yet, don't go past it.
5592 This should only happen when folding expressions made during
5593 type construction. */
5594 if (this_offset == 0)
5595 break;
5596
5597 offset = size_binop (PLUS_EXPR, offset, this_offset);
5598 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5599 DECL_FIELD_BIT_OFFSET (field));
5600
5601 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5602 }
5603 break;
5604
5605 case ARRAY_REF:
5606 case ARRAY_RANGE_REF:
5607 {
5608 tree index = TREE_OPERAND (exp, 1);
5609 tree low_bound = array_ref_low_bound (exp);
5610 tree unit_size = array_ref_element_size (exp);
5611
5612 /* We assume all arrays have sizes that are a multiple of a byte.
5613 First subtract the lower bound, if any, in the type of the
5614 index, then convert to sizetype and multiply by the size of
5615 the array element. */
5616 if (! integer_zerop (low_bound))
5617 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
5618 index, low_bound);
5619
5620 offset = size_binop (PLUS_EXPR, offset,
5621 size_binop (MULT_EXPR,
5622 convert (sizetype, index),
5623 unit_size));
5624 }
5625 break;
5626
5627 case REALPART_EXPR:
5628 break;
5629
5630 case IMAGPART_EXPR:
5631 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5632 bitsize_int (*pbitsize));
5633 break;
5634
5635 case VIEW_CONVERT_EXPR:
5636 if (keep_aligning && STRICT_ALIGNMENT
5637 && (TYPE_ALIGN (TREE_TYPE (exp))
5638 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5639 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5640 < BIGGEST_ALIGNMENT)
5641 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5642 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5643 goto done;
5644 break;
5645
5646 default:
5647 goto done;
5648 }
5649
5650 /* If any reference in the chain is volatile, the effect is volatile. */
5651 if (TREE_THIS_VOLATILE (exp))
5652 *pvolatilep = 1;
5653
5654 exp = TREE_OPERAND (exp, 0);
5655 }
5656 done:
5657
5658 /* If OFFSET is constant, see if we can return the whole thing as a
5659 constant bit position. Otherwise, split it up. */
5660 if (host_integerp (offset, 0)
5661 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5662 bitsize_unit_node))
5663 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5664 && host_integerp (tem, 0))
5665 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5666 else
5667 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5668
5669 *pmode = mode;
5670 return exp;
5671 }
5672
5673 /* Return a tree of sizetype representing the size, in bytes, of the element
5674 of EXP, an ARRAY_REF. */
5675
5676 tree
array_ref_element_size(tree exp)5677 array_ref_element_size (tree exp)
5678 {
5679 tree aligned_size = TREE_OPERAND (exp, 3);
5680 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5681
5682 /* If a size was specified in the ARRAY_REF, it's the size measured
5683 in alignment units of the element type. So multiply by that value. */
5684 if (aligned_size)
5685 {
5686 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5687 sizetype from another type of the same width and signedness. */
5688 if (TREE_TYPE (aligned_size) != sizetype)
5689 aligned_size = fold_convert (sizetype, aligned_size);
5690 return size_binop (MULT_EXPR, aligned_size,
5691 size_int (TYPE_ALIGN_UNIT (elmt_type)));
5692 }
5693
5694 /* Otherwise, take the size from that of the element type. Substitute
5695 any PLACEHOLDER_EXPR that we have. */
5696 else
5697 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5698 }
5699
5700 /* Return a tree representing the lower bound of the array mentioned in
5701 EXP, an ARRAY_REF. */
5702
5703 tree
array_ref_low_bound(tree exp)5704 array_ref_low_bound (tree exp)
5705 {
5706 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5707
5708 /* If a lower bound is specified in EXP, use it. */
5709 if (TREE_OPERAND (exp, 2))
5710 return TREE_OPERAND (exp, 2);
5711
5712 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5713 substituting for a PLACEHOLDER_EXPR as needed. */
5714 if (domain_type && TYPE_MIN_VALUE (domain_type))
5715 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5716
5717 /* Otherwise, return a zero of the appropriate type. */
5718 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
5719 }
5720
5721 /* Return a tree representing the upper bound of the array mentioned in
5722 EXP, an ARRAY_REF. */
5723
5724 tree
array_ref_up_bound(tree exp)5725 array_ref_up_bound (tree exp)
5726 {
5727 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5728
5729 /* If there is a domain type and it has an upper bound, use it, substituting
5730 for a PLACEHOLDER_EXPR as needed. */
5731 if (domain_type && TYPE_MAX_VALUE (domain_type))
5732 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
5733
5734 /* Otherwise fail. */
5735 return NULL_TREE;
5736 }
5737
5738 /* Return a tree representing the offset, in bytes, of the field referenced
5739 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5740
5741 tree
component_ref_field_offset(tree exp)5742 component_ref_field_offset (tree exp)
5743 {
5744 tree aligned_offset = TREE_OPERAND (exp, 2);
5745 tree field = TREE_OPERAND (exp, 1);
5746
5747 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5748 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5749 value. */
5750 if (aligned_offset)
5751 {
5752 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5753 sizetype from another type of the same width and signedness. */
5754 if (TREE_TYPE (aligned_offset) != sizetype)
5755 aligned_offset = fold_convert (sizetype, aligned_offset);
5756 return size_binop (MULT_EXPR, aligned_offset,
5757 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5758 }
5759
5760 /* Otherwise, take the offset from that of the field. Substitute
5761 any PLACEHOLDER_EXPR that we have. */
5762 else
5763 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5764 }
5765
5766 /* Return 1 if T is an expression that get_inner_reference handles. */
5767
5768 int
handled_component_p(tree t)5769 handled_component_p (tree t)
5770 {
5771 switch (TREE_CODE (t))
5772 {
5773 case BIT_FIELD_REF:
5774 case COMPONENT_REF:
5775 case ARRAY_REF:
5776 case ARRAY_RANGE_REF:
5777 case VIEW_CONVERT_EXPR:
5778 case REALPART_EXPR:
5779 case IMAGPART_EXPR:
5780 return 1;
5781
5782 default:
5783 return 0;
5784 }
5785 }
5786
5787 /* Given an rtx VALUE that may contain additions and multiplications, return
5788 an equivalent value that just refers to a register, memory, or constant.
5789 This is done by generating instructions to perform the arithmetic and
5790 returning a pseudo-register containing the value.
5791
5792 The returned value may be a REG, SUBREG, MEM or constant. */
5793
5794 rtx
force_operand(rtx value,rtx target)5795 force_operand (rtx value, rtx target)
5796 {
5797 rtx op1, op2;
5798 /* Use subtarget as the target for operand 0 of a binary operation. */
5799 rtx subtarget = get_subtarget (target);
5800 enum rtx_code code = GET_CODE (value);
5801
5802 /* Check for subreg applied to an expression produced by loop optimizer. */
5803 if (code == SUBREG
5804 && !REG_P (SUBREG_REG (value))
5805 && !MEM_P (SUBREG_REG (value)))
5806 {
5807 value = simplify_gen_subreg (GET_MODE (value),
5808 force_reg (GET_MODE (SUBREG_REG (value)),
5809 force_operand (SUBREG_REG (value),
5810 NULL_RTX)),
5811 GET_MODE (SUBREG_REG (value)),
5812 SUBREG_BYTE (value));
5813 code = GET_CODE (value);
5814 }
5815
5816 /* Check for a PIC address load. */
5817 if ((code == PLUS || code == MINUS)
5818 && XEXP (value, 0) == pic_offset_table_rtx
5819 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5820 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5821 || GET_CODE (XEXP (value, 1)) == CONST))
5822 {
5823 if (!subtarget)
5824 subtarget = gen_reg_rtx (GET_MODE (value));
5825 emit_move_insn (subtarget, value);
5826 return subtarget;
5827 }
5828
5829 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5830 {
5831 if (!target)
5832 target = gen_reg_rtx (GET_MODE (value));
5833 convert_move (target, force_operand (XEXP (value, 0), NULL),
5834 code == ZERO_EXTEND);
5835 return target;
5836 }
5837
5838 if (ARITHMETIC_P (value))
5839 {
5840 op2 = XEXP (value, 1);
5841 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
5842 subtarget = 0;
5843 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5844 {
5845 code = PLUS;
5846 op2 = negate_rtx (GET_MODE (value), op2);
5847 }
5848
5849 /* Check for an addition with OP2 a constant integer and our first
5850 operand a PLUS of a virtual register and something else. In that
5851 case, we want to emit the sum of the virtual register and the
5852 constant first and then add the other value. This allows virtual
5853 register instantiation to simply modify the constant rather than
5854 creating another one around this addition. */
5855 if (code == PLUS && GET_CODE (op2) == CONST_INT
5856 && GET_CODE (XEXP (value, 0)) == PLUS
5857 && REG_P (XEXP (XEXP (value, 0), 0))
5858 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5859 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5860 {
5861 rtx temp = expand_simple_binop (GET_MODE (value), code,
5862 XEXP (XEXP (value, 0), 0), op2,
5863 subtarget, 0, OPTAB_LIB_WIDEN);
5864 return expand_simple_binop (GET_MODE (value), code, temp,
5865 force_operand (XEXP (XEXP (value,
5866 0), 1), 0),
5867 target, 0, OPTAB_LIB_WIDEN);
5868 }
5869
5870 op1 = force_operand (XEXP (value, 0), subtarget);
5871 op2 = force_operand (op2, NULL_RTX);
5872 switch (code)
5873 {
5874 case MULT:
5875 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5876 case DIV:
5877 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5878 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5879 target, 1, OPTAB_LIB_WIDEN);
5880 else
5881 return expand_divmod (0,
5882 FLOAT_MODE_P (GET_MODE (value))
5883 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5884 GET_MODE (value), op1, op2, target, 0);
5885 break;
5886 case MOD:
5887 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5888 target, 0);
5889 break;
5890 case UDIV:
5891 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5892 target, 1);
5893 break;
5894 case UMOD:
5895 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5896 target, 1);
5897 break;
5898 case ASHIFTRT:
5899 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5900 target, 0, OPTAB_LIB_WIDEN);
5901 break;
5902 default:
5903 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5904 target, 1, OPTAB_LIB_WIDEN);
5905 }
5906 }
5907 if (UNARY_P (value))
5908 {
5909 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5910 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5911 }
5912
5913 #ifdef INSN_SCHEDULING
5914 /* On machines that have insn scheduling, we want all memory reference to be
5915 explicit, so we need to deal with such paradoxical SUBREGs. */
5916 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
5917 && (GET_MODE_SIZE (GET_MODE (value))
5918 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5919 value
5920 = simplify_gen_subreg (GET_MODE (value),
5921 force_reg (GET_MODE (SUBREG_REG (value)),
5922 force_operand (SUBREG_REG (value),
5923 NULL_RTX)),
5924 GET_MODE (SUBREG_REG (value)),
5925 SUBREG_BYTE (value));
5926 #endif
5927
5928 return value;
5929 }
5930
5931 /* Subroutine of expand_expr: return nonzero iff there is no way that
5932 EXP can reference X, which is being modified. TOP_P is nonzero if this
5933 call is going to be used to determine whether we need a temporary
5934 for EXP, as opposed to a recursive call to this function.
5935
5936 It is always safe for this routine to return zero since it merely
5937 searches for optimization opportunities. */
5938
5939 int
safe_from_p(rtx x,tree exp,int top_p)5940 safe_from_p (rtx x, tree exp, int top_p)
5941 {
5942 rtx exp_rtl = 0;
5943 int i, nops;
5944
5945 if (x == 0
5946 /* If EXP has varying size, we MUST use a target since we currently
5947 have no way of allocating temporaries of variable size
5948 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5949 So we assume here that something at a higher level has prevented a
5950 clash. This is somewhat bogus, but the best we can do. Only
5951 do this when X is BLKmode and when we are at the top level. */
5952 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5953 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5954 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5955 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5956 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5957 != INTEGER_CST)
5958 && GET_MODE (x) == BLKmode)
5959 /* If X is in the outgoing argument area, it is always safe. */
5960 || (MEM_P (x)
5961 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5962 || (GET_CODE (XEXP (x, 0)) == PLUS
5963 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5964 return 1;
5965
5966 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5967 find the underlying pseudo. */
5968 if (GET_CODE (x) == SUBREG)
5969 {
5970 x = SUBREG_REG (x);
5971 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5972 return 0;
5973 }
5974
5975 /* Now look at our tree code and possibly recurse. */
5976 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5977 {
5978 case tcc_declaration:
5979 exp_rtl = DECL_RTL_IF_SET (exp);
5980 break;
5981
5982 case tcc_constant:
5983 return 1;
5984
5985 case tcc_exceptional:
5986 if (TREE_CODE (exp) == TREE_LIST)
5987 {
5988 while (1)
5989 {
5990 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5991 return 0;
5992 exp = TREE_CHAIN (exp);
5993 if (!exp)
5994 return 1;
5995 if (TREE_CODE (exp) != TREE_LIST)
5996 return safe_from_p (x, exp, 0);
5997 }
5998 }
5999 else if (TREE_CODE (exp) == CONSTRUCTOR)
6000 {
6001 constructor_elt *ce;
6002 unsigned HOST_WIDE_INT idx;
6003
6004 for (idx = 0;
6005 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
6006 idx++)
6007 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
6008 || !safe_from_p (x, ce->value, 0))
6009 return 0;
6010 return 1;
6011 }
6012 else if (TREE_CODE (exp) == ERROR_MARK)
6013 return 1; /* An already-visited SAVE_EXPR? */
6014 else
6015 return 0;
6016
6017 case tcc_statement:
6018 /* The only case we look at here is the DECL_INITIAL inside a
6019 DECL_EXPR. */
6020 return (TREE_CODE (exp) != DECL_EXPR
6021 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
6022 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
6023 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
6024
6025 case tcc_binary:
6026 case tcc_comparison:
6027 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6028 return 0;
6029 /* Fall through. */
6030
6031 case tcc_unary:
6032 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6033
6034 case tcc_expression:
6035 case tcc_reference:
6036 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6037 the expression. If it is set, we conflict iff we are that rtx or
6038 both are in memory. Otherwise, we check all operands of the
6039 expression recursively. */
6040
6041 switch (TREE_CODE (exp))
6042 {
6043 case ADDR_EXPR:
6044 /* If the operand is static or we are static, we can't conflict.
6045 Likewise if we don't conflict with the operand at all. */
6046 if (staticp (TREE_OPERAND (exp, 0))
6047 || TREE_STATIC (exp)
6048 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6049 return 1;
6050
6051 /* Otherwise, the only way this can conflict is if we are taking
6052 the address of a DECL a that address if part of X, which is
6053 very rare. */
6054 exp = TREE_OPERAND (exp, 0);
6055 if (DECL_P (exp))
6056 {
6057 if (!DECL_RTL_SET_P (exp)
6058 || !MEM_P (DECL_RTL (exp)))
6059 return 0;
6060 else
6061 exp_rtl = XEXP (DECL_RTL (exp), 0);
6062 }
6063 break;
6064
6065 case MISALIGNED_INDIRECT_REF:
6066 case ALIGN_INDIRECT_REF:
6067 case INDIRECT_REF:
6068 if (MEM_P (x)
6069 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6070 get_alias_set (exp)))
6071 return 0;
6072 break;
6073
6074 case CALL_EXPR:
6075 /* Assume that the call will clobber all hard registers and
6076 all of memory. */
6077 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6078 || MEM_P (x))
6079 return 0;
6080 break;
6081
6082 case WITH_CLEANUP_EXPR:
6083 case CLEANUP_POINT_EXPR:
6084 /* Lowered by gimplify.c. */
6085 gcc_unreachable ();
6086
6087 case SAVE_EXPR:
6088 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6089
6090 default:
6091 break;
6092 }
6093
6094 /* If we have an rtx, we do not need to scan our operands. */
6095 if (exp_rtl)
6096 break;
6097
6098 nops = TREE_CODE_LENGTH (TREE_CODE (exp));
6099 for (i = 0; i < nops; i++)
6100 if (TREE_OPERAND (exp, i) != 0
6101 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6102 return 0;
6103
6104 /* If this is a language-specific tree code, it may require
6105 special handling. */
6106 if ((unsigned int) TREE_CODE (exp)
6107 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6108 && !lang_hooks.safe_from_p (x, exp))
6109 return 0;
6110 break;
6111
6112 case tcc_type:
6113 /* Should never get a type here. */
6114 gcc_unreachable ();
6115 }
6116
6117 /* If we have an rtl, find any enclosed object. Then see if we conflict
6118 with it. */
6119 if (exp_rtl)
6120 {
6121 if (GET_CODE (exp_rtl) == SUBREG)
6122 {
6123 exp_rtl = SUBREG_REG (exp_rtl);
6124 if (REG_P (exp_rtl)
6125 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6126 return 0;
6127 }
6128
6129 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6130 are memory and they conflict. */
6131 return ! (rtx_equal_p (x, exp_rtl)
6132 || (MEM_P (x) && MEM_P (exp_rtl)
6133 && true_dependence (exp_rtl, VOIDmode, x,
6134 rtx_addr_varies_p)));
6135 }
6136
6137 /* If we reach here, it is safe. */
6138 return 1;
6139 }
6140
6141
6142 /* Return the highest power of two that EXP is known to be a multiple of.
6143 This is used in updating alignment of MEMs in array references. */
6144
6145 unsigned HOST_WIDE_INT
highest_pow2_factor(tree exp)6146 highest_pow2_factor (tree exp)
6147 {
6148 unsigned HOST_WIDE_INT c0, c1;
6149
6150 switch (TREE_CODE (exp))
6151 {
6152 case INTEGER_CST:
6153 /* We can find the lowest bit that's a one. If the low
6154 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6155 We need to handle this case since we can find it in a COND_EXPR,
6156 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6157 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6158 later ICE. */
6159 if (TREE_CONSTANT_OVERFLOW (exp))
6160 return BIGGEST_ALIGNMENT;
6161 else
6162 {
6163 /* Note: tree_low_cst is intentionally not used here,
6164 we don't care about the upper bits. */
6165 c0 = TREE_INT_CST_LOW (exp);
6166 c0 &= -c0;
6167 return c0 ? c0 : BIGGEST_ALIGNMENT;
6168 }
6169 break;
6170
6171 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6172 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6173 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6174 return MIN (c0, c1);
6175
6176 case MULT_EXPR:
6177 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6178 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6179 return c0 * c1;
6180
6181 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6182 case CEIL_DIV_EXPR:
6183 if (integer_pow2p (TREE_OPERAND (exp, 1))
6184 && host_integerp (TREE_OPERAND (exp, 1), 1))
6185 {
6186 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6187 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6188 return MAX (1, c0 / c1);
6189 }
6190 break;
6191
6192 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6193 case SAVE_EXPR:
6194 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6195
6196 case COMPOUND_EXPR:
6197 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6198
6199 case COND_EXPR:
6200 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6201 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6202 return MIN (c0, c1);
6203
6204 default:
6205 break;
6206 }
6207
6208 return 1;
6209 }
6210
6211 /* Similar, except that the alignment requirements of TARGET are
6212 taken into account. Assume it is at least as aligned as its
6213 type, unless it is a COMPONENT_REF in which case the layout of
6214 the structure gives the alignment. */
6215
6216 static unsigned HOST_WIDE_INT
highest_pow2_factor_for_target(tree target,tree exp)6217 highest_pow2_factor_for_target (tree target, tree exp)
6218 {
6219 unsigned HOST_WIDE_INT target_align, factor;
6220
6221 factor = highest_pow2_factor (exp);
6222 if (TREE_CODE (target) == COMPONENT_REF)
6223 target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
6224 else
6225 target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
6226 return MAX (factor, target_align);
6227 }
6228
6229 /* Expands variable VAR. */
6230
6231 void
expand_var(tree var)6232 expand_var (tree var)
6233 {
6234 if (DECL_EXTERNAL (var))
6235 return;
6236
6237 if (TREE_STATIC (var))
6238 /* If this is an inlined copy of a static local variable,
6239 look up the original decl. */
6240 var = DECL_ORIGIN (var);
6241
6242 if (TREE_STATIC (var)
6243 ? !TREE_ASM_WRITTEN (var)
6244 : !DECL_RTL_SET_P (var))
6245 {
6246 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
6247 /* Should be ignored. */;
6248 else if (lang_hooks.expand_decl (var))
6249 /* OK. */;
6250 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6251 expand_decl (var);
6252 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
6253 rest_of_decl_compilation (var, 0, 0);
6254 else
6255 /* No expansion needed. */
6256 gcc_assert (TREE_CODE (var) == TYPE_DECL
6257 || TREE_CODE (var) == CONST_DECL
6258 || TREE_CODE (var) == FUNCTION_DECL
6259 || TREE_CODE (var) == LABEL_DECL);
6260 }
6261 }
6262
6263 /* Subroutine of expand_expr. Expand the two operands of a binary
6264 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6265 The value may be stored in TARGET if TARGET is nonzero. The
6266 MODIFIER argument is as documented by expand_expr. */
6267
6268 static void
expand_operands(tree exp0,tree exp1,rtx target,rtx * op0,rtx * op1,enum expand_modifier modifier)6269 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6270 enum expand_modifier modifier)
6271 {
6272 if (! safe_from_p (target, exp1, 1))
6273 target = 0;
6274 if (operand_equal_p (exp0, exp1, 0))
6275 {
6276 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6277 *op1 = copy_rtx (*op0);
6278 }
6279 else
6280 {
6281 /* If we need to preserve evaluation order, copy exp0 into its own
6282 temporary variable so that it can't be clobbered by exp1. */
6283 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6284 exp0 = save_expr (exp0);
6285 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6286 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6287 }
6288 }
6289
6290
6291 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6292 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6293
6294 static rtx
expand_expr_addr_expr_1(tree exp,rtx target,enum machine_mode tmode,enum expand_modifier modifier)6295 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6296 enum expand_modifier modifier)
6297 {
6298 rtx result, subtarget;
6299 tree inner, offset;
6300 HOST_WIDE_INT bitsize, bitpos;
6301 int volatilep, unsignedp;
6302 enum machine_mode mode1;
6303
6304 /* If we are taking the address of a constant and are at the top level,
6305 we have to use output_constant_def since we can't call force_const_mem
6306 at top level. */
6307 /* ??? This should be considered a front-end bug. We should not be
6308 generating ADDR_EXPR of something that isn't an LVALUE. The only
6309 exception here is STRING_CST. */
6310 if (TREE_CODE (exp) == CONSTRUCTOR
6311 || CONSTANT_CLASS_P (exp))
6312 return XEXP (output_constant_def (exp, 0), 0);
6313
6314 /* Everything must be something allowed by is_gimple_addressable. */
6315 switch (TREE_CODE (exp))
6316 {
6317 case INDIRECT_REF:
6318 /* This case will happen via recursion for &a->b. */
6319 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, EXPAND_NORMAL);
6320
6321 case CONST_DECL:
6322 /* Recurse and make the output_constant_def clause above handle this. */
6323 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
6324 tmode, modifier);
6325
6326 case REALPART_EXPR:
6327 /* The real part of the complex number is always first, therefore
6328 the address is the same as the address of the parent object. */
6329 offset = 0;
6330 bitpos = 0;
6331 inner = TREE_OPERAND (exp, 0);
6332 break;
6333
6334 case IMAGPART_EXPR:
6335 /* The imaginary part of the complex number is always second.
6336 The expression is therefore always offset by the size of the
6337 scalar type. */
6338 offset = 0;
6339 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6340 inner = TREE_OPERAND (exp, 0);
6341 break;
6342
6343 default:
6344 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6345 expand_expr, as that can have various side effects; LABEL_DECLs for
6346 example, may not have their DECL_RTL set yet. Assume language
6347 specific tree nodes can be expanded in some interesting way. */
6348 if (DECL_P (exp)
6349 || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE)
6350 {
6351 result = expand_expr (exp, target, tmode,
6352 modifier == EXPAND_INITIALIZER
6353 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6354
6355 /* If the DECL isn't in memory, then the DECL wasn't properly
6356 marked TREE_ADDRESSABLE, which will be either a front-end
6357 or a tree optimizer bug. */
6358 gcc_assert (MEM_P (result));
6359 result = XEXP (result, 0);
6360
6361 /* ??? Is this needed anymore? */
6362 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6363 {
6364 assemble_external (exp);
6365 TREE_USED (exp) = 1;
6366 }
6367
6368 if (modifier != EXPAND_INITIALIZER
6369 && modifier != EXPAND_CONST_ADDRESS)
6370 result = force_operand (result, target);
6371 return result;
6372 }
6373
6374 /* Pass FALSE as the last argument to get_inner_reference although
6375 we are expanding to RTL. The rationale is that we know how to
6376 handle "aligning nodes" here: we can just bypass them because
6377 they won't change the final object whose address will be returned
6378 (they actually exist only for that purpose). */
6379 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6380 &mode1, &unsignedp, &volatilep, false);
6381 break;
6382 }
6383
6384 /* We must have made progress. */
6385 gcc_assert (inner != exp);
6386
6387 subtarget = offset || bitpos ? NULL_RTX : target;
6388 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6389
6390 if (offset)
6391 {
6392 rtx tmp;
6393
6394 if (modifier != EXPAND_NORMAL)
6395 result = force_operand (result, NULL);
6396 tmp = expand_expr (offset, NULL, tmode, EXPAND_NORMAL);
6397
6398 result = convert_memory_address (tmode, result);
6399 tmp = convert_memory_address (tmode, tmp);
6400
6401 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6402 result = gen_rtx_PLUS (tmode, result, tmp);
6403 else
6404 {
6405 subtarget = bitpos ? NULL_RTX : target;
6406 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6407 1, OPTAB_LIB_WIDEN);
6408 }
6409 }
6410
6411 if (bitpos)
6412 {
6413 /* Someone beforehand should have rejected taking the address
6414 of such an object. */
6415 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6416
6417 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6418 if (modifier < EXPAND_SUM)
6419 result = force_operand (result, target);
6420 }
6421
6422 return result;
6423 }
6424
6425 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6426 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6427
6428 static rtx
expand_expr_addr_expr(tree exp,rtx target,enum machine_mode tmode,enum expand_modifier modifier)6429 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6430 enum expand_modifier modifier)
6431 {
6432 enum machine_mode rmode;
6433 rtx result;
6434
6435 /* Target mode of VOIDmode says "whatever's natural". */
6436 if (tmode == VOIDmode)
6437 tmode = TYPE_MODE (TREE_TYPE (exp));
6438
6439 /* We can get called with some Weird Things if the user does silliness
6440 like "(short) &a". In that case, convert_memory_address won't do
6441 the right thing, so ignore the given target mode. */
6442 if (tmode != Pmode && tmode != ptr_mode)
6443 tmode = Pmode;
6444
6445 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6446 tmode, modifier);
6447
6448 /* Despite expand_expr claims concerning ignoring TMODE when not
6449 strictly convenient, stuff breaks if we don't honor it. Note
6450 that combined with the above, we only do this for pointer modes. */
6451 rmode = GET_MODE (result);
6452 if (rmode == VOIDmode)
6453 rmode = tmode;
6454 if (rmode != tmode)
6455 result = convert_memory_address (tmode, result);
6456
6457 return result;
6458 }
6459
6460
6461 /* expand_expr: generate code for computing expression EXP.
6462 An rtx for the computed value is returned. The value is never null.
6463 In the case of a void EXP, const0_rtx is returned.
6464
6465 The value may be stored in TARGET if TARGET is nonzero.
6466 TARGET is just a suggestion; callers must assume that
6467 the rtx returned may not be the same as TARGET.
6468
6469 If TARGET is CONST0_RTX, it means that the value will be ignored.
6470
6471 If TMODE is not VOIDmode, it suggests generating the
6472 result in mode TMODE. But this is done only when convenient.
6473 Otherwise, TMODE is ignored and the value generated in its natural mode.
6474 TMODE is just a suggestion; callers must assume that
6475 the rtx returned may not have mode TMODE.
6476
6477 Note that TARGET may have neither TMODE nor MODE. In that case, it
6478 probably will not be used.
6479
6480 If MODIFIER is EXPAND_SUM then when EXP is an addition
6481 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6482 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6483 products as above, or REG or MEM, or constant.
6484 Ordinarily in such cases we would output mul or add instructions
6485 and then return a pseudo reg containing the sum.
6486
6487 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6488 it also marks a label as absolutely required (it can't be dead).
6489 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6490 This is used for outputting expressions used in initializers.
6491
6492 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6493 with a constant address even if that address is not normally legitimate.
6494 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6495
6496 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6497 a call parameter. Such targets require special care as we haven't yet
6498 marked TARGET so that it's safe from being trashed by libcalls. We
6499 don't want to use TARGET for anything but the final result;
6500 Intermediate values must go elsewhere. Additionally, calls to
6501 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6502
6503 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6504 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6505 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6506 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6507 recursively. */
6508
6509 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6510 enum expand_modifier, rtx *);
6511
6512 rtx
expand_expr_real(tree exp,rtx target,enum machine_mode tmode,enum expand_modifier modifier,rtx * alt_rtl)6513 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6514 enum expand_modifier modifier, rtx *alt_rtl)
6515 {
6516 int rn = -1;
6517 rtx ret, last = NULL;
6518
6519 /* Handle ERROR_MARK before anybody tries to access its type. */
6520 if (TREE_CODE (exp) == ERROR_MARK
6521 || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)
6522 {
6523 ret = CONST0_RTX (tmode);
6524 return ret ? ret : const0_rtx;
6525 }
6526
6527 if (flag_non_call_exceptions)
6528 {
6529 rn = lookup_stmt_eh_region (exp);
6530 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6531 if (rn >= 0)
6532 last = get_last_insn ();
6533 }
6534
6535 /* If this is an expression of some kind and it has an associated line
6536 number, then emit the line number before expanding the expression.
6537
6538 We need to save and restore the file and line information so that
6539 errors discovered during expansion are emitted with the right
6540 information. It would be better of the diagnostic routines
6541 used the file/line information embedded in the tree nodes rather
6542 than globals. */
6543 if (cfun && cfun->ib_boundaries_block && EXPR_HAS_LOCATION (exp))
6544 {
6545 location_t saved_location = input_location;
6546 input_location = EXPR_LOCATION (exp);
6547 emit_line_note (input_location);
6548
6549 /* Record where the insns produced belong. */
6550 record_block_change (TREE_BLOCK (exp));
6551
6552 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6553
6554 input_location = saved_location;
6555 }
6556 else
6557 {
6558 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6559 }
6560
6561 /* If using non-call exceptions, mark all insns that may trap.
6562 expand_call() will mark CALL_INSNs before we get to this code,
6563 but it doesn't handle libcalls, and these may trap. */
6564 if (rn >= 0)
6565 {
6566 rtx insn;
6567 for (insn = next_real_insn (last); insn;
6568 insn = next_real_insn (insn))
6569 {
6570 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6571 /* If we want exceptions for non-call insns, any
6572 may_trap_p instruction may throw. */
6573 && GET_CODE (PATTERN (insn)) != CLOBBER
6574 && GET_CODE (PATTERN (insn)) != USE
6575 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6576 {
6577 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6578 REG_NOTES (insn));
6579 }
6580 }
6581 }
6582
6583 return ret;
6584 }
6585
6586 static rtx
expand_expr_real_1(tree exp,rtx target,enum machine_mode tmode,enum expand_modifier modifier,rtx * alt_rtl)6587 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6588 enum expand_modifier modifier, rtx *alt_rtl)
6589 {
6590 rtx op0, op1, temp;
6591 tree type = TREE_TYPE (exp);
6592 int unsignedp;
6593 enum machine_mode mode;
6594 enum tree_code code = TREE_CODE (exp);
6595 optab this_optab;
6596 rtx subtarget, original_target;
6597 int ignore;
6598 tree context;
6599 bool reduce_bit_field = false;
6600 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6601 ? reduce_to_bit_field_precision ((expr), \
6602 target, \
6603 type) \
6604 : (expr))
6605
6606 mode = TYPE_MODE (type);
6607 unsignedp = TYPE_UNSIGNED (type);
6608 if (lang_hooks.reduce_bit_field_operations
6609 && TREE_CODE (type) == INTEGER_TYPE
6610 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
6611 {
6612 /* An operation in what may be a bit-field type needs the
6613 result to be reduced to the precision of the bit-field type,
6614 which is narrower than that of the type's mode. */
6615 reduce_bit_field = true;
6616 if (modifier == EXPAND_STACK_PARM)
6617 target = 0;
6618 }
6619
6620 /* Use subtarget as the target for operand 0 of a binary operation. */
6621 subtarget = get_subtarget (target);
6622 original_target = target;
6623 ignore = (target == const0_rtx
6624 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6625 || code == CONVERT_EXPR || code == COND_EXPR
6626 || code == VIEW_CONVERT_EXPR)
6627 && TREE_CODE (type) == VOID_TYPE));
6628
6629 /* If we are going to ignore this result, we need only do something
6630 if there is a side-effect somewhere in the expression. If there
6631 is, short-circuit the most common cases here. Note that we must
6632 not call expand_expr with anything but const0_rtx in case this
6633 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6634
6635 if (ignore)
6636 {
6637 if (! TREE_SIDE_EFFECTS (exp))
6638 return const0_rtx;
6639
6640 /* Ensure we reference a volatile object even if value is ignored, but
6641 don't do this if all we are doing is taking its address. */
6642 if (TREE_THIS_VOLATILE (exp)
6643 && TREE_CODE (exp) != FUNCTION_DECL
6644 && mode != VOIDmode && mode != BLKmode
6645 && modifier != EXPAND_CONST_ADDRESS)
6646 {
6647 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6648 if (MEM_P (temp))
6649 temp = copy_to_reg (temp);
6650 return const0_rtx;
6651 }
6652
6653 if (TREE_CODE_CLASS (code) == tcc_unary
6654 || code == COMPONENT_REF || code == INDIRECT_REF)
6655 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6656 modifier);
6657
6658 else if (TREE_CODE_CLASS (code) == tcc_binary
6659 || TREE_CODE_CLASS (code) == tcc_comparison
6660 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6661 {
6662 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6663 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6664 return const0_rtx;
6665 }
6666 else if (code == BIT_FIELD_REF)
6667 {
6668 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6669 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6670 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6671 return const0_rtx;
6672 }
6673
6674 target = 0;
6675 }
6676
6677
6678 switch (code)
6679 {
6680 case LABEL_DECL:
6681 {
6682 tree function = decl_function_context (exp);
6683
6684 temp = label_rtx (exp);
6685 temp = gen_rtx_LABEL_REF (Pmode, temp);
6686
6687 if (function != current_function_decl
6688 && function != 0)
6689 LABEL_REF_NONLOCAL_P (temp) = 1;
6690
6691 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
6692 return temp;
6693 }
6694
6695 case SSA_NAME:
6696 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
6697 NULL);
6698
6699 case PARM_DECL:
6700 case VAR_DECL:
6701 /* If a static var's type was incomplete when the decl was written,
6702 but the type is complete now, lay out the decl now. */
6703 if (DECL_SIZE (exp) == 0
6704 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6705 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6706 layout_decl (exp, 0);
6707
6708 /* ... fall through ... */
6709
6710 case FUNCTION_DECL:
6711 case RESULT_DECL:
6712 gcc_assert (DECL_RTL (exp));
6713
6714 /* Ensure variable marked as used even if it doesn't go through
6715 a parser. If it hasn't be used yet, write out an external
6716 definition. */
6717 if (! TREE_USED (exp))
6718 {
6719 assemble_external (exp);
6720 TREE_USED (exp) = 1;
6721 }
6722
6723 /* Show we haven't gotten RTL for this yet. */
6724 temp = 0;
6725
6726 /* Variables inherited from containing functions should have
6727 been lowered by this point. */
6728 context = decl_function_context (exp);
6729 gcc_assert (!context
6730 || context == current_function_decl
6731 || TREE_STATIC (exp)
6732 /* ??? C++ creates functions that are not TREE_STATIC. */
6733 || TREE_CODE (exp) == FUNCTION_DECL);
6734
6735 /* This is the case of an array whose size is to be determined
6736 from its initializer, while the initializer is still being parsed.
6737 See expand_decl. */
6738
6739 if (MEM_P (DECL_RTL (exp))
6740 && REG_P (XEXP (DECL_RTL (exp), 0)))
6741 temp = validize_mem (DECL_RTL (exp));
6742
6743 /* If DECL_RTL is memory, we are in the normal case and either
6744 the address is not valid or it is not a register and -fforce-addr
6745 is specified, get the address into a register. */
6746
6747 else if (MEM_P (DECL_RTL (exp))
6748 && modifier != EXPAND_CONST_ADDRESS
6749 && modifier != EXPAND_SUM
6750 && modifier != EXPAND_INITIALIZER
6751 && (! memory_address_p (DECL_MODE (exp),
6752 XEXP (DECL_RTL (exp), 0))
6753 || (flag_force_addr
6754 && !REG_P (XEXP (DECL_RTL (exp), 0)))))
6755 {
6756 if (alt_rtl)
6757 *alt_rtl = DECL_RTL (exp);
6758 temp = replace_equiv_address (DECL_RTL (exp),
6759 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6760 }
6761
6762 /* If we got something, return it. But first, set the alignment
6763 if the address is a register. */
6764 if (temp != 0)
6765 {
6766 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
6767 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6768
6769 return temp;
6770 }
6771
6772 /* If the mode of DECL_RTL does not match that of the decl, it
6773 must be a promoted value. We return a SUBREG of the wanted mode,
6774 but mark it so that we know that it was already extended. */
6775
6776 if (REG_P (DECL_RTL (exp))
6777 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6778 {
6779 enum machine_mode pmode;
6780
6781 /* Get the signedness used for this variable. Ensure we get the
6782 same mode we got when the variable was declared. */
6783 pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
6784 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0));
6785 gcc_assert (GET_MODE (DECL_RTL (exp)) == pmode);
6786
6787 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6788 SUBREG_PROMOTED_VAR_P (temp) = 1;
6789 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6790 return temp;
6791 }
6792
6793 return DECL_RTL (exp);
6794
6795 case INTEGER_CST:
6796 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6797 TREE_INT_CST_HIGH (exp), mode);
6798
6799 /* ??? If overflow is set, fold will have done an incomplete job,
6800 which can result in (plus xx (const_int 0)), which can get
6801 simplified by validate_replace_rtx during virtual register
6802 instantiation, which can result in unrecognizable insns.
6803 Avoid this by forcing all overflows into registers. */
6804 if (TREE_CONSTANT_OVERFLOW (exp)
6805 && modifier != EXPAND_INITIALIZER)
6806 temp = force_reg (mode, temp);
6807
6808 return temp;
6809
6810 case VECTOR_CST:
6811 if (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_INT
6812 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_FLOAT)
6813 return const_vector_from_tree (exp);
6814 else
6815 return expand_expr (build_constructor_from_list
6816 (TREE_TYPE (exp),
6817 TREE_VECTOR_CST_ELTS (exp)),
6818 ignore ? const0_rtx : target, tmode, modifier);
6819
6820 case CONST_DECL:
6821 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6822
6823 case REAL_CST:
6824 /* If optimized, generate immediate CONST_DOUBLE
6825 which will be turned into memory by reload if necessary.
6826
6827 We used to force a register so that loop.c could see it. But
6828 this does not allow gen_* patterns to perform optimizations with
6829 the constants. It also produces two insns in cases like "x = 1.0;".
6830 On most machines, floating-point constants are not permitted in
6831 many insns, so we'd end up copying it to a register in any case.
6832
6833 Now, we do the copying in expand_binop, if appropriate. */
6834 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6835 TYPE_MODE (TREE_TYPE (exp)));
6836
6837 case COMPLEX_CST:
6838 /* Handle evaluating a complex constant in a CONCAT target. */
6839 if (original_target && GET_CODE (original_target) == CONCAT)
6840 {
6841 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6842 rtx rtarg, itarg;
6843
6844 rtarg = XEXP (original_target, 0);
6845 itarg = XEXP (original_target, 1);
6846
6847 /* Move the real and imaginary parts separately. */
6848 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6849 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6850
6851 if (op0 != rtarg)
6852 emit_move_insn (rtarg, op0);
6853 if (op1 != itarg)
6854 emit_move_insn (itarg, op1);
6855
6856 return original_target;
6857 }
6858
6859 /* ... fall through ... */
6860
6861 case STRING_CST:
6862 temp = output_constant_def (exp, 1);
6863
6864 /* temp contains a constant address.
6865 On RISC machines where a constant address isn't valid,
6866 make some insns to get that address into a register. */
6867 if (modifier != EXPAND_CONST_ADDRESS
6868 && modifier != EXPAND_INITIALIZER
6869 && modifier != EXPAND_SUM
6870 && (! memory_address_p (mode, XEXP (temp, 0))
6871 || flag_force_addr))
6872 return replace_equiv_address (temp,
6873 copy_rtx (XEXP (temp, 0)));
6874 return temp;
6875
6876 case SAVE_EXPR:
6877 {
6878 tree val = TREE_OPERAND (exp, 0);
6879 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
6880
6881 if (!SAVE_EXPR_RESOLVED_P (exp))
6882 {
6883 /* We can indeed still hit this case, typically via builtin
6884 expanders calling save_expr immediately before expanding
6885 something. Assume this means that we only have to deal
6886 with non-BLKmode values. */
6887 gcc_assert (GET_MODE (ret) != BLKmode);
6888
6889 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
6890 DECL_ARTIFICIAL (val) = 1;
6891 DECL_IGNORED_P (val) = 1;
6892 TREE_OPERAND (exp, 0) = val;
6893 SAVE_EXPR_RESOLVED_P (exp) = 1;
6894
6895 if (!CONSTANT_P (ret))
6896 ret = copy_to_reg (ret);
6897 SET_DECL_RTL (val, ret);
6898 }
6899
6900 return ret;
6901 }
6902
6903 case GOTO_EXPR:
6904 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6905 expand_goto (TREE_OPERAND (exp, 0));
6906 else
6907 expand_computed_goto (TREE_OPERAND (exp, 0));
6908 return const0_rtx;
6909
6910 case CONSTRUCTOR:
6911 /* If we don't need the result, just ensure we evaluate any
6912 subexpressions. */
6913 if (ignore)
6914 {
6915 unsigned HOST_WIDE_INT idx;
6916 tree value;
6917
6918 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6919 expand_expr (value, const0_rtx, VOIDmode, 0);
6920
6921 return const0_rtx;
6922 }
6923
6924 /* Try to avoid creating a temporary at all. This is possible
6925 if all of the initializer is zero.
6926 FIXME: try to handle all [0..255] initializers we can handle
6927 with memset. */
6928 else if (TREE_STATIC (exp)
6929 && !TREE_ADDRESSABLE (exp)
6930 && target != 0 && mode == BLKmode
6931 && all_zeros_p (exp))
6932 {
6933 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
6934 return target;
6935 }
6936
6937 /* All elts simple constants => refer to a constant in memory. But
6938 if this is a non-BLKmode mode, let it store a field at a time
6939 since that should make a CONST_INT or CONST_DOUBLE when we
6940 fold. Likewise, if we have a target we can use, it is best to
6941 store directly into the target unless the type is large enough
6942 that memcpy will be used. If we are making an initializer and
6943 all operands are constant, put it in memory as well.
6944
6945 FIXME: Avoid trying to fill vector constructors piece-meal.
6946 Output them with output_constant_def below unless we're sure
6947 they're zeros. This should go away when vector initializers
6948 are treated like VECTOR_CST instead of arrays.
6949 */
6950 else if ((TREE_STATIC (exp)
6951 && ((mode == BLKmode
6952 && ! (target != 0 && safe_from_p (target, exp, 1)))
6953 || TREE_ADDRESSABLE (exp)
6954 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6955 && (! MOVE_BY_PIECES_P
6956 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6957 TYPE_ALIGN (type)))
6958 && ! mostly_zeros_p (exp))))
6959 || ((modifier == EXPAND_INITIALIZER
6960 || modifier == EXPAND_CONST_ADDRESS)
6961 && TREE_CONSTANT (exp)))
6962 {
6963 rtx constructor = output_constant_def (exp, 1);
6964
6965 if (modifier != EXPAND_CONST_ADDRESS
6966 && modifier != EXPAND_INITIALIZER
6967 && modifier != EXPAND_SUM)
6968 constructor = validize_mem (constructor);
6969
6970 return constructor;
6971 }
6972 else
6973 {
6974 /* Handle calls that pass values in multiple non-contiguous
6975 locations. The Irix 6 ABI has examples of this. */
6976 if (target == 0 || ! safe_from_p (target, exp, 1)
6977 || GET_CODE (target) == PARALLEL
6978 || modifier == EXPAND_STACK_PARM)
6979 target
6980 = assign_temp (build_qualified_type (type,
6981 (TYPE_QUALS (type)
6982 | (TREE_READONLY (exp)
6983 * TYPE_QUAL_CONST))),
6984 0, TREE_ADDRESSABLE (exp), 1);
6985
6986 store_constructor (exp, target, 0, int_expr_size (exp));
6987 return target;
6988 }
6989
6990 case MISALIGNED_INDIRECT_REF:
6991 case ALIGN_INDIRECT_REF:
6992 case INDIRECT_REF:
6993 {
6994 tree exp1 = TREE_OPERAND (exp, 0);
6995
6996 if (modifier != EXPAND_WRITE)
6997 {
6998 tree t;
6999
7000 t = fold_read_from_constant_string (exp);
7001 if (t)
7002 return expand_expr (t, target, tmode, modifier);
7003 }
7004
7005 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7006 op0 = memory_address (mode, op0);
7007
7008 if (code == ALIGN_INDIRECT_REF)
7009 {
7010 int align = TYPE_ALIGN_UNIT (type);
7011 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
7012 op0 = memory_address (mode, op0);
7013 }
7014
7015 temp = gen_rtx_MEM (mode, op0);
7016
7017 set_mem_attributes (temp, exp, 0);
7018
7019 /* Resolve the misalignment now, so that we don't have to remember
7020 to resolve it later. Of course, this only works for reads. */
7021 /* ??? When we get around to supporting writes, we'll have to handle
7022 this in store_expr directly. The vectorizer isn't generating
7023 those yet, however. */
7024 if (code == MISALIGNED_INDIRECT_REF)
7025 {
7026 int icode;
7027 rtx reg, insn;
7028
7029 gcc_assert (modifier == EXPAND_NORMAL
7030 || modifier == EXPAND_STACK_PARM);
7031
7032 /* The vectorizer should have already checked the mode. */
7033 icode = movmisalign_optab->handlers[mode].insn_code;
7034 gcc_assert (icode != CODE_FOR_nothing);
7035
7036 /* We've already validated the memory, and we're creating a
7037 new pseudo destination. The predicates really can't fail. */
7038 reg = gen_reg_rtx (mode);
7039
7040 /* Nor can the insn generator. */
7041 insn = GEN_FCN (icode) (reg, temp);
7042 emit_insn (insn);
7043
7044 return reg;
7045 }
7046
7047 return temp;
7048 }
7049
7050 case TARGET_MEM_REF:
7051 {
7052 struct mem_address addr;
7053
7054 get_address_description (exp, &addr);
7055 op0 = addr_for_mem_ref (&addr, true);
7056 op0 = memory_address (mode, op0);
7057 temp = gen_rtx_MEM (mode, op0);
7058 set_mem_attributes (temp, TMR_ORIGINAL (exp), 0);
7059 }
7060 return temp;
7061
7062 case ARRAY_REF:
7063
7064 {
7065 tree array = TREE_OPERAND (exp, 0);
7066 tree index = TREE_OPERAND (exp, 1);
7067
7068 /* Fold an expression like: "foo"[2].
7069 This is not done in fold so it won't happen inside &.
7070 Don't fold if this is for wide characters since it's too
7071 difficult to do correctly and this is a very rare case. */
7072
7073 if (modifier != EXPAND_CONST_ADDRESS
7074 && modifier != EXPAND_INITIALIZER
7075 && modifier != EXPAND_MEMORY)
7076 {
7077 tree t = fold_read_from_constant_string (exp);
7078
7079 if (t)
7080 return expand_expr (t, target, tmode, modifier);
7081 }
7082
7083 /* If this is a constant index into a constant array,
7084 just get the value from the array. Handle both the cases when
7085 we have an explicit constructor and when our operand is a variable
7086 that was declared const. */
7087
7088 if (modifier != EXPAND_CONST_ADDRESS
7089 && modifier != EXPAND_INITIALIZER
7090 && modifier != EXPAND_MEMORY
7091 && TREE_CODE (array) == CONSTRUCTOR
7092 && ! TREE_SIDE_EFFECTS (array)
7093 && TREE_CODE (index) == INTEGER_CST)
7094 {
7095 unsigned HOST_WIDE_INT ix;
7096 tree field, value;
7097
7098 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
7099 field, value)
7100 if (tree_int_cst_equal (field, index))
7101 {
7102 if (!TREE_SIDE_EFFECTS (value))
7103 return expand_expr (fold (value), target, tmode, modifier);
7104 break;
7105 }
7106 }
7107
7108 else if (optimize >= 1
7109 && modifier != EXPAND_CONST_ADDRESS
7110 && modifier != EXPAND_INITIALIZER
7111 && modifier != EXPAND_MEMORY
7112 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7113 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7114 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
7115 && targetm.binds_local_p (array))
7116 {
7117 if (TREE_CODE (index) == INTEGER_CST)
7118 {
7119 tree init = DECL_INITIAL (array);
7120
7121 if (TREE_CODE (init) == CONSTRUCTOR)
7122 {
7123 unsigned HOST_WIDE_INT ix;
7124 tree field, value;
7125
7126 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
7127 field, value)
7128 if (tree_int_cst_equal (field, index))
7129 {
7130 if (!TREE_SIDE_EFFECTS (value))
7131 return expand_expr (fold (value), target, tmode,
7132 modifier);
7133 break;
7134 }
7135 }
7136 else if(TREE_CODE (init) == STRING_CST)
7137 {
7138 tree index1 = index;
7139 tree low_bound = array_ref_low_bound (exp);
7140 index1 = fold_convert (sizetype, TREE_OPERAND (exp, 1));
7141
7142 /* Optimize the special-case of a zero lower bound.
7143
7144 We convert the low_bound to sizetype to avoid some problems
7145 with constant folding. (E.g. suppose the lower bound is 1,
7146 and its mode is QI. Without the conversion,l (ARRAY
7147 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7148 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
7149
7150 if (! integer_zerop (low_bound))
7151 index1 = size_diffop (index1, fold_convert (sizetype,
7152 low_bound));
7153
7154 if (0 > compare_tree_int (index1,
7155 TREE_STRING_LENGTH (init)))
7156 {
7157 tree type = TREE_TYPE (TREE_TYPE (init));
7158 enum machine_mode mode = TYPE_MODE (type);
7159
7160 if (GET_MODE_CLASS (mode) == MODE_INT
7161 && GET_MODE_SIZE (mode) == 1)
7162 return gen_int_mode (TREE_STRING_POINTER (init)
7163 [TREE_INT_CST_LOW (index1)],
7164 mode);
7165 }
7166 }
7167 }
7168 }
7169 }
7170 goto normal_inner_ref;
7171
7172 case COMPONENT_REF:
7173 /* If the operand is a CONSTRUCTOR, we can just extract the
7174 appropriate field if it is present. */
7175 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7176 {
7177 unsigned HOST_WIDE_INT idx;
7178 tree field, value;
7179
7180 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7181 idx, field, value)
7182 if (field == TREE_OPERAND (exp, 1)
7183 /* We can normally use the value of the field in the
7184 CONSTRUCTOR. However, if this is a bitfield in
7185 an integral mode that we can fit in a HOST_WIDE_INT,
7186 we must mask only the number of bits in the bitfield,
7187 since this is done implicitly by the constructor. If
7188 the bitfield does not meet either of those conditions,
7189 we can't do this optimization. */
7190 && (! DECL_BIT_FIELD (field)
7191 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
7192 && (GET_MODE_BITSIZE (DECL_MODE (field))
7193 <= HOST_BITS_PER_WIDE_INT))))
7194 {
7195 if (DECL_BIT_FIELD (field)
7196 && modifier == EXPAND_STACK_PARM)
7197 target = 0;
7198 op0 = expand_expr (value, target, tmode, modifier);
7199 if (DECL_BIT_FIELD (field))
7200 {
7201 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
7202 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
7203
7204 if (TYPE_UNSIGNED (TREE_TYPE (field)))
7205 {
7206 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7207 op0 = expand_and (imode, op0, op1, target);
7208 }
7209 else
7210 {
7211 tree count
7212 = build_int_cst (NULL_TREE,
7213 GET_MODE_BITSIZE (imode) - bitsize);
7214
7215 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7216 target, 0);
7217 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7218 target, 0);
7219 }
7220 }
7221
7222 return op0;
7223 }
7224 }
7225 goto normal_inner_ref;
7226
7227 case BIT_FIELD_REF:
7228 case ARRAY_RANGE_REF:
7229 normal_inner_ref:
7230 {
7231 enum machine_mode mode1;
7232 HOST_WIDE_INT bitsize, bitpos;
7233 tree offset;
7234 int volatilep = 0;
7235 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7236 &mode1, &unsignedp, &volatilep, true);
7237 rtx orig_op0;
7238
7239 /* If we got back the original object, something is wrong. Perhaps
7240 we are evaluating an expression too early. In any event, don't
7241 infinitely recurse. */
7242 gcc_assert (tem != exp);
7243
7244 /* If TEM's type is a union of variable size, pass TARGET to the inner
7245 computation, since it will need a temporary and TARGET is known
7246 to have to do. This occurs in unchecked conversion in Ada. */
7247
7248 orig_op0 = op0
7249 = expand_expr (tem,
7250 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7251 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7252 != INTEGER_CST)
7253 && modifier != EXPAND_STACK_PARM
7254 ? target : NULL_RTX),
7255 VOIDmode,
7256 (modifier == EXPAND_INITIALIZER
7257 || modifier == EXPAND_CONST_ADDRESS
7258 || modifier == EXPAND_STACK_PARM)
7259 ? modifier : EXPAND_NORMAL);
7260
7261 /* If this is a constant, put it into a register if it is a legitimate
7262 constant, OFFSET is 0, and we won't try to extract outside the
7263 register (in case we were passed a partially uninitialized object
7264 or a view_conversion to a larger size). Force the constant to
7265 memory otherwise. */
7266 if (CONSTANT_P (op0))
7267 {
7268 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7269 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7270 && offset == 0
7271 && bitpos + bitsize <= GET_MODE_BITSIZE (mode))
7272 op0 = force_reg (mode, op0);
7273 else
7274 op0 = validize_mem (force_const_mem (mode, op0));
7275 }
7276
7277 /* Otherwise, if this object not in memory and we either have an
7278 offset, a BLKmode result, or a reference outside the object, put it
7279 there. Such cases can occur in Ada if we have unchecked conversion
7280 of an expression from a scalar type to an array or record type or
7281 for an ARRAY_RANGE_REF whose type is BLKmode. */
7282 else if (!MEM_P (op0)
7283 && (offset != 0
7284 || (bitpos + bitsize > GET_MODE_BITSIZE (GET_MODE (op0)))
7285 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7286 {
7287 tree nt = build_qualified_type (TREE_TYPE (tem),
7288 (TYPE_QUALS (TREE_TYPE (tem))
7289 | TYPE_QUAL_CONST));
7290 rtx memloc = assign_temp (nt, 1, 1, 1);
7291
7292 emit_move_insn (memloc, op0);
7293 op0 = memloc;
7294 }
7295
7296 if (offset != 0)
7297 {
7298 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7299 EXPAND_SUM);
7300
7301 gcc_assert (MEM_P (op0));
7302
7303 #ifdef POINTERS_EXTEND_UNSIGNED
7304 if (GET_MODE (offset_rtx) != Pmode)
7305 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7306 #else
7307 if (GET_MODE (offset_rtx) != ptr_mode)
7308 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7309 #endif
7310
7311 if (GET_MODE (op0) == BLKmode
7312 /* A constant address in OP0 can have VOIDmode, we must
7313 not try to call force_reg in that case. */
7314 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7315 && bitsize != 0
7316 && (bitpos % bitsize) == 0
7317 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7318 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7319 {
7320 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7321 bitpos = 0;
7322 }
7323
7324 op0 = offset_address (op0, offset_rtx,
7325 highest_pow2_factor (offset));
7326 }
7327
7328 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7329 record its alignment as BIGGEST_ALIGNMENT. */
7330 if (MEM_P (op0) && bitpos == 0 && offset != 0
7331 && is_aligning_offset (offset, tem))
7332 set_mem_align (op0, BIGGEST_ALIGNMENT);
7333
7334 /* Don't forget about volatility even if this is a bitfield. */
7335 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
7336 {
7337 if (op0 == orig_op0)
7338 op0 = copy_rtx (op0);
7339
7340 MEM_VOLATILE_P (op0) = 1;
7341 }
7342
7343 /* The following code doesn't handle CONCAT.
7344 Assume only bitpos == 0 can be used for CONCAT, due to
7345 one element arrays having the same mode as its element. */
7346 if (GET_CODE (op0) == CONCAT)
7347 {
7348 gcc_assert (bitpos == 0
7349 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
7350 return op0;
7351 }
7352
7353 /* In cases where an aligned union has an unaligned object
7354 as a field, we might be extracting a BLKmode value from
7355 an integer-mode (e.g., SImode) object. Handle this case
7356 by doing the extract into an object as wide as the field
7357 (which we know to be the width of a basic mode), then
7358 storing into memory, and changing the mode to BLKmode. */
7359 if (mode1 == VOIDmode
7360 || REG_P (op0) || GET_CODE (op0) == SUBREG
7361 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7362 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7363 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7364 && modifier != EXPAND_CONST_ADDRESS
7365 && modifier != EXPAND_INITIALIZER)
7366 /* If the field isn't aligned enough to fetch as a memref,
7367 fetch it as a bit field. */
7368 || (mode1 != BLKmode
7369 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7370 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7371 || (MEM_P (op0)
7372 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7373 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7374 && ((modifier == EXPAND_CONST_ADDRESS
7375 || modifier == EXPAND_INITIALIZER)
7376 ? STRICT_ALIGNMENT
7377 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7378 || (bitpos % BITS_PER_UNIT != 0)))
7379 /* If the type and the field are a constant size and the
7380 size of the type isn't the same size as the bitfield,
7381 we must use bitfield operations. */
7382 || (bitsize >= 0
7383 && TYPE_SIZE (TREE_TYPE (exp))
7384 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
7385 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7386 bitsize)))
7387 {
7388 enum machine_mode ext_mode = mode;
7389
7390 if (ext_mode == BLKmode
7391 && ! (target != 0 && MEM_P (op0)
7392 && MEM_P (target)
7393 && bitpos % BITS_PER_UNIT == 0))
7394 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7395
7396 if (ext_mode == BLKmode)
7397 {
7398 if (target == 0)
7399 target = assign_temp (type, 0, 1, 1);
7400
7401 if (bitsize == 0)
7402 return target;
7403
7404 /* In this case, BITPOS must start at a byte boundary and
7405 TARGET, if specified, must be a MEM. */
7406 gcc_assert (MEM_P (op0)
7407 && (!target || MEM_P (target))
7408 && !(bitpos % BITS_PER_UNIT));
7409
7410 emit_block_move (target,
7411 adjust_address (op0, VOIDmode,
7412 bitpos / BITS_PER_UNIT),
7413 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7414 / BITS_PER_UNIT),
7415 (modifier == EXPAND_STACK_PARM
7416 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7417
7418 return target;
7419 }
7420
7421 op0 = validize_mem (op0);
7422
7423 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7424 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7425
7426 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7427 (modifier == EXPAND_STACK_PARM
7428 ? NULL_RTX : target),
7429 ext_mode, ext_mode);
7430
7431 /* If the result is a record type and BITSIZE is narrower than
7432 the mode of OP0, an integral mode, and this is a big endian
7433 machine, we must put the field into the high-order bits. */
7434 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7435 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7436 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7437 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7438 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7439 - bitsize),
7440 op0, 1);
7441
7442 /* If the result type is BLKmode, store the data into a temporary
7443 of the appropriate type, but with the mode corresponding to the
7444 mode for the data we have (op0's mode). It's tempting to make
7445 this a constant type, since we know it's only being stored once,
7446 but that can cause problems if we are taking the address of this
7447 COMPONENT_REF because the MEM of any reference via that address
7448 will have flags corresponding to the type, which will not
7449 necessarily be constant. */
7450 if (mode == BLKmode)
7451 {
7452 rtx new
7453 = assign_stack_temp_for_type
7454 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
7455
7456 emit_move_insn (new, op0);
7457 op0 = copy_rtx (new);
7458 PUT_MODE (op0, BLKmode);
7459 set_mem_attributes (op0, exp, 1);
7460 }
7461
7462 return op0;
7463 }
7464
7465 /* If the result is BLKmode, use that to access the object
7466 now as well. */
7467 if (mode == BLKmode)
7468 mode1 = BLKmode;
7469
7470 /* Get a reference to just this component. */
7471 if (modifier == EXPAND_CONST_ADDRESS
7472 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7473 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7474 else
7475 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7476
7477 if (op0 == orig_op0)
7478 op0 = copy_rtx (op0);
7479
7480 set_mem_attributes (op0, exp, 0);
7481 if (REG_P (XEXP (op0, 0)))
7482 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7483
7484 MEM_VOLATILE_P (op0) |= volatilep;
7485 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7486 || modifier == EXPAND_CONST_ADDRESS
7487 || modifier == EXPAND_INITIALIZER)
7488 return op0;
7489 else if (target == 0)
7490 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7491
7492 convert_move (target, op0, unsignedp);
7493 return target;
7494 }
7495
7496 case OBJ_TYPE_REF:
7497 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7498
7499 case CALL_EXPR:
7500 /* Check for a built-in function. */
7501 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7502 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7503 == FUNCTION_DECL)
7504 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7505 {
7506 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7507 == BUILT_IN_FRONTEND)
7508 return lang_hooks.expand_expr (exp, original_target,
7509 tmode, modifier,
7510 alt_rtl);
7511 else
7512 return expand_builtin (exp, target, subtarget, tmode, ignore);
7513 }
7514
7515 return expand_call (exp, target, ignore);
7516
7517 case NON_LVALUE_EXPR:
7518 case NOP_EXPR:
7519 case CONVERT_EXPR:
7520 if (TREE_OPERAND (exp, 0) == error_mark_node)
7521 return const0_rtx;
7522
7523 if (TREE_CODE (type) == UNION_TYPE)
7524 {
7525 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7526
7527 /* If both input and output are BLKmode, this conversion isn't doing
7528 anything except possibly changing memory attribute. */
7529 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7530 {
7531 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7532 modifier);
7533
7534 result = copy_rtx (result);
7535 set_mem_attributes (result, exp, 0);
7536 return result;
7537 }
7538
7539 if (target == 0)
7540 {
7541 if (TYPE_MODE (type) != BLKmode)
7542 target = gen_reg_rtx (TYPE_MODE (type));
7543 else
7544 target = assign_temp (type, 0, 1, 1);
7545 }
7546
7547 if (MEM_P (target))
7548 /* Store data into beginning of memory target. */
7549 store_expr (TREE_OPERAND (exp, 0),
7550 adjust_address (target, TYPE_MODE (valtype), 0),
7551 modifier == EXPAND_STACK_PARM);
7552
7553 else
7554 {
7555 gcc_assert (REG_P (target));
7556
7557 /* Store this field into a union of the proper type. */
7558 store_field (target,
7559 MIN ((int_size_in_bytes (TREE_TYPE
7560 (TREE_OPERAND (exp, 0)))
7561 * BITS_PER_UNIT),
7562 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7563 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7564 type, 0);
7565 }
7566
7567 /* Return the entire union. */
7568 return target;
7569 }
7570
7571 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7572 {
7573 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7574 modifier);
7575
7576 /* If the signedness of the conversion differs and OP0 is
7577 a promoted SUBREG, clear that indication since we now
7578 have to do the proper extension. */
7579 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7580 && GET_CODE (op0) == SUBREG)
7581 SUBREG_PROMOTED_VAR_P (op0) = 0;
7582
7583 return REDUCE_BIT_FIELD (op0);
7584 }
7585
7586 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7587 if (GET_MODE (op0) == mode)
7588 ;
7589
7590 /* If OP0 is a constant, just convert it into the proper mode. */
7591 else if (CONSTANT_P (op0))
7592 {
7593 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7594 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7595
7596 if (modifier == EXPAND_INITIALIZER)
7597 op0 = simplify_gen_subreg (mode, op0, inner_mode,
7598 subreg_lowpart_offset (mode,
7599 inner_mode));
7600 else
7601 op0= convert_modes (mode, inner_mode, op0,
7602 TYPE_UNSIGNED (inner_type));
7603 }
7604
7605 else if (modifier == EXPAND_INITIALIZER)
7606 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7607
7608 else if (target == 0)
7609 op0 = convert_to_mode (mode, op0,
7610 TYPE_UNSIGNED (TREE_TYPE
7611 (TREE_OPERAND (exp, 0))));
7612 else
7613 {
7614 convert_move (target, op0,
7615 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7616 op0 = target;
7617 }
7618
7619 return REDUCE_BIT_FIELD (op0);
7620
7621 case VIEW_CONVERT_EXPR:
7622 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7623
7624 /* If the input and output modes are both the same, we are done. */
7625 if (TYPE_MODE (type) == GET_MODE (op0))
7626 ;
7627 /* If neither mode is BLKmode, and both modes are the same size
7628 then we can use gen_lowpart. */
7629 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7630 && GET_MODE_SIZE (TYPE_MODE (type))
7631 == GET_MODE_SIZE (GET_MODE (op0)))
7632 {
7633 if (GET_CODE (op0) == SUBREG)
7634 op0 = force_reg (GET_MODE (op0), op0);
7635 op0 = gen_lowpart (TYPE_MODE (type), op0);
7636 }
7637 /* If both modes are integral, then we can convert from one to the
7638 other. */
7639 else if (SCALAR_INT_MODE_P (GET_MODE (op0))
7640 && SCALAR_INT_MODE_P (TYPE_MODE (type)))
7641 op0 = convert_modes (TYPE_MODE (type), GET_MODE (op0), op0,
7642 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7643 /* As a last resort, spill op0 to memory, and reload it in a
7644 different mode. */
7645 else if (!MEM_P (op0))
7646 {
7647 /* If the operand is not a MEM, force it into memory. Since we
7648 are going to be be changing the mode of the MEM, don't call
7649 force_const_mem for constants because we don't allow pool
7650 constants to change mode. */
7651 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7652
7653 gcc_assert (!TREE_ADDRESSABLE (exp));
7654
7655 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7656 target
7657 = assign_stack_temp_for_type
7658 (TYPE_MODE (inner_type),
7659 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7660
7661 emit_move_insn (target, op0);
7662 op0 = target;
7663 }
7664
7665 /* At this point, OP0 is in the correct mode. If the output type is such
7666 that the operand is known to be aligned, indicate that it is.
7667 Otherwise, we need only be concerned about alignment for non-BLKmode
7668 results. */
7669 if (MEM_P (op0))
7670 {
7671 op0 = copy_rtx (op0);
7672
7673 if (TYPE_ALIGN_OK (type))
7674 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7675 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7676 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7677 {
7678 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7679 HOST_WIDE_INT temp_size
7680 = MAX (int_size_in_bytes (inner_type),
7681 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7682 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7683 temp_size, 0, type);
7684 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7685
7686 gcc_assert (!TREE_ADDRESSABLE (exp));
7687
7688 if (GET_MODE (op0) == BLKmode)
7689 emit_block_move (new_with_op0_mode, op0,
7690 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7691 (modifier == EXPAND_STACK_PARM
7692 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7693 else
7694 emit_move_insn (new_with_op0_mode, op0);
7695
7696 op0 = new;
7697 }
7698
7699 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7700 }
7701
7702 return op0;
7703
7704 case PLUS_EXPR:
7705 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7706 something else, make sure we add the register to the constant and
7707 then to the other thing. This case can occur during strength
7708 reduction and doing it this way will produce better code if the
7709 frame pointer or argument pointer is eliminated.
7710
7711 fold-const.c will ensure that the constant is always in the inner
7712 PLUS_EXPR, so the only case we need to do anything about is if
7713 sp, ap, or fp is our second argument, in which case we must swap
7714 the innermost first argument and our second argument. */
7715
7716 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7717 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7718 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
7719 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7720 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7721 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7722 {
7723 tree t = TREE_OPERAND (exp, 1);
7724
7725 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7726 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7727 }
7728
7729 /* If the result is to be ptr_mode and we are adding an integer to
7730 something, we might be forming a constant. So try to use
7731 plus_constant. If it produces a sum and we can't accept it,
7732 use force_operand. This allows P = &ARR[const] to generate
7733 efficient code on machines where a SYMBOL_REF is not a valid
7734 address.
7735
7736 If this is an EXPAND_SUM call, always return the sum. */
7737 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7738 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7739 {
7740 if (modifier == EXPAND_STACK_PARM)
7741 target = 0;
7742 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7743 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7744 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7745 {
7746 rtx constant_part;
7747
7748 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7749 EXPAND_SUM);
7750 /* Use immed_double_const to ensure that the constant is
7751 truncated according to the mode of OP1, then sign extended
7752 to a HOST_WIDE_INT. Using the constant directly can result
7753 in non-canonical RTL in a 64x32 cross compile. */
7754 constant_part
7755 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7756 (HOST_WIDE_INT) 0,
7757 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7758 op1 = plus_constant (op1, INTVAL (constant_part));
7759 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7760 op1 = force_operand (op1, target);
7761 return REDUCE_BIT_FIELD (op1);
7762 }
7763
7764 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7765 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7766 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7767 {
7768 rtx constant_part;
7769
7770 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7771 (modifier == EXPAND_INITIALIZER
7772 ? EXPAND_INITIALIZER : EXPAND_SUM));
7773 if (! CONSTANT_P (op0))
7774 {
7775 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7776 VOIDmode, modifier);
7777 /* Return a PLUS if modifier says it's OK. */
7778 if (modifier == EXPAND_SUM
7779 || modifier == EXPAND_INITIALIZER)
7780 return simplify_gen_binary (PLUS, mode, op0, op1);
7781 goto binop2;
7782 }
7783 /* Use immed_double_const to ensure that the constant is
7784 truncated according to the mode of OP1, then sign extended
7785 to a HOST_WIDE_INT. Using the constant directly can result
7786 in non-canonical RTL in a 64x32 cross compile. */
7787 constant_part
7788 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7789 (HOST_WIDE_INT) 0,
7790 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7791 op0 = plus_constant (op0, INTVAL (constant_part));
7792 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7793 op0 = force_operand (op0, target);
7794 return REDUCE_BIT_FIELD (op0);
7795 }
7796 }
7797
7798 /* No sense saving up arithmetic to be done
7799 if it's all in the wrong mode to form part of an address.
7800 And force_operand won't know whether to sign-extend or
7801 zero-extend. */
7802 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7803 || mode != ptr_mode)
7804 {
7805 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7806 subtarget, &op0, &op1, 0);
7807 if (op0 == const0_rtx)
7808 return op1;
7809 if (op1 == const0_rtx)
7810 return op0;
7811 goto binop2;
7812 }
7813
7814 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7815 subtarget, &op0, &op1, modifier);
7816 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7817
7818 case MINUS_EXPR:
7819 /* For initializers, we are allowed to return a MINUS of two
7820 symbolic constants. Here we handle all cases when both operands
7821 are constant. */
7822 /* Handle difference of two symbolic constants,
7823 for the sake of an initializer. */
7824 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7825 && really_constant_p (TREE_OPERAND (exp, 0))
7826 && really_constant_p (TREE_OPERAND (exp, 1)))
7827 {
7828 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7829 NULL_RTX, &op0, &op1, modifier);
7830
7831 /* If the last operand is a CONST_INT, use plus_constant of
7832 the negated constant. Else make the MINUS. */
7833 if (GET_CODE (op1) == CONST_INT)
7834 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
7835 else
7836 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
7837 }
7838
7839 /* No sense saving up arithmetic to be done
7840 if it's all in the wrong mode to form part of an address.
7841 And force_operand won't know whether to sign-extend or
7842 zero-extend. */
7843 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7844 || mode != ptr_mode)
7845 goto binop;
7846
7847 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7848 subtarget, &op0, &op1, modifier);
7849
7850 /* Convert A - const to A + (-const). */
7851 if (GET_CODE (op1) == CONST_INT)
7852 {
7853 op1 = negate_rtx (mode, op1);
7854 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7855 }
7856
7857 goto binop2;
7858
7859 case MULT_EXPR:
7860 /* If first operand is constant, swap them.
7861 Thus the following special case checks need only
7862 check the second operand. */
7863 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7864 {
7865 tree t1 = TREE_OPERAND (exp, 0);
7866 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7867 TREE_OPERAND (exp, 1) = t1;
7868 }
7869
7870 /* Attempt to return something suitable for generating an
7871 indexed address, for machines that support that. */
7872
7873 if (modifier == EXPAND_SUM && mode == ptr_mode
7874 && host_integerp (TREE_OPERAND (exp, 1), 0))
7875 {
7876 tree exp1 = TREE_OPERAND (exp, 1);
7877
7878 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7879 EXPAND_SUM);
7880
7881 if (!REG_P (op0))
7882 op0 = force_operand (op0, NULL_RTX);
7883 if (!REG_P (op0))
7884 op0 = copy_to_mode_reg (mode, op0);
7885
7886 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
7887 gen_int_mode (tree_low_cst (exp1, 0),
7888 TYPE_MODE (TREE_TYPE (exp1)))));
7889 }
7890
7891 if (modifier == EXPAND_STACK_PARM)
7892 target = 0;
7893
7894 /* Check for multiplying things that have been extended
7895 from a narrower type. If this machine supports multiplying
7896 in that narrower type with a result in the desired type,
7897 do it that way, and avoid the explicit type-conversion. */
7898 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7899 && TREE_CODE (type) == INTEGER_TYPE
7900 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7901 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7902 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7903 && int_fits_type_p (TREE_OPERAND (exp, 1),
7904 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7905 /* Don't use a widening multiply if a shift will do. */
7906 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7907 > HOST_BITS_PER_WIDE_INT)
7908 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7909 ||
7910 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7911 && (TYPE_PRECISION (TREE_TYPE
7912 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7913 == TYPE_PRECISION (TREE_TYPE
7914 (TREE_OPERAND
7915 (TREE_OPERAND (exp, 0), 0))))
7916 /* If both operands are extended, they must either both
7917 be zero-extended or both be sign-extended. */
7918 && (TYPE_UNSIGNED (TREE_TYPE
7919 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7920 == TYPE_UNSIGNED (TREE_TYPE
7921 (TREE_OPERAND
7922 (TREE_OPERAND (exp, 0), 0)))))))
7923 {
7924 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
7925 enum machine_mode innermode = TYPE_MODE (op0type);
7926 bool zextend_p = TYPE_UNSIGNED (op0type);
7927 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7928 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7929
7930 if (mode == GET_MODE_2XWIDER_MODE (innermode))
7931 {
7932 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7933 {
7934 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7935 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7936 TREE_OPERAND (exp, 1),
7937 NULL_RTX, &op0, &op1, 0);
7938 else
7939 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7940 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7941 NULL_RTX, &op0, &op1, 0);
7942 goto binop3;
7943 }
7944 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7945 && innermode == word_mode)
7946 {
7947 rtx htem, hipart;
7948 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7949 NULL_RTX, VOIDmode, 0);
7950 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7951 op1 = convert_modes (innermode, mode,
7952 expand_expr (TREE_OPERAND (exp, 1),
7953 NULL_RTX, VOIDmode, 0),
7954 unsignedp);
7955 else
7956 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7957 NULL_RTX, VOIDmode, 0);
7958 temp = expand_binop (mode, other_optab, op0, op1, target,
7959 unsignedp, OPTAB_LIB_WIDEN);
7960 hipart = gen_highpart (innermode, temp);
7961 htem = expand_mult_highpart_adjust (innermode, hipart,
7962 op0, op1, hipart,
7963 zextend_p);
7964 if (htem != hipart)
7965 emit_move_insn (hipart, htem);
7966 return REDUCE_BIT_FIELD (temp);
7967 }
7968 }
7969 }
7970 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7971 subtarget, &op0, &op1, 0);
7972 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7973
7974 case TRUNC_DIV_EXPR:
7975 case FLOOR_DIV_EXPR:
7976 case CEIL_DIV_EXPR:
7977 case ROUND_DIV_EXPR:
7978 case EXACT_DIV_EXPR:
7979 if (modifier == EXPAND_STACK_PARM)
7980 target = 0;
7981 /* Possible optimization: compute the dividend with EXPAND_SUM
7982 then if the divisor is constant can optimize the case
7983 where some terms of the dividend have coeffs divisible by it. */
7984 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7985 subtarget, &op0, &op1, 0);
7986 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7987
7988 case RDIV_EXPR:
7989 goto binop;
7990
7991 case TRUNC_MOD_EXPR:
7992 case FLOOR_MOD_EXPR:
7993 case CEIL_MOD_EXPR:
7994 case ROUND_MOD_EXPR:
7995 if (modifier == EXPAND_STACK_PARM)
7996 target = 0;
7997 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7998 subtarget, &op0, &op1, 0);
7999 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8000
8001 case FIX_ROUND_EXPR:
8002 case FIX_FLOOR_EXPR:
8003 case FIX_CEIL_EXPR:
8004 gcc_unreachable (); /* Not used for C. */
8005
8006 case FIX_TRUNC_EXPR:
8007 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8008 if (target == 0 || modifier == EXPAND_STACK_PARM)
8009 target = gen_reg_rtx (mode);
8010 expand_fix (target, op0, unsignedp);
8011 return target;
8012
8013 case FLOAT_EXPR:
8014 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8015 if (target == 0 || modifier == EXPAND_STACK_PARM)
8016 target = gen_reg_rtx (mode);
8017 /* expand_float can't figure out what to do if FROM has VOIDmode.
8018 So give it the correct mode. With -O, cse will optimize this. */
8019 if (GET_MODE (op0) == VOIDmode)
8020 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8021 op0);
8022 expand_float (target, op0,
8023 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8024 return target;
8025
8026 case NEGATE_EXPR:
8027 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8028 if (modifier == EXPAND_STACK_PARM)
8029 target = 0;
8030 temp = expand_unop (mode,
8031 optab_for_tree_code (NEGATE_EXPR, type),
8032 op0, target, 0);
8033 gcc_assert (temp);
8034 return REDUCE_BIT_FIELD (temp);
8035
8036 case ABS_EXPR:
8037 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8038 if (modifier == EXPAND_STACK_PARM)
8039 target = 0;
8040
8041 /* ABS_EXPR is not valid for complex arguments. */
8042 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8043 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8044
8045 /* Unsigned abs is simply the operand. Testing here means we don't
8046 risk generating incorrect code below. */
8047 if (TYPE_UNSIGNED (type))
8048 return op0;
8049
8050 return expand_abs (mode, op0, target, unsignedp,
8051 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8052
8053 case MAX_EXPR:
8054 case MIN_EXPR:
8055 target = original_target;
8056 if (target == 0
8057 || modifier == EXPAND_STACK_PARM
8058 || (MEM_P (target) && MEM_VOLATILE_P (target))
8059 || GET_MODE (target) != mode
8060 || (REG_P (target)
8061 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8062 target = gen_reg_rtx (mode);
8063 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8064 target, &op0, &op1, 0);
8065
8066 /* First try to do it with a special MIN or MAX instruction.
8067 If that does not win, use a conditional jump to select the proper
8068 value. */
8069 this_optab = optab_for_tree_code (code, type);
8070 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8071 OPTAB_WIDEN);
8072 if (temp != 0)
8073 return temp;
8074
8075 /* At this point, a MEM target is no longer useful; we will get better
8076 code without it. */
8077
8078 if (! REG_P (target))
8079 target = gen_reg_rtx (mode);
8080
8081 /* If op1 was placed in target, swap op0 and op1. */
8082 if (target != op0 && target == op1)
8083 {
8084 temp = op0;
8085 op0 = op1;
8086 op1 = temp;
8087 }
8088
8089 /* We generate better code and avoid problems with op1 mentioning
8090 target by forcing op1 into a pseudo if it isn't a constant. */
8091 if (! CONSTANT_P (op1))
8092 op1 = force_reg (mode, op1);
8093
8094 {
8095 enum rtx_code comparison_code;
8096 rtx cmpop1 = op1;
8097
8098 if (code == MAX_EXPR)
8099 comparison_code = unsignedp ? GEU : GE;
8100 else
8101 comparison_code = unsignedp ? LEU : LE;
8102
8103 /* Canonicalize to comparsions against 0. */
8104 if (op1 == const1_rtx)
8105 {
8106 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8107 or (a != 0 ? a : 1) for unsigned.
8108 For MIN we are safe converting (a <= 1 ? a : 1)
8109 into (a <= 0 ? a : 1) */
8110 cmpop1 = const0_rtx;
8111 if (code == MAX_EXPR)
8112 comparison_code = unsignedp ? NE : GT;
8113 }
8114 if (op1 == constm1_rtx && !unsignedp)
8115 {
8116 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8117 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8118 cmpop1 = const0_rtx;
8119 if (code == MIN_EXPR)
8120 comparison_code = LT;
8121 }
8122 #ifdef HAVE_conditional_move
8123 /* Use a conditional move if possible. */
8124 if (can_conditionally_move_p (mode))
8125 {
8126 rtx insn;
8127
8128 /* ??? Same problem as in expmed.c: emit_conditional_move
8129 forces a stack adjustment via compare_from_rtx, and we
8130 lose the stack adjustment if the sequence we are about
8131 to create is discarded. */
8132 do_pending_stack_adjust ();
8133
8134 start_sequence ();
8135
8136 /* Try to emit the conditional move. */
8137 insn = emit_conditional_move (target, comparison_code,
8138 op0, cmpop1, mode,
8139 op0, op1, mode,
8140 unsignedp);
8141
8142 /* If we could do the conditional move, emit the sequence,
8143 and return. */
8144 if (insn)
8145 {
8146 rtx seq = get_insns ();
8147 end_sequence ();
8148 emit_insn (seq);
8149 return target;
8150 }
8151
8152 /* Otherwise discard the sequence and fall back to code with
8153 branches. */
8154 end_sequence ();
8155 }
8156 #endif
8157 if (target != op0)
8158 emit_move_insn (target, op0);
8159
8160 temp = gen_label_rtx ();
8161
8162 /* If this mode is an integer too wide to compare properly,
8163 compare word by word. Rely on cse to optimize constant cases. */
8164 if (GET_MODE_CLASS (mode) == MODE_INT
8165 && ! can_compare_p (GE, mode, ccp_jump))
8166 {
8167 if (code == MAX_EXPR)
8168 do_jump_by_parts_greater_rtx (mode, unsignedp, target, op1,
8169 NULL_RTX, temp);
8170 else
8171 do_jump_by_parts_greater_rtx (mode, unsignedp, op1, target,
8172 NULL_RTX, temp);
8173 }
8174 else
8175 {
8176 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8177 unsignedp, mode, NULL_RTX, NULL_RTX, temp);
8178 }
8179 }
8180 emit_move_insn (target, op1);
8181 emit_label (temp);
8182 return target;
8183
8184 case BIT_NOT_EXPR:
8185 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8186 if (modifier == EXPAND_STACK_PARM)
8187 target = 0;
8188 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8189 gcc_assert (temp);
8190 return temp;
8191
8192 /* ??? Can optimize bitwise operations with one arg constant.
8193 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8194 and (a bitwise1 b) bitwise2 b (etc)
8195 but that is probably not worth while. */
8196
8197 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8198 boolean values when we want in all cases to compute both of them. In
8199 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8200 as actual zero-or-1 values and then bitwise anding. In cases where
8201 there cannot be any side effects, better code would be made by
8202 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8203 how to recognize those cases. */
8204
8205 case TRUTH_AND_EXPR:
8206 code = BIT_AND_EXPR;
8207 case BIT_AND_EXPR:
8208 goto binop;
8209
8210 case TRUTH_OR_EXPR:
8211 code = BIT_IOR_EXPR;
8212 case BIT_IOR_EXPR:
8213 goto binop;
8214
8215 case TRUTH_XOR_EXPR:
8216 code = BIT_XOR_EXPR;
8217 case BIT_XOR_EXPR:
8218 goto binop;
8219
8220 case LSHIFT_EXPR:
8221 case RSHIFT_EXPR:
8222 case LROTATE_EXPR:
8223 case RROTATE_EXPR:
8224 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8225 subtarget = 0;
8226 if (modifier == EXPAND_STACK_PARM)
8227 target = 0;
8228 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8229 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8230 unsignedp);
8231
8232 /* Could determine the answer when only additive constants differ. Also,
8233 the addition of one can be handled by changing the condition. */
8234 case LT_EXPR:
8235 case LE_EXPR:
8236 case GT_EXPR:
8237 case GE_EXPR:
8238 case EQ_EXPR:
8239 case NE_EXPR:
8240 case UNORDERED_EXPR:
8241 case ORDERED_EXPR:
8242 case UNLT_EXPR:
8243 case UNLE_EXPR:
8244 case UNGT_EXPR:
8245 case UNGE_EXPR:
8246 case UNEQ_EXPR:
8247 case LTGT_EXPR:
8248 temp = do_store_flag (exp,
8249 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8250 tmode != VOIDmode ? tmode : mode, 0);
8251 if (temp != 0)
8252 return temp;
8253
8254 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8255 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8256 && original_target
8257 && REG_P (original_target)
8258 && (GET_MODE (original_target)
8259 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8260 {
8261 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8262 VOIDmode, 0);
8263
8264 /* If temp is constant, we can just compute the result. */
8265 if (GET_CODE (temp) == CONST_INT)
8266 {
8267 if (INTVAL (temp) != 0)
8268 emit_move_insn (target, const1_rtx);
8269 else
8270 emit_move_insn (target, const0_rtx);
8271
8272 return target;
8273 }
8274
8275 if (temp != original_target)
8276 {
8277 enum machine_mode mode1 = GET_MODE (temp);
8278 if (mode1 == VOIDmode)
8279 mode1 = tmode != VOIDmode ? tmode : mode;
8280
8281 temp = copy_to_mode_reg (mode1, temp);
8282 }
8283
8284 op1 = gen_label_rtx ();
8285 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8286 GET_MODE (temp), unsignedp, op1);
8287 emit_move_insn (temp, const1_rtx);
8288 emit_label (op1);
8289 return temp;
8290 }
8291
8292 /* If no set-flag instruction, must generate a conditional store
8293 into a temporary variable. Drop through and handle this
8294 like && and ||. */
8295
8296 if (! ignore
8297 && (target == 0
8298 || modifier == EXPAND_STACK_PARM
8299 || ! safe_from_p (target, exp, 1)
8300 /* Make sure we don't have a hard reg (such as function's return
8301 value) live across basic blocks, if not optimizing. */
8302 || (!optimize && REG_P (target)
8303 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8304 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8305
8306 if (target)
8307 emit_move_insn (target, const0_rtx);
8308
8309 op1 = gen_label_rtx ();
8310 jumpifnot (exp, op1);
8311
8312 if (target)
8313 emit_move_insn (target, const1_rtx);
8314
8315 emit_label (op1);
8316 return ignore ? const0_rtx : target;
8317
8318 case TRUTH_NOT_EXPR:
8319 if (modifier == EXPAND_STACK_PARM)
8320 target = 0;
8321 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8322 /* The parser is careful to generate TRUTH_NOT_EXPR
8323 only with operands that are always zero or one. */
8324 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8325 target, 1, OPTAB_LIB_WIDEN);
8326 gcc_assert (temp);
8327 return temp;
8328
8329 case STATEMENT_LIST:
8330 {
8331 tree_stmt_iterator iter;
8332
8333 gcc_assert (ignore);
8334
8335 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
8336 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
8337 }
8338 return const0_rtx;
8339
8340 case COND_EXPR:
8341 /* A COND_EXPR with its type being VOID_TYPE represents a
8342 conditional jump and is handled in
8343 expand_gimple_cond_expr. */
8344 gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp)));
8345
8346 /* Note that COND_EXPRs whose type is a structure or union
8347 are required to be constructed to contain assignments of
8348 a temporary variable, so that we can evaluate them here
8349 for side effect only. If type is void, we must do likewise. */
8350
8351 gcc_assert (!TREE_ADDRESSABLE (type)
8352 && !ignore
8353 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
8354 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
8355
8356 /* If we are not to produce a result, we have no target. Otherwise,
8357 if a target was specified use it; it will not be used as an
8358 intermediate target unless it is safe. If no target, use a
8359 temporary. */
8360
8361 if (modifier != EXPAND_STACK_PARM
8362 && original_target
8363 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8364 && GET_MODE (original_target) == mode
8365 #ifdef HAVE_conditional_move
8366 && (! can_conditionally_move_p (mode)
8367 || REG_P (original_target))
8368 #endif
8369 && !MEM_P (original_target))
8370 temp = original_target;
8371 else
8372 temp = assign_temp (type, 0, 0, 1);
8373
8374 do_pending_stack_adjust ();
8375 NO_DEFER_POP;
8376 op0 = gen_label_rtx ();
8377 op1 = gen_label_rtx ();
8378 jumpifnot (TREE_OPERAND (exp, 0), op0);
8379 store_expr (TREE_OPERAND (exp, 1), temp,
8380 modifier == EXPAND_STACK_PARM);
8381
8382 emit_jump_insn (gen_jump (op1));
8383 emit_barrier ();
8384 emit_label (op0);
8385 store_expr (TREE_OPERAND (exp, 2), temp,
8386 modifier == EXPAND_STACK_PARM);
8387
8388 emit_label (op1);
8389 OK_DEFER_POP;
8390 return temp;
8391
8392 case VEC_COND_EXPR:
8393 target = expand_vec_cond_expr (exp, target);
8394 return target;
8395
8396 case MODIFY_EXPR:
8397 {
8398 tree lhs = TREE_OPERAND (exp, 0);
8399 tree rhs = TREE_OPERAND (exp, 1);
8400
8401 gcc_assert (ignore);
8402
8403 /* Check for |= or &= of a bitfield of size one into another bitfield
8404 of size 1. In this case, (unless we need the result of the
8405 assignment) we can do this more efficiently with a
8406 test followed by an assignment, if necessary.
8407
8408 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8409 things change so we do, this code should be enhanced to
8410 support it. */
8411 if (TREE_CODE (lhs) == COMPONENT_REF
8412 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8413 || TREE_CODE (rhs) == BIT_AND_EXPR)
8414 && TREE_OPERAND (rhs, 0) == lhs
8415 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8416 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8417 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8418 {
8419 rtx label = gen_label_rtx ();
8420
8421 do_jump (TREE_OPERAND (rhs, 1),
8422 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8423 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8424 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8425 (TREE_CODE (rhs) == BIT_IOR_EXPR
8426 ? integer_one_node
8427 : integer_zero_node)));
8428 do_pending_stack_adjust ();
8429 emit_label (label);
8430 return const0_rtx;
8431 }
8432
8433 expand_assignment (lhs, rhs);
8434
8435 return const0_rtx;
8436 }
8437
8438 case RETURN_EXPR:
8439 if (!TREE_OPERAND (exp, 0))
8440 expand_null_return ();
8441 else
8442 expand_return (TREE_OPERAND (exp, 0));
8443 return const0_rtx;
8444
8445 case ADDR_EXPR:
8446 return expand_expr_addr_expr (exp, target, tmode, modifier);
8447
8448 case COMPLEX_EXPR:
8449 /* Get the rtx code of the operands. */
8450 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8451 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8452
8453 if (!target)
8454 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8455
8456 /* Move the real (op0) and imaginary (op1) parts to their location. */
8457 write_complex_part (target, op0, false);
8458 write_complex_part (target, op1, true);
8459
8460 return target;
8461
8462 case REALPART_EXPR:
8463 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8464 return read_complex_part (op0, false);
8465
8466 case IMAGPART_EXPR:
8467 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8468 return read_complex_part (op0, true);
8469
8470 case RESX_EXPR:
8471 expand_resx_expr (exp);
8472 return const0_rtx;
8473
8474 case TRY_CATCH_EXPR:
8475 case CATCH_EXPR:
8476 case EH_FILTER_EXPR:
8477 case TRY_FINALLY_EXPR:
8478 /* Lowered by tree-eh.c. */
8479 gcc_unreachable ();
8480
8481 case WITH_CLEANUP_EXPR:
8482 case CLEANUP_POINT_EXPR:
8483 case TARGET_EXPR:
8484 case CASE_LABEL_EXPR:
8485 case VA_ARG_EXPR:
8486 case BIND_EXPR:
8487 case INIT_EXPR:
8488 case CONJ_EXPR:
8489 case COMPOUND_EXPR:
8490 case PREINCREMENT_EXPR:
8491 case PREDECREMENT_EXPR:
8492 case POSTINCREMENT_EXPR:
8493 case POSTDECREMENT_EXPR:
8494 case LOOP_EXPR:
8495 case EXIT_EXPR:
8496 case TRUTH_ANDIF_EXPR:
8497 case TRUTH_ORIF_EXPR:
8498 /* Lowered by gimplify.c. */
8499 gcc_unreachable ();
8500
8501 case EXC_PTR_EXPR:
8502 return get_exception_pointer (cfun);
8503
8504 case FILTER_EXPR:
8505 return get_exception_filter (cfun);
8506
8507 case FDESC_EXPR:
8508 /* Function descriptors are not valid except for as
8509 initialization constants, and should not be expanded. */
8510 gcc_unreachable ();
8511
8512 case SWITCH_EXPR:
8513 expand_case (exp);
8514 return const0_rtx;
8515
8516 case LABEL_EXPR:
8517 expand_label (TREE_OPERAND (exp, 0));
8518 return const0_rtx;
8519
8520 case ASM_EXPR:
8521 expand_asm_expr (exp);
8522 return const0_rtx;
8523
8524 case WITH_SIZE_EXPR:
8525 /* WITH_SIZE_EXPR expands to its first argument. The caller should
8526 have pulled out the size to use in whatever context it needed. */
8527 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
8528 modifier, alt_rtl);
8529
8530 case REALIGN_LOAD_EXPR:
8531 {
8532 tree oprnd0 = TREE_OPERAND (exp, 0);
8533 tree oprnd1 = TREE_OPERAND (exp, 1);
8534 tree oprnd2 = TREE_OPERAND (exp, 2);
8535 rtx op2;
8536
8537 this_optab = optab_for_tree_code (code, type);
8538 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
8539 op2 = expand_expr (oprnd2, NULL_RTX, VOIDmode, 0);
8540 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8541 target, unsignedp);
8542 gcc_assert (temp);
8543 return temp;
8544 }
8545
8546 case REDUC_MAX_EXPR:
8547 case REDUC_MIN_EXPR:
8548 case REDUC_PLUS_EXPR:
8549 {
8550 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8551 this_optab = optab_for_tree_code (code, type);
8552 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8553 gcc_assert (temp);
8554 return temp;
8555 }
8556
8557 case VEC_LSHIFT_EXPR:
8558 case VEC_RSHIFT_EXPR:
8559 {
8560 target = expand_vec_shift_expr (exp, target);
8561 return target;
8562 }
8563
8564 default:
8565 return lang_hooks.expand_expr (exp, original_target, tmode,
8566 modifier, alt_rtl);
8567 }
8568
8569 /* Here to do an ordinary binary operator. */
8570 binop:
8571 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8572 subtarget, &op0, &op1, 0);
8573 binop2:
8574 this_optab = optab_for_tree_code (code, type);
8575 binop3:
8576 if (modifier == EXPAND_STACK_PARM)
8577 target = 0;
8578 temp = expand_binop (mode, this_optab, op0, op1, target,
8579 unsignedp, OPTAB_LIB_WIDEN);
8580 gcc_assert (temp);
8581 return REDUCE_BIT_FIELD (temp);
8582 }
8583 #undef REDUCE_BIT_FIELD
8584
8585 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
8586 signedness of TYPE), possibly returning the result in TARGET. */
8587 static rtx
reduce_to_bit_field_precision(rtx exp,rtx target,tree type)8588 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
8589 {
8590 HOST_WIDE_INT prec = TYPE_PRECISION (type);
8591 if (target && GET_MODE (target) != GET_MODE (exp))
8592 target = 0;
8593 if (TYPE_UNSIGNED (type))
8594 {
8595 rtx mask;
8596 if (prec < HOST_BITS_PER_WIDE_INT)
8597 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
8598 GET_MODE (exp));
8599 else
8600 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
8601 ((unsigned HOST_WIDE_INT) 1
8602 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
8603 GET_MODE (exp));
8604 return expand_and (GET_MODE (exp), exp, mask, target);
8605 }
8606 else
8607 {
8608 tree count = build_int_cst (NULL_TREE,
8609 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
8610 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8611 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8612 }
8613 }
8614
8615 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8616 when applied to the address of EXP produces an address known to be
8617 aligned more than BIGGEST_ALIGNMENT. */
8618
8619 static int
is_aligning_offset(tree offset,tree exp)8620 is_aligning_offset (tree offset, tree exp)
8621 {
8622 /* Strip off any conversions. */
8623 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8624 || TREE_CODE (offset) == NOP_EXPR
8625 || TREE_CODE (offset) == CONVERT_EXPR)
8626 offset = TREE_OPERAND (offset, 0);
8627
8628 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8629 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8630 if (TREE_CODE (offset) != BIT_AND_EXPR
8631 || !host_integerp (TREE_OPERAND (offset, 1), 1)
8632 || compare_tree_int (TREE_OPERAND (offset, 1),
8633 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
8634 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
8635 return 0;
8636
8637 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8638 It must be NEGATE_EXPR. Then strip any more conversions. */
8639 offset = TREE_OPERAND (offset, 0);
8640 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8641 || TREE_CODE (offset) == NOP_EXPR
8642 || TREE_CODE (offset) == CONVERT_EXPR)
8643 offset = TREE_OPERAND (offset, 0);
8644
8645 if (TREE_CODE (offset) != NEGATE_EXPR)
8646 return 0;
8647
8648 offset = TREE_OPERAND (offset, 0);
8649 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8650 || TREE_CODE (offset) == NOP_EXPR
8651 || TREE_CODE (offset) == CONVERT_EXPR)
8652 offset = TREE_OPERAND (offset, 0);
8653
8654 /* This must now be the address of EXP. */
8655 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
8656 }
8657
8658 /* Return the tree node if an ARG corresponds to a string constant or zero
8659 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
8660 in bytes within the string that ARG is accessing. The type of the
8661 offset will be `sizetype'. */
8662
8663 tree
string_constant(tree arg,tree * ptr_offset)8664 string_constant (tree arg, tree *ptr_offset)
8665 {
8666 tree array, offset;
8667 STRIP_NOPS (arg);
8668
8669 if (TREE_CODE (arg) == ADDR_EXPR)
8670 {
8671 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8672 {
8673 *ptr_offset = size_zero_node;
8674 return TREE_OPERAND (arg, 0);
8675 }
8676 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
8677 {
8678 array = TREE_OPERAND (arg, 0);
8679 offset = size_zero_node;
8680 }
8681 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
8682 {
8683 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
8684 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
8685 if (TREE_CODE (array) != STRING_CST
8686 && TREE_CODE (array) != VAR_DECL)
8687 return 0;
8688 }
8689 else
8690 return 0;
8691 }
8692 else if (TREE_CODE (arg) == PLUS_EXPR)
8693 {
8694 tree arg0 = TREE_OPERAND (arg, 0);
8695 tree arg1 = TREE_OPERAND (arg, 1);
8696
8697 STRIP_NOPS (arg0);
8698 STRIP_NOPS (arg1);
8699
8700 if (TREE_CODE (arg0) == ADDR_EXPR
8701 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
8702 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
8703 {
8704 array = TREE_OPERAND (arg0, 0);
8705 offset = arg1;
8706 }
8707 else if (TREE_CODE (arg1) == ADDR_EXPR
8708 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
8709 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
8710 {
8711 array = TREE_OPERAND (arg1, 0);
8712 offset = arg0;
8713 }
8714 else
8715 return 0;
8716 }
8717 else
8718 return 0;
8719
8720 if (TREE_CODE (array) == STRING_CST)
8721 {
8722 *ptr_offset = convert (sizetype, offset);
8723 return array;
8724 }
8725 else if (TREE_CODE (array) == VAR_DECL)
8726 {
8727 int length;
8728
8729 /* Variables initialized to string literals can be handled too. */
8730 if (DECL_INITIAL (array) == NULL_TREE
8731 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
8732 return 0;
8733
8734 /* If they are read-only, non-volatile and bind locally. */
8735 if (! TREE_READONLY (array)
8736 || TREE_SIDE_EFFECTS (array)
8737 || ! targetm.binds_local_p (array))
8738 return 0;
8739
8740 /* Avoid const char foo[4] = "abcde"; */
8741 if (DECL_SIZE_UNIT (array) == NULL_TREE
8742 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
8743 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
8744 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
8745 return 0;
8746
8747 /* If variable is bigger than the string literal, OFFSET must be constant
8748 and inside of the bounds of the string literal. */
8749 offset = convert (sizetype, offset);
8750 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
8751 && (! host_integerp (offset, 1)
8752 || compare_tree_int (offset, length) >= 0))
8753 return 0;
8754
8755 *ptr_offset = offset;
8756 return DECL_INITIAL (array);
8757 }
8758
8759 return 0;
8760 }
8761
8762 /* Generate code to calculate EXP using a store-flag instruction
8763 and return an rtx for the result. EXP is either a comparison
8764 or a TRUTH_NOT_EXPR whose operand is a comparison.
8765
8766 If TARGET is nonzero, store the result there if convenient.
8767
8768 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
8769 cheap.
8770
8771 Return zero if there is no suitable set-flag instruction
8772 available on this machine.
8773
8774 Once expand_expr has been called on the arguments of the comparison,
8775 we are committed to doing the store flag, since it is not safe to
8776 re-evaluate the expression. We emit the store-flag insn by calling
8777 emit_store_flag, but only expand the arguments if we have a reason
8778 to believe that emit_store_flag will be successful. If we think that
8779 it will, but it isn't, we have to simulate the store-flag with a
8780 set/jump/set sequence. */
8781
8782 static rtx
do_store_flag(tree exp,rtx target,enum machine_mode mode,int only_cheap)8783 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
8784 {
8785 enum rtx_code code;
8786 tree arg0, arg1, type;
8787 tree tem;
8788 enum machine_mode operand_mode;
8789 int invert = 0;
8790 int unsignedp;
8791 rtx op0, op1;
8792 enum insn_code icode;
8793 rtx subtarget = target;
8794 rtx result, label;
8795
8796 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
8797 result at the end. We can't simply invert the test since it would
8798 have already been inverted if it were valid. This case occurs for
8799 some floating-point comparisons. */
8800
8801 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
8802 invert = 1, exp = TREE_OPERAND (exp, 0);
8803
8804 arg0 = TREE_OPERAND (exp, 0);
8805 arg1 = TREE_OPERAND (exp, 1);
8806
8807 /* Don't crash if the comparison was erroneous. */
8808 if (arg0 == error_mark_node || arg1 == error_mark_node)
8809 return const0_rtx;
8810
8811 type = TREE_TYPE (arg0);
8812 operand_mode = TYPE_MODE (type);
8813 unsignedp = TYPE_UNSIGNED (type);
8814
8815 /* We won't bother with BLKmode store-flag operations because it would mean
8816 passing a lot of information to emit_store_flag. */
8817 if (operand_mode == BLKmode)
8818 return 0;
8819
8820 /* We won't bother with store-flag operations involving function pointers
8821 when function pointers must be canonicalized before comparisons. */
8822 #ifdef HAVE_canonicalize_funcptr_for_compare
8823 if (HAVE_canonicalize_funcptr_for_compare
8824 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
8825 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8826 == FUNCTION_TYPE))
8827 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
8828 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8829 == FUNCTION_TYPE))))
8830 return 0;
8831 #endif
8832
8833 STRIP_NOPS (arg0);
8834 STRIP_NOPS (arg1);
8835
8836 /* Get the rtx comparison code to use. We know that EXP is a comparison
8837 operation of some type. Some comparisons against 1 and -1 can be
8838 converted to comparisons with zero. Do so here so that the tests
8839 below will be aware that we have a comparison with zero. These
8840 tests will not catch constants in the first operand, but constants
8841 are rarely passed as the first operand. */
8842
8843 switch (TREE_CODE (exp))
8844 {
8845 case EQ_EXPR:
8846 code = EQ;
8847 break;
8848 case NE_EXPR:
8849 code = NE;
8850 break;
8851 case LT_EXPR:
8852 if (integer_onep (arg1))
8853 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
8854 else
8855 code = unsignedp ? LTU : LT;
8856 break;
8857 case LE_EXPR:
8858 if (! unsignedp && integer_all_onesp (arg1))
8859 arg1 = integer_zero_node, code = LT;
8860 else
8861 code = unsignedp ? LEU : LE;
8862 break;
8863 case GT_EXPR:
8864 if (! unsignedp && integer_all_onesp (arg1))
8865 arg1 = integer_zero_node, code = GE;
8866 else
8867 code = unsignedp ? GTU : GT;
8868 break;
8869 case GE_EXPR:
8870 if (integer_onep (arg1))
8871 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
8872 else
8873 code = unsignedp ? GEU : GE;
8874 break;
8875
8876 case UNORDERED_EXPR:
8877 code = UNORDERED;
8878 break;
8879 case ORDERED_EXPR:
8880 code = ORDERED;
8881 break;
8882 case UNLT_EXPR:
8883 code = UNLT;
8884 break;
8885 case UNLE_EXPR:
8886 code = UNLE;
8887 break;
8888 case UNGT_EXPR:
8889 code = UNGT;
8890 break;
8891 case UNGE_EXPR:
8892 code = UNGE;
8893 break;
8894 case UNEQ_EXPR:
8895 code = UNEQ;
8896 break;
8897 case LTGT_EXPR:
8898 code = LTGT;
8899 break;
8900
8901 default:
8902 gcc_unreachable ();
8903 }
8904
8905 /* Put a constant second. */
8906 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
8907 {
8908 tem = arg0; arg0 = arg1; arg1 = tem;
8909 code = swap_condition (code);
8910 }
8911
8912 /* If this is an equality or inequality test of a single bit, we can
8913 do this by shifting the bit being tested to the low-order bit and
8914 masking the result with the constant 1. If the condition was EQ,
8915 we xor it with 1. This does not require an scc insn and is faster
8916 than an scc insn even if we have it.
8917
8918 The code to make this transformation was moved into fold_single_bit_test,
8919 so we just call into the folder and expand its result. */
8920
8921 if ((code == NE || code == EQ)
8922 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
8923 && integer_pow2p (TREE_OPERAND (arg0, 1)))
8924 {
8925 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
8926 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
8927 arg0, arg1, type),
8928 target, VOIDmode, EXPAND_NORMAL);
8929 }
8930
8931 /* Now see if we are likely to be able to do this. Return if not. */
8932 if (! can_compare_p (code, operand_mode, ccp_store_flag))
8933 return 0;
8934
8935 icode = setcc_gen_code[(int) code];
8936 if (icode == CODE_FOR_nothing
8937 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
8938 {
8939 /* We can only do this if it is one of the special cases that
8940 can be handled without an scc insn. */
8941 if ((code == LT && integer_zerop (arg1))
8942 || (! only_cheap && code == GE && integer_zerop (arg1)))
8943 ;
8944 else if (! only_cheap && (code == NE || code == EQ)
8945 && TREE_CODE (type) != REAL_TYPE
8946 && ((abs_optab->handlers[(int) operand_mode].insn_code
8947 != CODE_FOR_nothing)
8948 || (ffs_optab->handlers[(int) operand_mode].insn_code
8949 != CODE_FOR_nothing)))
8950 ;
8951 else
8952 return 0;
8953 }
8954
8955 if (! get_subtarget (target)
8956 || GET_MODE (subtarget) != operand_mode)
8957 subtarget = 0;
8958
8959 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
8960
8961 if (target == 0)
8962 target = gen_reg_rtx (mode);
8963
8964 result = emit_store_flag (target, code, op0, op1,
8965 operand_mode, unsignedp, 1);
8966
8967 if (result)
8968 {
8969 if (invert)
8970 result = expand_binop (mode, xor_optab, result, const1_rtx,
8971 result, 0, OPTAB_LIB_WIDEN);
8972 return result;
8973 }
8974
8975 /* If this failed, we have to do this with set/compare/jump/set code. */
8976 if (!REG_P (target)
8977 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
8978 target = gen_reg_rtx (GET_MODE (target));
8979
8980 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
8981 result = compare_from_rtx (op0, op1, code, unsignedp,
8982 operand_mode, NULL_RTX);
8983 if (GET_CODE (result) == CONST_INT)
8984 return (((result == const0_rtx && ! invert)
8985 || (result != const0_rtx && invert))
8986 ? const0_rtx : const1_rtx);
8987
8988 /* The code of RESULT may not match CODE if compare_from_rtx
8989 decided to swap its operands and reverse the original code.
8990
8991 We know that compare_from_rtx returns either a CONST_INT or
8992 a new comparison code, so it is safe to just extract the
8993 code from RESULT. */
8994 code = GET_CODE (result);
8995
8996 label = gen_label_rtx ();
8997 gcc_assert (bcc_gen_fctn[(int) code]);
8998
8999 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9000 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9001 emit_label (label);
9002
9003 return target;
9004 }
9005
9006
9007 /* Stubs in case we haven't got a casesi insn. */
9008 #ifndef HAVE_casesi
9009 # define HAVE_casesi 0
9010 # define gen_casesi(a, b, c, d, e) (0)
9011 # define CODE_FOR_casesi CODE_FOR_nothing
9012 #endif
9013
9014 /* If the machine does not have a case insn that compares the bounds,
9015 this means extra overhead for dispatch tables, which raises the
9016 threshold for using them. */
9017 #ifndef CASE_VALUES_THRESHOLD
9018 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9019 #endif /* CASE_VALUES_THRESHOLD */
9020
9021 unsigned int
case_values_threshold(void)9022 case_values_threshold (void)
9023 {
9024 return CASE_VALUES_THRESHOLD;
9025 }
9026
9027 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9028 0 otherwise (i.e. if there is no casesi instruction). */
9029 int
try_casesi(tree index_type,tree index_expr,tree minval,tree range,rtx table_label ATTRIBUTE_UNUSED,rtx default_label)9030 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9031 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9032 {
9033 enum machine_mode index_mode = SImode;
9034 int index_bits = GET_MODE_BITSIZE (index_mode);
9035 rtx op1, op2, index;
9036 enum machine_mode op_mode;
9037
9038 if (! HAVE_casesi)
9039 return 0;
9040
9041 /* Convert the index to SImode. */
9042 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9043 {
9044 enum machine_mode omode = TYPE_MODE (index_type);
9045 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
9046
9047 /* We must handle the endpoints in the original mode. */
9048 index_expr = build2 (MINUS_EXPR, index_type,
9049 index_expr, minval);
9050 minval = integer_zero_node;
9051 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9052 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9053 omode, 1, default_label);
9054 /* Now we can safely truncate. */
9055 index = convert_to_mode (index_mode, index, 0);
9056 }
9057 else
9058 {
9059 if (TYPE_MODE (index_type) != index_mode)
9060 {
9061 index_expr = convert (lang_hooks.types.type_for_size
9062 (index_bits, 0), index_expr);
9063 index_type = TREE_TYPE (index_expr);
9064 }
9065
9066 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9067 }
9068
9069 do_pending_stack_adjust ();
9070
9071 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9072 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9073 (index, op_mode))
9074 index = copy_to_mode_reg (op_mode, index);
9075
9076 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
9077
9078 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9079 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9080 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
9081 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9082 (op1, op_mode))
9083 op1 = copy_to_mode_reg (op_mode, op1);
9084
9085 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
9086
9087 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9088 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9089 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
9090 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9091 (op2, op_mode))
9092 op2 = copy_to_mode_reg (op_mode, op2);
9093
9094 emit_jump_insn (gen_casesi (index, op1, op2,
9095 table_label, default_label));
9096 return 1;
9097 }
9098
9099 /* Attempt to generate a tablejump instruction; same concept. */
9100 #ifndef HAVE_tablejump
9101 #define HAVE_tablejump 0
9102 #define gen_tablejump(x, y) (0)
9103 #endif
9104
9105 /* Subroutine of the next function.
9106
9107 INDEX is the value being switched on, with the lowest value
9108 in the table already subtracted.
9109 MODE is its expected mode (needed if INDEX is constant).
9110 RANGE is the length of the jump table.
9111 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9112
9113 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9114 index value is out of range. */
9115
9116 static void
do_tablejump(rtx index,enum machine_mode mode,rtx range,rtx table_label,rtx default_label)9117 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9118 rtx default_label)
9119 {
9120 rtx temp, vector;
9121
9122 if (INTVAL (range) > cfun->max_jumptable_ents)
9123 cfun->max_jumptable_ents = INTVAL (range);
9124
9125 /* Do an unsigned comparison (in the proper mode) between the index
9126 expression and the value which represents the length of the range.
9127 Since we just finished subtracting the lower bound of the range
9128 from the index expression, this comparison allows us to simultaneously
9129 check that the original index expression value is both greater than
9130 or equal to the minimum value of the range and less than or equal to
9131 the maximum value of the range. */
9132
9133 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9134 default_label);
9135
9136 /* If index is in range, it must fit in Pmode.
9137 Convert to Pmode so we can index with it. */
9138 if (mode != Pmode)
9139 index = convert_to_mode (Pmode, index, 1);
9140
9141 /* Don't let a MEM slip through, because then INDEX that comes
9142 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9143 and break_out_memory_refs will go to work on it and mess it up. */
9144 #ifdef PIC_CASE_VECTOR_ADDRESS
9145 if (flag_pic && !REG_P (index))
9146 index = copy_to_mode_reg (Pmode, index);
9147 #endif
9148
9149 /* If flag_force_addr were to affect this address
9150 it could interfere with the tricky assumptions made
9151 about addresses that contain label-refs,
9152 which may be valid only very near the tablejump itself. */
9153 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9154 GET_MODE_SIZE, because this indicates how large insns are. The other
9155 uses should all be Pmode, because they are addresses. This code
9156 could fail if addresses and insns are not the same size. */
9157 index = gen_rtx_PLUS (Pmode,
9158 gen_rtx_MULT (Pmode, index,
9159 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9160 gen_rtx_LABEL_REF (Pmode, table_label));
9161 #ifdef PIC_CASE_VECTOR_ADDRESS
9162 if (flag_pic)
9163 index = PIC_CASE_VECTOR_ADDRESS (index);
9164 else
9165 #endif
9166 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9167 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9168 vector = gen_const_mem (CASE_VECTOR_MODE, index);
9169 convert_move (temp, vector, 0);
9170
9171 emit_jump_insn (gen_tablejump (temp, table_label));
9172
9173 /* If we are generating PIC code or if the table is PC-relative, the
9174 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9175 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9176 emit_barrier ();
9177 }
9178
9179 int
try_tablejump(tree index_type,tree index_expr,tree minval,tree range,rtx table_label,rtx default_label)9180 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9181 rtx table_label, rtx default_label)
9182 {
9183 rtx index;
9184
9185 if (! HAVE_tablejump)
9186 return 0;
9187
9188 index_expr = fold_build2 (MINUS_EXPR, index_type,
9189 convert (index_type, index_expr),
9190 convert (index_type, minval));
9191 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9192 do_pending_stack_adjust ();
9193
9194 do_tablejump (index, TYPE_MODE (index_type),
9195 convert_modes (TYPE_MODE (index_type),
9196 TYPE_MODE (TREE_TYPE (range)),
9197 expand_expr (range, NULL_RTX,
9198 VOIDmode, 0),
9199 TYPE_UNSIGNED (TREE_TYPE (range))),
9200 table_label, default_label);
9201 return 1;
9202 }
9203
9204 /* Nonzero if the mode is a valid vector mode for this architecture.
9205 This returns nonzero even if there is no hardware support for the
9206 vector mode, but we can emulate with narrower modes. */
9207
9208 int
vector_mode_valid_p(enum machine_mode mode)9209 vector_mode_valid_p (enum machine_mode mode)
9210 {
9211 enum mode_class class = GET_MODE_CLASS (mode);
9212 enum machine_mode innermode;
9213
9214 /* Doh! What's going on? */
9215 if (class != MODE_VECTOR_INT
9216 && class != MODE_VECTOR_FLOAT)
9217 return 0;
9218
9219 /* Hardware support. Woo hoo! */
9220 if (targetm.vector_mode_supported_p (mode))
9221 return 1;
9222
9223 innermode = GET_MODE_INNER (mode);
9224
9225 /* We should probably return 1 if requesting V4DI and we have no DI,
9226 but we have V2DI, but this is probably very unlikely. */
9227
9228 /* If we have support for the inner mode, we can safely emulate it.
9229 We may not have V2DI, but me can emulate with a pair of DIs. */
9230 return targetm.scalar_mode_supported_p (innermode);
9231 }
9232
9233 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9234 static rtx
const_vector_from_tree(tree exp)9235 const_vector_from_tree (tree exp)
9236 {
9237 rtvec v;
9238 int units, i;
9239 tree link, elt;
9240 enum machine_mode inner, mode;
9241
9242 mode = TYPE_MODE (TREE_TYPE (exp));
9243
9244 if (initializer_zerop (exp))
9245 return CONST0_RTX (mode);
9246
9247 units = GET_MODE_NUNITS (mode);
9248 inner = GET_MODE_INNER (mode);
9249
9250 v = rtvec_alloc (units);
9251
9252 link = TREE_VECTOR_CST_ELTS (exp);
9253 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9254 {
9255 elt = TREE_VALUE (link);
9256
9257 if (TREE_CODE (elt) == REAL_CST)
9258 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9259 inner);
9260 else
9261 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9262 TREE_INT_CST_HIGH (elt),
9263 inner);
9264 }
9265
9266 /* Initialize remaining elements to 0. */
9267 for (; i < units; ++i)
9268 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9269
9270 return gen_rtx_CONST_VECTOR (mode, v);
9271 }
9272 #include "gt-expr.h"
9273