xref: /openbsd/gnu/gcc/gcc/expr.c (revision 0f748e8d)
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2    Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3    2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation,
4    Inc.
5 
6 This file is part of GCC.
7 
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
12 
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16 for more details.
17 
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING.  If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA.  */
22 
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "machmode.h"
28 #include "real.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "insn-attr.h"
38 /* Include expr.h after insn-config.h so we get HAVE_conditional_move.  */
39 #include "expr.h"
40 #include "optabs.h"
41 #include "libfuncs.h"
42 #include "recog.h"
43 #include "reload.h"
44 #include "output.h"
45 #include "typeclass.h"
46 #include "toplev.h"
47 #include "ggc.h"
48 #include "langhooks.h"
49 #include "intl.h"
50 #include "tm_p.h"
51 #include "tree-iterator.h"
52 #include "tree-pass.h"
53 #include "tree-flow.h"
54 #include "target.h"
55 #include "timevar.h"
56 
57 /* Decide whether a function's arguments should be processed
58    from first to last or from last to first.
59 
60    They should if the stack and args grow in opposite directions, but
61    only if we have push insns.  */
62 
63 #ifdef PUSH_ROUNDING
64 
65 #ifndef PUSH_ARGS_REVERSED
66 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
67 #define PUSH_ARGS_REVERSED	/* If it's last to first.  */
68 #endif
69 #endif
70 
71 #endif
72 
73 #ifndef STACK_PUSH_CODE
74 #ifdef STACK_GROWS_DOWNWARD
75 #define STACK_PUSH_CODE PRE_DEC
76 #else
77 #define STACK_PUSH_CODE PRE_INC
78 #endif
79 #endif
80 
81 
82 /* If this is nonzero, we do not bother generating VOLATILE
83    around volatile memory references, and we are willing to
84    output indirect addresses.  If cse is to follow, we reject
85    indirect addresses so a useful potential cse is generated;
86    if it is used only once, instruction combination will produce
87    the same indirect address eventually.  */
88 int cse_not_expected;
89 
90 /* This structure is used by move_by_pieces to describe the move to
91    be performed.  */
92 struct move_by_pieces
93 {
94   rtx to;
95   rtx to_addr;
96   int autinc_to;
97   int explicit_inc_to;
98   rtx from;
99   rtx from_addr;
100   int autinc_from;
101   int explicit_inc_from;
102   unsigned HOST_WIDE_INT len;
103   HOST_WIDE_INT offset;
104   int reverse;
105 };
106 
107 /* This structure is used by store_by_pieces to describe the clear to
108    be performed.  */
109 
110 struct store_by_pieces
111 {
112   rtx to;
113   rtx to_addr;
114   int autinc_to;
115   int explicit_inc_to;
116   unsigned HOST_WIDE_INT len;
117   HOST_WIDE_INT offset;
118   rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
119   void *constfundata;
120   int reverse;
121 };
122 
123 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
124 						     unsigned int,
125 						     unsigned int);
126 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
127 			      struct move_by_pieces *);
128 static bool block_move_libcall_safe_for_call_parm (void);
129 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned);
130 static rtx emit_block_move_via_libcall (rtx, rtx, rtx, bool);
131 static tree emit_block_move_libcall_fn (int);
132 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
133 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
134 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
135 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
136 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
137 			       struct store_by_pieces *);
138 static rtx clear_storage_via_libcall (rtx, rtx, bool);
139 static tree clear_storage_libcall_fn (int);
140 static rtx compress_float_constant (rtx, rtx);
141 static rtx get_subtarget (rtx);
142 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
143 				     HOST_WIDE_INT, enum machine_mode,
144 				     tree, tree, int, int);
145 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
146 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
147 			tree, tree, int);
148 
149 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
150 
151 static int is_aligning_offset (tree, tree);
152 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
153 			     enum expand_modifier);
154 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
155 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
156 #ifdef PUSH_ROUNDING
157 static void emit_single_push_insn (enum machine_mode, rtx, tree);
158 #endif
159 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
160 static rtx const_vector_from_tree (tree);
161 static void write_complex_part (rtx, rtx, bool);
162 
163 /* Record for each mode whether we can move a register directly to or
164    from an object of that mode in memory.  If we can't, we won't try
165    to use that mode directly when accessing a field of that mode.  */
166 
167 static char direct_load[NUM_MACHINE_MODES];
168 static char direct_store[NUM_MACHINE_MODES];
169 
170 /* Record for each mode whether we can float-extend from memory.  */
171 
172 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
173 
174 /* This macro is used to determine whether move_by_pieces should be called
175    to perform a structure copy.  */
176 #ifndef MOVE_BY_PIECES_P
177 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
178   (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
179    < (unsigned int) MOVE_RATIO)
180 #endif
181 
182 /* This macro is used to determine whether clear_by_pieces should be
183    called to clear storage.  */
184 #ifndef CLEAR_BY_PIECES_P
185 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
186   (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
187    < (unsigned int) CLEAR_RATIO)
188 #endif
189 
190 /* This macro is used to determine whether store_by_pieces should be
191    called to "memset" storage with byte values other than zero, or
192    to "memcpy" storage when the source is a constant string.  */
193 #ifndef STORE_BY_PIECES_P
194 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
195   (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
196    < (unsigned int) MOVE_RATIO)
197 #endif
198 
199 /* This array records the insn_code of insns to perform block moves.  */
200 enum insn_code movmem_optab[NUM_MACHINE_MODES];
201 
202 /* This array records the insn_code of insns to perform block sets.  */
203 enum insn_code setmem_optab[NUM_MACHINE_MODES];
204 
205 /* These arrays record the insn_code of three different kinds of insns
206    to perform block compares.  */
207 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
208 enum insn_code cmpstrn_optab[NUM_MACHINE_MODES];
209 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
210 
211 /* Synchronization primitives.  */
212 enum insn_code sync_add_optab[NUM_MACHINE_MODES];
213 enum insn_code sync_sub_optab[NUM_MACHINE_MODES];
214 enum insn_code sync_ior_optab[NUM_MACHINE_MODES];
215 enum insn_code sync_and_optab[NUM_MACHINE_MODES];
216 enum insn_code sync_xor_optab[NUM_MACHINE_MODES];
217 enum insn_code sync_nand_optab[NUM_MACHINE_MODES];
218 enum insn_code sync_old_add_optab[NUM_MACHINE_MODES];
219 enum insn_code sync_old_sub_optab[NUM_MACHINE_MODES];
220 enum insn_code sync_old_ior_optab[NUM_MACHINE_MODES];
221 enum insn_code sync_old_and_optab[NUM_MACHINE_MODES];
222 enum insn_code sync_old_xor_optab[NUM_MACHINE_MODES];
223 enum insn_code sync_old_nand_optab[NUM_MACHINE_MODES];
224 enum insn_code sync_new_add_optab[NUM_MACHINE_MODES];
225 enum insn_code sync_new_sub_optab[NUM_MACHINE_MODES];
226 enum insn_code sync_new_ior_optab[NUM_MACHINE_MODES];
227 enum insn_code sync_new_and_optab[NUM_MACHINE_MODES];
228 enum insn_code sync_new_xor_optab[NUM_MACHINE_MODES];
229 enum insn_code sync_new_nand_optab[NUM_MACHINE_MODES];
230 enum insn_code sync_compare_and_swap[NUM_MACHINE_MODES];
231 enum insn_code sync_compare_and_swap_cc[NUM_MACHINE_MODES];
232 enum insn_code sync_lock_test_and_set[NUM_MACHINE_MODES];
233 enum insn_code sync_lock_release[NUM_MACHINE_MODES];
234 
235 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow.  */
236 
237 #ifndef SLOW_UNALIGNED_ACCESS
238 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
239 #endif
240 
241 /* This is run once per compilation to set up which modes can be used
242    directly in memory and to initialize the block move optab.  */
243 
244 void
init_expr_once(void)245 init_expr_once (void)
246 {
247   rtx insn, pat;
248   enum machine_mode mode;
249   int num_clobbers;
250   rtx mem, mem1;
251   rtx reg;
252 
253   /* Try indexing by frame ptr and try by stack ptr.
254      It is known that on the Convex the stack ptr isn't a valid index.
255      With luck, one or the other is valid on any machine.  */
256   mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
257   mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
258 
259   /* A scratch register we can modify in-place below to avoid
260      useless RTL allocations.  */
261   reg = gen_rtx_REG (VOIDmode, -1);
262 
263   insn = rtx_alloc (INSN);
264   pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
265   PATTERN (insn) = pat;
266 
267   for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
268        mode = (enum machine_mode) ((int) mode + 1))
269     {
270       int regno;
271 
272       direct_load[(int) mode] = direct_store[(int) mode] = 0;
273       PUT_MODE (mem, mode);
274       PUT_MODE (mem1, mode);
275       PUT_MODE (reg, mode);
276 
277       /* See if there is some register that can be used in this mode and
278 	 directly loaded or stored from memory.  */
279 
280       if (mode != VOIDmode && mode != BLKmode)
281 	for (regno = 0; regno < FIRST_PSEUDO_REGISTER
282 	     && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
283 	     regno++)
284 	  {
285 	    if (! HARD_REGNO_MODE_OK (regno, mode))
286 	      continue;
287 
288 	    REGNO (reg) = regno;
289 
290 	    SET_SRC (pat) = mem;
291 	    SET_DEST (pat) = reg;
292 	    if (recog (pat, insn, &num_clobbers) >= 0)
293 	      direct_load[(int) mode] = 1;
294 
295 	    SET_SRC (pat) = mem1;
296 	    SET_DEST (pat) = reg;
297 	    if (recog (pat, insn, &num_clobbers) >= 0)
298 	      direct_load[(int) mode] = 1;
299 
300 	    SET_SRC (pat) = reg;
301 	    SET_DEST (pat) = mem;
302 	    if (recog (pat, insn, &num_clobbers) >= 0)
303 	      direct_store[(int) mode] = 1;
304 
305 	    SET_SRC (pat) = reg;
306 	    SET_DEST (pat) = mem1;
307 	    if (recog (pat, insn, &num_clobbers) >= 0)
308 	      direct_store[(int) mode] = 1;
309 	  }
310     }
311 
312   mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
313 
314   for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
315        mode = GET_MODE_WIDER_MODE (mode))
316     {
317       enum machine_mode srcmode;
318       for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
319 	   srcmode = GET_MODE_WIDER_MODE (srcmode))
320 	{
321 	  enum insn_code ic;
322 
323 	  ic = can_extend_p (mode, srcmode, 0);
324 	  if (ic == CODE_FOR_nothing)
325 	    continue;
326 
327 	  PUT_MODE (mem, srcmode);
328 
329 	  if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
330 	    float_extend_from_mem[mode][srcmode] = true;
331 	}
332     }
333 }
334 
335 /* This is run at the start of compiling a function.  */
336 
337 void
init_expr(void)338 init_expr (void)
339 {
340   cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
341 }
342 
343 /* Copy data from FROM to TO, where the machine modes are not the same.
344    Both modes may be integer, or both may be floating.
345    UNSIGNEDP should be nonzero if FROM is an unsigned type.
346    This causes zero-extension instead of sign-extension.  */
347 
348 void
convert_move(rtx to,rtx from,int unsignedp)349 convert_move (rtx to, rtx from, int unsignedp)
350 {
351   enum machine_mode to_mode = GET_MODE (to);
352   enum machine_mode from_mode = GET_MODE (from);
353   int to_real = SCALAR_FLOAT_MODE_P (to_mode);
354   int from_real = SCALAR_FLOAT_MODE_P (from_mode);
355   enum insn_code code;
356   rtx libcall;
357 
358   /* rtx code for making an equivalent value.  */
359   enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
360 			      : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
361 
362 
363   gcc_assert (to_real == from_real);
364 
365   /* If the source and destination are already the same, then there's
366      nothing to do.  */
367   if (to == from)
368     return;
369 
370   /* If FROM is a SUBREG that indicates that we have already done at least
371      the required extension, strip it.  We don't handle such SUBREGs as
372      TO here.  */
373 
374   if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
375       && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
376 	  >= GET_MODE_SIZE (to_mode))
377       && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
378     from = gen_lowpart (to_mode, from), from_mode = to_mode;
379 
380   gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
381 
382   if (to_mode == from_mode
383       || (from_mode == VOIDmode && CONSTANT_P (from)))
384     {
385       emit_move_insn (to, from);
386       return;
387     }
388 
389   if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
390     {
391       gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
392 
393       if (VECTOR_MODE_P (to_mode))
394 	from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
395       else
396 	to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
397 
398       emit_move_insn (to, from);
399       return;
400     }
401 
402   if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
403     {
404       convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
405       convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
406       return;
407     }
408 
409   if (to_real)
410     {
411       rtx value, insns;
412       convert_optab tab;
413 
414       gcc_assert ((GET_MODE_PRECISION (from_mode)
415 		   != GET_MODE_PRECISION (to_mode))
416 		  || (DECIMAL_FLOAT_MODE_P (from_mode)
417 		      != DECIMAL_FLOAT_MODE_P (to_mode)));
418 
419       if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
420 	/* Conversion between decimal float and binary float, same size.  */
421 	tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
422       else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
423 	tab = sext_optab;
424       else
425 	tab = trunc_optab;
426 
427       /* Try converting directly if the insn is supported.  */
428 
429       code = tab->handlers[to_mode][from_mode].insn_code;
430       if (code != CODE_FOR_nothing)
431 	{
432 	  emit_unop_insn (code, to, from,
433 			  tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
434 	  return;
435 	}
436 
437       /* Otherwise use a libcall.  */
438       libcall = tab->handlers[to_mode][from_mode].libfunc;
439 
440       /* Is this conversion implemented yet?  */
441       gcc_assert (libcall);
442 
443       start_sequence ();
444       value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
445 				       1, from, from_mode);
446       insns = get_insns ();
447       end_sequence ();
448       emit_libcall_block (insns, to, value,
449 			  tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
450 								       from)
451 			  : gen_rtx_FLOAT_EXTEND (to_mode, from));
452       return;
453     }
454 
455   /* Handle pointer conversion.  */			/* SPEE 900220.  */
456   /* Targets are expected to provide conversion insns between PxImode and
457      xImode for all MODE_PARTIAL_INT modes they use, but no others.  */
458   if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
459     {
460       enum machine_mode full_mode
461 	= smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
462 
463       gcc_assert (trunc_optab->handlers[to_mode][full_mode].insn_code
464 		  != CODE_FOR_nothing);
465 
466       if (full_mode != from_mode)
467 	from = convert_to_mode (full_mode, from, unsignedp);
468       emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
469 		      to, from, UNKNOWN);
470       return;
471     }
472   if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
473     {
474       rtx new_from;
475       enum machine_mode full_mode
476 	= smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
477 
478       gcc_assert (sext_optab->handlers[full_mode][from_mode].insn_code
479 		  != CODE_FOR_nothing);
480 
481       if (to_mode == full_mode)
482 	{
483 	  emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
484 			  to, from, UNKNOWN);
485 	  return;
486 	}
487 
488       new_from = gen_reg_rtx (full_mode);
489       emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
490 		      new_from, from, UNKNOWN);
491 
492       /* else proceed to integer conversions below.  */
493       from_mode = full_mode;
494       from = new_from;
495     }
496 
497   /* Now both modes are integers.  */
498 
499   /* Handle expanding beyond a word.  */
500   if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
501       && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
502     {
503       rtx insns;
504       rtx lowpart;
505       rtx fill_value;
506       rtx lowfrom;
507       int i;
508       enum machine_mode lowpart_mode;
509       int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
510 
511       /* Try converting directly if the insn is supported.  */
512       if ((code = can_extend_p (to_mode, from_mode, unsignedp))
513 	  != CODE_FOR_nothing)
514 	{
515 	  /* If FROM is a SUBREG, put it into a register.  Do this
516 	     so that we always generate the same set of insns for
517 	     better cse'ing; if an intermediate assignment occurred,
518 	     we won't be doing the operation directly on the SUBREG.  */
519 	  if (optimize > 0 && GET_CODE (from) == SUBREG)
520 	    from = force_reg (from_mode, from);
521 	  emit_unop_insn (code, to, from, equiv_code);
522 	  return;
523 	}
524       /* Next, try converting via full word.  */
525       else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
526 	       && ((code = can_extend_p (to_mode, word_mode, unsignedp))
527 		   != CODE_FOR_nothing))
528 	{
529 	  if (REG_P (to))
530 	    {
531 	      if (reg_overlap_mentioned_p (to, from))
532 		from = force_reg (from_mode, from);
533 	      emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
534 	    }
535 	  convert_move (gen_lowpart (word_mode, to), from, unsignedp);
536 	  emit_unop_insn (code, to,
537 			  gen_lowpart (word_mode, to), equiv_code);
538 	  return;
539 	}
540 
541       /* No special multiword conversion insn; do it by hand.  */
542       start_sequence ();
543 
544       /* Since we will turn this into a no conflict block, we must ensure
545 	 that the source does not overlap the target.  */
546 
547       if (reg_overlap_mentioned_p (to, from))
548 	from = force_reg (from_mode, from);
549 
550       /* Get a copy of FROM widened to a word, if necessary.  */
551       if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
552 	lowpart_mode = word_mode;
553       else
554 	lowpart_mode = from_mode;
555 
556       lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
557 
558       lowpart = gen_lowpart (lowpart_mode, to);
559       emit_move_insn (lowpart, lowfrom);
560 
561       /* Compute the value to put in each remaining word.  */
562       if (unsignedp)
563 	fill_value = const0_rtx;
564       else
565 	{
566 #ifdef HAVE_slt
567 	  if (HAVE_slt
568 	      && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
569 	      && STORE_FLAG_VALUE == -1)
570 	    {
571 	      emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
572 			     lowpart_mode, 0);
573 	      fill_value = gen_reg_rtx (word_mode);
574 	      emit_insn (gen_slt (fill_value));
575 	    }
576 	  else
577 #endif
578 	    {
579 	      fill_value
580 		= expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
581 				size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
582 				NULL_RTX, 0);
583 	      fill_value = convert_to_mode (word_mode, fill_value, 1);
584 	    }
585 	}
586 
587       /* Fill the remaining words.  */
588       for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
589 	{
590 	  int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
591 	  rtx subword = operand_subword (to, index, 1, to_mode);
592 
593 	  gcc_assert (subword);
594 
595 	  if (fill_value != subword)
596 	    emit_move_insn (subword, fill_value);
597 	}
598 
599       insns = get_insns ();
600       end_sequence ();
601 
602       emit_no_conflict_block (insns, to, from, NULL_RTX,
603 			      gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
604       return;
605     }
606 
607   /* Truncating multi-word to a word or less.  */
608   if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
609       && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
610     {
611       if (!((MEM_P (from)
612 	     && ! MEM_VOLATILE_P (from)
613 	     && direct_load[(int) to_mode]
614 	     && ! mode_dependent_address_p (XEXP (from, 0)))
615 	    || REG_P (from)
616 	    || GET_CODE (from) == SUBREG))
617 	from = force_reg (from_mode, from);
618       convert_move (to, gen_lowpart (word_mode, from), 0);
619       return;
620     }
621 
622   /* Now follow all the conversions between integers
623      no more than a word long.  */
624 
625   /* For truncation, usually we can just refer to FROM in a narrower mode.  */
626   if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
627       && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
628 				GET_MODE_BITSIZE (from_mode)))
629     {
630       if (!((MEM_P (from)
631 	     && ! MEM_VOLATILE_P (from)
632 	     && direct_load[(int) to_mode]
633 	     && ! mode_dependent_address_p (XEXP (from, 0)))
634 	    || REG_P (from)
635 	    || GET_CODE (from) == SUBREG))
636 	from = force_reg (from_mode, from);
637       if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
638 	  && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
639 	from = copy_to_reg (from);
640       emit_move_insn (to, gen_lowpart (to_mode, from));
641       return;
642     }
643 
644   /* Handle extension.  */
645   if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
646     {
647       /* Convert directly if that works.  */
648       if ((code = can_extend_p (to_mode, from_mode, unsignedp))
649 	  != CODE_FOR_nothing)
650 	{
651 	  emit_unop_insn (code, to, from, equiv_code);
652 	  return;
653 	}
654       else
655 	{
656 	  enum machine_mode intermediate;
657 	  rtx tmp;
658 	  tree shift_amount;
659 
660 	  /* Search for a mode to convert via.  */
661 	  for (intermediate = from_mode; intermediate != VOIDmode;
662 	       intermediate = GET_MODE_WIDER_MODE (intermediate))
663 	    if (((can_extend_p (to_mode, intermediate, unsignedp)
664 		  != CODE_FOR_nothing)
665 		 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
666 		     && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
667 					       GET_MODE_BITSIZE (intermediate))))
668 		&& (can_extend_p (intermediate, from_mode, unsignedp)
669 		    != CODE_FOR_nothing))
670 	      {
671 		convert_move (to, convert_to_mode (intermediate, from,
672 						   unsignedp), unsignedp);
673 		return;
674 	      }
675 
676 	  /* No suitable intermediate mode.
677 	     Generate what we need with	shifts.  */
678 	  shift_amount = build_int_cst (NULL_TREE,
679 					GET_MODE_BITSIZE (to_mode)
680 					- GET_MODE_BITSIZE (from_mode));
681 	  from = gen_lowpart (to_mode, force_reg (from_mode, from));
682 	  tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
683 			      to, unsignedp);
684 	  tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
685 			      to, unsignedp);
686 	  if (tmp != to)
687 	    emit_move_insn (to, tmp);
688 	  return;
689 	}
690     }
691 
692   /* Support special truncate insns for certain modes.  */
693   if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
694     {
695       emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
696 		      to, from, UNKNOWN);
697       return;
698     }
699 
700   /* Handle truncation of volatile memrefs, and so on;
701      the things that couldn't be truncated directly,
702      and for which there was no special instruction.
703 
704      ??? Code above formerly short-circuited this, for most integer
705      mode pairs, with a force_reg in from_mode followed by a recursive
706      call to this routine.  Appears always to have been wrong.  */
707   if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
708     {
709       rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
710       emit_move_insn (to, temp);
711       return;
712     }
713 
714   /* Mode combination is not recognized.  */
715   gcc_unreachable ();
716 }
717 
718 /* Return an rtx for a value that would result
719    from converting X to mode MODE.
720    Both X and MODE may be floating, or both integer.
721    UNSIGNEDP is nonzero if X is an unsigned value.
722    This can be done by referring to a part of X in place
723    or by copying to a new temporary with conversion.  */
724 
725 rtx
convert_to_mode(enum machine_mode mode,rtx x,int unsignedp)726 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
727 {
728   return convert_modes (mode, VOIDmode, x, unsignedp);
729 }
730 
731 /* Return an rtx for a value that would result
732    from converting X from mode OLDMODE to mode MODE.
733    Both modes may be floating, or both integer.
734    UNSIGNEDP is nonzero if X is an unsigned value.
735 
736    This can be done by referring to a part of X in place
737    or by copying to a new temporary with conversion.
738 
739    You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.  */
740 
741 rtx
convert_modes(enum machine_mode mode,enum machine_mode oldmode,rtx x,int unsignedp)742 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
743 {
744   rtx temp;
745 
746   /* If FROM is a SUBREG that indicates that we have already done at least
747      the required extension, strip it.  */
748 
749   if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
750       && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
751       && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
752     x = gen_lowpart (mode, x);
753 
754   if (GET_MODE (x) != VOIDmode)
755     oldmode = GET_MODE (x);
756 
757   if (mode == oldmode)
758     return x;
759 
760   /* There is one case that we must handle specially: If we are converting
761      a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
762      we are to interpret the constant as unsigned, gen_lowpart will do
763      the wrong if the constant appears negative.  What we want to do is
764      make the high-order word of the constant zero, not all ones.  */
765 
766   if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
767       && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
768       && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
769     {
770       HOST_WIDE_INT val = INTVAL (x);
771 
772       if (oldmode != VOIDmode
773 	  && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
774 	{
775 	  int width = GET_MODE_BITSIZE (oldmode);
776 
777 	  /* We need to zero extend VAL.  */
778 	  val &= ((HOST_WIDE_INT) 1 << width) - 1;
779 	}
780 
781       return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
782     }
783 
784   /* We can do this with a gen_lowpart if both desired and current modes
785      are integer, and this is either a constant integer, a register, or a
786      non-volatile MEM.  Except for the constant case where MODE is no
787      wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand.  */
788 
789   if ((GET_CODE (x) == CONST_INT
790        && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
791       || (GET_MODE_CLASS (mode) == MODE_INT
792 	  && GET_MODE_CLASS (oldmode) == MODE_INT
793 	  && (GET_CODE (x) == CONST_DOUBLE
794 	      || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
795 		  && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
796 		       && direct_load[(int) mode])
797 		      || (REG_P (x)
798 			  && (! HARD_REGISTER_P (x)
799 			      || HARD_REGNO_MODE_OK (REGNO (x), mode))
800 			  && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
801 						    GET_MODE_BITSIZE (GET_MODE (x)))))))))
802     {
803       /* ?? If we don't know OLDMODE, we have to assume here that
804 	 X does not need sign- or zero-extension.   This may not be
805 	 the case, but it's the best we can do.  */
806       if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
807 	  && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
808 	{
809 	  HOST_WIDE_INT val = INTVAL (x);
810 	  int width = GET_MODE_BITSIZE (oldmode);
811 
812 	  /* We must sign or zero-extend in this case.  Start by
813 	     zero-extending, then sign extend if we need to.  */
814 	  val &= ((HOST_WIDE_INT) 1 << width) - 1;
815 	  if (! unsignedp
816 	      && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
817 	    val |= (HOST_WIDE_INT) (-1) << width;
818 
819 	  return gen_int_mode (val, mode);
820 	}
821 
822       return gen_lowpart (mode, x);
823     }
824 
825   /* Converting from integer constant into mode is always equivalent to an
826      subreg operation.  */
827   if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
828     {
829       gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
830       return simplify_gen_subreg (mode, x, oldmode, 0);
831     }
832 
833   temp = gen_reg_rtx (mode);
834   convert_move (temp, x, unsignedp);
835   return temp;
836 }
837 
838 /* STORE_MAX_PIECES is the number of bytes at a time that we can
839    store efficiently.  Due to internal GCC limitations, this is
840    MOVE_MAX_PIECES limited by the number of bytes GCC can represent
841    for an immediate constant.  */
842 
843 #define STORE_MAX_PIECES  MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
844 
845 /* Determine whether the LEN bytes can be moved by using several move
846    instructions.  Return nonzero if a call to move_by_pieces should
847    succeed.  */
848 
849 int
can_move_by_pieces(unsigned HOST_WIDE_INT len,unsigned int align ATTRIBUTE_UNUSED)850 can_move_by_pieces (unsigned HOST_WIDE_INT len,
851 		    unsigned int align ATTRIBUTE_UNUSED)
852 {
853   return MOVE_BY_PIECES_P (len, align);
854 }
855 
856 /* Generate several move instructions to copy LEN bytes from block FROM to
857    block TO.  (These are MEM rtx's with BLKmode).
858 
859    If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
860    used to push FROM to the stack.
861 
862    ALIGN is maximum stack alignment we can assume.
863 
864    If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
865    mempcpy, and if ENDP is 2 return memory the end minus one byte ala
866    stpcpy.  */
867 
868 rtx
move_by_pieces(rtx to,rtx from,unsigned HOST_WIDE_INT len,unsigned int align,int endp)869 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
870 		unsigned int align, int endp)
871 {
872   struct move_by_pieces data;
873   rtx to_addr, from_addr = XEXP (from, 0);
874   unsigned int max_size = MOVE_MAX_PIECES + 1;
875   enum machine_mode mode = VOIDmode, tmode;
876   enum insn_code icode;
877 
878   align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
879 
880   data.offset = 0;
881   data.from_addr = from_addr;
882   if (to)
883     {
884       to_addr = XEXP (to, 0);
885       data.to = to;
886       data.autinc_to
887 	= (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
888 	   || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
889       data.reverse
890 	= (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
891     }
892   else
893     {
894       to_addr = NULL_RTX;
895       data.to = NULL_RTX;
896       data.autinc_to = 1;
897 #ifdef STACK_GROWS_DOWNWARD
898       data.reverse = 1;
899 #else
900       data.reverse = 0;
901 #endif
902     }
903   data.to_addr = to_addr;
904   data.from = from;
905   data.autinc_from
906     = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
907        || GET_CODE (from_addr) == POST_INC
908        || GET_CODE (from_addr) == POST_DEC);
909 
910   data.explicit_inc_from = 0;
911   data.explicit_inc_to = 0;
912   if (data.reverse) data.offset = len;
913   data.len = len;
914 
915   /* If copying requires more than two move insns,
916      copy addresses to registers (to make displacements shorter)
917      and use post-increment if available.  */
918   if (!(data.autinc_from && data.autinc_to)
919       && move_by_pieces_ninsns (len, align, max_size) > 2)
920     {
921       /* Find the mode of the largest move...  */
922       for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
923 	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
924 	if (GET_MODE_SIZE (tmode) < max_size)
925 	  mode = tmode;
926 
927       if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
928 	{
929 	  data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
930 	  data.autinc_from = 1;
931 	  data.explicit_inc_from = -1;
932 	}
933       if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
934 	{
935 	  data.from_addr = copy_addr_to_reg (from_addr);
936 	  data.autinc_from = 1;
937 	  data.explicit_inc_from = 1;
938 	}
939       if (!data.autinc_from && CONSTANT_P (from_addr))
940 	data.from_addr = copy_addr_to_reg (from_addr);
941       if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
942 	{
943 	  data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
944 	  data.autinc_to = 1;
945 	  data.explicit_inc_to = -1;
946 	}
947       if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
948 	{
949 	  data.to_addr = copy_addr_to_reg (to_addr);
950 	  data.autinc_to = 1;
951 	  data.explicit_inc_to = 1;
952 	}
953       if (!data.autinc_to && CONSTANT_P (to_addr))
954 	data.to_addr = copy_addr_to_reg (to_addr);
955     }
956 
957   tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
958   if (align >= GET_MODE_ALIGNMENT (tmode))
959     align = GET_MODE_ALIGNMENT (tmode);
960   else
961     {
962       enum machine_mode xmode;
963 
964       for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
965 	   tmode != VOIDmode;
966 	   xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
967 	if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
968 	    || SLOW_UNALIGNED_ACCESS (tmode, align))
969 	  break;
970 
971       align = MAX (align, GET_MODE_ALIGNMENT (xmode));
972     }
973 
974   /* First move what we can in the largest integer mode, then go to
975      successively smaller modes.  */
976 
977   while (max_size > 1)
978     {
979       for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
980 	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
981 	if (GET_MODE_SIZE (tmode) < max_size)
982 	  mode = tmode;
983 
984       if (mode == VOIDmode)
985 	break;
986 
987       icode = mov_optab->handlers[(int) mode].insn_code;
988       if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
989 	move_by_pieces_1 (GEN_FCN (icode), mode, &data);
990 
991       max_size = GET_MODE_SIZE (mode);
992     }
993 
994   /* The code above should have handled everything.  */
995   gcc_assert (!data.len);
996 
997   if (endp)
998     {
999       rtx to1;
1000 
1001       gcc_assert (!data.reverse);
1002       if (data.autinc_to)
1003 	{
1004 	  if (endp == 2)
1005 	    {
1006 	      if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1007 		emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1008 	      else
1009 		data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1010 								-1));
1011 	    }
1012 	  to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1013 					   data.offset);
1014 	}
1015       else
1016 	{
1017 	  if (endp == 2)
1018 	    --data.offset;
1019 	  to1 = adjust_address (data.to, QImode, data.offset);
1020 	}
1021       return to1;
1022     }
1023   else
1024     return data.to;
1025 }
1026 
1027 /* Return number of insns required to move L bytes by pieces.
1028    ALIGN (in bits) is maximum alignment we can assume.  */
1029 
1030 static unsigned HOST_WIDE_INT
move_by_pieces_ninsns(unsigned HOST_WIDE_INT l,unsigned int align,unsigned int max_size)1031 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1032 		       unsigned int max_size)
1033 {
1034   unsigned HOST_WIDE_INT n_insns = 0;
1035   enum machine_mode tmode;
1036 
1037   tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1038   if (align >= GET_MODE_ALIGNMENT (tmode))
1039     align = GET_MODE_ALIGNMENT (tmode);
1040   else
1041     {
1042       enum machine_mode tmode, xmode;
1043 
1044       for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1045 	   tmode != VOIDmode;
1046 	   xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1047 	if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1048 	    || SLOW_UNALIGNED_ACCESS (tmode, align))
1049 	  break;
1050 
1051       align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1052     }
1053 
1054   while (max_size > 1)
1055     {
1056       enum machine_mode mode = VOIDmode;
1057       enum insn_code icode;
1058 
1059       for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1060 	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1061 	if (GET_MODE_SIZE (tmode) < max_size)
1062 	  mode = tmode;
1063 
1064       if (mode == VOIDmode)
1065 	break;
1066 
1067       icode = mov_optab->handlers[(int) mode].insn_code;
1068       if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1069 	n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1070 
1071       max_size = GET_MODE_SIZE (mode);
1072     }
1073 
1074   gcc_assert (!l);
1075   return n_insns;
1076 }
1077 
1078 /* Subroutine of move_by_pieces.  Move as many bytes as appropriate
1079    with move instructions for mode MODE.  GENFUN is the gen_... function
1080    to make a move insn for that mode.  DATA has all the other info.  */
1081 
1082 static void
move_by_pieces_1(rtx (* genfun)(rtx,...),enum machine_mode mode,struct move_by_pieces * data)1083 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1084 		  struct move_by_pieces *data)
1085 {
1086   unsigned int size = GET_MODE_SIZE (mode);
1087   rtx to1 = NULL_RTX, from1;
1088 
1089   while (data->len >= size)
1090     {
1091       if (data->reverse)
1092 	data->offset -= size;
1093 
1094       if (data->to)
1095 	{
1096 	  if (data->autinc_to)
1097 	    to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1098 					     data->offset);
1099 	  else
1100 	    to1 = adjust_address (data->to, mode, data->offset);
1101 	}
1102 
1103       if (data->autinc_from)
1104 	from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1105 					   data->offset);
1106       else
1107 	from1 = adjust_address (data->from, mode, data->offset);
1108 
1109       if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1110 	emit_insn (gen_add2_insn (data->to_addr,
1111 				  GEN_INT (-(HOST_WIDE_INT)size)));
1112       if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1113 	emit_insn (gen_add2_insn (data->from_addr,
1114 				  GEN_INT (-(HOST_WIDE_INT)size)));
1115 
1116       if (data->to)
1117 	emit_insn ((*genfun) (to1, from1));
1118       else
1119 	{
1120 #ifdef PUSH_ROUNDING
1121 	  emit_single_push_insn (mode, from1, NULL);
1122 #else
1123 	  gcc_unreachable ();
1124 #endif
1125 	}
1126 
1127       if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1128 	emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1129       if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1130 	emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1131 
1132       if (! data->reverse)
1133 	data->offset += size;
1134 
1135       data->len -= size;
1136     }
1137 }
1138 
1139 /* Emit code to move a block Y to a block X.  This may be done with
1140    string-move instructions, with multiple scalar move instructions,
1141    or with a library call.
1142 
1143    Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1144    SIZE is an rtx that says how long they are.
1145    ALIGN is the maximum alignment we can assume they have.
1146    METHOD describes what kind of copy this is, and what mechanisms may be used.
1147 
1148    Return the address of the new block, if memcpy is called and returns it,
1149    0 otherwise.  */
1150 
1151 rtx
emit_block_move(rtx x,rtx y,rtx size,enum block_op_methods method)1152 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1153 {
1154   bool may_use_call;
1155   rtx retval = 0;
1156   unsigned int align;
1157 
1158   switch (method)
1159     {
1160     case BLOCK_OP_NORMAL:
1161     case BLOCK_OP_TAILCALL:
1162       may_use_call = true;
1163       break;
1164 
1165     case BLOCK_OP_CALL_PARM:
1166       may_use_call = block_move_libcall_safe_for_call_parm ();
1167 
1168       /* Make inhibit_defer_pop nonzero around the library call
1169 	 to force it to pop the arguments right away.  */
1170       NO_DEFER_POP;
1171       break;
1172 
1173     case BLOCK_OP_NO_LIBCALL:
1174       may_use_call = false;
1175       break;
1176 
1177     default:
1178       gcc_unreachable ();
1179     }
1180 
1181   align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1182 
1183   gcc_assert (MEM_P (x));
1184   gcc_assert (MEM_P (y));
1185   gcc_assert (size);
1186 
1187   /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1188      block copy is more efficient for other large modes, e.g. DCmode.  */
1189   x = adjust_address (x, BLKmode, 0);
1190   y = adjust_address (y, BLKmode, 0);
1191 
1192   /* Set MEM_SIZE as appropriate for this block copy.  The main place this
1193      can be incorrect is coming from __builtin_memcpy.  */
1194   if (GET_CODE (size) == CONST_INT)
1195     {
1196       if (INTVAL (size) == 0)
1197 	return 0;
1198 
1199       x = shallow_copy_rtx (x);
1200       y = shallow_copy_rtx (y);
1201       set_mem_size (x, size);
1202       set_mem_size (y, size);
1203     }
1204 
1205   if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1206     move_by_pieces (x, y, INTVAL (size), align, 0);
1207   else if (emit_block_move_via_movmem (x, y, size, align))
1208     ;
1209   else if (may_use_call)
1210     retval = emit_block_move_via_libcall (x, y, size,
1211 					  method == BLOCK_OP_TAILCALL);
1212   else
1213     emit_block_move_via_loop (x, y, size, align);
1214 
1215   if (method == BLOCK_OP_CALL_PARM)
1216     OK_DEFER_POP;
1217 
1218   return retval;
1219 }
1220 
1221 /* A subroutine of emit_block_move.  Returns true if calling the
1222    block move libcall will not clobber any parameters which may have
1223    already been placed on the stack.  */
1224 
1225 static bool
block_move_libcall_safe_for_call_parm(void)1226 block_move_libcall_safe_for_call_parm (void)
1227 {
1228   /* If arguments are pushed on the stack, then they're safe.  */
1229   if (PUSH_ARGS)
1230     return true;
1231 
1232   /* If registers go on the stack anyway, any argument is sure to clobber
1233      an outgoing argument.  */
1234 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1235   {
1236     tree fn = emit_block_move_libcall_fn (false);
1237     (void) fn;
1238     if (REG_PARM_STACK_SPACE (fn) != 0)
1239       return false;
1240   }
1241 #endif
1242 
1243   /* If any argument goes in memory, then it might clobber an outgoing
1244      argument.  */
1245   {
1246     CUMULATIVE_ARGS args_so_far;
1247     tree fn, arg;
1248 
1249     fn = emit_block_move_libcall_fn (false);
1250     INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1251 
1252     arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1253     for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1254       {
1255 	enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1256 	rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1257 	if (!tmp || !REG_P (tmp))
1258 	  return false;
1259 	if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
1260 	  return false;
1261 	FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1262       }
1263   }
1264   return true;
1265 }
1266 
1267 /* A subroutine of emit_block_move.  Expand a movmem pattern;
1268    return true if successful.  */
1269 
1270 static bool
emit_block_move_via_movmem(rtx x,rtx y,rtx size,unsigned int align)1271 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align)
1272 {
1273   rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1274   int save_volatile_ok = volatile_ok;
1275   enum machine_mode mode;
1276 
1277   /* Since this is a move insn, we don't care about volatility.  */
1278   volatile_ok = 1;
1279 
1280   /* Try the most limited insn first, because there's no point
1281      including more than one in the machine description unless
1282      the more limited one has some advantage.  */
1283 
1284   for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1285        mode = GET_MODE_WIDER_MODE (mode))
1286     {
1287       enum insn_code code = movmem_optab[(int) mode];
1288       insn_operand_predicate_fn pred;
1289 
1290       if (code != CODE_FOR_nothing
1291 	  /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1292 	     here because if SIZE is less than the mode mask, as it is
1293 	     returned by the macro, it will definitely be less than the
1294 	     actual mode mask.  */
1295 	  && ((GET_CODE (size) == CONST_INT
1296 	       && ((unsigned HOST_WIDE_INT) INTVAL (size)
1297 		   <= (GET_MODE_MASK (mode) >> 1)))
1298 	      || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1299 	  && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1300 	      || (*pred) (x, BLKmode))
1301 	  && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1302 	      || (*pred) (y, BLKmode))
1303 	  && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1304 	      || (*pred) (opalign, VOIDmode)))
1305 	{
1306 	  rtx op2;
1307 	  rtx last = get_last_insn ();
1308 	  rtx pat;
1309 
1310 	  op2 = convert_to_mode (mode, size, 1);
1311 	  pred = insn_data[(int) code].operand[2].predicate;
1312 	  if (pred != 0 && ! (*pred) (op2, mode))
1313 	    op2 = copy_to_mode_reg (mode, op2);
1314 
1315 	  /* ??? When called via emit_block_move_for_call, it'd be
1316 	     nice if there were some way to inform the backend, so
1317 	     that it doesn't fail the expansion because it thinks
1318 	     emitting the libcall would be more efficient.  */
1319 
1320 	  pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1321 	  if (pat)
1322 	    {
1323 	      emit_insn (pat);
1324 	      volatile_ok = save_volatile_ok;
1325 	      return true;
1326 	    }
1327 	  else
1328 	    delete_insns_since (last);
1329 	}
1330     }
1331 
1332   volatile_ok = save_volatile_ok;
1333   return false;
1334 }
1335 
1336 /* A subroutine of emit_block_move.  Expand a call to memcpy.
1337    Return the return value from memcpy, 0 otherwise.  */
1338 
1339 static rtx
emit_block_move_via_libcall(rtx dst,rtx src,rtx size,bool tailcall)1340 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1341 {
1342   rtx dst_addr, src_addr;
1343   tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1344   enum machine_mode size_mode;
1345   rtx retval;
1346 
1347   /* Emit code to copy the addresses of DST and SRC and SIZE into new
1348      pseudos.  We can then place those new pseudos into a VAR_DECL and
1349      use them later.  */
1350 
1351   dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1352   src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1353 
1354   dst_addr = convert_memory_address (ptr_mode, dst_addr);
1355   src_addr = convert_memory_address (ptr_mode, src_addr);
1356 
1357   dst_tree = make_tree (ptr_type_node, dst_addr);
1358   src_tree = make_tree (ptr_type_node, src_addr);
1359 
1360   size_mode = TYPE_MODE (sizetype);
1361 
1362   size = convert_to_mode (size_mode, size, 1);
1363   size = copy_to_mode_reg (size_mode, size);
1364 
1365   /* It is incorrect to use the libcall calling conventions to call
1366      memcpy in this context.  This could be a user call to memcpy and
1367      the user may wish to examine the return value from memcpy.  For
1368      targets where libcalls and normal calls have different conventions
1369      for returning pointers, we could end up generating incorrect code.  */
1370 
1371   size_tree = make_tree (sizetype, size);
1372 
1373   fn = emit_block_move_libcall_fn (true);
1374   arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1375   arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1376   arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1377 
1378   /* Now we have to build up the CALL_EXPR itself.  */
1379   call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1380   call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1381 		      call_expr, arg_list, NULL_TREE);
1382   CALL_EXPR_TAILCALL (call_expr) = tailcall;
1383 
1384   retval = expand_normal (call_expr);
1385 
1386   return retval;
1387 }
1388 
1389 /* A subroutine of emit_block_move_via_libcall.  Create the tree node
1390    for the function we use for block copies.  The first time FOR_CALL
1391    is true, we call assemble_external.  */
1392 
1393 static GTY(()) tree block_move_fn;
1394 
1395 void
init_block_move_fn(tree decl,const char * asmspec)1396 init_block_move_fn (tree decl, const char *asmspec)
1397 {
1398   if (!block_move_fn)
1399     {
1400       tree args, fn;
1401 
1402       fn = get_identifier ("memcpy");
1403       args = build_function_type_list (ptr_type_node, ptr_type_node,
1404 				       const_ptr_type_node, sizetype,
1405 				       NULL_TREE);
1406 
1407       fn = build_decl (FUNCTION_DECL, fn, args);
1408       DECL_EXTERNAL (fn) = 1;
1409       TREE_PUBLIC (fn) = 1;
1410       DECL_ARTIFICIAL (fn) = 1;
1411       TREE_NOTHROW (fn) = 1;
1412       if (decl != NULL_TREE && DECL_VISIBILITY_SPECIFIED (decl))
1413 	DECL_VISIBILITY (fn) = DECL_VISIBILITY (decl);
1414       else
1415 	DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1416       DECL_VISIBILITY_SPECIFIED (fn) = 1;
1417 
1418       block_move_fn = fn;
1419     }
1420 
1421   if (asmspec)
1422     set_user_assembler_name (block_move_fn, asmspec);
1423 }
1424 
1425 static tree
emit_block_move_libcall_fn(int for_call)1426 emit_block_move_libcall_fn (int for_call)
1427 {
1428   static bool emitted_extern;
1429 
1430   if (!block_move_fn)
1431     init_block_move_fn (NULL_TREE, NULL);
1432 
1433   if (for_call && !emitted_extern)
1434     {
1435       emitted_extern = true;
1436       make_decl_rtl (block_move_fn);
1437       assemble_external (block_move_fn);
1438     }
1439 
1440   return block_move_fn;
1441 }
1442 
1443 /* A subroutine of emit_block_move.  Copy the data via an explicit
1444    loop.  This is used only when libcalls are forbidden.  */
1445 /* ??? It'd be nice to copy in hunks larger than QImode.  */
1446 
1447 static void
emit_block_move_via_loop(rtx x,rtx y,rtx size,unsigned int align ATTRIBUTE_UNUSED)1448 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1449 			  unsigned int align ATTRIBUTE_UNUSED)
1450 {
1451   rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1452   enum machine_mode iter_mode;
1453 
1454   iter_mode = GET_MODE (size);
1455   if (iter_mode == VOIDmode)
1456     iter_mode = word_mode;
1457 
1458   top_label = gen_label_rtx ();
1459   cmp_label = gen_label_rtx ();
1460   iter = gen_reg_rtx (iter_mode);
1461 
1462   emit_move_insn (iter, const0_rtx);
1463 
1464   x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1465   y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1466   do_pending_stack_adjust ();
1467 
1468   emit_jump (cmp_label);
1469   emit_label (top_label);
1470 
1471   tmp = convert_modes (Pmode, iter_mode, iter, true);
1472   x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1473   y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1474   x = change_address (x, QImode, x_addr);
1475   y = change_address (y, QImode, y_addr);
1476 
1477   emit_move_insn (x, y);
1478 
1479   tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1480 			     true, OPTAB_LIB_WIDEN);
1481   if (tmp != iter)
1482     emit_move_insn (iter, tmp);
1483 
1484   emit_label (cmp_label);
1485 
1486   emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1487 			   true, top_label);
1488 }
1489 
1490 /* Copy all or part of a value X into registers starting at REGNO.
1491    The number of registers to be filled is NREGS.  */
1492 
1493 void
move_block_to_reg(int regno,rtx x,int nregs,enum machine_mode mode)1494 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1495 {
1496   int i;
1497 #ifdef HAVE_load_multiple
1498   rtx pat;
1499   rtx last;
1500 #endif
1501 
1502   if (nregs == 0)
1503     return;
1504 
1505   if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1506     x = validize_mem (force_const_mem (mode, x));
1507 
1508   /* See if the machine can do this with a load multiple insn.  */
1509 #ifdef HAVE_load_multiple
1510   if (HAVE_load_multiple)
1511     {
1512       last = get_last_insn ();
1513       pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1514 			       GEN_INT (nregs));
1515       if (pat)
1516 	{
1517 	  emit_insn (pat);
1518 	  return;
1519 	}
1520       else
1521 	delete_insns_since (last);
1522     }
1523 #endif
1524 
1525   for (i = 0; i < nregs; i++)
1526     emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1527 		    operand_subword_force (x, i, mode));
1528 }
1529 
1530 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1531    The number of registers to be filled is NREGS.  */
1532 
1533 void
move_block_from_reg(int regno,rtx x,int nregs)1534 move_block_from_reg (int regno, rtx x, int nregs)
1535 {
1536   int i;
1537 
1538   if (nregs == 0)
1539     return;
1540 
1541   /* See if the machine can do this with a store multiple insn.  */
1542 #ifdef HAVE_store_multiple
1543   if (HAVE_store_multiple)
1544     {
1545       rtx last = get_last_insn ();
1546       rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1547 				    GEN_INT (nregs));
1548       if (pat)
1549 	{
1550 	  emit_insn (pat);
1551 	  return;
1552 	}
1553       else
1554 	delete_insns_since (last);
1555     }
1556 #endif
1557 
1558   for (i = 0; i < nregs; i++)
1559     {
1560       rtx tem = operand_subword (x, i, 1, BLKmode);
1561 
1562       gcc_assert (tem);
1563 
1564       emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1565     }
1566 }
1567 
1568 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1569    ORIG, where ORIG is a non-consecutive group of registers represented by
1570    a PARALLEL.  The clone is identical to the original except in that the
1571    original set of registers is replaced by a new set of pseudo registers.
1572    The new set has the same modes as the original set.  */
1573 
1574 rtx
gen_group_rtx(rtx orig)1575 gen_group_rtx (rtx orig)
1576 {
1577   int i, length;
1578   rtx *tmps;
1579 
1580   gcc_assert (GET_CODE (orig) == PARALLEL);
1581 
1582   length = XVECLEN (orig, 0);
1583   tmps = alloca (sizeof (rtx) * length);
1584 
1585   /* Skip a NULL entry in first slot.  */
1586   i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1587 
1588   if (i)
1589     tmps[0] = 0;
1590 
1591   for (; i < length; i++)
1592     {
1593       enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1594       rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1595 
1596       tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1597     }
1598 
1599   return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1600 }
1601 
1602 /* A subroutine of emit_group_load.  Arguments as for emit_group_load,
1603    except that values are placed in TMPS[i], and must later be moved
1604    into corresponding XEXP (XVECEXP (DST, 0, i), 0) element.  */
1605 
1606 static void
emit_group_load_1(rtx * tmps,rtx dst,rtx orig_src,tree type,int ssize)1607 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1608 {
1609   rtx src;
1610   int start, i;
1611   enum machine_mode m = GET_MODE (orig_src);
1612 
1613   gcc_assert (GET_CODE (dst) == PARALLEL);
1614 
1615   if (m != VOIDmode
1616       && !SCALAR_INT_MODE_P (m)
1617       && !MEM_P (orig_src)
1618       && GET_CODE (orig_src) != CONCAT)
1619     {
1620       enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1621       if (imode == BLKmode)
1622 	src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1623       else
1624 	src = gen_reg_rtx (imode);
1625       if (imode != BLKmode)
1626 	src = gen_lowpart (GET_MODE (orig_src), src);
1627       emit_move_insn (src, orig_src);
1628       /* ...and back again.  */
1629       if (imode != BLKmode)
1630 	src = gen_lowpart (imode, src);
1631       emit_group_load_1 (tmps, dst, src, type, ssize);
1632       return;
1633     }
1634 
1635   /* Check for a NULL entry, used to indicate that the parameter goes
1636      both on the stack and in registers.  */
1637   if (XEXP (XVECEXP (dst, 0, 0), 0))
1638     start = 0;
1639   else
1640     start = 1;
1641 
1642   /* Process the pieces.  */
1643   for (i = start; i < XVECLEN (dst, 0); i++)
1644     {
1645       enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1646       HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1647       unsigned int bytelen = GET_MODE_SIZE (mode);
1648       int shift = 0;
1649 
1650       /* Handle trailing fragments that run over the size of the struct.  */
1651       if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1652 	{
1653 	  /* Arrange to shift the fragment to where it belongs.
1654 	     extract_bit_field loads to the lsb of the reg.  */
1655 	  if (
1656 #ifdef BLOCK_REG_PADDING
1657 	      BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1658 	      == (BYTES_BIG_ENDIAN ? upward : downward)
1659 #else
1660 	      BYTES_BIG_ENDIAN
1661 #endif
1662 	      )
1663 	    shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1664 	  bytelen = ssize - bytepos;
1665 	  gcc_assert (bytelen > 0);
1666 	}
1667 
1668       /* If we won't be loading directly from memory, protect the real source
1669 	 from strange tricks we might play; but make sure that the source can
1670 	 be loaded directly into the destination.  */
1671       src = orig_src;
1672       if (!MEM_P (orig_src)
1673 	  && (!CONSTANT_P (orig_src)
1674 	      || (GET_MODE (orig_src) != mode
1675 		  && GET_MODE (orig_src) != VOIDmode)))
1676 	{
1677 	  if (GET_MODE (orig_src) == VOIDmode)
1678 	    src = gen_reg_rtx (mode);
1679 	  else
1680 	    src = gen_reg_rtx (GET_MODE (orig_src));
1681 
1682 	  emit_move_insn (src, orig_src);
1683 	}
1684 
1685       /* Optimize the access just a bit.  */
1686       if (MEM_P (src)
1687 	  && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1688 	      || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1689 	  && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1690 	  && bytelen == GET_MODE_SIZE (mode))
1691 	{
1692 	  tmps[i] = gen_reg_rtx (mode);
1693 	  emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1694 	}
1695       else if (COMPLEX_MODE_P (mode)
1696 	       && GET_MODE (src) == mode
1697 	       && bytelen == GET_MODE_SIZE (mode))
1698 	/* Let emit_move_complex do the bulk of the work.  */
1699 	tmps[i] = src;
1700       else if (GET_CODE (src) == CONCAT)
1701 	{
1702 	  unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1703 	  unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1704 
1705 	  if ((bytepos == 0 && bytelen == slen0)
1706 	      || (bytepos != 0 && bytepos + bytelen <= slen))
1707 	    {
1708 	      /* The following assumes that the concatenated objects all
1709 		 have the same size.  In this case, a simple calculation
1710 		 can be used to determine the object and the bit field
1711 		 to be extracted.  */
1712 	      tmps[i] = XEXP (src, bytepos / slen0);
1713 	      if (! CONSTANT_P (tmps[i])
1714 		  && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1715 		tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1716 					     (bytepos % slen0) * BITS_PER_UNIT,
1717 					     1, NULL_RTX, mode, mode);
1718 	    }
1719 	  else
1720 	    {
1721 	      rtx mem;
1722 
1723 	      gcc_assert (!bytepos);
1724 	      mem = assign_stack_temp (GET_MODE (src), slen, 0);
1725 	      emit_move_insn (mem, src);
1726 	      tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1727 					   0, 1, NULL_RTX, mode, mode);
1728 	    }
1729 	}
1730       /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1731 	 SIMD register, which is currently broken.  While we get GCC
1732 	 to emit proper RTL for these cases, let's dump to memory.  */
1733       else if (VECTOR_MODE_P (GET_MODE (dst))
1734 	       && REG_P (src))
1735 	{
1736 	  int slen = GET_MODE_SIZE (GET_MODE (src));
1737 	  rtx mem;
1738 
1739 	  mem = assign_stack_temp (GET_MODE (src), slen, 0);
1740 	  emit_move_insn (mem, src);
1741 	  tmps[i] = adjust_address (mem, mode, (int) bytepos);
1742 	}
1743       else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1744                && XVECLEN (dst, 0) > 1)
1745         tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1746       else if (CONSTANT_P (src)
1747 	       || (REG_P (src) && GET_MODE (src) == mode))
1748 	tmps[i] = src;
1749       else
1750 	tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1751 				     bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1752 				     mode, mode);
1753 
1754       if (shift)
1755 	tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1756 				build_int_cst (NULL_TREE, shift), tmps[i], 0);
1757     }
1758 }
1759 
1760 /* Emit code to move a block SRC of type TYPE to a block DST,
1761    where DST is non-consecutive registers represented by a PARALLEL.
1762    SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1763    if not known.  */
1764 
1765 void
emit_group_load(rtx dst,rtx src,tree type,int ssize)1766 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1767 {
1768   rtx *tmps;
1769   int i;
1770 
1771   tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1772   emit_group_load_1 (tmps, dst, src, type, ssize);
1773 
1774   /* Copy the extracted pieces into the proper (probable) hard regs.  */
1775   for (i = 0; i < XVECLEN (dst, 0); i++)
1776     {
1777       rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1778       if (d == NULL)
1779 	continue;
1780       emit_move_insn (d, tmps[i]);
1781     }
1782 }
1783 
1784 /* Similar, but load SRC into new pseudos in a format that looks like
1785    PARALLEL.  This can later be fed to emit_group_move to get things
1786    in the right place.  */
1787 
1788 rtx
emit_group_load_into_temps(rtx parallel,rtx src,tree type,int ssize)1789 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1790 {
1791   rtvec vec;
1792   int i;
1793 
1794   vec = rtvec_alloc (XVECLEN (parallel, 0));
1795   emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1796 
1797   /* Convert the vector to look just like the original PARALLEL, except
1798      with the computed values.  */
1799   for (i = 0; i < XVECLEN (parallel, 0); i++)
1800     {
1801       rtx e = XVECEXP (parallel, 0, i);
1802       rtx d = XEXP (e, 0);
1803 
1804       if (d)
1805 	{
1806 	  d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1807 	  e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1808 	}
1809       RTVEC_ELT (vec, i) = e;
1810     }
1811 
1812   return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1813 }
1814 
1815 /* Emit code to move a block SRC to block DST, where SRC and DST are
1816    non-consecutive groups of registers, each represented by a PARALLEL.  */
1817 
1818 void
emit_group_move(rtx dst,rtx src)1819 emit_group_move (rtx dst, rtx src)
1820 {
1821   int i;
1822 
1823   gcc_assert (GET_CODE (src) == PARALLEL
1824 	      && GET_CODE (dst) == PARALLEL
1825 	      && XVECLEN (src, 0) == XVECLEN (dst, 0));
1826 
1827   /* Skip first entry if NULL.  */
1828   for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1829     emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1830 		    XEXP (XVECEXP (src, 0, i), 0));
1831 }
1832 
1833 /* Move a group of registers represented by a PARALLEL into pseudos.  */
1834 
1835 rtx
emit_group_move_into_temps(rtx src)1836 emit_group_move_into_temps (rtx src)
1837 {
1838   rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1839   int i;
1840 
1841   for (i = 0; i < XVECLEN (src, 0); i++)
1842     {
1843       rtx e = XVECEXP (src, 0, i);
1844       rtx d = XEXP (e, 0);
1845 
1846       if (d)
1847 	e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1848       RTVEC_ELT (vec, i) = e;
1849     }
1850 
1851   return gen_rtx_PARALLEL (GET_MODE (src), vec);
1852 }
1853 
1854 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1855    where SRC is non-consecutive registers represented by a PARALLEL.
1856    SSIZE represents the total size of block ORIG_DST, or -1 if not
1857    known.  */
1858 
1859 void
emit_group_store(rtx orig_dst,rtx src,tree type ATTRIBUTE_UNUSED,int ssize)1860 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1861 {
1862   rtx *tmps, dst;
1863   int start, finish, i;
1864   enum machine_mode m = GET_MODE (orig_dst);
1865 
1866   gcc_assert (GET_CODE (src) == PARALLEL);
1867 
1868   if (!SCALAR_INT_MODE_P (m)
1869       && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1870     {
1871       enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1872       if (imode == BLKmode)
1873         dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1874       else
1875         dst = gen_reg_rtx (imode);
1876       emit_group_store (dst, src, type, ssize);
1877       if (imode != BLKmode)
1878         dst = gen_lowpart (GET_MODE (orig_dst), dst);
1879       emit_move_insn (orig_dst, dst);
1880       return;
1881     }
1882 
1883   /* Check for a NULL entry, used to indicate that the parameter goes
1884      both on the stack and in registers.  */
1885   if (XEXP (XVECEXP (src, 0, 0), 0))
1886     start = 0;
1887   else
1888     start = 1;
1889   finish = XVECLEN (src, 0);
1890 
1891   tmps = alloca (sizeof (rtx) * finish);
1892 
1893   /* Copy the (probable) hard regs into pseudos.  */
1894   for (i = start; i < finish; i++)
1895     {
1896       rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1897       if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1898 	{
1899 	  tmps[i] = gen_reg_rtx (GET_MODE (reg));
1900 	  emit_move_insn (tmps[i], reg);
1901 	}
1902       else
1903 	tmps[i] = reg;
1904     }
1905 
1906   /* If we won't be storing directly into memory, protect the real destination
1907      from strange tricks we might play.  */
1908   dst = orig_dst;
1909   if (GET_CODE (dst) == PARALLEL)
1910     {
1911       rtx temp;
1912 
1913       /* We can get a PARALLEL dst if there is a conditional expression in
1914 	 a return statement.  In that case, the dst and src are the same,
1915 	 so no action is necessary.  */
1916       if (rtx_equal_p (dst, src))
1917 	return;
1918 
1919       /* It is unclear if we can ever reach here, but we may as well handle
1920 	 it.  Allocate a temporary, and split this into a store/load to/from
1921 	 the temporary.  */
1922 
1923       temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1924       emit_group_store (temp, src, type, ssize);
1925       emit_group_load (dst, temp, type, ssize);
1926       return;
1927     }
1928   else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1929     {
1930       enum machine_mode outer = GET_MODE (dst);
1931       enum machine_mode inner;
1932       HOST_WIDE_INT bytepos;
1933       bool done = false;
1934       rtx temp;
1935 
1936       if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1937 	dst = gen_reg_rtx (outer);
1938 
1939       /* Make life a bit easier for combine.  */
1940       /* If the first element of the vector is the low part
1941 	 of the destination mode, use a paradoxical subreg to
1942 	 initialize the destination.  */
1943       if (start < finish)
1944 	{
1945 	  inner = GET_MODE (tmps[start]);
1946 	  bytepos = subreg_lowpart_offset (inner, outer);
1947 	  if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1948 	    {
1949 	      temp = simplify_gen_subreg (outer, tmps[start],
1950 					  inner, 0);
1951 	      if (temp)
1952 		{
1953 		  emit_move_insn (dst, temp);
1954 		  done = true;
1955 		  start++;
1956 		}
1957 	    }
1958 	}
1959 
1960       /* If the first element wasn't the low part, try the last.  */
1961       if (!done
1962 	  && start < finish - 1)
1963 	{
1964 	  inner = GET_MODE (tmps[finish - 1]);
1965 	  bytepos = subreg_lowpart_offset (inner, outer);
1966 	  if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1967 	    {
1968 	      temp = simplify_gen_subreg (outer, tmps[finish - 1],
1969 					  inner, 0);
1970 	      if (temp)
1971 		{
1972 		  emit_move_insn (dst, temp);
1973 		  done = true;
1974 		  finish--;
1975 		}
1976 	    }
1977 	}
1978 
1979       /* Otherwise, simply initialize the result to zero.  */
1980       if (!done)
1981         emit_move_insn (dst, CONST0_RTX (outer));
1982     }
1983 
1984   /* Process the pieces.  */
1985   for (i = start; i < finish; i++)
1986     {
1987       HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1988       enum machine_mode mode = GET_MODE (tmps[i]);
1989       unsigned int bytelen = GET_MODE_SIZE (mode);
1990       rtx dest = dst;
1991 
1992       /* Handle trailing fragments that run over the size of the struct.  */
1993       if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1994 	{
1995 	  /* store_bit_field always takes its value from the lsb.
1996 	     Move the fragment to the lsb if it's not already there.  */
1997 	  if (
1998 #ifdef BLOCK_REG_PADDING
1999 	      BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2000 	      == (BYTES_BIG_ENDIAN ? upward : downward)
2001 #else
2002 	      BYTES_BIG_ENDIAN
2003 #endif
2004 	      )
2005 	    {
2006 	      int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2007 	      tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2008 				      build_int_cst (NULL_TREE, shift),
2009 				      tmps[i], 0);
2010 	    }
2011 	  bytelen = ssize - bytepos;
2012 	}
2013 
2014       if (GET_CODE (dst) == CONCAT)
2015 	{
2016 	  if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2017 	    dest = XEXP (dst, 0);
2018 	  else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2019 	    {
2020 	      bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2021 	      dest = XEXP (dst, 1);
2022 	    }
2023 	  else
2024 	    {
2025 	      gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2026 	      dest = assign_stack_temp (GET_MODE (dest),
2027 				        GET_MODE_SIZE (GET_MODE (dest)), 0);
2028 	      emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2029 			      tmps[i]);
2030 	      dst = dest;
2031 	      break;
2032 	    }
2033 	}
2034 
2035       /* Optimize the access just a bit.  */
2036       if (MEM_P (dest)
2037 	  && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2038 	      || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2039 	  && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2040 	  && bytelen == GET_MODE_SIZE (mode))
2041 	emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2042       else
2043 	store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2044 			 mode, tmps[i]);
2045     }
2046 
2047   /* Copy from the pseudo into the (probable) hard reg.  */
2048   if (orig_dst != dst)
2049     emit_move_insn (orig_dst, dst);
2050 }
2051 
2052 /* Generate code to copy a BLKmode object of TYPE out of a
2053    set of registers starting with SRCREG into TGTBLK.  If TGTBLK
2054    is null, a stack temporary is created.  TGTBLK is returned.
2055 
2056    The purpose of this routine is to handle functions that return
2057    BLKmode structures in registers.  Some machines (the PA for example)
2058    want to return all small structures in registers regardless of the
2059    structure's alignment.  */
2060 
2061 rtx
copy_blkmode_from_reg(rtx tgtblk,rtx srcreg,tree type)2062 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2063 {
2064   unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2065   rtx src = NULL, dst = NULL;
2066   unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2067   unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2068 
2069   if (tgtblk == 0)
2070     {
2071       tgtblk = assign_temp (build_qualified_type (type,
2072 						  (TYPE_QUALS (type)
2073 						   | TYPE_QUAL_CONST)),
2074 			    0, 1, 1);
2075       preserve_temp_slots (tgtblk);
2076     }
2077 
2078   /* This code assumes srcreg is at least a full word.  If it isn't, copy it
2079      into a new pseudo which is a full word.  */
2080 
2081   if (GET_MODE (srcreg) != BLKmode
2082       && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2083     srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2084 
2085   /* If the structure doesn't take up a whole number of words, see whether
2086      SRCREG is padded on the left or on the right.  If it's on the left,
2087      set PADDING_CORRECTION to the number of bits to skip.
2088 
2089      In most ABIs, the structure will be returned at the least end of
2090      the register, which translates to right padding on little-endian
2091      targets and left padding on big-endian targets.  The opposite
2092      holds if the structure is returned at the most significant
2093      end of the register.  */
2094   if (bytes % UNITS_PER_WORD != 0
2095       && (targetm.calls.return_in_msb (type)
2096 	  ? !BYTES_BIG_ENDIAN
2097 	  : BYTES_BIG_ENDIAN))
2098     padding_correction
2099       = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2100 
2101   /* Copy the structure BITSIZE bites at a time.
2102 
2103      We could probably emit more efficient code for machines which do not use
2104      strict alignment, but it doesn't seem worth the effort at the current
2105      time.  */
2106   for (bitpos = 0, xbitpos = padding_correction;
2107        bitpos < bytes * BITS_PER_UNIT;
2108        bitpos += bitsize, xbitpos += bitsize)
2109     {
2110       /* We need a new source operand each time xbitpos is on a
2111 	 word boundary and when xbitpos == padding_correction
2112 	 (the first time through).  */
2113       if (xbitpos % BITS_PER_WORD == 0
2114 	  || xbitpos == padding_correction)
2115 	src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2116 				     GET_MODE (srcreg));
2117 
2118       /* We need a new destination operand each time bitpos is on
2119 	 a word boundary.  */
2120       if (bitpos % BITS_PER_WORD == 0)
2121 	dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2122 
2123       /* Use xbitpos for the source extraction (right justified) and
2124 	 xbitpos for the destination store (left justified).  */
2125       store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2126 		       extract_bit_field (src, bitsize,
2127 					  xbitpos % BITS_PER_WORD, 1,
2128 					  NULL_RTX, word_mode, word_mode));
2129     }
2130 
2131   return tgtblk;
2132 }
2133 
2134 /* Add a USE expression for REG to the (possibly empty) list pointed
2135    to by CALL_FUSAGE.  REG must denote a hard register.  */
2136 
2137 void
use_reg(rtx * call_fusage,rtx reg)2138 use_reg (rtx *call_fusage, rtx reg)
2139 {
2140   gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2141 
2142   *call_fusage
2143     = gen_rtx_EXPR_LIST (VOIDmode,
2144 			 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2145 }
2146 
2147 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2148    starting at REGNO.  All of these registers must be hard registers.  */
2149 
2150 void
use_regs(rtx * call_fusage,int regno,int nregs)2151 use_regs (rtx *call_fusage, int regno, int nregs)
2152 {
2153   int i;
2154 
2155   gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2156 
2157   for (i = 0; i < nregs; i++)
2158     use_reg (call_fusage, regno_reg_rtx[regno + i]);
2159 }
2160 
2161 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2162    PARALLEL REGS.  This is for calls that pass values in multiple
2163    non-contiguous locations.  The Irix 6 ABI has examples of this.  */
2164 
2165 void
use_group_regs(rtx * call_fusage,rtx regs)2166 use_group_regs (rtx *call_fusage, rtx regs)
2167 {
2168   int i;
2169 
2170   for (i = 0; i < XVECLEN (regs, 0); i++)
2171     {
2172       rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2173 
2174       /* A NULL entry means the parameter goes both on the stack and in
2175 	 registers.  This can also be a MEM for targets that pass values
2176 	 partially on the stack and partially in registers.  */
2177       if (reg != 0 && REG_P (reg))
2178 	use_reg (call_fusage, reg);
2179     }
2180 }
2181 
2182 
2183 /* Determine whether the LEN bytes generated by CONSTFUN can be
2184    stored to memory using several move instructions.  CONSTFUNDATA is
2185    a pointer which will be passed as argument in every CONSTFUN call.
2186    ALIGN is maximum alignment we can assume.  Return nonzero if a
2187    call to store_by_pieces should succeed.  */
2188 
2189 int
can_store_by_pieces(unsigned HOST_WIDE_INT len,rtx (* constfun)(void *,HOST_WIDE_INT,enum machine_mode),void * constfundata,unsigned int align)2190 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2191 		     rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2192 		     void *constfundata, unsigned int align)
2193 {
2194   unsigned HOST_WIDE_INT l;
2195   unsigned int max_size;
2196   HOST_WIDE_INT offset = 0;
2197   enum machine_mode mode, tmode;
2198   enum insn_code icode;
2199   int reverse;
2200   rtx cst;
2201 
2202   if (len == 0)
2203     return 1;
2204 
2205   if (! STORE_BY_PIECES_P (len, align))
2206     return 0;
2207 
2208   tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2209   if (align >= GET_MODE_ALIGNMENT (tmode))
2210     align = GET_MODE_ALIGNMENT (tmode);
2211   else
2212     {
2213       enum machine_mode xmode;
2214 
2215       for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2216 	   tmode != VOIDmode;
2217 	   xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2218 	if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2219 	    || SLOW_UNALIGNED_ACCESS (tmode, align))
2220 	  break;
2221 
2222       align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2223     }
2224 
2225   /* We would first store what we can in the largest integer mode, then go to
2226      successively smaller modes.  */
2227 
2228   for (reverse = 0;
2229        reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2230        reverse++)
2231     {
2232       l = len;
2233       mode = VOIDmode;
2234       max_size = STORE_MAX_PIECES + 1;
2235       while (max_size > 1)
2236 	{
2237 	  for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2238 	       tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2239 	    if (GET_MODE_SIZE (tmode) < max_size)
2240 	      mode = tmode;
2241 
2242 	  if (mode == VOIDmode)
2243 	    break;
2244 
2245 	  icode = mov_optab->handlers[(int) mode].insn_code;
2246 	  if (icode != CODE_FOR_nothing
2247 	      && align >= GET_MODE_ALIGNMENT (mode))
2248 	    {
2249 	      unsigned int size = GET_MODE_SIZE (mode);
2250 
2251 	      while (l >= size)
2252 		{
2253 		  if (reverse)
2254 		    offset -= size;
2255 
2256 		  cst = (*constfun) (constfundata, offset, mode);
2257 		  if (!LEGITIMATE_CONSTANT_P (cst))
2258 		    return 0;
2259 
2260 		  if (!reverse)
2261 		    offset += size;
2262 
2263 		  l -= size;
2264 		}
2265 	    }
2266 
2267 	  max_size = GET_MODE_SIZE (mode);
2268 	}
2269 
2270       /* The code above should have handled everything.  */
2271       gcc_assert (!l);
2272     }
2273 
2274   return 1;
2275 }
2276 
2277 /* Generate several move instructions to store LEN bytes generated by
2278    CONSTFUN to block TO.  (A MEM rtx with BLKmode).  CONSTFUNDATA is a
2279    pointer which will be passed as argument in every CONSTFUN call.
2280    ALIGN is maximum alignment we can assume.
2281    If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2282    mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2283    stpcpy.  */
2284 
2285 rtx
store_by_pieces(rtx to,unsigned HOST_WIDE_INT len,rtx (* constfun)(void *,HOST_WIDE_INT,enum machine_mode),void * constfundata,unsigned int align,int endp)2286 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2287 		 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2288 		 void *constfundata, unsigned int align, int endp)
2289 {
2290   struct store_by_pieces data;
2291 
2292   if (len == 0)
2293     {
2294       gcc_assert (endp != 2);
2295       return to;
2296     }
2297 
2298   gcc_assert (STORE_BY_PIECES_P (len, align));
2299   data.constfun = constfun;
2300   data.constfundata = constfundata;
2301   data.len = len;
2302   data.to = to;
2303   store_by_pieces_1 (&data, align);
2304   if (endp)
2305     {
2306       rtx to1;
2307 
2308       gcc_assert (!data.reverse);
2309       if (data.autinc_to)
2310 	{
2311 	  if (endp == 2)
2312 	    {
2313 	      if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2314 		emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2315 	      else
2316 		data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2317 								-1));
2318 	    }
2319 	  to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2320 					   data.offset);
2321 	}
2322       else
2323 	{
2324 	  if (endp == 2)
2325 	    --data.offset;
2326 	  to1 = adjust_address (data.to, QImode, data.offset);
2327 	}
2328       return to1;
2329     }
2330   else
2331     return data.to;
2332 }
2333 
2334 /* Generate several move instructions to clear LEN bytes of block TO.  (A MEM
2335    rtx with BLKmode).  ALIGN is maximum alignment we can assume.  */
2336 
2337 static void
clear_by_pieces(rtx to,unsigned HOST_WIDE_INT len,unsigned int align)2338 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2339 {
2340   struct store_by_pieces data;
2341 
2342   if (len == 0)
2343     return;
2344 
2345   data.constfun = clear_by_pieces_1;
2346   data.constfundata = NULL;
2347   data.len = len;
2348   data.to = to;
2349   store_by_pieces_1 (&data, align);
2350 }
2351 
2352 /* Callback routine for clear_by_pieces.
2353    Return const0_rtx unconditionally.  */
2354 
2355 static rtx
clear_by_pieces_1(void * data ATTRIBUTE_UNUSED,HOST_WIDE_INT offset ATTRIBUTE_UNUSED,enum machine_mode mode ATTRIBUTE_UNUSED)2356 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2357 		   HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2358 		   enum machine_mode mode ATTRIBUTE_UNUSED)
2359 {
2360   return const0_rtx;
2361 }
2362 
2363 /* Subroutine of clear_by_pieces and store_by_pieces.
2364    Generate several move instructions to store LEN bytes of block TO.  (A MEM
2365    rtx with BLKmode).  ALIGN is maximum alignment we can assume.  */
2366 
2367 static void
store_by_pieces_1(struct store_by_pieces * data ATTRIBUTE_UNUSED,unsigned int align ATTRIBUTE_UNUSED)2368 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2369 		   unsigned int align ATTRIBUTE_UNUSED)
2370 {
2371   rtx to_addr = XEXP (data->to, 0);
2372   unsigned int max_size = STORE_MAX_PIECES + 1;
2373   enum machine_mode mode = VOIDmode, tmode;
2374   enum insn_code icode;
2375 
2376   data->offset = 0;
2377   data->to_addr = to_addr;
2378   data->autinc_to
2379     = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2380        || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2381 
2382   data->explicit_inc_to = 0;
2383   data->reverse
2384     = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2385   if (data->reverse)
2386     data->offset = data->len;
2387 
2388   /* If storing requires more than two move insns,
2389      copy addresses to registers (to make displacements shorter)
2390      and use post-increment if available.  */
2391   if (!data->autinc_to
2392       && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2393     {
2394       /* Determine the main mode we'll be using.  */
2395       for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2396 	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2397 	if (GET_MODE_SIZE (tmode) < max_size)
2398 	  mode = tmode;
2399 
2400       if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2401 	{
2402 	  data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2403 	  data->autinc_to = 1;
2404 	  data->explicit_inc_to = -1;
2405 	}
2406 
2407       if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2408 	  && ! data->autinc_to)
2409 	{
2410 	  data->to_addr = copy_addr_to_reg (to_addr);
2411 	  data->autinc_to = 1;
2412 	  data->explicit_inc_to = 1;
2413 	}
2414 
2415       if ( !data->autinc_to && CONSTANT_P (to_addr))
2416 	data->to_addr = copy_addr_to_reg (to_addr);
2417     }
2418 
2419   tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2420   if (align >= GET_MODE_ALIGNMENT (tmode))
2421     align = GET_MODE_ALIGNMENT (tmode);
2422   else
2423     {
2424       enum machine_mode xmode;
2425 
2426       for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2427 	   tmode != VOIDmode;
2428 	   xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2429 	if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2430 	    || SLOW_UNALIGNED_ACCESS (tmode, align))
2431 	  break;
2432 
2433       align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2434     }
2435 
2436   /* First store what we can in the largest integer mode, then go to
2437      successively smaller modes.  */
2438 
2439   while (max_size > 1)
2440     {
2441       for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2442 	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2443 	if (GET_MODE_SIZE (tmode) < max_size)
2444 	  mode = tmode;
2445 
2446       if (mode == VOIDmode)
2447 	break;
2448 
2449       icode = mov_optab->handlers[(int) mode].insn_code;
2450       if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2451 	store_by_pieces_2 (GEN_FCN (icode), mode, data);
2452 
2453       max_size = GET_MODE_SIZE (mode);
2454     }
2455 
2456   /* The code above should have handled everything.  */
2457   gcc_assert (!data->len);
2458 }
2459 
2460 /* Subroutine of store_by_pieces_1.  Store as many bytes as appropriate
2461    with move instructions for mode MODE.  GENFUN is the gen_... function
2462    to make a move insn for that mode.  DATA has all the other info.  */
2463 
2464 static void
store_by_pieces_2(rtx (* genfun)(rtx,...),enum machine_mode mode,struct store_by_pieces * data)2465 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2466 		   struct store_by_pieces *data)
2467 {
2468   unsigned int size = GET_MODE_SIZE (mode);
2469   rtx to1, cst;
2470 
2471   while (data->len >= size)
2472     {
2473       if (data->reverse)
2474 	data->offset -= size;
2475 
2476       if (data->autinc_to)
2477 	to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2478 					 data->offset);
2479       else
2480 	to1 = adjust_address (data->to, mode, data->offset);
2481 
2482       if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2483 	emit_insn (gen_add2_insn (data->to_addr,
2484 				  GEN_INT (-(HOST_WIDE_INT) size)));
2485 
2486       cst = (*data->constfun) (data->constfundata, data->offset, mode);
2487       emit_insn ((*genfun) (to1, cst));
2488 
2489       if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2490 	emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2491 
2492       if (! data->reverse)
2493 	data->offset += size;
2494 
2495       data->len -= size;
2496     }
2497 }
2498 
2499 /* Write zeros through the storage of OBJECT.  If OBJECT has BLKmode, SIZE is
2500    its length in bytes.  */
2501 
2502 rtx
clear_storage(rtx object,rtx size,enum block_op_methods method)2503 clear_storage (rtx object, rtx size, enum block_op_methods method)
2504 {
2505   enum machine_mode mode = GET_MODE (object);
2506   unsigned int align;
2507 
2508   gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2509 
2510   /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2511      just move a zero.  Otherwise, do this a piece at a time.  */
2512   if (mode != BLKmode
2513       && GET_CODE (size) == CONST_INT
2514       && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2515     {
2516       rtx zero = CONST0_RTX (mode);
2517       if (zero != NULL)
2518 	{
2519 	  emit_move_insn (object, zero);
2520 	  return NULL;
2521 	}
2522 
2523       if (COMPLEX_MODE_P (mode))
2524 	{
2525 	  zero = CONST0_RTX (GET_MODE_INNER (mode));
2526 	  if (zero != NULL)
2527 	    {
2528 	      write_complex_part (object, zero, 0);
2529 	      write_complex_part (object, zero, 1);
2530 	      return NULL;
2531 	    }
2532 	}
2533     }
2534 
2535   if (size == const0_rtx)
2536     return NULL;
2537 
2538   align = MEM_ALIGN (object);
2539 
2540   if (GET_CODE (size) == CONST_INT
2541       && CLEAR_BY_PIECES_P (INTVAL (size), align))
2542     clear_by_pieces (object, INTVAL (size), align);
2543   else if (set_storage_via_setmem (object, size, const0_rtx, align))
2544     ;
2545   else
2546     return clear_storage_via_libcall (object, size,
2547 				      method == BLOCK_OP_TAILCALL);
2548 
2549   return NULL;
2550 }
2551 
2552 /* A subroutine of clear_storage.  Expand a call to memset.
2553    Return the return value of memset, 0 otherwise.  */
2554 
2555 static rtx
clear_storage_via_libcall(rtx object,rtx size,bool tailcall)2556 clear_storage_via_libcall (rtx object, rtx size, bool tailcall)
2557 {
2558   tree call_expr, arg_list, fn, object_tree, size_tree;
2559   enum machine_mode size_mode;
2560   rtx retval;
2561 
2562   /* Emit code to copy OBJECT and SIZE into new pseudos.  We can then
2563      place those into new pseudos into a VAR_DECL and use them later.  */
2564 
2565   object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2566 
2567   size_mode = TYPE_MODE (sizetype);
2568   size = convert_to_mode (size_mode, size, 1);
2569   size = copy_to_mode_reg (size_mode, size);
2570 
2571   /* It is incorrect to use the libcall calling conventions to call
2572      memset in this context.  This could be a user call to memset and
2573      the user may wish to examine the return value from memset.  For
2574      targets where libcalls and normal calls have different conventions
2575      for returning pointers, we could end up generating incorrect code.  */
2576 
2577   object_tree = make_tree (ptr_type_node, object);
2578   size_tree = make_tree (sizetype, size);
2579 
2580   fn = clear_storage_libcall_fn (true);
2581   arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2582   arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2583   arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2584 
2585   /* Now we have to build up the CALL_EXPR itself.  */
2586   call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2587   call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2588 		      call_expr, arg_list, NULL_TREE);
2589   CALL_EXPR_TAILCALL (call_expr) = tailcall;
2590 
2591   retval = expand_normal (call_expr);
2592 
2593   return retval;
2594 }
2595 
2596 /* A subroutine of clear_storage_via_libcall.  Create the tree node
2597    for the function we use for block clears.  The first time FOR_CALL
2598    is true, we call assemble_external.  */
2599 
2600 static GTY(()) tree block_clear_fn;
2601 
2602 void
init_block_clear_fn(tree decl,const char * asmspec)2603 init_block_clear_fn (tree decl, const char *asmspec)
2604 {
2605   if (!block_clear_fn)
2606     {
2607       tree fn, args;
2608 
2609       fn = get_identifier ("memset");
2610       args = build_function_type_list (ptr_type_node, ptr_type_node,
2611 				       integer_type_node, sizetype,
2612 				       NULL_TREE);
2613 
2614       fn = build_decl (FUNCTION_DECL, fn, args);
2615       DECL_EXTERNAL (fn) = 1;
2616       TREE_PUBLIC (fn) = 1;
2617       DECL_ARTIFICIAL (fn) = 1;
2618       TREE_NOTHROW (fn) = 1;
2619       if (decl != NULL_TREE && DECL_VISIBILITY_SPECIFIED (decl))
2620 	DECL_VISIBILITY (fn) = DECL_VISIBILITY (decl);
2621       else
2622 	DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2623       DECL_VISIBILITY_SPECIFIED (fn) = 1;
2624 
2625       block_clear_fn = fn;
2626     }
2627 
2628   if (asmspec)
2629     set_user_assembler_name (block_clear_fn, asmspec);
2630 }
2631 
2632 static tree
clear_storage_libcall_fn(int for_call)2633 clear_storage_libcall_fn (int for_call)
2634 {
2635   static bool emitted_extern;
2636 
2637   if (!block_clear_fn)
2638     init_block_clear_fn (NULL_TREE, NULL);
2639 
2640   if (for_call && !emitted_extern)
2641     {
2642       emitted_extern = true;
2643       make_decl_rtl (block_clear_fn);
2644       assemble_external (block_clear_fn);
2645     }
2646 
2647   return block_clear_fn;
2648 }
2649 
2650 /* Expand a setmem pattern; return true if successful.  */
2651 
2652 bool
set_storage_via_setmem(rtx object,rtx size,rtx val,unsigned int align)2653 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align)
2654 {
2655   /* Try the most limited insn first, because there's no point
2656      including more than one in the machine description unless
2657      the more limited one has some advantage.  */
2658 
2659   rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2660   enum machine_mode mode;
2661 
2662   for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2663        mode = GET_MODE_WIDER_MODE (mode))
2664     {
2665       enum insn_code code = setmem_optab[(int) mode];
2666       insn_operand_predicate_fn pred;
2667 
2668       if (code != CODE_FOR_nothing
2669 	  /* We don't need MODE to be narrower than
2670 	     BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2671 	     the mode mask, as it is returned by the macro, it will
2672 	     definitely be less than the actual mode mask.  */
2673 	  && ((GET_CODE (size) == CONST_INT
2674 	       && ((unsigned HOST_WIDE_INT) INTVAL (size)
2675 		   <= (GET_MODE_MASK (mode) >> 1)))
2676 	      || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2677 	  && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2678 	      || (*pred) (object, BLKmode))
2679 	  && ((pred = insn_data[(int) code].operand[3].predicate) == 0
2680 	      || (*pred) (opalign, VOIDmode)))
2681 	{
2682 	  rtx opsize, opchar;
2683 	  enum machine_mode char_mode;
2684 	  rtx last = get_last_insn ();
2685 	  rtx pat;
2686 
2687 	  opsize = convert_to_mode (mode, size, 1);
2688 	  pred = insn_data[(int) code].operand[1].predicate;
2689 	  if (pred != 0 && ! (*pred) (opsize, mode))
2690 	    opsize = copy_to_mode_reg (mode, opsize);
2691 
2692 	  opchar = val;
2693 	  char_mode = insn_data[(int) code].operand[2].mode;
2694 	  if (char_mode != VOIDmode)
2695 	    {
2696 	      opchar = convert_to_mode (char_mode, opchar, 1);
2697 	      pred = insn_data[(int) code].operand[2].predicate;
2698 	      if (pred != 0 && ! (*pred) (opchar, char_mode))
2699 		opchar = copy_to_mode_reg (char_mode, opchar);
2700 	    }
2701 
2702 	  pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
2703 	  if (pat)
2704 	    {
2705 	      emit_insn (pat);
2706 	      return true;
2707 	    }
2708 	  else
2709 	    delete_insns_since (last);
2710 	}
2711     }
2712 
2713   return false;
2714 }
2715 
2716 
2717 /* Write to one of the components of the complex value CPLX.  Write VAL to
2718    the real part if IMAG_P is false, and the imaginary part if its true.  */
2719 
2720 static void
write_complex_part(rtx cplx,rtx val,bool imag_p)2721 write_complex_part (rtx cplx, rtx val, bool imag_p)
2722 {
2723   enum machine_mode cmode;
2724   enum machine_mode imode;
2725   unsigned ibitsize;
2726 
2727   if (GET_CODE (cplx) == CONCAT)
2728     {
2729       emit_move_insn (XEXP (cplx, imag_p), val);
2730       return;
2731     }
2732 
2733   cmode = GET_MODE (cplx);
2734   imode = GET_MODE_INNER (cmode);
2735   ibitsize = GET_MODE_BITSIZE (imode);
2736 
2737   /* For MEMs simplify_gen_subreg may generate an invalid new address
2738      because, e.g., the original address is considered mode-dependent
2739      by the target, which restricts simplify_subreg from invoking
2740      adjust_address_nv.  Instead of preparing fallback support for an
2741      invalid address, we call adjust_address_nv directly.  */
2742   if (MEM_P (cplx))
2743     {
2744       emit_move_insn (adjust_address_nv (cplx, imode,
2745 					 imag_p ? GET_MODE_SIZE (imode) : 0),
2746 		      val);
2747       return;
2748     }
2749 
2750   /* If the sub-object is at least word sized, then we know that subregging
2751      will work.  This special case is important, since store_bit_field
2752      wants to operate on integer modes, and there's rarely an OImode to
2753      correspond to TCmode.  */
2754   if (ibitsize >= BITS_PER_WORD
2755       /* For hard regs we have exact predicates.  Assume we can split
2756 	 the original object if it spans an even number of hard regs.
2757 	 This special case is important for SCmode on 64-bit platforms
2758 	 where the natural size of floating-point regs is 32-bit.  */
2759       || (REG_P (cplx)
2760 	  && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2761 	  && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2762     {
2763       rtx part = simplify_gen_subreg (imode, cplx, cmode,
2764 				      imag_p ? GET_MODE_SIZE (imode) : 0);
2765       if (part)
2766         {
2767 	  emit_move_insn (part, val);
2768 	  return;
2769 	}
2770       else
2771 	/* simplify_gen_subreg may fail for sub-word MEMs.  */
2772 	gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2773     }
2774 
2775   store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2776 }
2777 
2778 /* Extract one of the components of the complex value CPLX.  Extract the
2779    real part if IMAG_P is false, and the imaginary part if it's true.  */
2780 
2781 static rtx
read_complex_part(rtx cplx,bool imag_p)2782 read_complex_part (rtx cplx, bool imag_p)
2783 {
2784   enum machine_mode cmode, imode;
2785   unsigned ibitsize;
2786 
2787   if (GET_CODE (cplx) == CONCAT)
2788     return XEXP (cplx, imag_p);
2789 
2790   cmode = GET_MODE (cplx);
2791   imode = GET_MODE_INNER (cmode);
2792   ibitsize = GET_MODE_BITSIZE (imode);
2793 
2794   /* Special case reads from complex constants that got spilled to memory.  */
2795   if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2796     {
2797       tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2798       if (decl && TREE_CODE (decl) == COMPLEX_CST)
2799 	{
2800 	  tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2801 	  if (CONSTANT_CLASS_P (part))
2802 	    return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2803 	}
2804     }
2805 
2806   /* For MEMs simplify_gen_subreg may generate an invalid new address
2807      because, e.g., the original address is considered mode-dependent
2808      by the target, which restricts simplify_subreg from invoking
2809      adjust_address_nv.  Instead of preparing fallback support for an
2810      invalid address, we call adjust_address_nv directly.  */
2811   if (MEM_P (cplx))
2812     return adjust_address_nv (cplx, imode,
2813 			      imag_p ? GET_MODE_SIZE (imode) : 0);
2814 
2815   /* If the sub-object is at least word sized, then we know that subregging
2816      will work.  This special case is important, since extract_bit_field
2817      wants to operate on integer modes, and there's rarely an OImode to
2818      correspond to TCmode.  */
2819   if (ibitsize >= BITS_PER_WORD
2820       /* For hard regs we have exact predicates.  Assume we can split
2821 	 the original object if it spans an even number of hard regs.
2822 	 This special case is important for SCmode on 64-bit platforms
2823 	 where the natural size of floating-point regs is 32-bit.  */
2824       || (REG_P (cplx)
2825 	  && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2826 	  && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2827     {
2828       rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2829 				     imag_p ? GET_MODE_SIZE (imode) : 0);
2830       if (ret)
2831         return ret;
2832       else
2833 	/* simplify_gen_subreg may fail for sub-word MEMs.  */
2834 	gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2835     }
2836 
2837   return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2838 			    true, NULL_RTX, imode, imode);
2839 }
2840 
2841 /* A subroutine of emit_move_insn_1.  Yet another lowpart generator.
2842    NEW_MODE and OLD_MODE are the same size.  Return NULL if X cannot be
2843    represented in NEW_MODE.  If FORCE is true, this will never happen, as
2844    we'll force-create a SUBREG if needed.  */
2845 
2846 static rtx
emit_move_change_mode(enum machine_mode new_mode,enum machine_mode old_mode,rtx x,bool force)2847 emit_move_change_mode (enum machine_mode new_mode,
2848 		       enum machine_mode old_mode, rtx x, bool force)
2849 {
2850   rtx ret;
2851 
2852   if (MEM_P (x))
2853     {
2854       /* We don't have to worry about changing the address since the
2855 	 size in bytes is supposed to be the same.  */
2856       if (reload_in_progress)
2857 	{
2858 	  /* Copy the MEM to change the mode and move any
2859 	     substitutions from the old MEM to the new one.  */
2860 	  ret = adjust_address_nv (x, new_mode, 0);
2861 	  copy_replacements (x, ret);
2862 	}
2863       else
2864 	ret = adjust_address (x, new_mode, 0);
2865     }
2866   else
2867     {
2868       /* Note that we do want simplify_subreg's behavior of validating
2869 	 that the new mode is ok for a hard register.  If we were to use
2870 	 simplify_gen_subreg, we would create the subreg, but would
2871 	 probably run into the target not being able to implement it.  */
2872       /* Except, of course, when FORCE is true, when this is exactly what
2873 	 we want.  Which is needed for CCmodes on some targets.  */
2874       if (force)
2875 	ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
2876       else
2877 	ret = simplify_subreg (new_mode, x, old_mode, 0);
2878     }
2879 
2880   return ret;
2881 }
2882 
2883 /* A subroutine of emit_move_insn_1.  Generate a move from Y into X using
2884    an integer mode of the same size as MODE.  Returns the instruction
2885    emitted, or NULL if such a move could not be generated.  */
2886 
2887 static rtx
emit_move_via_integer(enum machine_mode mode,rtx x,rtx y,bool force)2888 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
2889 {
2890   enum machine_mode imode;
2891   enum insn_code code;
2892 
2893   /* There must exist a mode of the exact size we require.  */
2894   imode = int_mode_for_mode (mode);
2895   if (imode == BLKmode)
2896     return NULL_RTX;
2897 
2898   /* The target must support moves in this mode.  */
2899   code = mov_optab->handlers[imode].insn_code;
2900   if (code == CODE_FOR_nothing)
2901     return NULL_RTX;
2902 
2903   x = emit_move_change_mode (imode, mode, x, force);
2904   if (x == NULL_RTX)
2905     return NULL_RTX;
2906   y = emit_move_change_mode (imode, mode, y, force);
2907   if (y == NULL_RTX)
2908     return NULL_RTX;
2909   return emit_insn (GEN_FCN (code) (x, y));
2910 }
2911 
2912 /* A subroutine of emit_move_insn_1.  X is a push_operand in MODE.
2913    Return an equivalent MEM that does not use an auto-increment.  */
2914 
2915 static rtx
emit_move_resolve_push(enum machine_mode mode,rtx x)2916 emit_move_resolve_push (enum machine_mode mode, rtx x)
2917 {
2918   enum rtx_code code = GET_CODE (XEXP (x, 0));
2919   HOST_WIDE_INT adjust;
2920   rtx temp;
2921 
2922   adjust = GET_MODE_SIZE (mode);
2923 #ifdef PUSH_ROUNDING
2924   adjust = PUSH_ROUNDING (adjust);
2925 #endif
2926   if (code == PRE_DEC || code == POST_DEC)
2927     adjust = -adjust;
2928   else if (code == PRE_MODIFY || code == POST_MODIFY)
2929     {
2930       rtx expr = XEXP (XEXP (x, 0), 1);
2931       HOST_WIDE_INT val;
2932 
2933       gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
2934       gcc_assert (GET_CODE (XEXP (expr, 1)) == CONST_INT);
2935       val = INTVAL (XEXP (expr, 1));
2936       if (GET_CODE (expr) == MINUS)
2937 	val = -val;
2938       gcc_assert (adjust == val || adjust == -val);
2939       adjust = val;
2940     }
2941 
2942   /* Do not use anti_adjust_stack, since we don't want to update
2943      stack_pointer_delta.  */
2944   temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
2945 			      GEN_INT (adjust), stack_pointer_rtx,
2946 			      0, OPTAB_LIB_WIDEN);
2947   if (temp != stack_pointer_rtx)
2948     emit_move_insn (stack_pointer_rtx, temp);
2949 
2950   switch (code)
2951     {
2952     case PRE_INC:
2953     case PRE_DEC:
2954     case PRE_MODIFY:
2955       temp = stack_pointer_rtx;
2956       break;
2957     case POST_INC:
2958     case POST_DEC:
2959     case POST_MODIFY:
2960       temp = plus_constant (stack_pointer_rtx, -adjust);
2961       break;
2962     default:
2963       gcc_unreachable ();
2964     }
2965 
2966   return replace_equiv_address (x, temp);
2967 }
2968 
2969 /* A subroutine of emit_move_complex.  Generate a move from Y into X.
2970    X is known to satisfy push_operand, and MODE is known to be complex.
2971    Returns the last instruction emitted.  */
2972 
2973 static rtx
emit_move_complex_push(enum machine_mode mode,rtx x,rtx y)2974 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
2975 {
2976   enum machine_mode submode = GET_MODE_INNER (mode);
2977   bool imag_first;
2978 
2979 #ifdef PUSH_ROUNDING
2980   unsigned int submodesize = GET_MODE_SIZE (submode);
2981 
2982   /* In case we output to the stack, but the size is smaller than the
2983      machine can push exactly, we need to use move instructions.  */
2984   if (PUSH_ROUNDING (submodesize) != submodesize)
2985     {
2986       x = emit_move_resolve_push (mode, x);
2987       return emit_move_insn (x, y);
2988     }
2989 #endif
2990 
2991   /* Note that the real part always precedes the imag part in memory
2992      regardless of machine's endianness.  */
2993   switch (GET_CODE (XEXP (x, 0)))
2994     {
2995     case PRE_DEC:
2996     case POST_DEC:
2997       imag_first = true;
2998       break;
2999     case PRE_INC:
3000     case POST_INC:
3001       imag_first = false;
3002       break;
3003     default:
3004       gcc_unreachable ();
3005     }
3006 
3007   emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3008 		  read_complex_part (y, imag_first));
3009   return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3010 			 read_complex_part (y, !imag_first));
3011 }
3012 
3013 /* A subroutine of emit_move_insn_1.  Generate a move from Y into X.
3014    MODE is known to be complex.  Returns the last instruction emitted.  */
3015 
3016 static rtx
emit_move_complex(enum machine_mode mode,rtx x,rtx y)3017 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3018 {
3019   bool try_int;
3020 
3021   /* Need to take special care for pushes, to maintain proper ordering
3022      of the data, and possibly extra padding.  */
3023   if (push_operand (x, mode))
3024     return emit_move_complex_push (mode, x, y);
3025 
3026   /* See if we can coerce the target into moving both values at once.  */
3027 
3028   /* Move floating point as parts.  */
3029   if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3030       && mov_optab->handlers[GET_MODE_INNER (mode)].insn_code != CODE_FOR_nothing)
3031     try_int = false;
3032   /* Not possible if the values are inherently not adjacent.  */
3033   else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3034     try_int = false;
3035   /* Is possible if both are registers (or subregs of registers).  */
3036   else if (register_operand (x, mode) && register_operand (y, mode))
3037     try_int = true;
3038   /* If one of the operands is a memory, and alignment constraints
3039      are friendly enough, we may be able to do combined memory operations.
3040      We do not attempt this if Y is a constant because that combination is
3041      usually better with the by-parts thing below.  */
3042   else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3043 	   && (!STRICT_ALIGNMENT
3044 	       || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3045     try_int = true;
3046   else
3047     try_int = false;
3048 
3049   if (try_int)
3050     {
3051       rtx ret;
3052 
3053       /* For memory to memory moves, optimal behavior can be had with the
3054 	 existing block move logic.  */
3055       if (MEM_P (x) && MEM_P (y))
3056 	{
3057 	  emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3058 			   BLOCK_OP_NO_LIBCALL);
3059 	  return get_last_insn ();
3060 	}
3061 
3062       ret = emit_move_via_integer (mode, x, y, true);
3063       if (ret)
3064 	return ret;
3065     }
3066 
3067   /* Show the output dies here.  This is necessary for SUBREGs
3068      of pseudos since we cannot track their lifetimes correctly;
3069      hard regs shouldn't appear here except as return values.  */
3070   if (!reload_completed && !reload_in_progress
3071       && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3072     emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3073 
3074   write_complex_part (x, read_complex_part (y, false), false);
3075   write_complex_part (x, read_complex_part (y, true), true);
3076   return get_last_insn ();
3077 }
3078 
3079 /* A subroutine of emit_move_insn_1.  Generate a move from Y into X.
3080    MODE is known to be MODE_CC.  Returns the last instruction emitted.  */
3081 
3082 static rtx
emit_move_ccmode(enum machine_mode mode,rtx x,rtx y)3083 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3084 {
3085   rtx ret;
3086 
3087   /* Assume all MODE_CC modes are equivalent; if we have movcc, use it.  */
3088   if (mode != CCmode)
3089     {
3090       enum insn_code code = mov_optab->handlers[CCmode].insn_code;
3091       if (code != CODE_FOR_nothing)
3092 	{
3093 	  x = emit_move_change_mode (CCmode, mode, x, true);
3094 	  y = emit_move_change_mode (CCmode, mode, y, true);
3095 	  return emit_insn (GEN_FCN (code) (x, y));
3096 	}
3097     }
3098 
3099   /* Otherwise, find the MODE_INT mode of the same width.  */
3100   ret = emit_move_via_integer (mode, x, y, false);
3101   gcc_assert (ret != NULL);
3102   return ret;
3103 }
3104 
3105 /* Return true if word I of OP lies entirely in the
3106    undefined bits of a paradoxical subreg.  */
3107 
3108 static bool
undefined_operand_subword_p(rtx op,int i)3109 undefined_operand_subword_p (rtx op, int i)
3110 {
3111   enum machine_mode innermode, innermostmode;
3112   int offset;
3113   if (GET_CODE (op) != SUBREG)
3114     return false;
3115   innermode = GET_MODE (op);
3116   innermostmode = GET_MODE (SUBREG_REG (op));
3117   offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3118   /* The SUBREG_BYTE represents offset, as if the value were stored in
3119      memory, except for a paradoxical subreg where we define
3120      SUBREG_BYTE to be 0; undo this exception as in
3121      simplify_subreg.  */
3122   if (SUBREG_BYTE (op) == 0
3123       && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3124     {
3125       int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3126       if (WORDS_BIG_ENDIAN)
3127 	offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3128       if (BYTES_BIG_ENDIAN)
3129 	offset += difference % UNITS_PER_WORD;
3130     }
3131   if (offset >= GET_MODE_SIZE (innermostmode)
3132       || offset <= -GET_MODE_SIZE (word_mode))
3133     return true;
3134   return false;
3135 }
3136 
3137 /* A subroutine of emit_move_insn_1.  Generate a move from Y into X.
3138    MODE is any multi-word or full-word mode that lacks a move_insn
3139    pattern.  Note that you will get better code if you define such
3140    patterns, even if they must turn into multiple assembler instructions.  */
3141 
3142 static rtx
emit_move_multi_word(enum machine_mode mode,rtx x,rtx y)3143 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3144 {
3145   rtx last_insn = 0;
3146   rtx seq, inner;
3147   bool need_clobber;
3148   int i;
3149 
3150   gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3151 
3152   /* If X is a push on the stack, do the push now and replace
3153      X with a reference to the stack pointer.  */
3154   if (push_operand (x, mode))
3155     x = emit_move_resolve_push (mode, x);
3156 
3157   /* If we are in reload, see if either operand is a MEM whose address
3158      is scheduled for replacement.  */
3159   if (reload_in_progress && MEM_P (x)
3160       && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3161     x = replace_equiv_address_nv (x, inner);
3162   if (reload_in_progress && MEM_P (y)
3163       && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3164     y = replace_equiv_address_nv (y, inner);
3165 
3166   start_sequence ();
3167 
3168   need_clobber = false;
3169   for (i = 0;
3170        i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3171        i++)
3172     {
3173       rtx xpart = operand_subword (x, i, 1, mode);
3174       rtx ypart;
3175 
3176       /* Do not generate code for a move if it would come entirely
3177 	 from the undefined bits of a paradoxical subreg.  */
3178       if (undefined_operand_subword_p (y, i))
3179 	continue;
3180 
3181       ypart = operand_subword (y, i, 1, mode);
3182 
3183       /* If we can't get a part of Y, put Y into memory if it is a
3184 	 constant.  Otherwise, force it into a register.  Then we must
3185 	 be able to get a part of Y.  */
3186       if (ypart == 0 && CONSTANT_P (y))
3187 	{
3188 	  y = use_anchored_address (force_const_mem (mode, y));
3189 	  ypart = operand_subword (y, i, 1, mode);
3190 	}
3191       else if (ypart == 0)
3192 	ypart = operand_subword_force (y, i, mode);
3193 
3194       gcc_assert (xpart && ypart);
3195 
3196       need_clobber |= (GET_CODE (xpart) == SUBREG);
3197 
3198       last_insn = emit_move_insn (xpart, ypart);
3199     }
3200 
3201   seq = get_insns ();
3202   end_sequence ();
3203 
3204   /* Show the output dies here.  This is necessary for SUBREGs
3205      of pseudos since we cannot track their lifetimes correctly;
3206      hard regs shouldn't appear here except as return values.
3207      We never want to emit such a clobber after reload.  */
3208   if (x != y
3209       && ! (reload_in_progress || reload_completed)
3210       && need_clobber != 0)
3211     emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3212 
3213   emit_insn (seq);
3214 
3215   return last_insn;
3216 }
3217 
3218 /* Low level part of emit_move_insn.
3219    Called just like emit_move_insn, but assumes X and Y
3220    are basically valid.  */
3221 
3222 rtx
emit_move_insn_1(rtx x,rtx y)3223 emit_move_insn_1 (rtx x, rtx y)
3224 {
3225   enum machine_mode mode = GET_MODE (x);
3226   enum insn_code code;
3227 
3228   gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3229 
3230   code = mov_optab->handlers[mode].insn_code;
3231   if (code != CODE_FOR_nothing)
3232     return emit_insn (GEN_FCN (code) (x, y));
3233 
3234   /* Expand complex moves by moving real part and imag part.  */
3235   if (COMPLEX_MODE_P (mode))
3236     return emit_move_complex (mode, x, y);
3237 
3238   if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT)
3239     {
3240       rtx result = emit_move_via_integer (mode, x, y, true);
3241 
3242       /* If we can't find an integer mode, use multi words.  */
3243       if (result)
3244 	return result;
3245       else
3246 	return emit_move_multi_word (mode, x, y);
3247     }
3248 
3249   if (GET_MODE_CLASS (mode) == MODE_CC)
3250     return emit_move_ccmode (mode, x, y);
3251 
3252   /* Try using a move pattern for the corresponding integer mode.  This is
3253      only safe when simplify_subreg can convert MODE constants into integer
3254      constants.  At present, it can only do this reliably if the value
3255      fits within a HOST_WIDE_INT.  */
3256   if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3257     {
3258       rtx ret = emit_move_via_integer (mode, x, y, false);
3259       if (ret)
3260 	return ret;
3261     }
3262 
3263   return emit_move_multi_word (mode, x, y);
3264 }
3265 
3266 /* Generate code to copy Y into X.
3267    Both Y and X must have the same mode, except that
3268    Y can be a constant with VOIDmode.
3269    This mode cannot be BLKmode; use emit_block_move for that.
3270 
3271    Return the last instruction emitted.  */
3272 
3273 rtx
emit_move_insn(rtx x,rtx y)3274 emit_move_insn (rtx x, rtx y)
3275 {
3276   enum machine_mode mode = GET_MODE (x);
3277   rtx y_cst = NULL_RTX;
3278   rtx last_insn, set;
3279 
3280   gcc_assert (mode != BLKmode
3281 	      && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3282 
3283   if (CONSTANT_P (y))
3284     {
3285       if (optimize
3286 	  && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3287 	  && (last_insn = compress_float_constant (x, y)))
3288 	return last_insn;
3289 
3290       y_cst = y;
3291 
3292       if (!LEGITIMATE_CONSTANT_P (y))
3293 	{
3294 	  y = force_const_mem (mode, y);
3295 
3296 	  /* If the target's cannot_force_const_mem prevented the spill,
3297 	     assume that the target's move expanders will also take care
3298 	     of the non-legitimate constant.  */
3299 	  if (!y)
3300 	    y = y_cst;
3301 	  else
3302 	    y = use_anchored_address (y);
3303 	}
3304     }
3305 
3306   /* If X or Y are memory references, verify that their addresses are valid
3307      for the machine.  */
3308   if (MEM_P (x)
3309       && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3310 	   && ! push_operand (x, GET_MODE (x)))
3311 	  || (flag_force_addr
3312 	      && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3313     x = validize_mem (x);
3314 
3315   if (MEM_P (y)
3316       && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3317 	  || (flag_force_addr
3318 	      && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3319     y = validize_mem (y);
3320 
3321   gcc_assert (mode != BLKmode);
3322 
3323   last_insn = emit_move_insn_1 (x, y);
3324 
3325   if (y_cst && REG_P (x)
3326       && (set = single_set (last_insn)) != NULL_RTX
3327       && SET_DEST (set) == x
3328       && ! rtx_equal_p (y_cst, SET_SRC (set)))
3329     set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3330 
3331   return last_insn;
3332 }
3333 
3334 /* If Y is representable exactly in a narrower mode, and the target can
3335    perform the extension directly from constant or memory, then emit the
3336    move as an extension.  */
3337 
3338 static rtx
compress_float_constant(rtx x,rtx y)3339 compress_float_constant (rtx x, rtx y)
3340 {
3341   enum machine_mode dstmode = GET_MODE (x);
3342   enum machine_mode orig_srcmode = GET_MODE (y);
3343   enum machine_mode srcmode;
3344   REAL_VALUE_TYPE r;
3345   int oldcost, newcost;
3346 
3347   REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3348 
3349   if (LEGITIMATE_CONSTANT_P (y))
3350     oldcost = rtx_cost (y, SET);
3351   else
3352     oldcost = rtx_cost (force_const_mem (dstmode, y), SET);
3353 
3354   for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3355        srcmode != orig_srcmode;
3356        srcmode = GET_MODE_WIDER_MODE (srcmode))
3357     {
3358       enum insn_code ic;
3359       rtx trunc_y, last_insn;
3360 
3361       /* Skip if the target can't extend this way.  */
3362       ic = can_extend_p (dstmode, srcmode, 0);
3363       if (ic == CODE_FOR_nothing)
3364 	continue;
3365 
3366       /* Skip if the narrowed value isn't exact.  */
3367       if (! exact_real_truncate (srcmode, &r))
3368 	continue;
3369 
3370       trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3371 
3372       if (LEGITIMATE_CONSTANT_P (trunc_y))
3373 	{
3374 	  /* Skip if the target needs extra instructions to perform
3375 	     the extension.  */
3376 	  if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3377 	    continue;
3378 	  /* This is valid, but may not be cheaper than the original. */
3379 	  newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3380 	  if (oldcost < newcost)
3381 	    continue;
3382 	}
3383       else if (float_extend_from_mem[dstmode][srcmode])
3384 	{
3385 	  trunc_y = force_const_mem (srcmode, trunc_y);
3386 	  /* This is valid, but may not be cheaper than the original. */
3387 	  newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3388 	  if (oldcost < newcost)
3389 	    continue;
3390 	  trunc_y = validize_mem (trunc_y);
3391 	}
3392       else
3393 	continue;
3394 
3395       /* For CSE's benefit, force the compressed constant pool entry
3396 	 into a new pseudo.  This constant may be used in different modes,
3397 	 and if not, combine will put things back together for us.  */
3398       trunc_y = force_reg (srcmode, trunc_y);
3399       emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3400       last_insn = get_last_insn ();
3401 
3402       if (REG_P (x))
3403 	set_unique_reg_note (last_insn, REG_EQUAL, y);
3404 
3405       return last_insn;
3406     }
3407 
3408   return NULL_RTX;
3409 }
3410 
3411 /* Pushing data onto the stack.  */
3412 
3413 /* Push a block of length SIZE (perhaps variable)
3414    and return an rtx to address the beginning of the block.
3415    The value may be virtual_outgoing_args_rtx.
3416 
3417    EXTRA is the number of bytes of padding to push in addition to SIZE.
3418    BELOW nonzero means this padding comes at low addresses;
3419    otherwise, the padding comes at high addresses.  */
3420 
3421 rtx
push_block(rtx size,int extra,int below)3422 push_block (rtx size, int extra, int below)
3423 {
3424   rtx temp;
3425 
3426   size = convert_modes (Pmode, ptr_mode, size, 1);
3427   if (CONSTANT_P (size))
3428     anti_adjust_stack (plus_constant (size, extra));
3429   else if (REG_P (size) && extra == 0)
3430     anti_adjust_stack (size);
3431   else
3432     {
3433       temp = copy_to_mode_reg (Pmode, size);
3434       if (extra != 0)
3435 	temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3436 			     temp, 0, OPTAB_LIB_WIDEN);
3437       anti_adjust_stack (temp);
3438     }
3439 
3440 #ifndef STACK_GROWS_DOWNWARD
3441   if (0)
3442 #else
3443   if (1)
3444 #endif
3445     {
3446       temp = virtual_outgoing_args_rtx;
3447       if (extra != 0 && below)
3448 	temp = plus_constant (temp, extra);
3449     }
3450   else
3451     {
3452       if (GET_CODE (size) == CONST_INT)
3453 	temp = plus_constant (virtual_outgoing_args_rtx,
3454 			      -INTVAL (size) - (below ? 0 : extra));
3455       else if (extra != 0 && !below)
3456 	temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3457 			     negate_rtx (Pmode, plus_constant (size, extra)));
3458       else
3459 	temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3460 			     negate_rtx (Pmode, size));
3461     }
3462 
3463   return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3464 }
3465 
3466 #ifdef PUSH_ROUNDING
3467 
3468 /* Emit single push insn.  */
3469 
3470 static void
emit_single_push_insn(enum machine_mode mode,rtx x,tree type)3471 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3472 {
3473   rtx dest_addr;
3474   unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3475   rtx dest;
3476   enum insn_code icode;
3477   insn_operand_predicate_fn pred;
3478 
3479   stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3480   /* If there is push pattern, use it.  Otherwise try old way of throwing
3481      MEM representing push operation to move expander.  */
3482   icode = push_optab->handlers[(int) mode].insn_code;
3483   if (icode != CODE_FOR_nothing)
3484     {
3485       if (((pred = insn_data[(int) icode].operand[0].predicate)
3486 	   && !((*pred) (x, mode))))
3487 	x = force_reg (mode, x);
3488       emit_insn (GEN_FCN (icode) (x));
3489       return;
3490     }
3491   if (GET_MODE_SIZE (mode) == rounded_size)
3492     dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3493   /* If we are to pad downward, adjust the stack pointer first and
3494      then store X into the stack location using an offset.  This is
3495      because emit_move_insn does not know how to pad; it does not have
3496      access to type.  */
3497   else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3498     {
3499       unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3500       HOST_WIDE_INT offset;
3501 
3502       emit_move_insn (stack_pointer_rtx,
3503 		      expand_binop (Pmode,
3504 #ifdef STACK_GROWS_DOWNWARD
3505 				    sub_optab,
3506 #else
3507 				    add_optab,
3508 #endif
3509 				    stack_pointer_rtx,
3510 				    GEN_INT (rounded_size),
3511 				    NULL_RTX, 0, OPTAB_LIB_WIDEN));
3512 
3513       offset = (HOST_WIDE_INT) padding_size;
3514 #ifdef STACK_GROWS_DOWNWARD
3515       if (STACK_PUSH_CODE == POST_DEC)
3516 	/* We have already decremented the stack pointer, so get the
3517 	   previous value.  */
3518 	offset += (HOST_WIDE_INT) rounded_size;
3519 #else
3520       if (STACK_PUSH_CODE == POST_INC)
3521 	/* We have already incremented the stack pointer, so get the
3522 	   previous value.  */
3523 	offset -= (HOST_WIDE_INT) rounded_size;
3524 #endif
3525       dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3526     }
3527   else
3528     {
3529 #ifdef STACK_GROWS_DOWNWARD
3530       /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC.  */
3531       dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3532 				GEN_INT (-(HOST_WIDE_INT) rounded_size));
3533 #else
3534       /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC.  */
3535       dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3536 				GEN_INT (rounded_size));
3537 #endif
3538       dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3539     }
3540 
3541   dest = gen_rtx_MEM (mode, dest_addr);
3542 
3543   if (type != 0)
3544     {
3545       set_mem_attributes (dest, type, 1);
3546 
3547       if (flag_optimize_sibling_calls)
3548 	/* Function incoming arguments may overlap with sibling call
3549 	   outgoing arguments and we cannot allow reordering of reads
3550 	   from function arguments with stores to outgoing arguments
3551 	   of sibling calls.  */
3552 	set_mem_alias_set (dest, 0);
3553     }
3554   emit_move_insn (dest, x);
3555 }
3556 #endif
3557 
3558 /* Generate code to push X onto the stack, assuming it has mode MODE and
3559    type TYPE.
3560    MODE is redundant except when X is a CONST_INT (since they don't
3561    carry mode info).
3562    SIZE is an rtx for the size of data to be copied (in bytes),
3563    needed only if X is BLKmode.
3564 
3565    ALIGN (in bits) is maximum alignment we can assume.
3566 
3567    If PARTIAL and REG are both nonzero, then copy that many of the first
3568    bytes of X into registers starting with REG, and push the rest of X.
3569    The amount of space pushed is decreased by PARTIAL bytes.
3570    REG must be a hard register in this case.
3571    If REG is zero but PARTIAL is not, take any all others actions for an
3572    argument partially in registers, but do not actually load any
3573    registers.
3574 
3575    EXTRA is the amount in bytes of extra space to leave next to this arg.
3576    This is ignored if an argument block has already been allocated.
3577 
3578    On a machine that lacks real push insns, ARGS_ADDR is the address of
3579    the bottom of the argument block for this call.  We use indexing off there
3580    to store the arg.  On machines with push insns, ARGS_ADDR is 0 when a
3581    argument block has not been preallocated.
3582 
3583    ARGS_SO_FAR is the size of args previously pushed for this call.
3584 
3585    REG_PARM_STACK_SPACE is nonzero if functions require stack space
3586    for arguments passed in registers.  If nonzero, it will be the number
3587    of bytes required.  */
3588 
3589 void
emit_push_insn(rtx x,enum machine_mode mode,tree type,rtx size,unsigned int align,int partial,rtx reg,int extra,rtx args_addr,rtx args_so_far,int reg_parm_stack_space,rtx alignment_pad)3590 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3591 		unsigned int align, int partial, rtx reg, int extra,
3592 		rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3593 		rtx alignment_pad)
3594 {
3595   rtx xinner;
3596   enum direction stack_direction
3597 #ifdef STACK_GROWS_DOWNWARD
3598     = downward;
3599 #else
3600     = upward;
3601 #endif
3602 
3603   /* Decide where to pad the argument: `downward' for below,
3604      `upward' for above, or `none' for don't pad it.
3605      Default is below for small data on big-endian machines; else above.  */
3606   enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3607 
3608   /* Invert direction if stack is post-decrement.
3609      FIXME: why?  */
3610   if (STACK_PUSH_CODE == POST_DEC)
3611     if (where_pad != none)
3612       where_pad = (where_pad == downward ? upward : downward);
3613 
3614   xinner = x;
3615 
3616   if (mode == BLKmode)
3617     {
3618       /* Copy a block into the stack, entirely or partially.  */
3619 
3620       rtx temp;
3621       int used;
3622       int offset;
3623       int skip;
3624 
3625       offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3626       used = partial - offset;
3627 
3628       gcc_assert (size);
3629 
3630       /* USED is now the # of bytes we need not copy to the stack
3631 	 because registers will take care of them.  */
3632 
3633       if (partial != 0)
3634 	xinner = adjust_address (xinner, BLKmode, used);
3635 
3636       /* If the partial register-part of the arg counts in its stack size,
3637 	 skip the part of stack space corresponding to the registers.
3638 	 Otherwise, start copying to the beginning of the stack space,
3639 	 by setting SKIP to 0.  */
3640       skip = (reg_parm_stack_space == 0) ? 0 : used;
3641 
3642 #ifdef PUSH_ROUNDING
3643       /* Do it with several push insns if that doesn't take lots of insns
3644 	 and if there is no difficulty with push insns that skip bytes
3645 	 on the stack for alignment purposes.  */
3646       if (args_addr == 0
3647 	  && PUSH_ARGS
3648 	  && GET_CODE (size) == CONST_INT
3649 	  && skip == 0
3650 	  && MEM_ALIGN (xinner) >= align
3651 	  && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3652 	  /* Here we avoid the case of a structure whose weak alignment
3653 	     forces many pushes of a small amount of data,
3654 	     and such small pushes do rounding that causes trouble.  */
3655 	  && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3656 	      || align >= BIGGEST_ALIGNMENT
3657 	      || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3658 		  == (align / BITS_PER_UNIT)))
3659 	  && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3660 	{
3661 	  /* Push padding now if padding above and stack grows down,
3662 	     or if padding below and stack grows up.
3663 	     But if space already allocated, this has already been done.  */
3664 	  if (extra && args_addr == 0
3665 	      && where_pad != none && where_pad != stack_direction)
3666 	    anti_adjust_stack (GEN_INT (extra));
3667 
3668 	  move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3669 	}
3670       else
3671 #endif /* PUSH_ROUNDING  */
3672 	{
3673 	  rtx target;
3674 
3675 	  /* Otherwise make space on the stack and copy the data
3676 	     to the address of that space.  */
3677 
3678 	  /* Deduct words put into registers from the size we must copy.  */
3679 	  if (partial != 0)
3680 	    {
3681 	      if (GET_CODE (size) == CONST_INT)
3682 		size = GEN_INT (INTVAL (size) - used);
3683 	      else
3684 		size = expand_binop (GET_MODE (size), sub_optab, size,
3685 				     GEN_INT (used), NULL_RTX, 0,
3686 				     OPTAB_LIB_WIDEN);
3687 	    }
3688 
3689 	  /* Get the address of the stack space.
3690 	     In this case, we do not deal with EXTRA separately.
3691 	     A single stack adjust will do.  */
3692 	  if (! args_addr)
3693 	    {
3694 	      temp = push_block (size, extra, where_pad == downward);
3695 	      extra = 0;
3696 	    }
3697 	  else if (GET_CODE (args_so_far) == CONST_INT)
3698 	    temp = memory_address (BLKmode,
3699 				   plus_constant (args_addr,
3700 						  skip + INTVAL (args_so_far)));
3701 	  else
3702 	    temp = memory_address (BLKmode,
3703 				   plus_constant (gen_rtx_PLUS (Pmode,
3704 								args_addr,
3705 								args_so_far),
3706 						  skip));
3707 
3708 	  if (!ACCUMULATE_OUTGOING_ARGS)
3709 	    {
3710 	      /* If the source is referenced relative to the stack pointer,
3711 		 copy it to another register to stabilize it.  We do not need
3712 		 to do this if we know that we won't be changing sp.  */
3713 
3714 	      if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3715 		  || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3716 		temp = copy_to_reg (temp);
3717 	    }
3718 
3719 	  target = gen_rtx_MEM (BLKmode, temp);
3720 
3721 	  /* We do *not* set_mem_attributes here, because incoming arguments
3722 	     may overlap with sibling call outgoing arguments and we cannot
3723 	     allow reordering of reads from function arguments with stores
3724 	     to outgoing arguments of sibling calls.  We do, however, want
3725 	     to record the alignment of the stack slot.  */
3726 	  /* ALIGN may well be better aligned than TYPE, e.g. due to
3727 	     PARM_BOUNDARY.  Assume the caller isn't lying.  */
3728 	  set_mem_align (target, align);
3729 
3730 	  emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3731 	}
3732     }
3733   else if (partial > 0)
3734     {
3735       /* Scalar partly in registers.  */
3736 
3737       int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3738       int i;
3739       int not_stack;
3740       /* # bytes of start of argument
3741 	 that we must make space for but need not store.  */
3742       int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3743       int args_offset = INTVAL (args_so_far);
3744       int skip;
3745 
3746       /* Push padding now if padding above and stack grows down,
3747 	 or if padding below and stack grows up.
3748 	 But if space already allocated, this has already been done.  */
3749       if (extra && args_addr == 0
3750 	  && where_pad != none && where_pad != stack_direction)
3751 	anti_adjust_stack (GEN_INT (extra));
3752 
3753       /* If we make space by pushing it, we might as well push
3754 	 the real data.  Otherwise, we can leave OFFSET nonzero
3755 	 and leave the space uninitialized.  */
3756       if (args_addr == 0)
3757 	offset = 0;
3758 
3759       /* Now NOT_STACK gets the number of words that we don't need to
3760 	 allocate on the stack.  Convert OFFSET to words too.  */
3761       not_stack = (partial - offset) / UNITS_PER_WORD;
3762       offset /= UNITS_PER_WORD;
3763 
3764       /* If the partial register-part of the arg counts in its stack size,
3765 	 skip the part of stack space corresponding to the registers.
3766 	 Otherwise, start copying to the beginning of the stack space,
3767 	 by setting SKIP to 0.  */
3768       skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3769 
3770       if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3771 	x = validize_mem (force_const_mem (mode, x));
3772 
3773       /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3774 	 SUBREGs of such registers are not allowed.  */
3775       if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3776 	   && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3777 	x = copy_to_reg (x);
3778 
3779       /* Loop over all the words allocated on the stack for this arg.  */
3780       /* We can do it by words, because any scalar bigger than a word
3781 	 has a size a multiple of a word.  */
3782 #ifndef PUSH_ARGS_REVERSED
3783       for (i = not_stack; i < size; i++)
3784 #else
3785       for (i = size - 1; i >= not_stack; i--)
3786 #endif
3787 	if (i >= not_stack + offset)
3788 	  emit_push_insn (operand_subword_force (x, i, mode),
3789 			  word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3790 			  0, args_addr,
3791 			  GEN_INT (args_offset + ((i - not_stack + skip)
3792 						  * UNITS_PER_WORD)),
3793 			  reg_parm_stack_space, alignment_pad);
3794     }
3795   else
3796     {
3797       rtx addr;
3798       rtx dest;
3799 
3800       /* Push padding now if padding above and stack grows down,
3801 	 or if padding below and stack grows up.
3802 	 But if space already allocated, this has already been done.  */
3803       if (extra && args_addr == 0
3804 	  && where_pad != none && where_pad != stack_direction)
3805 	anti_adjust_stack (GEN_INT (extra));
3806 
3807 #ifdef PUSH_ROUNDING
3808       if (args_addr == 0 && PUSH_ARGS)
3809 	emit_single_push_insn (mode, x, type);
3810       else
3811 #endif
3812 	{
3813 	  if (GET_CODE (args_so_far) == CONST_INT)
3814 	    addr
3815 	      = memory_address (mode,
3816 				plus_constant (args_addr,
3817 					       INTVAL (args_so_far)));
3818 	  else
3819 	    addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3820 						       args_so_far));
3821 	  dest = gen_rtx_MEM (mode, addr);
3822 
3823 	  /* We do *not* set_mem_attributes here, because incoming arguments
3824 	     may overlap with sibling call outgoing arguments and we cannot
3825 	     allow reordering of reads from function arguments with stores
3826 	     to outgoing arguments of sibling calls.  We do, however, want
3827 	     to record the alignment of the stack slot.  */
3828 	  /* ALIGN may well be better aligned than TYPE, e.g. due to
3829 	     PARM_BOUNDARY.  Assume the caller isn't lying.  */
3830 	  set_mem_align (dest, align);
3831 
3832 	  emit_move_insn (dest, x);
3833 	}
3834     }
3835 
3836   /* If part should go in registers, copy that part
3837      into the appropriate registers.  Do this now, at the end,
3838      since mem-to-mem copies above may do function calls.  */
3839   if (partial > 0 && reg != 0)
3840     {
3841       /* Handle calls that pass values in multiple non-contiguous locations.
3842 	 The Irix 6 ABI has examples of this.  */
3843       if (GET_CODE (reg) == PARALLEL)
3844 	emit_group_load (reg, x, type, -1);
3845       else
3846 	{
3847 	  gcc_assert (partial % UNITS_PER_WORD == 0);
3848 	  move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
3849 	}
3850     }
3851 
3852   if (extra && args_addr == 0 && where_pad == stack_direction)
3853     anti_adjust_stack (GEN_INT (extra));
3854 
3855   if (alignment_pad && args_addr == 0)
3856     anti_adjust_stack (alignment_pad);
3857 }
3858 
3859 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3860    operations.  */
3861 
3862 static rtx
get_subtarget(rtx x)3863 get_subtarget (rtx x)
3864 {
3865   return (optimize
3866           || x == 0
3867 	   /* Only registers can be subtargets.  */
3868 	   || !REG_P (x)
3869 	   /* Don't use hard regs to avoid extending their life.  */
3870 	   || REGNO (x) < FIRST_PSEUDO_REGISTER
3871 	  ? 0 : x);
3872 }
3873 
3874 /* A subroutine of expand_assignment.  Optimize FIELD op= VAL, where
3875    FIELD is a bitfield.  Returns true if the optimization was successful,
3876    and there's nothing else to do.  */
3877 
3878 static bool
optimize_bitfield_assignment_op(unsigned HOST_WIDE_INT bitsize,unsigned HOST_WIDE_INT bitpos,enum machine_mode mode1,rtx str_rtx,tree to,tree src)3879 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3880 				 unsigned HOST_WIDE_INT bitpos,
3881 				 enum machine_mode mode1, rtx str_rtx,
3882 				 tree to, tree src)
3883 {
3884   enum machine_mode str_mode = GET_MODE (str_rtx);
3885   unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
3886   tree op0, op1;
3887   rtx value, result;
3888   optab binop;
3889 
3890   if (mode1 != VOIDmode
3891       || bitsize >= BITS_PER_WORD
3892       || str_bitsize > BITS_PER_WORD
3893       || TREE_SIDE_EFFECTS (to)
3894       || TREE_THIS_VOLATILE (to))
3895     return false;
3896 
3897   STRIP_NOPS (src);
3898   if (!BINARY_CLASS_P (src)
3899       || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
3900     return false;
3901 
3902   op0 = TREE_OPERAND (src, 0);
3903   op1 = TREE_OPERAND (src, 1);
3904   STRIP_NOPS (op0);
3905 
3906   if (!operand_equal_p (to, op0, 0))
3907     return false;
3908 
3909   if (MEM_P (str_rtx))
3910     {
3911       unsigned HOST_WIDE_INT offset1;
3912 
3913       if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
3914 	str_mode = word_mode;
3915       str_mode = get_best_mode (bitsize, bitpos,
3916 				MEM_ALIGN (str_rtx), str_mode, 0);
3917       if (str_mode == VOIDmode)
3918 	return false;
3919       str_bitsize = GET_MODE_BITSIZE (str_mode);
3920 
3921       offset1 = bitpos;
3922       bitpos %= str_bitsize;
3923       offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
3924       str_rtx = adjust_address (str_rtx, str_mode, offset1);
3925     }
3926   else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
3927     return false;
3928 
3929   /* If the bit field covers the whole REG/MEM, store_field
3930      will likely generate better code.  */
3931   if (bitsize >= str_bitsize)
3932     return false;
3933 
3934   /* We can't handle fields split across multiple entities.  */
3935   if (bitpos + bitsize > str_bitsize)
3936     return false;
3937 
3938   if (BYTES_BIG_ENDIAN)
3939     bitpos = str_bitsize - bitpos - bitsize;
3940 
3941   switch (TREE_CODE (src))
3942     {
3943     case PLUS_EXPR:
3944     case MINUS_EXPR:
3945       /* For now, just optimize the case of the topmost bitfield
3946 	 where we don't need to do any masking and also
3947 	 1 bit bitfields where xor can be used.
3948 	 We might win by one instruction for the other bitfields
3949 	 too if insv/extv instructions aren't used, so that
3950 	 can be added later.  */
3951       if (bitpos + bitsize != str_bitsize
3952 	  && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
3953 	break;
3954 
3955       value = expand_expr (op1, NULL_RTX, str_mode, 0);
3956       value = convert_modes (str_mode,
3957 			     TYPE_MODE (TREE_TYPE (op1)), value,
3958 			     TYPE_UNSIGNED (TREE_TYPE (op1)));
3959 
3960       /* We may be accessing data outside the field, which means
3961 	 we can alias adjacent data.  */
3962       if (MEM_P (str_rtx))
3963 	{
3964 	  str_rtx = shallow_copy_rtx (str_rtx);
3965 	  set_mem_alias_set (str_rtx, 0);
3966 	  set_mem_expr (str_rtx, 0);
3967 	}
3968 
3969       binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
3970       if (bitsize == 1 && bitpos + bitsize != str_bitsize)
3971 	{
3972 	  value = expand_and (str_mode, value, const1_rtx, NULL);
3973 	  binop = xor_optab;
3974 	}
3975       value = expand_shift (LSHIFT_EXPR, str_mode, value,
3976 			    build_int_cst (NULL_TREE, bitpos),
3977 			    NULL_RTX, 1);
3978       result = expand_binop (str_mode, binop, str_rtx,
3979 			     value, str_rtx, 1, OPTAB_WIDEN);
3980       if (result != str_rtx)
3981 	emit_move_insn (str_rtx, result);
3982       return true;
3983 
3984     case BIT_IOR_EXPR:
3985     case BIT_XOR_EXPR:
3986       if (TREE_CODE (op1) != INTEGER_CST)
3987 	break;
3988       value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), 0);
3989       value = convert_modes (GET_MODE (str_rtx),
3990 			     TYPE_MODE (TREE_TYPE (op1)), value,
3991 			     TYPE_UNSIGNED (TREE_TYPE (op1)));
3992 
3993       /* We may be accessing data outside the field, which means
3994 	 we can alias adjacent data.  */
3995       if (MEM_P (str_rtx))
3996 	{
3997 	  str_rtx = shallow_copy_rtx (str_rtx);
3998 	  set_mem_alias_set (str_rtx, 0);
3999 	  set_mem_expr (str_rtx, 0);
4000 	}
4001 
4002       binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab;
4003       if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
4004 	{
4005 	  rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
4006 			      - 1);
4007 	  value = expand_and (GET_MODE (str_rtx), value, mask,
4008 			      NULL_RTX);
4009 	}
4010       value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
4011 			    build_int_cst (NULL_TREE, bitpos),
4012 			    NULL_RTX, 1);
4013       result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
4014 			     value, str_rtx, 1, OPTAB_WIDEN);
4015       if (result != str_rtx)
4016 	emit_move_insn (str_rtx, result);
4017       return true;
4018 
4019     default:
4020       break;
4021     }
4022 
4023   return false;
4024 }
4025 
4026 
4027 /* Expand an assignment that stores the value of FROM into TO.  */
4028 
4029 void
expand_assignment(tree to,tree from)4030 expand_assignment (tree to, tree from)
4031 {
4032   rtx to_rtx = 0;
4033   rtx result;
4034 
4035   /* Don't crash if the lhs of the assignment was erroneous.  */
4036   if (TREE_CODE (to) == ERROR_MARK)
4037     {
4038       result = expand_normal (from);
4039       return;
4040     }
4041 
4042   /* Optimize away no-op moves without side-effects.  */
4043   if (operand_equal_p (to, from, 0))
4044     return;
4045 
4046   /* Assignment of a structure component needs special treatment
4047      if the structure component's rtx is not simply a MEM.
4048      Assignment of an array element at a constant index, and assignment of
4049      an array element in an unaligned packed structure field, has the same
4050      problem.  */
4051   if (handled_component_p (to)
4052       || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4053     {
4054       enum machine_mode mode1;
4055       HOST_WIDE_INT bitsize, bitpos;
4056       tree offset;
4057       int unsignedp;
4058       int volatilep = 0;
4059       tree tem;
4060 
4061       push_temp_slots ();
4062       tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4063 				 &unsignedp, &volatilep, true);
4064 
4065       /* If we are going to use store_bit_field and extract_bit_field,
4066 	 make sure to_rtx will be safe for multiple use.  */
4067 
4068       to_rtx = expand_normal (tem);
4069 
4070       if (offset != 0)
4071 	{
4072 	  rtx offset_rtx;
4073 
4074 	  if (!MEM_P (to_rtx))
4075 	    {
4076 	      /* We can get constant negative offsets into arrays with broken
4077 		 user code.  Translate this to a trap instead of ICEing.  */
4078 	      gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4079 	      expand_builtin_trap ();
4080 	      to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4081 	    }
4082 
4083 	  offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4084 #ifdef POINTERS_EXTEND_UNSIGNED
4085 	  if (GET_MODE (offset_rtx) != Pmode)
4086 	    offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4087 #else
4088 	  if (GET_MODE (offset_rtx) != ptr_mode)
4089 	    offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4090 #endif
4091 
4092 	  /* A constant address in TO_RTX can have VOIDmode, we must not try
4093 	     to call force_reg for that case.  Avoid that case.  */
4094 	  if (MEM_P (to_rtx)
4095 	      && GET_MODE (to_rtx) == BLKmode
4096 	      && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4097 	      && bitsize > 0
4098 	      && (bitpos % bitsize) == 0
4099 	      && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4100 	      && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4101 	    {
4102 	      to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4103 	      bitpos = 0;
4104 	    }
4105 
4106 	  to_rtx = offset_address (to_rtx, offset_rtx,
4107 				   highest_pow2_factor_for_target (to,
4108 				   				   offset));
4109 	}
4110 
4111       /* Handle expand_expr of a complex value returning a CONCAT.  */
4112       if (GET_CODE (to_rtx) == CONCAT)
4113 	{
4114 	  if (TREE_CODE (TREE_TYPE (from)) == COMPLEX_TYPE)
4115 	    {
4116 	      gcc_assert (bitpos == 0);
4117 	      result = store_expr (from, to_rtx, false);
4118 	    }
4119 	  else
4120 	    {
4121 	      gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
4122 	      result = store_expr (from, XEXP (to_rtx, bitpos != 0), false);
4123 	    }
4124 	}
4125       else
4126 	{
4127 	  if (MEM_P (to_rtx))
4128 	    {
4129 	      /* If the field is at offset zero, we could have been given the
4130 		 DECL_RTX of the parent struct.  Don't munge it.  */
4131 	      to_rtx = shallow_copy_rtx (to_rtx);
4132 
4133 	      set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4134 
4135 	      /* Deal with volatile and readonly fields.  The former is only
4136 		 done for MEM.  Also set MEM_KEEP_ALIAS_SET_P if needed.  */
4137 	      if (volatilep)
4138 		MEM_VOLATILE_P (to_rtx) = 1;
4139 	      if (component_uses_parent_alias_set (to))
4140 		MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4141 	    }
4142 
4143 	  if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
4144 					       to_rtx, to, from))
4145 	    result = NULL;
4146 	  else
4147 	    result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4148 				  TREE_TYPE (tem), get_alias_set (to));
4149 	}
4150 
4151       if (result)
4152 	preserve_temp_slots (result);
4153       free_temp_slots ();
4154       pop_temp_slots ();
4155       return;
4156     }
4157 
4158   /* If the rhs is a function call and its value is not an aggregate,
4159      call the function before we start to compute the lhs.
4160      This is needed for correct code for cases such as
4161      val = setjmp (buf) on machines where reference to val
4162      requires loading up part of an address in a separate insn.
4163 
4164      Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4165      since it might be a promoted variable where the zero- or sign- extension
4166      needs to be done.  Handling this in the normal way is safe because no
4167      computation is done before the call.  */
4168   if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4169       && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4170       && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4171 	    && REG_P (DECL_RTL (to))))
4172     {
4173       rtx value;
4174 
4175       push_temp_slots ();
4176       value = expand_normal (from);
4177       if (to_rtx == 0)
4178 	to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4179 
4180       /* Handle calls that return values in multiple non-contiguous locations.
4181 	 The Irix 6 ABI has examples of this.  */
4182       if (GET_CODE (to_rtx) == PARALLEL)
4183 	emit_group_load (to_rtx, value, TREE_TYPE (from),
4184 			 int_size_in_bytes (TREE_TYPE (from)));
4185       else if (GET_MODE (to_rtx) == BLKmode)
4186 	emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4187       else
4188 	{
4189 	  if (POINTER_TYPE_P (TREE_TYPE (to)))
4190 	    value = convert_memory_address (GET_MODE (to_rtx), value);
4191 	  emit_move_insn (to_rtx, value);
4192 	}
4193       preserve_temp_slots (to_rtx);
4194       free_temp_slots ();
4195       pop_temp_slots ();
4196       return;
4197     }
4198 
4199   /* Ordinary treatment.  Expand TO to get a REG or MEM rtx.
4200      Don't re-expand if it was expanded already (in COMPONENT_REF case).  */
4201 
4202   if (to_rtx == 0)
4203     to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4204 
4205   /* Don't move directly into a return register.  */
4206   if (TREE_CODE (to) == RESULT_DECL
4207       && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4208     {
4209       rtx temp;
4210 
4211       push_temp_slots ();
4212       temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
4213 
4214       if (GET_CODE (to_rtx) == PARALLEL)
4215 	emit_group_load (to_rtx, temp, TREE_TYPE (from),
4216 			 int_size_in_bytes (TREE_TYPE (from)));
4217       else
4218 	emit_move_insn (to_rtx, temp);
4219 
4220       preserve_temp_slots (to_rtx);
4221       free_temp_slots ();
4222       pop_temp_slots ();
4223       return;
4224     }
4225 
4226   /* In case we are returning the contents of an object which overlaps
4227      the place the value is being stored, use a safe function when copying
4228      a value through a pointer into a structure value return block.  */
4229   if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4230       && current_function_returns_struct
4231       && !current_function_returns_pcc_struct)
4232     {
4233       rtx from_rtx, size;
4234 
4235       push_temp_slots ();
4236       size = expr_size (from);
4237       from_rtx = expand_normal (from);
4238 
4239       emit_library_call (memmove_libfunc, LCT_NORMAL,
4240 			 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4241 			 XEXP (from_rtx, 0), Pmode,
4242 			 convert_to_mode (TYPE_MODE (sizetype),
4243 					  size, TYPE_UNSIGNED (sizetype)),
4244 			 TYPE_MODE (sizetype));
4245 
4246       preserve_temp_slots (to_rtx);
4247       free_temp_slots ();
4248       pop_temp_slots ();
4249       return;
4250     }
4251 
4252   /* Compute FROM and store the value in the rtx we got.  */
4253 
4254   push_temp_slots ();
4255   result = store_expr (from, to_rtx, 0);
4256   preserve_temp_slots (result);
4257   free_temp_slots ();
4258   pop_temp_slots ();
4259   return;
4260 }
4261 
4262 /* Generate code for computing expression EXP,
4263    and storing the value into TARGET.
4264 
4265    If the mode is BLKmode then we may return TARGET itself.
4266    It turns out that in BLKmode it doesn't cause a problem.
4267    because C has no operators that could combine two different
4268    assignments into the same BLKmode object with different values
4269    with no sequence point.  Will other languages need this to
4270    be more thorough?
4271 
4272    If CALL_PARAM_P is nonzero, this is a store into a call param on the
4273    stack, and block moves may need to be treated specially.  */
4274 
4275 rtx
store_expr(tree exp,rtx target,int call_param_p)4276 store_expr (tree exp, rtx target, int call_param_p)
4277 {
4278   rtx temp;
4279   rtx alt_rtl = NULL_RTX;
4280   int dont_return_target = 0;
4281 
4282   if (VOID_TYPE_P (TREE_TYPE (exp)))
4283     {
4284       /* C++ can generate ?: expressions with a throw expression in one
4285 	 branch and an rvalue in the other. Here, we resolve attempts to
4286 	 store the throw expression's nonexistent result.  */
4287       gcc_assert (!call_param_p);
4288       expand_expr (exp, const0_rtx, VOIDmode, 0);
4289       return NULL_RTX;
4290     }
4291   if (TREE_CODE (exp) == COMPOUND_EXPR)
4292     {
4293       /* Perform first part of compound expression, then assign from second
4294 	 part.  */
4295       expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4296 		   call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4297       return store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4298     }
4299   else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4300     {
4301       /* For conditional expression, get safe form of the target.  Then
4302 	 test the condition, doing the appropriate assignment on either
4303 	 side.  This avoids the creation of unnecessary temporaries.
4304 	 For non-BLKmode, it is more efficient not to do this.  */
4305 
4306       rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4307 
4308       do_pending_stack_adjust ();
4309       NO_DEFER_POP;
4310       jumpifnot (TREE_OPERAND (exp, 0), lab1);
4311       store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4312       emit_jump_insn (gen_jump (lab2));
4313       emit_barrier ();
4314       emit_label (lab1);
4315       store_expr (TREE_OPERAND (exp, 2), target, call_param_p);
4316       emit_label (lab2);
4317       OK_DEFER_POP;
4318 
4319       return NULL_RTX;
4320     }
4321   else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4322     /* If this is a scalar in a register that is stored in a wider mode
4323        than the declared mode, compute the result into its declared mode
4324        and then convert to the wider mode.  Our value is the computed
4325        expression.  */
4326     {
4327       rtx inner_target = 0;
4328 
4329       /* We can do the conversion inside EXP, which will often result
4330 	 in some optimizations.  Do the conversion in two steps: first
4331 	 change the signedness, if needed, then the extend.  But don't
4332 	 do this if the type of EXP is a subtype of something else
4333 	 since then the conversion might involve more than just
4334 	 converting modes.  */
4335       if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4336 	  && TREE_TYPE (TREE_TYPE (exp)) == 0
4337 	  && (!lang_hooks.reduce_bit_field_operations
4338 	      || (GET_MODE_PRECISION (GET_MODE (target))
4339 		  == TYPE_PRECISION (TREE_TYPE (exp)))))
4340 	{
4341 	  if (TYPE_UNSIGNED (TREE_TYPE (exp))
4342 	      != SUBREG_PROMOTED_UNSIGNED_P (target))
4343 	    exp = fold_convert
4344 	      (lang_hooks.types.signed_or_unsigned_type
4345 	       (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4346 
4347 	  exp = fold_convert (lang_hooks.types.type_for_mode
4348 				(GET_MODE (SUBREG_REG (target)),
4349 				 SUBREG_PROMOTED_UNSIGNED_P (target)),
4350 			      exp);
4351 
4352 	  inner_target = SUBREG_REG (target);
4353 	}
4354 
4355       temp = expand_expr (exp, inner_target, VOIDmode,
4356 			  call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4357 
4358       /* If TEMP is a VOIDmode constant, use convert_modes to make
4359 	 sure that we properly convert it.  */
4360       if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4361 	{
4362 	  temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4363 				temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4364 	  temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4365 			        GET_MODE (target), temp,
4366 			        SUBREG_PROMOTED_UNSIGNED_P (target));
4367 	}
4368 
4369       convert_move (SUBREG_REG (target), temp,
4370 		    SUBREG_PROMOTED_UNSIGNED_P (target));
4371 
4372       return NULL_RTX;
4373     }
4374   else
4375     {
4376       temp = expand_expr_real (exp, target, GET_MODE (target),
4377 			       (call_param_p
4378 				? EXPAND_STACK_PARM : EXPAND_NORMAL),
4379 			       &alt_rtl);
4380       /* Return TARGET if it's a specified hardware register.
4381 	 If TARGET is a volatile mem ref, either return TARGET
4382 	 or return a reg copied *from* TARGET; ANSI requires this.
4383 
4384 	 Otherwise, if TEMP is not TARGET, return TEMP
4385 	 if it is constant (for efficiency),
4386 	 or if we really want the correct value.  */
4387       if (!(target && REG_P (target)
4388 	    && REGNO (target) < FIRST_PSEUDO_REGISTER)
4389 	  && !(MEM_P (target) && MEM_VOLATILE_P (target))
4390 	  && ! rtx_equal_p (temp, target)
4391 	  && CONSTANT_P (temp))
4392 	dont_return_target = 1;
4393     }
4394 
4395   /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4396      the same as that of TARGET, adjust the constant.  This is needed, for
4397      example, in case it is a CONST_DOUBLE and we want only a word-sized
4398      value.  */
4399   if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4400       && TREE_CODE (exp) != ERROR_MARK
4401       && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4402     temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4403 			  temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4404 
4405   /* If value was not generated in the target, store it there.
4406      Convert the value to TARGET's type first if necessary and emit the
4407      pending incrementations that have been queued when expanding EXP.
4408      Note that we cannot emit the whole queue blindly because this will
4409      effectively disable the POST_INC optimization later.
4410 
4411      If TEMP and TARGET compare equal according to rtx_equal_p, but
4412      one or both of them are volatile memory refs, we have to distinguish
4413      two cases:
4414      - expand_expr has used TARGET.  In this case, we must not generate
4415        another copy.  This can be detected by TARGET being equal according
4416        to == .
4417      - expand_expr has not used TARGET - that means that the source just
4418        happens to have the same RTX form.  Since temp will have been created
4419        by expand_expr, it will compare unequal according to == .
4420        We must generate a copy in this case, to reach the correct number
4421        of volatile memory references.  */
4422 
4423   if ((! rtx_equal_p (temp, target)
4424        || (temp != target && (side_effects_p (temp)
4425 			      || side_effects_p (target))))
4426       && TREE_CODE (exp) != ERROR_MARK
4427       /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4428 	 but TARGET is not valid memory reference, TEMP will differ
4429 	 from TARGET although it is really the same location.  */
4430       && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4431       /* If there's nothing to copy, don't bother.  Don't call
4432 	 expr_size unless necessary, because some front-ends (C++)
4433 	 expr_size-hook must not be given objects that are not
4434 	 supposed to be bit-copied or bit-initialized.  */
4435       && expr_size (exp) != const0_rtx)
4436     {
4437       if (GET_MODE (temp) != GET_MODE (target)
4438 	  && GET_MODE (temp) != VOIDmode)
4439 	{
4440 	  int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4441 	  if (dont_return_target)
4442 	    {
4443 	      /* In this case, we will return TEMP,
4444 		 so make sure it has the proper mode.
4445 		 But don't forget to store the value into TARGET.  */
4446 	      temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4447 	      emit_move_insn (target, temp);
4448 	    }
4449 	  else
4450 	    convert_move (target, temp, unsignedp);
4451 	}
4452 
4453       else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4454 	{
4455 	  /* Handle copying a string constant into an array.  The string
4456 	     constant may be shorter than the array.  So copy just the string's
4457 	     actual length, and clear the rest.  First get the size of the data
4458 	     type of the string, which is actually the size of the target.  */
4459 	  rtx size = expr_size (exp);
4460 
4461 	  if (GET_CODE (size) == CONST_INT
4462 	      && INTVAL (size) < TREE_STRING_LENGTH (exp))
4463 	    emit_block_move (target, temp, size,
4464 			     (call_param_p
4465 			      ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4466 	  else
4467 	    {
4468 	      /* Compute the size of the data to copy from the string.  */
4469 	      tree copy_size
4470 		= size_binop (MIN_EXPR,
4471 			      make_tree (sizetype, size),
4472 			      size_int (TREE_STRING_LENGTH (exp)));
4473 	      rtx copy_size_rtx
4474 		= expand_expr (copy_size, NULL_RTX, VOIDmode,
4475 			       (call_param_p
4476 				? EXPAND_STACK_PARM : EXPAND_NORMAL));
4477 	      rtx label = 0;
4478 
4479 	      /* Copy that much.  */
4480 	      copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4481 					       TYPE_UNSIGNED (sizetype));
4482 	      emit_block_move (target, temp, copy_size_rtx,
4483 			       (call_param_p
4484 				? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4485 
4486 	      /* Figure out how much is left in TARGET that we have to clear.
4487 		 Do all calculations in ptr_mode.  */
4488 	      if (GET_CODE (copy_size_rtx) == CONST_INT)
4489 		{
4490 		  size = plus_constant (size, -INTVAL (copy_size_rtx));
4491 		  target = adjust_address (target, BLKmode,
4492 					   INTVAL (copy_size_rtx));
4493 		}
4494 	      else
4495 		{
4496 		  size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4497 				       copy_size_rtx, NULL_RTX, 0,
4498 				       OPTAB_LIB_WIDEN);
4499 
4500 #ifdef POINTERS_EXTEND_UNSIGNED
4501 		  if (GET_MODE (copy_size_rtx) != Pmode)
4502 		    copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4503 						     TYPE_UNSIGNED (sizetype));
4504 #endif
4505 
4506 		  target = offset_address (target, copy_size_rtx,
4507 					   highest_pow2_factor (copy_size));
4508 		  label = gen_label_rtx ();
4509 		  emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4510 					   GET_MODE (size), 0, label);
4511 		}
4512 
4513 	      if (size != const0_rtx)
4514 		clear_storage (target, size, BLOCK_OP_NORMAL);
4515 
4516 	      if (label)
4517 		emit_label (label);
4518 	    }
4519 	}
4520       /* Handle calls that return values in multiple non-contiguous locations.
4521 	 The Irix 6 ABI has examples of this.  */
4522       else if (GET_CODE (target) == PARALLEL)
4523 	emit_group_load (target, temp, TREE_TYPE (exp),
4524 			 int_size_in_bytes (TREE_TYPE (exp)));
4525       else if (GET_MODE (temp) == BLKmode)
4526 	emit_block_move (target, temp, expr_size (exp),
4527 			 (call_param_p
4528 			  ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4529       else
4530 	{
4531 	  temp = force_operand (temp, target);
4532 	  if (temp != target)
4533 	    emit_move_insn (target, temp);
4534 	}
4535     }
4536 
4537   return NULL_RTX;
4538 }
4539 
4540 /* Helper for categorize_ctor_elements.  Identical interface.  */
4541 
4542 static bool
categorize_ctor_elements_1(tree ctor,HOST_WIDE_INT * p_nz_elts,HOST_WIDE_INT * p_elt_count,bool * p_must_clear)4543 categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4544 			    HOST_WIDE_INT *p_elt_count,
4545 			    bool *p_must_clear)
4546 {
4547   unsigned HOST_WIDE_INT idx;
4548   HOST_WIDE_INT nz_elts, elt_count;
4549   tree value, purpose;
4550 
4551   /* Whether CTOR is a valid constant initializer, in accordance with what
4552      initializer_constant_valid_p does.  If inferred from the constructor
4553      elements, true until proven otherwise.  */
4554   bool const_from_elts_p = constructor_static_from_elts_p (ctor);
4555   bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
4556 
4557   nz_elts = 0;
4558   elt_count = 0;
4559 
4560   FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
4561     {
4562       HOST_WIDE_INT mult;
4563 
4564       mult = 1;
4565       if (TREE_CODE (purpose) == RANGE_EXPR)
4566 	{
4567 	  tree lo_index = TREE_OPERAND (purpose, 0);
4568 	  tree hi_index = TREE_OPERAND (purpose, 1);
4569 
4570 	  if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4571 	    mult = (tree_low_cst (hi_index, 1)
4572 		    - tree_low_cst (lo_index, 1) + 1);
4573 	}
4574 
4575       switch (TREE_CODE (value))
4576 	{
4577 	case CONSTRUCTOR:
4578 	  {
4579 	    HOST_WIDE_INT nz = 0, ic = 0;
4580 
4581 	    bool const_elt_p
4582 	      = categorize_ctor_elements_1 (value, &nz, &ic, p_must_clear);
4583 
4584 	    nz_elts += mult * nz;
4585  	    elt_count += mult * ic;
4586 
4587 	    if (const_from_elts_p && const_p)
4588 	      const_p = const_elt_p;
4589 	  }
4590 	  break;
4591 
4592 	case INTEGER_CST:
4593 	case REAL_CST:
4594 	  if (!initializer_zerop (value))
4595 	    nz_elts += mult;
4596 	  elt_count += mult;
4597 	  break;
4598 
4599 	case STRING_CST:
4600 	  nz_elts += mult * TREE_STRING_LENGTH (value);
4601 	  elt_count += mult * TREE_STRING_LENGTH (value);
4602 	  break;
4603 
4604 	case COMPLEX_CST:
4605 	  if (!initializer_zerop (TREE_REALPART (value)))
4606 	    nz_elts += mult;
4607 	  if (!initializer_zerop (TREE_IMAGPART (value)))
4608 	    nz_elts += mult;
4609 	  elt_count += mult;
4610 	  break;
4611 
4612 	case VECTOR_CST:
4613 	  {
4614 	    tree v;
4615 	    for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4616 	      {
4617 		if (!initializer_zerop (TREE_VALUE (v)))
4618 		  nz_elts += mult;
4619 		elt_count += mult;
4620 	      }
4621 	  }
4622 	  break;
4623 
4624 	default:
4625 	  nz_elts += mult;
4626 	  elt_count += mult;
4627 
4628 	  if (const_from_elts_p && const_p)
4629 	    const_p = initializer_constant_valid_p (value, TREE_TYPE (value))
4630 		      != NULL_TREE;
4631 	  break;
4632 	}
4633     }
4634 
4635   if (!*p_must_clear
4636       && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4637 	  || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4638     {
4639       tree init_sub_type;
4640       bool clear_this = true;
4641 
4642       if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
4643 	{
4644 	  /* We don't expect more than one element of the union to be
4645 	     initialized.  Not sure what we should do otherwise... */
4646           gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
4647 		      == 1);
4648 
4649           init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
4650 						CONSTRUCTOR_ELTS (ctor),
4651 						0)->value);
4652 
4653 	  /* ??? We could look at each element of the union, and find the
4654 	     largest element.  Which would avoid comparing the size of the
4655 	     initialized element against any tail padding in the union.
4656 	     Doesn't seem worth the effort...  */
4657 	  if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4658 				TYPE_SIZE (init_sub_type)) == 1)
4659 	    {
4660 	      /* And now we have to find out if the element itself is fully
4661 		 constructed.  E.g. for union { struct { int a, b; } s; } u
4662 		 = { .s = { .a = 1 } }.  */
4663 	      if (elt_count == count_type_elements (init_sub_type, false))
4664 		clear_this = false;
4665 	    }
4666 	}
4667 
4668       *p_must_clear = clear_this;
4669     }
4670 
4671   *p_nz_elts += nz_elts;
4672   *p_elt_count += elt_count;
4673 
4674   return const_p;
4675 }
4676 
4677 /* Examine CTOR to discover:
4678    * how many scalar fields are set to nonzero values,
4679      and place it in *P_NZ_ELTS;
4680    * how many scalar fields in total are in CTOR,
4681      and place it in *P_ELT_COUNT.
4682    * if a type is a union, and the initializer from the constructor
4683      is not the largest element in the union, then set *p_must_clear.
4684 
4685    Return whether or not CTOR is a valid static constant initializer, the same
4686    as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0".  */
4687 
4688 bool
categorize_ctor_elements(tree ctor,HOST_WIDE_INT * p_nz_elts,HOST_WIDE_INT * p_elt_count,bool * p_must_clear)4689 categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4690 			  HOST_WIDE_INT *p_elt_count,
4691 			  bool *p_must_clear)
4692 {
4693   *p_nz_elts = 0;
4694   *p_elt_count = 0;
4695   *p_must_clear = false;
4696 
4697   return
4698     categorize_ctor_elements_1 (ctor, p_nz_elts, p_elt_count, p_must_clear);
4699 }
4700 
4701 /* Count the number of scalars in TYPE.  Return -1 on overflow or
4702    variable-sized.  If ALLOW_FLEXARR is true, don't count flexible
4703    array member at the end of the structure.  */
4704 
4705 HOST_WIDE_INT
count_type_elements(tree type,bool allow_flexarr)4706 count_type_elements (tree type, bool allow_flexarr)
4707 {
4708   const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4709   switch (TREE_CODE (type))
4710     {
4711     case ARRAY_TYPE:
4712       {
4713 	tree telts = array_type_nelts (type);
4714 	if (telts && host_integerp (telts, 1))
4715 	  {
4716 	    HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4717 	    HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
4718 	    if (n == 0)
4719 	      return 0;
4720 	    else if (max / n > m)
4721 	      return n * m;
4722 	  }
4723 	return -1;
4724       }
4725 
4726     case RECORD_TYPE:
4727       {
4728 	HOST_WIDE_INT n = 0, t;
4729 	tree f;
4730 
4731 	for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4732 	  if (TREE_CODE (f) == FIELD_DECL)
4733 	    {
4734 	      t = count_type_elements (TREE_TYPE (f), false);
4735 	      if (t < 0)
4736 		{
4737 		  /* Check for structures with flexible array member.  */
4738 		  tree tf = TREE_TYPE (f);
4739 		  if (allow_flexarr
4740 		      && TREE_CHAIN (f) == NULL
4741 		      && TREE_CODE (tf) == ARRAY_TYPE
4742 		      && TYPE_DOMAIN (tf)
4743 		      && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
4744 		      && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
4745 		      && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
4746 		      && int_size_in_bytes (type) >= 0)
4747 		    break;
4748 
4749 		  return -1;
4750 		}
4751 	      n += t;
4752 	    }
4753 
4754 	return n;
4755       }
4756 
4757     case UNION_TYPE:
4758     case QUAL_UNION_TYPE:
4759       {
4760 	/* Ho hum.  How in the world do we guess here?  Clearly it isn't
4761 	   right to count the fields.  Guess based on the number of words.  */
4762         HOST_WIDE_INT n = int_size_in_bytes (type);
4763 	if (n < 0)
4764 	  return -1;
4765 	return n / UNITS_PER_WORD;
4766       }
4767 
4768     case COMPLEX_TYPE:
4769       return 2;
4770 
4771     case VECTOR_TYPE:
4772       return TYPE_VECTOR_SUBPARTS (type);
4773 
4774     case INTEGER_TYPE:
4775     case REAL_TYPE:
4776     case ENUMERAL_TYPE:
4777     case BOOLEAN_TYPE:
4778     case POINTER_TYPE:
4779     case OFFSET_TYPE:
4780     case REFERENCE_TYPE:
4781       return 1;
4782 
4783     case VOID_TYPE:
4784     case METHOD_TYPE:
4785     case FUNCTION_TYPE:
4786     case LANG_TYPE:
4787     default:
4788       gcc_unreachable ();
4789     }
4790 }
4791 
4792 /* Return 1 if EXP contains mostly (3/4)  zeros.  */
4793 
4794 static int
mostly_zeros_p(tree exp)4795 mostly_zeros_p (tree exp)
4796 {
4797   if (TREE_CODE (exp) == CONSTRUCTOR)
4798 
4799     {
4800       HOST_WIDE_INT nz_elts, count, elts;
4801       bool must_clear;
4802 
4803       categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
4804       if (must_clear)
4805 	return 1;
4806 
4807       elts = count_type_elements (TREE_TYPE (exp), false);
4808 
4809       return nz_elts < elts / 4;
4810     }
4811 
4812   return initializer_zerop (exp);
4813 }
4814 
4815 /* Return 1 if EXP contains all zeros.  */
4816 
4817 static int
all_zeros_p(tree exp)4818 all_zeros_p (tree exp)
4819 {
4820   if (TREE_CODE (exp) == CONSTRUCTOR)
4821 
4822     {
4823       HOST_WIDE_INT nz_elts, count;
4824       bool must_clear;
4825 
4826       categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
4827       return nz_elts == 0;
4828     }
4829 
4830   return initializer_zerop (exp);
4831 }
4832 
4833 /* Helper function for store_constructor.
4834    TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4835    TYPE is the type of the CONSTRUCTOR, not the element type.
4836    CLEARED is as for store_constructor.
4837    ALIAS_SET is the alias set to use for any stores.
4838 
4839    This provides a recursive shortcut back to store_constructor when it isn't
4840    necessary to go through store_field.  This is so that we can pass through
4841    the cleared field to let store_constructor know that we may not have to
4842    clear a substructure if the outer structure has already been cleared.  */
4843 
4844 static void
store_constructor_field(rtx target,unsigned HOST_WIDE_INT bitsize,HOST_WIDE_INT bitpos,enum machine_mode mode,tree exp,tree type,int cleared,int alias_set)4845 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4846 			 HOST_WIDE_INT bitpos, enum machine_mode mode,
4847 			 tree exp, tree type, int cleared, int alias_set)
4848 {
4849   if (TREE_CODE (exp) == CONSTRUCTOR
4850       /* We can only call store_constructor recursively if the size and
4851 	 bit position are on a byte boundary.  */
4852       && bitpos % BITS_PER_UNIT == 0
4853       && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
4854       /* If we have a nonzero bitpos for a register target, then we just
4855 	 let store_field do the bitfield handling.  This is unlikely to
4856 	 generate unnecessary clear instructions anyways.  */
4857       && (bitpos == 0 || MEM_P (target)))
4858     {
4859       if (MEM_P (target))
4860 	target
4861 	  = adjust_address (target,
4862 			    GET_MODE (target) == BLKmode
4863 			    || 0 != (bitpos
4864 				     % GET_MODE_ALIGNMENT (GET_MODE (target)))
4865 			    ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4866 
4867 
4868       /* Update the alias set, if required.  */
4869       if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
4870 	  && MEM_ALIAS_SET (target) != 0)
4871 	{
4872 	  target = copy_rtx (target);
4873 	  set_mem_alias_set (target, alias_set);
4874 	}
4875 
4876       store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4877     }
4878   else
4879     store_field (target, bitsize, bitpos, mode, exp, type, alias_set);
4880 }
4881 
4882 /* Store the value of constructor EXP into the rtx TARGET.
4883    TARGET is either a REG or a MEM; we know it cannot conflict, since
4884    safe_from_p has been called.
4885    CLEARED is true if TARGET is known to have been zero'd.
4886    SIZE is the number of bytes of TARGET we are allowed to modify: this
4887    may not be the same as the size of EXP if we are assigning to a field
4888    which has been packed to exclude padding bits.  */
4889 
4890 static void
store_constructor(tree exp,rtx target,int cleared,HOST_WIDE_INT size)4891 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4892 {
4893   tree type = TREE_TYPE (exp);
4894 #ifdef WORD_REGISTER_OPERATIONS
4895   HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4896 #endif
4897 
4898   switch (TREE_CODE (type))
4899     {
4900     case RECORD_TYPE:
4901     case UNION_TYPE:
4902     case QUAL_UNION_TYPE:
4903       {
4904 	unsigned HOST_WIDE_INT idx;
4905 	tree field, value;
4906 
4907 	/* If size is zero or the target is already cleared, do nothing.  */
4908 	if (size == 0 || cleared)
4909 	  cleared = 1;
4910 	/* We either clear the aggregate or indicate the value is dead.  */
4911 	else if ((TREE_CODE (type) == UNION_TYPE
4912 		  || TREE_CODE (type) == QUAL_UNION_TYPE)
4913 		 && ! CONSTRUCTOR_ELTS (exp))
4914 	  /* If the constructor is empty, clear the union.  */
4915 	  {
4916 	    clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
4917 	    cleared = 1;
4918 	  }
4919 
4920 	/* If we are building a static constructor into a register,
4921 	   set the initial value as zero so we can fold the value into
4922 	   a constant.  But if more than one register is involved,
4923 	   this probably loses.  */
4924 	else if (REG_P (target) && TREE_STATIC (exp)
4925 		 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4926 	  {
4927 	    emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4928 	    cleared = 1;
4929 	  }
4930 
4931         /* If the constructor has fewer fields than the structure or
4932 	   if we are initializing the structure to mostly zeros, clear
4933 	   the whole structure first.  Don't do this if TARGET is a
4934 	   register whose mode size isn't equal to SIZE since
4935 	   clear_storage can't handle this case.  */
4936 	else if (size > 0
4937 		 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
4938 		      != fields_length (type))
4939 		     || mostly_zeros_p (exp))
4940 		 && (!REG_P (target)
4941 		     || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4942 			 == size)))
4943 	  {
4944 	    clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
4945 	    cleared = 1;
4946 	  }
4947 
4948 	if (! cleared)
4949 	  emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4950 
4951 	/* Store each element of the constructor into the
4952 	   corresponding field of TARGET.  */
4953 	FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
4954 	  {
4955 	    enum machine_mode mode;
4956 	    HOST_WIDE_INT bitsize;
4957 	    HOST_WIDE_INT bitpos = 0;
4958 	    tree offset;
4959 	    rtx to_rtx = target;
4960 
4961 	    /* Just ignore missing fields.  We cleared the whole
4962 	       structure, above, if any fields are missing.  */
4963 	    if (field == 0)
4964 	      continue;
4965 
4966 	    if (cleared && initializer_zerop (value))
4967 	      continue;
4968 
4969 	    if (host_integerp (DECL_SIZE (field), 1))
4970 	      bitsize = tree_low_cst (DECL_SIZE (field), 1);
4971 	    else
4972 	      bitsize = -1;
4973 
4974 	    mode = DECL_MODE (field);
4975 	    if (DECL_BIT_FIELD (field))
4976 	      mode = VOIDmode;
4977 
4978 	    offset = DECL_FIELD_OFFSET (field);
4979 	    if (host_integerp (offset, 0)
4980 		&& host_integerp (bit_position (field), 0))
4981 	      {
4982 		bitpos = int_bit_position (field);
4983 		offset = 0;
4984 	      }
4985 	    else
4986 	      bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4987 
4988 	    if (offset)
4989 	      {
4990 		rtx offset_rtx;
4991 
4992 		offset
4993 		  = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4994 						    make_tree (TREE_TYPE (exp),
4995 							       target));
4996 
4997 		offset_rtx = expand_normal (offset);
4998 		gcc_assert (MEM_P (to_rtx));
4999 
5000 #ifdef POINTERS_EXTEND_UNSIGNED
5001 		if (GET_MODE (offset_rtx) != Pmode)
5002 		  offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
5003 #else
5004 		if (GET_MODE (offset_rtx) != ptr_mode)
5005 		  offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
5006 #endif
5007 
5008 		to_rtx = offset_address (to_rtx, offset_rtx,
5009 					 highest_pow2_factor (offset));
5010 	      }
5011 
5012 #ifdef WORD_REGISTER_OPERATIONS
5013 	    /* If this initializes a field that is smaller than a
5014 	       word, at the start of a word, try to widen it to a full
5015 	       word.  This special case allows us to output C++ member
5016 	       function initializations in a form that the optimizers
5017 	       can understand.  */
5018 	    if (REG_P (target)
5019 		&& bitsize < BITS_PER_WORD
5020 		&& bitpos % BITS_PER_WORD == 0
5021 		&& GET_MODE_CLASS (mode) == MODE_INT
5022 		&& TREE_CODE (value) == INTEGER_CST
5023 		&& exp_size >= 0
5024 		&& bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5025 	      {
5026 		tree type = TREE_TYPE (value);
5027 
5028 		if (TYPE_PRECISION (type) < BITS_PER_WORD)
5029 		  {
5030 		    type = lang_hooks.types.type_for_size
5031 		      (BITS_PER_WORD, TYPE_UNSIGNED (type));
5032 		    value = fold_convert (type, value);
5033 		  }
5034 
5035 		if (BYTES_BIG_ENDIAN)
5036 		  value
5037 		   = fold_build2 (LSHIFT_EXPR, type, value,
5038 				   build_int_cst (type,
5039 						  BITS_PER_WORD - bitsize));
5040 		bitsize = BITS_PER_WORD;
5041 		mode = word_mode;
5042 	      }
5043 #endif
5044 
5045 	    if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5046 		&& DECL_NONADDRESSABLE_P (field))
5047 	      {
5048 		to_rtx = copy_rtx (to_rtx);
5049 		MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5050 	      }
5051 
5052 	    store_constructor_field (to_rtx, bitsize, bitpos, mode,
5053 				     value, type, cleared,
5054 				     get_alias_set (TREE_TYPE (field)));
5055 	  }
5056 	break;
5057       }
5058     case ARRAY_TYPE:
5059       {
5060 	tree value, index;
5061 	unsigned HOST_WIDE_INT i;
5062 	int need_to_clear;
5063 	tree domain;
5064 	tree elttype = TREE_TYPE (type);
5065 	int const_bounds_p;
5066 	HOST_WIDE_INT minelt = 0;
5067 	HOST_WIDE_INT maxelt = 0;
5068 
5069 	domain = TYPE_DOMAIN (type);
5070 	const_bounds_p = (TYPE_MIN_VALUE (domain)
5071 			  && TYPE_MAX_VALUE (domain)
5072 			  && host_integerp (TYPE_MIN_VALUE (domain), 0)
5073 			  && host_integerp (TYPE_MAX_VALUE (domain), 0));
5074 
5075 	/* If we have constant bounds for the range of the type, get them.  */
5076 	if (const_bounds_p)
5077 	  {
5078 	    minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5079 	    maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5080 	  }
5081 
5082 	/* If the constructor has fewer elements than the array, clear
5083            the whole array first.  Similarly if this is static
5084            constructor of a non-BLKmode object.  */
5085 	if (cleared)
5086 	  need_to_clear = 0;
5087 	else if (REG_P (target) && TREE_STATIC (exp))
5088 	  need_to_clear = 1;
5089 	else
5090 	  {
5091 	    unsigned HOST_WIDE_INT idx;
5092 	    tree index, value;
5093 	    HOST_WIDE_INT count = 0, zero_count = 0;
5094 	    need_to_clear = ! const_bounds_p;
5095 
5096 	    /* This loop is a more accurate version of the loop in
5097 	       mostly_zeros_p (it handles RANGE_EXPR in an index).  It
5098 	       is also needed to check for missing elements.  */
5099 	    FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5100 	      {
5101 		HOST_WIDE_INT this_node_count;
5102 
5103 		if (need_to_clear)
5104 		  break;
5105 
5106 		if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5107 		  {
5108 		    tree lo_index = TREE_OPERAND (index, 0);
5109 		    tree hi_index = TREE_OPERAND (index, 1);
5110 
5111 		    if (! host_integerp (lo_index, 1)
5112 			|| ! host_integerp (hi_index, 1))
5113 		      {
5114 			need_to_clear = 1;
5115 			break;
5116 		      }
5117 
5118 		    this_node_count = (tree_low_cst (hi_index, 1)
5119 				       - tree_low_cst (lo_index, 1) + 1);
5120 		  }
5121 		else
5122 		  this_node_count = 1;
5123 
5124 		count += this_node_count;
5125 		if (mostly_zeros_p (value))
5126 		  zero_count += this_node_count;
5127 	      }
5128 
5129 	    /* Clear the entire array first if there are any missing
5130 	       elements, or if the incidence of zero elements is >=
5131 	       75%.  */
5132 	    if (! need_to_clear
5133 		&& (count < maxelt - minelt + 1
5134 		    || 4 * zero_count >= 3 * count))
5135 	      need_to_clear = 1;
5136 	  }
5137 
5138 	if (need_to_clear && size > 0)
5139 	  {
5140 	    if (REG_P (target))
5141 	      emit_move_insn (target,  CONST0_RTX (GET_MODE (target)));
5142 	    else
5143 	      clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5144 	    cleared = 1;
5145 	  }
5146 
5147 	if (!cleared && REG_P (target))
5148 	  /* Inform later passes that the old value is dead.  */
5149 	  emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5150 
5151 	/* Store each element of the constructor into the
5152 	   corresponding element of TARGET, determined by counting the
5153 	   elements.  */
5154 	FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5155 	  {
5156 	    enum machine_mode mode;
5157 	    HOST_WIDE_INT bitsize;
5158 	    HOST_WIDE_INT bitpos;
5159 	    int unsignedp;
5160 	    rtx xtarget = target;
5161 
5162 	    if (cleared && initializer_zerop (value))
5163 	      continue;
5164 
5165 	    unsignedp = TYPE_UNSIGNED (elttype);
5166 	    mode = TYPE_MODE (elttype);
5167 	    if (mode == BLKmode)
5168 	      bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5169 			 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5170 			 : -1);
5171 	    else
5172 	      bitsize = GET_MODE_BITSIZE (mode);
5173 
5174 	    if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5175 	      {
5176 		tree lo_index = TREE_OPERAND (index, 0);
5177 		tree hi_index = TREE_OPERAND (index, 1);
5178 		rtx index_r, pos_rtx;
5179 		HOST_WIDE_INT lo, hi, count;
5180 		tree position;
5181 
5182 		/* If the range is constant and "small", unroll the loop.  */
5183 		if (const_bounds_p
5184 		    && host_integerp (lo_index, 0)
5185 		    && host_integerp (hi_index, 0)
5186 		    && (lo = tree_low_cst (lo_index, 0),
5187 			hi = tree_low_cst (hi_index, 0),
5188 			count = hi - lo + 1,
5189 			(!MEM_P (target)
5190 			 || count <= 2
5191 			 || (host_integerp (TYPE_SIZE (elttype), 1)
5192 			     && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5193 				 <= 40 * 8)))))
5194 		  {
5195 		    lo -= minelt;  hi -= minelt;
5196 		    for (; lo <= hi; lo++)
5197 		      {
5198 			bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5199 
5200 			if (MEM_P (target)
5201 			    && !MEM_KEEP_ALIAS_SET_P (target)
5202 			    && TREE_CODE (type) == ARRAY_TYPE
5203 			    && TYPE_NONALIASED_COMPONENT (type))
5204 			  {
5205 			    target = copy_rtx (target);
5206 			    MEM_KEEP_ALIAS_SET_P (target) = 1;
5207 			  }
5208 
5209 			store_constructor_field
5210 			  (target, bitsize, bitpos, mode, value, type, cleared,
5211 			   get_alias_set (elttype));
5212 		      }
5213 		  }
5214 		else
5215 		  {
5216 		    rtx loop_start = gen_label_rtx ();
5217 		    rtx loop_end = gen_label_rtx ();
5218 		    tree exit_cond;
5219 
5220 		    expand_normal (hi_index);
5221 		    unsignedp = TYPE_UNSIGNED (domain);
5222 
5223 		    index = build_decl (VAR_DECL, NULL_TREE, domain);
5224 
5225 		    index_r
5226 		      = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5227 						   &unsignedp, 0));
5228 		    SET_DECL_RTL (index, index_r);
5229 		    store_expr (lo_index, index_r, 0);
5230 
5231 		    /* Build the head of the loop.  */
5232 		    do_pending_stack_adjust ();
5233 		    emit_label (loop_start);
5234 
5235 		    /* Assign value to element index.  */
5236 		    position =
5237 		      fold_convert (ssizetype,
5238 				    fold_build2 (MINUS_EXPR,
5239 						 TREE_TYPE (index),
5240 						 index,
5241 						 TYPE_MIN_VALUE (domain)));
5242 
5243 		    position =
5244 			size_binop (MULT_EXPR, position,
5245 				    fold_convert (ssizetype,
5246 						  TYPE_SIZE_UNIT (elttype)));
5247 
5248 		    pos_rtx = expand_normal (position);
5249 		    xtarget = offset_address (target, pos_rtx,
5250 					      highest_pow2_factor (position));
5251 		    xtarget = adjust_address (xtarget, mode, 0);
5252 		    if (TREE_CODE (value) == CONSTRUCTOR)
5253 		      store_constructor (value, xtarget, cleared,
5254 					 bitsize / BITS_PER_UNIT);
5255 		    else
5256 		      store_expr (value, xtarget, 0);
5257 
5258 		    /* Generate a conditional jump to exit the loop.  */
5259 		    exit_cond = build2 (LT_EXPR, integer_type_node,
5260 					index, hi_index);
5261 		    jumpif (exit_cond, loop_end);
5262 
5263 		    /* Update the loop counter, and jump to the head of
5264 		       the loop.  */
5265 		    expand_assignment (index,
5266 				       build2 (PLUS_EXPR, TREE_TYPE (index),
5267 					       index, integer_one_node));
5268 
5269 		    emit_jump (loop_start);
5270 
5271 		    /* Build the end of the loop.  */
5272 		    emit_label (loop_end);
5273 		  }
5274 	      }
5275 	    else if ((index != 0 && ! host_integerp (index, 0))
5276 		     || ! host_integerp (TYPE_SIZE (elttype), 1))
5277 	      {
5278 		tree position;
5279 
5280 		if (index == 0)
5281 		  index = ssize_int (1);
5282 
5283 		if (minelt)
5284 		  index = fold_convert (ssizetype,
5285 					fold_build2 (MINUS_EXPR,
5286 						     TREE_TYPE (index),
5287 						     index,
5288 						     TYPE_MIN_VALUE (domain)));
5289 
5290 		position =
5291 		  size_binop (MULT_EXPR, index,
5292 			      fold_convert (ssizetype,
5293 					    TYPE_SIZE_UNIT (elttype)));
5294 		xtarget = offset_address (target,
5295 					  expand_normal (position),
5296 					  highest_pow2_factor (position));
5297 		xtarget = adjust_address (xtarget, mode, 0);
5298 		store_expr (value, xtarget, 0);
5299 	      }
5300 	    else
5301 	      {
5302 		if (index != 0)
5303 		  bitpos = ((tree_low_cst (index, 0) - minelt)
5304 			    * tree_low_cst (TYPE_SIZE (elttype), 1));
5305 		else
5306 		  bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5307 
5308 		if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5309 		    && TREE_CODE (type) == ARRAY_TYPE
5310 		    && TYPE_NONALIASED_COMPONENT (type))
5311 		  {
5312 		    target = copy_rtx (target);
5313 		    MEM_KEEP_ALIAS_SET_P (target) = 1;
5314 		  }
5315 		store_constructor_field (target, bitsize, bitpos, mode, value,
5316 					 type, cleared, get_alias_set (elttype));
5317 	      }
5318 	  }
5319 	break;
5320       }
5321 
5322     case VECTOR_TYPE:
5323       {
5324 	unsigned HOST_WIDE_INT idx;
5325 	constructor_elt *ce;
5326 	int i;
5327 	int need_to_clear;
5328 	int icode = 0;
5329 	tree elttype = TREE_TYPE (type);
5330 	int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5331 	enum machine_mode eltmode = TYPE_MODE (elttype);
5332 	HOST_WIDE_INT bitsize;
5333 	HOST_WIDE_INT bitpos;
5334 	rtvec vector = NULL;
5335 	unsigned n_elts;
5336 
5337 	gcc_assert (eltmode != BLKmode);
5338 
5339 	n_elts = TYPE_VECTOR_SUBPARTS (type);
5340 	if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5341 	  {
5342 	    enum machine_mode mode = GET_MODE (target);
5343 
5344 	    icode = (int) vec_init_optab->handlers[mode].insn_code;
5345 	    if (icode != CODE_FOR_nothing)
5346 	      {
5347 		unsigned int i;
5348 
5349 		vector = rtvec_alloc (n_elts);
5350 		for (i = 0; i < n_elts; i++)
5351 		  RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5352 	      }
5353 	  }
5354 
5355 	/* If the constructor has fewer elements than the vector,
5356 	   clear the whole array first.  Similarly if this is static
5357 	   constructor of a non-BLKmode object.  */
5358 	if (cleared)
5359 	  need_to_clear = 0;
5360 	else if (REG_P (target) && TREE_STATIC (exp))
5361 	  need_to_clear = 1;
5362 	else
5363 	  {
5364 	    unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5365 	    tree value;
5366 
5367 	    FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
5368 	      {
5369 		int n_elts_here = tree_low_cst
5370 		  (int_const_binop (TRUNC_DIV_EXPR,
5371 				    TYPE_SIZE (TREE_TYPE (value)),
5372 				    TYPE_SIZE (elttype), 0), 1);
5373 
5374 		count += n_elts_here;
5375 		if (mostly_zeros_p (value))
5376 		  zero_count += n_elts_here;
5377 	      }
5378 
5379 	    /* Clear the entire vector first if there are any missing elements,
5380 	       or if the incidence of zero elements is >= 75%.  */
5381 	    need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5382 	  }
5383 
5384 	if (need_to_clear && size > 0 && !vector)
5385 	  {
5386 	    if (REG_P (target))
5387 	      emit_move_insn (target,  CONST0_RTX (GET_MODE (target)));
5388 	    else
5389 	      clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5390 	    cleared = 1;
5391 	  }
5392 
5393 	/* Inform later passes that the old value is dead.  */
5394 	if (!cleared && !vector && REG_P (target))
5395 	  emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5396 
5397         /* Store each element of the constructor into the corresponding
5398 	   element of TARGET, determined by counting the elements.  */
5399 	for (idx = 0, i = 0;
5400 	     VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5401 	     idx++, i += bitsize / elt_size)
5402 	  {
5403 	    HOST_WIDE_INT eltpos;
5404 	    tree value = ce->value;
5405 
5406 	    bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5407 	    if (cleared && initializer_zerop (value))
5408 	      continue;
5409 
5410 	    if (ce->index)
5411 	      eltpos = tree_low_cst (ce->index, 1);
5412 	    else
5413 	      eltpos = i;
5414 
5415 	    if (vector)
5416 	      {
5417 	        /* Vector CONSTRUCTORs should only be built from smaller
5418 		   vectors in the case of BLKmode vectors.  */
5419 		gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5420 		RTVEC_ELT (vector, eltpos)
5421 		  = expand_normal (value);
5422 	      }
5423 	    else
5424 	      {
5425 		enum machine_mode value_mode =
5426 		  TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5427 		  ? TYPE_MODE (TREE_TYPE (value))
5428 		  : eltmode;
5429 		bitpos = eltpos * elt_size;
5430 		store_constructor_field (target, bitsize, bitpos,
5431 					 value_mode, value, type,
5432 					 cleared, get_alias_set (elttype));
5433 	      }
5434 	  }
5435 
5436 	if (vector)
5437 	  emit_insn (GEN_FCN (icode)
5438 		     (target,
5439 		      gen_rtx_PARALLEL (GET_MODE (target), vector)));
5440 	break;
5441       }
5442 
5443     default:
5444       gcc_unreachable ();
5445     }
5446 }
5447 
5448 /* Store the value of EXP (an expression tree)
5449    into a subfield of TARGET which has mode MODE and occupies
5450    BITSIZE bits, starting BITPOS bits from the start of TARGET.
5451    If MODE is VOIDmode, it means that we are storing into a bit-field.
5452 
5453    Always return const0_rtx unless we have something particular to
5454    return.
5455 
5456    TYPE is the type of the underlying object,
5457 
5458    ALIAS_SET is the alias set for the destination.  This value will
5459    (in general) be different from that for TARGET, since TARGET is a
5460    reference to the containing structure.  */
5461 
5462 static rtx
store_field(rtx target,HOST_WIDE_INT bitsize,HOST_WIDE_INT bitpos,enum machine_mode mode,tree exp,tree type,int alias_set)5463 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5464 	     enum machine_mode mode, tree exp, tree type, int alias_set)
5465 {
5466   HOST_WIDE_INT width_mask = 0;
5467 
5468   if (TREE_CODE (exp) == ERROR_MARK)
5469     return const0_rtx;
5470 
5471   /* If we have nothing to store, do nothing unless the expression has
5472      side-effects.  */
5473   if (bitsize == 0)
5474     return expand_expr (exp, const0_rtx, VOIDmode, 0);
5475   else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5476     width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5477 
5478   /* If we are storing into an unaligned field of an aligned union that is
5479      in a register, we may have the mode of TARGET being an integer mode but
5480      MODE == BLKmode.  In that case, get an aligned object whose size and
5481      alignment are the same as TARGET and store TARGET into it (we can avoid
5482      the store if the field being stored is the entire width of TARGET).  Then
5483      call ourselves recursively to store the field into a BLKmode version of
5484      that object.  Finally, load from the object into TARGET.  This is not
5485      very efficient in general, but should only be slightly more expensive
5486      than the otherwise-required unaligned accesses.  Perhaps this can be
5487      cleaned up later.  It's tempting to make OBJECT readonly, but it's set
5488      twice, once with emit_move_insn and once via store_field.  */
5489 
5490   if (mode == BLKmode
5491       && (REG_P (target) || GET_CODE (target) == SUBREG))
5492     {
5493       rtx object = assign_temp (type, 0, 1, 1);
5494       rtx blk_object = adjust_address (object, BLKmode, 0);
5495 
5496       if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5497 	emit_move_insn (object, target);
5498 
5499       store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set);
5500 
5501       emit_move_insn (target, object);
5502 
5503       /* We want to return the BLKmode version of the data.  */
5504       return blk_object;
5505     }
5506 
5507   if (GET_CODE (target) == CONCAT)
5508     {
5509       /* We're storing into a struct containing a single __complex.  */
5510 
5511       gcc_assert (!bitpos);
5512       return store_expr (exp, target, 0);
5513     }
5514 
5515   /* If the structure is in a register or if the component
5516      is a bit field, we cannot use addressing to access it.
5517      Use bit-field techniques or SUBREG to store in it.  */
5518 
5519   if (mode == VOIDmode
5520       || (mode != BLKmode && ! direct_store[(int) mode]
5521 	  && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5522 	  && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5523       || REG_P (target)
5524       || GET_CODE (target) == SUBREG
5525       /* If the field isn't aligned enough to store as an ordinary memref,
5526 	 store it as a bit field.  */
5527       || (mode != BLKmode
5528 	  && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5529 		|| bitpos % GET_MODE_ALIGNMENT (mode))
5530 	       && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5531 	      || (bitpos % BITS_PER_UNIT != 0)))
5532       /* If the RHS and field are a constant size and the size of the
5533 	 RHS isn't the same size as the bitfield, we must use bitfield
5534 	 operations.  */
5535       || (bitsize >= 0
5536 	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5537 	  && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5538     {
5539       rtx temp;
5540 
5541       /* If EXP is a NOP_EXPR of precision less than its mode, then that
5542 	 implies a mask operation.  If the precision is the same size as
5543 	 the field we're storing into, that mask is redundant.  This is
5544 	 particularly common with bit field assignments generated by the
5545 	 C front end.  */
5546       if (TREE_CODE (exp) == NOP_EXPR)
5547 	{
5548 	  tree type = TREE_TYPE (exp);
5549 	  if (INTEGRAL_TYPE_P (type)
5550 	      && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5551 	      && bitsize == TYPE_PRECISION (type))
5552 	    {
5553 	      type = TREE_TYPE (TREE_OPERAND (exp, 0));
5554 	      if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5555 		exp = TREE_OPERAND (exp, 0);
5556 	    }
5557 	}
5558 
5559       temp = expand_normal (exp);
5560 
5561       /* If BITSIZE is narrower than the size of the type of EXP
5562 	 we will be narrowing TEMP.  Normally, what's wanted are the
5563 	 low-order bits.  However, if EXP's type is a record and this is
5564 	 big-endian machine, we want the upper BITSIZE bits.  */
5565       if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5566 	  && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5567 	  && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5568 	temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5569 			     size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5570 				       - bitsize),
5571 			     NULL_RTX, 1);
5572 
5573       /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5574 	 MODE.  */
5575       if (mode != VOIDmode && mode != BLKmode
5576 	  && mode != TYPE_MODE (TREE_TYPE (exp)))
5577 	temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5578 
5579       /* If the modes of TARGET and TEMP are both BLKmode, both
5580 	 must be in memory and BITPOS must be aligned on a byte
5581 	 boundary.  If so, we simply do a block copy.  */
5582       if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5583 	{
5584 	  gcc_assert (MEM_P (target) && MEM_P (temp)
5585 		      && !(bitpos % BITS_PER_UNIT));
5586 
5587 	  target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5588 	  emit_block_move (target, temp,
5589 			   GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5590 				    / BITS_PER_UNIT),
5591 			   BLOCK_OP_NORMAL);
5592 
5593 	  return const0_rtx;
5594 	}
5595 
5596       /* Store the value in the bitfield.  */
5597       store_bit_field (target, bitsize, bitpos, mode, temp);
5598 
5599       return const0_rtx;
5600     }
5601   else
5602     {
5603       /* Now build a reference to just the desired component.  */
5604       rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5605 
5606       if (to_rtx == target)
5607 	to_rtx = copy_rtx (to_rtx);
5608 
5609       MEM_SET_IN_STRUCT_P (to_rtx, 1);
5610       if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5611 	set_mem_alias_set (to_rtx, alias_set);
5612 
5613       return store_expr (exp, to_rtx, 0);
5614     }
5615 }
5616 
5617 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5618    an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5619    codes and find the ultimate containing object, which we return.
5620 
5621    We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5622    bit position, and *PUNSIGNEDP to the signedness of the field.
5623    If the position of the field is variable, we store a tree
5624    giving the variable offset (in units) in *POFFSET.
5625    This offset is in addition to the bit position.
5626    If the position is not variable, we store 0 in *POFFSET.
5627 
5628    If any of the extraction expressions is volatile,
5629    we store 1 in *PVOLATILEP.  Otherwise we don't change that.
5630 
5631    If the field is a bit-field, *PMODE is set to VOIDmode.  Otherwise, it
5632    is a mode that can be used to access the field.  In that case, *PBITSIZE
5633    is redundant.
5634 
5635    If the field describes a variable-sized object, *PMODE is set to
5636    VOIDmode and *PBITSIZE is set to -1.  An access cannot be made in
5637    this case, but the address of the object can be found.
5638 
5639    If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5640    look through nodes that serve as markers of a greater alignment than
5641    the one that can be deduced from the expression.  These nodes make it
5642    possible for front-ends to prevent temporaries from being created by
5643    the middle-end on alignment considerations.  For that purpose, the
5644    normal operating mode at high-level is to always pass FALSE so that
5645    the ultimate containing object is really returned; moreover, the
5646    associated predicate handled_component_p will always return TRUE
5647    on these nodes, thus indicating that they are essentially handled
5648    by get_inner_reference.  TRUE should only be passed when the caller
5649    is scanning the expression in order to build another representation
5650    and specifically knows how to handle these nodes; as such, this is
5651    the normal operating mode in the RTL expanders.  */
5652 
5653 tree
get_inner_reference(tree exp,HOST_WIDE_INT * pbitsize,HOST_WIDE_INT * pbitpos,tree * poffset,enum machine_mode * pmode,int * punsignedp,int * pvolatilep,bool keep_aligning)5654 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5655 		     HOST_WIDE_INT *pbitpos, tree *poffset,
5656 		     enum machine_mode *pmode, int *punsignedp,
5657 		     int *pvolatilep, bool keep_aligning)
5658 {
5659   tree size_tree = 0;
5660   enum machine_mode mode = VOIDmode;
5661   tree offset = size_zero_node;
5662   tree bit_offset = bitsize_zero_node;
5663   tree tem;
5664 
5665   /* First get the mode, signedness, and size.  We do this from just the
5666      outermost expression.  */
5667   if (TREE_CODE (exp) == COMPONENT_REF)
5668     {
5669       size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5670       if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5671 	mode = DECL_MODE (TREE_OPERAND (exp, 1));
5672 
5673       *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5674     }
5675   else if (TREE_CODE (exp) == BIT_FIELD_REF)
5676     {
5677       size_tree = TREE_OPERAND (exp, 1);
5678       *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5679     }
5680   else
5681     {
5682       mode = TYPE_MODE (TREE_TYPE (exp));
5683       *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5684 
5685       if (mode == BLKmode)
5686 	size_tree = TYPE_SIZE (TREE_TYPE (exp));
5687       else
5688 	*pbitsize = GET_MODE_BITSIZE (mode);
5689     }
5690 
5691   if (size_tree != 0)
5692     {
5693       if (! host_integerp (size_tree, 1))
5694 	mode = BLKmode, *pbitsize = -1;
5695       else
5696 	*pbitsize = tree_low_cst (size_tree, 1);
5697     }
5698 
5699   /* Compute cumulative bit-offset for nested component-refs and array-refs,
5700      and find the ultimate containing object.  */
5701   while (1)
5702     {
5703       switch (TREE_CODE (exp))
5704 	{
5705 	case BIT_FIELD_REF:
5706 	  bit_offset = size_binop (PLUS_EXPR, bit_offset,
5707 				   TREE_OPERAND (exp, 2));
5708 	  break;
5709 
5710 	case COMPONENT_REF:
5711 	  {
5712 	    tree field = TREE_OPERAND (exp, 1);
5713 	    tree this_offset = component_ref_field_offset (exp);
5714 
5715 	    /* If this field hasn't been filled in yet, don't go past it.
5716 	       This should only happen when folding expressions made during
5717 	       type construction.  */
5718 	    if (this_offset == 0)
5719 	      break;
5720 
5721 	    offset = size_binop (PLUS_EXPR, offset, this_offset);
5722 	    bit_offset = size_binop (PLUS_EXPR, bit_offset,
5723 				     DECL_FIELD_BIT_OFFSET (field));
5724 
5725 	    /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN.  */
5726 	  }
5727 	  break;
5728 
5729 	case ARRAY_REF:
5730 	case ARRAY_RANGE_REF:
5731 	  {
5732 	    tree index = TREE_OPERAND (exp, 1);
5733 	    tree low_bound = array_ref_low_bound (exp);
5734 	    tree unit_size = array_ref_element_size (exp);
5735 
5736 	    /* We assume all arrays have sizes that are a multiple of a byte.
5737 	       First subtract the lower bound, if any, in the type of the
5738 	       index, then convert to sizetype and multiply by the size of
5739 	       the array element.  */
5740 	    if (! integer_zerop (low_bound))
5741 	      index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
5742 				   index, low_bound);
5743 
5744 	    offset = size_binop (PLUS_EXPR, offset,
5745 			         size_binop (MULT_EXPR,
5746 					     fold_convert (sizetype, index),
5747 					     unit_size));
5748 	  }
5749 	  break;
5750 
5751 	case REALPART_EXPR:
5752 	  break;
5753 
5754 	case IMAGPART_EXPR:
5755 	  bit_offset = size_binop (PLUS_EXPR, bit_offset,
5756 				   bitsize_int (*pbitsize));
5757 	  break;
5758 
5759 	case VIEW_CONVERT_EXPR:
5760 	  if (keep_aligning && STRICT_ALIGNMENT
5761 	      && (TYPE_ALIGN (TREE_TYPE (exp))
5762 	       > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5763 	      && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5764 		  < BIGGEST_ALIGNMENT)
5765 	      && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5766 		  || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5767 	    goto done;
5768 	  break;
5769 
5770 	default:
5771 	  goto done;
5772 	}
5773 
5774       /* If any reference in the chain is volatile, the effect is volatile.  */
5775       if (TREE_THIS_VOLATILE (exp))
5776 	*pvolatilep = 1;
5777 
5778       exp = TREE_OPERAND (exp, 0);
5779     }
5780  done:
5781 
5782   /* If OFFSET is constant, see if we can return the whole thing as a
5783      constant bit position.  Otherwise, split it up.  */
5784   if (host_integerp (offset, 0)
5785       && 0 != (tem = size_binop (MULT_EXPR,
5786 				 fold_convert (bitsizetype, offset),
5787 				 bitsize_unit_node))
5788       && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5789       && host_integerp (tem, 0))
5790     *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5791   else
5792     *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5793 
5794   *pmode = mode;
5795   return exp;
5796 }
5797 
5798 /* Return a tree of sizetype representing the size, in bytes, of the element
5799    of EXP, an ARRAY_REF.  */
5800 
5801 tree
array_ref_element_size(tree exp)5802 array_ref_element_size (tree exp)
5803 {
5804   tree aligned_size = TREE_OPERAND (exp, 3);
5805   tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5806 
5807   /* If a size was specified in the ARRAY_REF, it's the size measured
5808      in alignment units of the element type.  So multiply by that value.  */
5809   if (aligned_size)
5810     {
5811       /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5812 	 sizetype from another type of the same width and signedness.  */
5813       if (TREE_TYPE (aligned_size) != sizetype)
5814 	aligned_size = fold_convert (sizetype, aligned_size);
5815       return size_binop (MULT_EXPR, aligned_size,
5816 		         size_int (TYPE_ALIGN_UNIT (elmt_type)));
5817     }
5818 
5819   /* Otherwise, take the size from that of the element type.  Substitute
5820      any PLACEHOLDER_EXPR that we have.  */
5821   else
5822     return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5823 }
5824 
5825 /* Return a tree representing the lower bound of the array mentioned in
5826    EXP, an ARRAY_REF.  */
5827 
5828 tree
array_ref_low_bound(tree exp)5829 array_ref_low_bound (tree exp)
5830 {
5831   tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5832 
5833   /* If a lower bound is specified in EXP, use it.  */
5834   if (TREE_OPERAND (exp, 2))
5835     return TREE_OPERAND (exp, 2);
5836 
5837   /* Otherwise, if there is a domain type and it has a lower bound, use it,
5838      substituting for a PLACEHOLDER_EXPR as needed.  */
5839   if (domain_type && TYPE_MIN_VALUE (domain_type))
5840     return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5841 
5842   /* Otherwise, return a zero of the appropriate type.  */
5843   return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
5844 }
5845 
5846 /* Return a tree representing the upper bound of the array mentioned in
5847    EXP, an ARRAY_REF.  */
5848 
5849 tree
array_ref_up_bound(tree exp)5850 array_ref_up_bound (tree exp)
5851 {
5852   tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5853 
5854   /* If there is a domain type and it has an upper bound, use it, substituting
5855      for a PLACEHOLDER_EXPR as needed.  */
5856   if (domain_type && TYPE_MAX_VALUE (domain_type))
5857     return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
5858 
5859   /* Otherwise fail.  */
5860   return NULL_TREE;
5861 }
5862 
5863 /* Return a tree representing the offset, in bytes, of the field referenced
5864    by EXP.  This does not include any offset in DECL_FIELD_BIT_OFFSET.  */
5865 
5866 tree
component_ref_field_offset(tree exp)5867 component_ref_field_offset (tree exp)
5868 {
5869   tree aligned_offset = TREE_OPERAND (exp, 2);
5870   tree field = TREE_OPERAND (exp, 1);
5871 
5872   /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5873      in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT.  So multiply by that
5874      value.  */
5875   if (aligned_offset)
5876     {
5877       /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5878 	 sizetype from another type of the same width and signedness.  */
5879       if (TREE_TYPE (aligned_offset) != sizetype)
5880 	aligned_offset = fold_convert (sizetype, aligned_offset);
5881       return size_binop (MULT_EXPR, aligned_offset,
5882 		         size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5883     }
5884 
5885   /* Otherwise, take the offset from that of the field.  Substitute
5886      any PLACEHOLDER_EXPR that we have.  */
5887   else
5888     return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5889 }
5890 
5891 /* Return 1 if T is an expression that get_inner_reference handles.  */
5892 
5893 int
handled_component_p(tree t)5894 handled_component_p (tree t)
5895 {
5896   switch (TREE_CODE (t))
5897     {
5898     case BIT_FIELD_REF:
5899     case COMPONENT_REF:
5900     case ARRAY_REF:
5901     case ARRAY_RANGE_REF:
5902     case VIEW_CONVERT_EXPR:
5903     case REALPART_EXPR:
5904     case IMAGPART_EXPR:
5905       return 1;
5906 
5907     default:
5908       return 0;
5909     }
5910 }
5911 
5912 /* Given an rtx VALUE that may contain additions and multiplications, return
5913    an equivalent value that just refers to a register, memory, or constant.
5914    This is done by generating instructions to perform the arithmetic and
5915    returning a pseudo-register containing the value.
5916 
5917    The returned value may be a REG, SUBREG, MEM or constant.  */
5918 
5919 rtx
force_operand(rtx value,rtx target)5920 force_operand (rtx value, rtx target)
5921 {
5922   rtx op1, op2;
5923   /* Use subtarget as the target for operand 0 of a binary operation.  */
5924   rtx subtarget = get_subtarget (target);
5925   enum rtx_code code = GET_CODE (value);
5926 
5927   /* Check for subreg applied to an expression produced by loop optimizer.  */
5928   if (code == SUBREG
5929       && !REG_P (SUBREG_REG (value))
5930       && !MEM_P (SUBREG_REG (value)))
5931     {
5932       value = simplify_gen_subreg (GET_MODE (value),
5933 				   force_reg (GET_MODE (SUBREG_REG (value)),
5934 					      force_operand (SUBREG_REG (value),
5935 							     NULL_RTX)),
5936 				   GET_MODE (SUBREG_REG (value)),
5937 				   SUBREG_BYTE (value));
5938       code = GET_CODE (value);
5939     }
5940 
5941   /* Check for a PIC address load.  */
5942   if ((code == PLUS || code == MINUS)
5943       && XEXP (value, 0) == pic_offset_table_rtx
5944       && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5945 	  || GET_CODE (XEXP (value, 1)) == LABEL_REF
5946 	  || GET_CODE (XEXP (value, 1)) == CONST))
5947     {
5948       if (!subtarget)
5949 	subtarget = gen_reg_rtx (GET_MODE (value));
5950       emit_move_insn (subtarget, value);
5951       return subtarget;
5952     }
5953 
5954   if (ARITHMETIC_P (value))
5955     {
5956       op2 = XEXP (value, 1);
5957       if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
5958 	subtarget = 0;
5959       if (code == MINUS && GET_CODE (op2) == CONST_INT)
5960 	{
5961 	  code = PLUS;
5962 	  op2 = negate_rtx (GET_MODE (value), op2);
5963 	}
5964 
5965       /* Check for an addition with OP2 a constant integer and our first
5966          operand a PLUS of a virtual register and something else.  In that
5967          case, we want to emit the sum of the virtual register and the
5968          constant first and then add the other value.  This allows virtual
5969          register instantiation to simply modify the constant rather than
5970          creating another one around this addition.  */
5971       if (code == PLUS && GET_CODE (op2) == CONST_INT
5972 	  && GET_CODE (XEXP (value, 0)) == PLUS
5973 	  && REG_P (XEXP (XEXP (value, 0), 0))
5974 	  && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5975 	  && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5976 	{
5977 	  rtx temp = expand_simple_binop (GET_MODE (value), code,
5978 					  XEXP (XEXP (value, 0), 0), op2,
5979 					  subtarget, 0, OPTAB_LIB_WIDEN);
5980 	  return expand_simple_binop (GET_MODE (value), code, temp,
5981 				      force_operand (XEXP (XEXP (value,
5982 								 0), 1), 0),
5983 				      target, 0, OPTAB_LIB_WIDEN);
5984 	}
5985 
5986       op1 = force_operand (XEXP (value, 0), subtarget);
5987       op2 = force_operand (op2, NULL_RTX);
5988       switch (code)
5989 	{
5990 	case MULT:
5991 	  return expand_mult (GET_MODE (value), op1, op2, target, 1);
5992 	case DIV:
5993 	  if (!INTEGRAL_MODE_P (GET_MODE (value)))
5994 	    return expand_simple_binop (GET_MODE (value), code, op1, op2,
5995 					target, 1, OPTAB_LIB_WIDEN);
5996 	  else
5997 	    return expand_divmod (0,
5998 				  FLOAT_MODE_P (GET_MODE (value))
5999 				  ? RDIV_EXPR : TRUNC_DIV_EXPR,
6000 				  GET_MODE (value), op1, op2, target, 0);
6001 	  break;
6002 	case MOD:
6003 	  return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6004 				target, 0);
6005 	  break;
6006 	case UDIV:
6007 	  return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6008 				target, 1);
6009 	  break;
6010 	case UMOD:
6011 	  return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6012 				target, 1);
6013 	  break;
6014 	case ASHIFTRT:
6015 	  return expand_simple_binop (GET_MODE (value), code, op1, op2,
6016 				      target, 0, OPTAB_LIB_WIDEN);
6017 	  break;
6018 	default:
6019 	  return expand_simple_binop (GET_MODE (value), code, op1, op2,
6020 				      target, 1, OPTAB_LIB_WIDEN);
6021 	}
6022     }
6023   if (UNARY_P (value))
6024     {
6025       if (!target)
6026 	target = gen_reg_rtx (GET_MODE (value));
6027       op1 = force_operand (XEXP (value, 0), NULL_RTX);
6028       switch (code)
6029 	{
6030 	case ZERO_EXTEND:
6031 	case SIGN_EXTEND:
6032 	case TRUNCATE:
6033 	case FLOAT_EXTEND:
6034 	case FLOAT_TRUNCATE:
6035 	  convert_move (target, op1, code == ZERO_EXTEND);
6036 	  return target;
6037 
6038 	case FIX:
6039 	case UNSIGNED_FIX:
6040 	  expand_fix (target, op1, code == UNSIGNED_FIX);
6041 	  return target;
6042 
6043 	case FLOAT:
6044 	case UNSIGNED_FLOAT:
6045 	  expand_float (target, op1, code == UNSIGNED_FLOAT);
6046 	  return target;
6047 
6048 	default:
6049 	  return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6050 	}
6051     }
6052 
6053 #ifdef INSN_SCHEDULING
6054   /* On machines that have insn scheduling, we want all memory reference to be
6055      explicit, so we need to deal with such paradoxical SUBREGs.  */
6056   if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
6057       && (GET_MODE_SIZE (GET_MODE (value))
6058 	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6059     value
6060       = simplify_gen_subreg (GET_MODE (value),
6061 			     force_reg (GET_MODE (SUBREG_REG (value)),
6062 					force_operand (SUBREG_REG (value),
6063 						       NULL_RTX)),
6064 			     GET_MODE (SUBREG_REG (value)),
6065 			     SUBREG_BYTE (value));
6066 #endif
6067 
6068   return value;
6069 }
6070 
6071 /* Subroutine of expand_expr: return nonzero iff there is no way that
6072    EXP can reference X, which is being modified.  TOP_P is nonzero if this
6073    call is going to be used to determine whether we need a temporary
6074    for EXP, as opposed to a recursive call to this function.
6075 
6076    It is always safe for this routine to return zero since it merely
6077    searches for optimization opportunities.  */
6078 
6079 int
safe_from_p(rtx x,tree exp,int top_p)6080 safe_from_p (rtx x, tree exp, int top_p)
6081 {
6082   rtx exp_rtl = 0;
6083   int i, nops;
6084 
6085   if (x == 0
6086       /* If EXP has varying size, we MUST use a target since we currently
6087 	 have no way of allocating temporaries of variable size
6088 	 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6089 	 So we assume here that something at a higher level has prevented a
6090 	 clash.  This is somewhat bogus, but the best we can do.  Only
6091 	 do this when X is BLKmode and when we are at the top level.  */
6092       || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6093 	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6094 	  && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6095 	      || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6096 	      || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6097 	      != INTEGER_CST)
6098 	  && GET_MODE (x) == BLKmode)
6099       /* If X is in the outgoing argument area, it is always safe.  */
6100       || (MEM_P (x)
6101 	  && (XEXP (x, 0) == virtual_outgoing_args_rtx
6102 	      || (GET_CODE (XEXP (x, 0)) == PLUS
6103 		  && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6104     return 1;
6105 
6106   /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6107      find the underlying pseudo.  */
6108   if (GET_CODE (x) == SUBREG)
6109     {
6110       x = SUBREG_REG (x);
6111       if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6112 	return 0;
6113     }
6114 
6115   /* Now look at our tree code and possibly recurse.  */
6116   switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6117     {
6118     case tcc_declaration:
6119       exp_rtl = DECL_RTL_IF_SET (exp);
6120       break;
6121 
6122     case tcc_constant:
6123       return 1;
6124 
6125     case tcc_exceptional:
6126       if (TREE_CODE (exp) == TREE_LIST)
6127 	{
6128 	  while (1)
6129 	    {
6130 	      if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6131 		return 0;
6132 	      exp = TREE_CHAIN (exp);
6133 	      if (!exp)
6134 		return 1;
6135 	      if (TREE_CODE (exp) != TREE_LIST)
6136 		return safe_from_p (x, exp, 0);
6137 	    }
6138 	}
6139       else if (TREE_CODE (exp) == CONSTRUCTOR)
6140 	{
6141 	  constructor_elt *ce;
6142 	  unsigned HOST_WIDE_INT idx;
6143 
6144 	  for (idx = 0;
6145 	       VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
6146 	       idx++)
6147 	    if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
6148 		|| !safe_from_p (x, ce->value, 0))
6149 	      return 0;
6150 	  return 1;
6151 	}
6152       else if (TREE_CODE (exp) == ERROR_MARK)
6153 	return 1;	/* An already-visited SAVE_EXPR? */
6154       else
6155 	return 0;
6156 
6157     case tcc_statement:
6158       /* The only case we look at here is the DECL_INITIAL inside a
6159 	 DECL_EXPR.  */
6160       return (TREE_CODE (exp) != DECL_EXPR
6161 	      || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
6162 	      || !DECL_INITIAL (DECL_EXPR_DECL (exp))
6163 	      || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
6164 
6165     case tcc_binary:
6166     case tcc_comparison:
6167       if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6168 	return 0;
6169       /* Fall through.  */
6170 
6171     case tcc_unary:
6172       return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6173 
6174     case tcc_expression:
6175     case tcc_reference:
6176       /* Now do code-specific tests.  EXP_RTL is set to any rtx we find in
6177 	 the expression.  If it is set, we conflict iff we are that rtx or
6178 	 both are in memory.  Otherwise, we check all operands of the
6179 	 expression recursively.  */
6180 
6181       switch (TREE_CODE (exp))
6182 	{
6183 	case ADDR_EXPR:
6184 	  /* If the operand is static or we are static, we can't conflict.
6185 	     Likewise if we don't conflict with the operand at all.  */
6186 	  if (staticp (TREE_OPERAND (exp, 0))
6187 	      || TREE_STATIC (exp)
6188 	      || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6189 	    return 1;
6190 
6191 	  /* Otherwise, the only way this can conflict is if we are taking
6192 	     the address of a DECL a that address if part of X, which is
6193 	     very rare.  */
6194 	  exp = TREE_OPERAND (exp, 0);
6195 	  if (DECL_P (exp))
6196 	    {
6197 	      if (!DECL_RTL_SET_P (exp)
6198 		  || !MEM_P (DECL_RTL (exp)))
6199 		return 0;
6200 	      else
6201 		exp_rtl = XEXP (DECL_RTL (exp), 0);
6202 	    }
6203 	  break;
6204 
6205 	case MISALIGNED_INDIRECT_REF:
6206 	case ALIGN_INDIRECT_REF:
6207 	case INDIRECT_REF:
6208 	  if (MEM_P (x)
6209 	      && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6210 					get_alias_set (exp)))
6211 	    return 0;
6212 	  break;
6213 
6214 	case CALL_EXPR:
6215 	  /* Assume that the call will clobber all hard registers and
6216 	     all of memory.  */
6217 	  if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6218 	      || MEM_P (x))
6219 	    return 0;
6220 	  break;
6221 
6222 	case WITH_CLEANUP_EXPR:
6223 	case CLEANUP_POINT_EXPR:
6224 	  /* Lowered by gimplify.c.  */
6225 	  gcc_unreachable ();
6226 
6227 	case SAVE_EXPR:
6228 	  return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6229 
6230 	default:
6231 	  break;
6232 	}
6233 
6234       /* If we have an rtx, we do not need to scan our operands.  */
6235       if (exp_rtl)
6236 	break;
6237 
6238       nops = TREE_CODE_LENGTH (TREE_CODE (exp));
6239       for (i = 0; i < nops; i++)
6240 	if (TREE_OPERAND (exp, i) != 0
6241 	    && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6242 	  return 0;
6243 
6244       /* If this is a language-specific tree code, it may require
6245 	 special handling.  */
6246       if ((unsigned int) TREE_CODE (exp)
6247 	  >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6248 	  && !lang_hooks.safe_from_p (x, exp))
6249 	return 0;
6250       break;
6251 
6252     case tcc_type:
6253       /* Should never get a type here.  */
6254       gcc_unreachable ();
6255     }
6256 
6257   /* If we have an rtl, find any enclosed object.  Then see if we conflict
6258      with it.  */
6259   if (exp_rtl)
6260     {
6261       if (GET_CODE (exp_rtl) == SUBREG)
6262 	{
6263 	  exp_rtl = SUBREG_REG (exp_rtl);
6264 	  if (REG_P (exp_rtl)
6265 	      && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6266 	    return 0;
6267 	}
6268 
6269       /* If the rtl is X, then it is not safe.  Otherwise, it is unless both
6270 	 are memory and they conflict.  */
6271       return ! (rtx_equal_p (x, exp_rtl)
6272 		|| (MEM_P (x) && MEM_P (exp_rtl)
6273 		    && true_dependence (exp_rtl, VOIDmode, x,
6274 					rtx_addr_varies_p)));
6275     }
6276 
6277   /* If we reach here, it is safe.  */
6278   return 1;
6279 }
6280 
6281 
6282 /* Return the highest power of two that EXP is known to be a multiple of.
6283    This is used in updating alignment of MEMs in array references.  */
6284 
6285 unsigned HOST_WIDE_INT
highest_pow2_factor(tree exp)6286 highest_pow2_factor (tree exp)
6287 {
6288   unsigned HOST_WIDE_INT c0, c1;
6289 
6290   switch (TREE_CODE (exp))
6291     {
6292     case INTEGER_CST:
6293       /* We can find the lowest bit that's a one.  If the low
6294 	 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6295 	 We need to handle this case since we can find it in a COND_EXPR,
6296 	 a MIN_EXPR, or a MAX_EXPR.  If the constant overflows, we have an
6297 	 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6298 	 later ICE.  */
6299       if (TREE_CONSTANT_OVERFLOW (exp))
6300 	return BIGGEST_ALIGNMENT;
6301       else
6302 	{
6303 	  /* Note: tree_low_cst is intentionally not used here,
6304 	     we don't care about the upper bits.  */
6305 	  c0 = TREE_INT_CST_LOW (exp);
6306 	  c0 &= -c0;
6307 	  return c0 ? c0 : BIGGEST_ALIGNMENT;
6308 	}
6309       break;
6310 
6311     case PLUS_EXPR:  case MINUS_EXPR:  case MIN_EXPR:  case MAX_EXPR:
6312       c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6313       c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6314       return MIN (c0, c1);
6315 
6316     case MULT_EXPR:
6317       c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6318       c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6319       return c0 * c1;
6320 
6321     case ROUND_DIV_EXPR:  case TRUNC_DIV_EXPR:  case FLOOR_DIV_EXPR:
6322     case CEIL_DIV_EXPR:
6323       if (integer_pow2p (TREE_OPERAND (exp, 1))
6324 	  && host_integerp (TREE_OPERAND (exp, 1), 1))
6325 	{
6326 	  c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6327 	  c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6328 	  return MAX (1, c0 / c1);
6329 	}
6330       break;
6331 
6332     case NON_LVALUE_EXPR:  case NOP_EXPR:  case CONVERT_EXPR:
6333     case SAVE_EXPR:
6334       return highest_pow2_factor (TREE_OPERAND (exp, 0));
6335 
6336     case COMPOUND_EXPR:
6337       return highest_pow2_factor (TREE_OPERAND (exp, 1));
6338 
6339     case COND_EXPR:
6340       c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6341       c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6342       return MIN (c0, c1);
6343 
6344     default:
6345       break;
6346     }
6347 
6348   return 1;
6349 }
6350 
6351 /* Similar, except that the alignment requirements of TARGET are
6352    taken into account.  Assume it is at least as aligned as its
6353    type, unless it is a COMPONENT_REF in which case the layout of
6354    the structure gives the alignment.  */
6355 
6356 static unsigned HOST_WIDE_INT
highest_pow2_factor_for_target(tree target,tree exp)6357 highest_pow2_factor_for_target (tree target, tree exp)
6358 {
6359   unsigned HOST_WIDE_INT target_align, factor;
6360 
6361   factor = highest_pow2_factor (exp);
6362   if (TREE_CODE (target) == COMPONENT_REF)
6363     target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
6364   else
6365     target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
6366   return MAX (factor, target_align);
6367 }
6368 
6369 /* Expands variable VAR.  */
6370 
6371 void
expand_var(tree var)6372 expand_var (tree var)
6373 {
6374   if (DECL_EXTERNAL (var))
6375     return;
6376 
6377   if (TREE_STATIC (var))
6378     /* If this is an inlined copy of a static local variable,
6379        look up the original decl.  */
6380     var = DECL_ORIGIN (var);
6381 
6382   if (TREE_STATIC (var)
6383       ? !TREE_ASM_WRITTEN (var)
6384       : !DECL_RTL_SET_P (var))
6385     {
6386       if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
6387 	/* Should be ignored.  */;
6388       else if (lang_hooks.expand_decl (var))
6389 	/* OK.  */;
6390       else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6391 	expand_decl (var);
6392       else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
6393 	rest_of_decl_compilation (var, 0, 0);
6394       else
6395 	/* No expansion needed.  */
6396 	gcc_assert (TREE_CODE (var) == TYPE_DECL
6397 		    || TREE_CODE (var) == CONST_DECL
6398 		    || TREE_CODE (var) == FUNCTION_DECL
6399 		    || TREE_CODE (var) == LABEL_DECL);
6400     }
6401 }
6402 
6403 /* Subroutine of expand_expr.  Expand the two operands of a binary
6404    expression EXP0 and EXP1 placing the results in OP0 and OP1.
6405    The value may be stored in TARGET if TARGET is nonzero.  The
6406    MODIFIER argument is as documented by expand_expr.  */
6407 
6408 static void
expand_operands(tree exp0,tree exp1,rtx target,rtx * op0,rtx * op1,enum expand_modifier modifier)6409 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6410 		 enum expand_modifier modifier)
6411 {
6412   if (! safe_from_p (target, exp1, 1))
6413     target = 0;
6414   if (operand_equal_p (exp0, exp1, 0))
6415     {
6416       *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6417       *op1 = copy_rtx (*op0);
6418     }
6419   else
6420     {
6421       /* If we need to preserve evaluation order, copy exp0 into its own
6422 	 temporary variable so that it can't be clobbered by exp1.  */
6423       if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6424 	exp0 = save_expr (exp0);
6425       *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6426       *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6427     }
6428 }
6429 
6430 
6431 /* Return a MEM that contains constant EXP.  DEFER is as for
6432    output_constant_def and MODIFIER is as for expand_expr.  */
6433 
6434 static rtx
expand_expr_constant(tree exp,int defer,enum expand_modifier modifier)6435 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
6436 {
6437   rtx mem;
6438 
6439   mem = output_constant_def (exp, defer);
6440   if (modifier != EXPAND_INITIALIZER)
6441     mem = use_anchored_address (mem);
6442   return mem;
6443 }
6444 
6445 /* A subroutine of expand_expr_addr_expr.  Evaluate the address of EXP.
6446    The TARGET, TMODE and MODIFIER arguments are as for expand_expr.  */
6447 
6448 static rtx
expand_expr_addr_expr_1(tree exp,rtx target,enum machine_mode tmode,enum expand_modifier modifier)6449 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6450 		         enum expand_modifier modifier)
6451 {
6452   rtx result, subtarget;
6453   tree inner, offset;
6454   HOST_WIDE_INT bitsize, bitpos;
6455   int volatilep, unsignedp;
6456   enum machine_mode mode1;
6457 
6458   /* If we are taking the address of a constant and are at the top level,
6459      we have to use output_constant_def since we can't call force_const_mem
6460      at top level.  */
6461   /* ??? This should be considered a front-end bug.  We should not be
6462      generating ADDR_EXPR of something that isn't an LVALUE.  The only
6463      exception here is STRING_CST.  */
6464   if (TREE_CODE (exp) == CONSTRUCTOR
6465       || CONSTANT_CLASS_P (exp))
6466     return XEXP (expand_expr_constant (exp, 0, modifier), 0);
6467 
6468   /* Everything must be something allowed by is_gimple_addressable.  */
6469   switch (TREE_CODE (exp))
6470     {
6471     case INDIRECT_REF:
6472       /* This case will happen via recursion for &a->b.  */
6473       return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6474 
6475     case CONST_DECL:
6476       /* Recurse and make the output_constant_def clause above handle this.  */
6477       return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
6478 				      tmode, modifier);
6479 
6480     case REALPART_EXPR:
6481       /* The real part of the complex number is always first, therefore
6482 	 the address is the same as the address of the parent object.  */
6483       offset = 0;
6484       bitpos = 0;
6485       inner = TREE_OPERAND (exp, 0);
6486       break;
6487 
6488     case IMAGPART_EXPR:
6489       /* The imaginary part of the complex number is always second.
6490 	 The expression is therefore always offset by the size of the
6491 	 scalar type.  */
6492       offset = 0;
6493       bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6494       inner = TREE_OPERAND (exp, 0);
6495       break;
6496 
6497     default:
6498       /* If the object is a DECL, then expand it for its rtl.  Don't bypass
6499 	 expand_expr, as that can have various side effects; LABEL_DECLs for
6500 	 example, may not have their DECL_RTL set yet.  Assume language
6501 	 specific tree nodes can be expanded in some interesting way.  */
6502       if (DECL_P (exp)
6503 	  || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE)
6504 	{
6505 	  result = expand_expr (exp, target, tmode,
6506 				modifier == EXPAND_INITIALIZER
6507 				? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6508 
6509 	  /* If the DECL isn't in memory, then the DECL wasn't properly
6510 	     marked TREE_ADDRESSABLE, which will be either a front-end
6511 	     or a tree optimizer bug.  */
6512 	  gcc_assert (MEM_P (result));
6513 	  result = XEXP (result, 0);
6514 
6515 	  /* ??? Is this needed anymore?  */
6516 	  if (DECL_P (exp) && !TREE_USED (exp) == 0)
6517 	    {
6518 	      assemble_external (exp);
6519 	      TREE_USED (exp) = 1;
6520 	    }
6521 
6522 	  if (modifier != EXPAND_INITIALIZER
6523 	      && modifier != EXPAND_CONST_ADDRESS)
6524 	    result = force_operand (result, target);
6525 	  return result;
6526 	}
6527 
6528       /* Pass FALSE as the last argument to get_inner_reference although
6529 	 we are expanding to RTL.  The rationale is that we know how to
6530 	 handle "aligning nodes" here: we can just bypass them because
6531 	 they won't change the final object whose address will be returned
6532 	 (they actually exist only for that purpose).  */
6533       inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6534 				   &mode1, &unsignedp, &volatilep, false);
6535       break;
6536     }
6537 
6538   /* We must have made progress.  */
6539   gcc_assert (inner != exp);
6540 
6541   subtarget = offset || bitpos ? NULL_RTX : target;
6542   result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6543 
6544   if (offset)
6545     {
6546       rtx tmp;
6547 
6548       if (modifier != EXPAND_NORMAL)
6549 	result = force_operand (result, NULL);
6550       tmp = expand_expr (offset, NULL, tmode, EXPAND_NORMAL);
6551 
6552       result = convert_memory_address (tmode, result);
6553       tmp = convert_memory_address (tmode, tmp);
6554 
6555       if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6556 	result = gen_rtx_PLUS (tmode, result, tmp);
6557       else
6558 	{
6559 	  subtarget = bitpos ? NULL_RTX : target;
6560 	  result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6561 					1, OPTAB_LIB_WIDEN);
6562 	}
6563     }
6564 
6565   if (bitpos)
6566     {
6567       /* Someone beforehand should have rejected taking the address
6568 	 of such an object.  */
6569       gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6570 
6571       result = plus_constant (result, bitpos / BITS_PER_UNIT);
6572       if (modifier < EXPAND_SUM)
6573 	result = force_operand (result, target);
6574     }
6575 
6576   return result;
6577 }
6578 
6579 /* A subroutine of expand_expr.  Evaluate EXP, which is an ADDR_EXPR.
6580    The TARGET, TMODE and MODIFIER arguments are as for expand_expr.  */
6581 
6582 static rtx
expand_expr_addr_expr(tree exp,rtx target,enum machine_mode tmode,enum expand_modifier modifier)6583 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6584 		       enum expand_modifier modifier)
6585 {
6586   enum machine_mode rmode;
6587   rtx result;
6588 
6589   /* Target mode of VOIDmode says "whatever's natural".  */
6590   if (tmode == VOIDmode)
6591     tmode = TYPE_MODE (TREE_TYPE (exp));
6592 
6593   /* We can get called with some Weird Things if the user does silliness
6594      like "(short) &a".  In that case, convert_memory_address won't do
6595      the right thing, so ignore the given target mode.  */
6596   if (tmode != Pmode && tmode != ptr_mode)
6597     tmode = Pmode;
6598 
6599   result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6600 				    tmode, modifier);
6601 
6602   /* Despite expand_expr claims concerning ignoring TMODE when not
6603      strictly convenient, stuff breaks if we don't honor it.  Note
6604      that combined with the above, we only do this for pointer modes.  */
6605   rmode = GET_MODE (result);
6606   if (rmode == VOIDmode)
6607     rmode = tmode;
6608   if (rmode != tmode)
6609     result = convert_memory_address (tmode, result);
6610 
6611   return result;
6612 }
6613 
6614 
6615 /* expand_expr: generate code for computing expression EXP.
6616    An rtx for the computed value is returned.  The value is never null.
6617    In the case of a void EXP, const0_rtx is returned.
6618 
6619    The value may be stored in TARGET if TARGET is nonzero.
6620    TARGET is just a suggestion; callers must assume that
6621    the rtx returned may not be the same as TARGET.
6622 
6623    If TARGET is CONST0_RTX, it means that the value will be ignored.
6624 
6625    If TMODE is not VOIDmode, it suggests generating the
6626    result in mode TMODE.  But this is done only when convenient.
6627    Otherwise, TMODE is ignored and the value generated in its natural mode.
6628    TMODE is just a suggestion; callers must assume that
6629    the rtx returned may not have mode TMODE.
6630 
6631    Note that TARGET may have neither TMODE nor MODE.  In that case, it
6632    probably will not be used.
6633 
6634    If MODIFIER is EXPAND_SUM then when EXP is an addition
6635    we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6636    or a nest of (PLUS ...) and (MINUS ...) where the terms are
6637    products as above, or REG or MEM, or constant.
6638    Ordinarily in such cases we would output mul or add instructions
6639    and then return a pseudo reg containing the sum.
6640 
6641    EXPAND_INITIALIZER is much like EXPAND_SUM except that
6642    it also marks a label as absolutely required (it can't be dead).
6643    It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6644    This is used for outputting expressions used in initializers.
6645 
6646    EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6647    with a constant address even if that address is not normally legitimate.
6648    EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6649 
6650    EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6651    a call parameter.  Such targets require special care as we haven't yet
6652    marked TARGET so that it's safe from being trashed by libcalls.  We
6653    don't want to use TARGET for anything but the final result;
6654    Intermediate values must go elsewhere.   Additionally, calls to
6655    emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6656 
6657    If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6658    address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6659    DECL_RTL of the VAR_DECL.  *ALT_RTL is also set if EXP is a
6660    COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6661    recursively.  */
6662 
6663 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6664 			       enum expand_modifier, rtx *);
6665 
6666 rtx
expand_expr_real(tree exp,rtx target,enum machine_mode tmode,enum expand_modifier modifier,rtx * alt_rtl)6667 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6668 		  enum expand_modifier modifier, rtx *alt_rtl)
6669 {
6670   int rn = -1;
6671   rtx ret, last = NULL;
6672 
6673   /* Handle ERROR_MARK before anybody tries to access its type.  */
6674   if (TREE_CODE (exp) == ERROR_MARK
6675       || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)
6676     {
6677       ret = CONST0_RTX (tmode);
6678       return ret ? ret : const0_rtx;
6679     }
6680 
6681   if (flag_non_call_exceptions)
6682     {
6683       rn = lookup_stmt_eh_region (exp);
6684       /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw.  */
6685       if (rn >= 0)
6686 	last = get_last_insn ();
6687     }
6688 
6689   /* If this is an expression of some kind and it has an associated line
6690      number, then emit the line number before expanding the expression.
6691 
6692      We need to save and restore the file and line information so that
6693      errors discovered during expansion are emitted with the right
6694      information.  It would be better of the diagnostic routines
6695      used the file/line information embedded in the tree nodes rather
6696      than globals.  */
6697   if (cfun && cfun->ib_boundaries_block && EXPR_HAS_LOCATION (exp))
6698     {
6699       location_t saved_location = input_location;
6700       input_location = EXPR_LOCATION (exp);
6701       emit_line_note (input_location);
6702 
6703       /* Record where the insns produced belong.  */
6704       record_block_change (TREE_BLOCK (exp));
6705 
6706       ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6707 
6708       input_location = saved_location;
6709     }
6710   else
6711     {
6712       ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6713     }
6714 
6715   /* If using non-call exceptions, mark all insns that may trap.
6716      expand_call() will mark CALL_INSNs before we get to this code,
6717      but it doesn't handle libcalls, and these may trap.  */
6718   if (rn >= 0)
6719     {
6720       rtx insn;
6721       for (insn = next_real_insn (last); insn;
6722 	   insn = next_real_insn (insn))
6723 	{
6724 	  if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6725 	      /* If we want exceptions for non-call insns, any
6726 		 may_trap_p instruction may throw.  */
6727 	      && GET_CODE (PATTERN (insn)) != CLOBBER
6728 	      && GET_CODE (PATTERN (insn)) != USE
6729 	      && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6730 	    {
6731 	      REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6732 						  REG_NOTES (insn));
6733 	    }
6734 	}
6735     }
6736 
6737   return ret;
6738 }
6739 
6740 static rtx
expand_expr_real_1(tree exp,rtx target,enum machine_mode tmode,enum expand_modifier modifier,rtx * alt_rtl)6741 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6742 		    enum expand_modifier modifier, rtx *alt_rtl)
6743 {
6744   rtx op0, op1, temp, decl_rtl;
6745   tree type = TREE_TYPE (exp);
6746   int unsignedp;
6747   enum machine_mode mode;
6748   enum tree_code code = TREE_CODE (exp);
6749   optab this_optab;
6750   rtx subtarget, original_target;
6751   int ignore;
6752   tree context, subexp0, subexp1;
6753   bool reduce_bit_field = false;
6754 #define REDUCE_BIT_FIELD(expr)	(reduce_bit_field && !ignore		  \
6755 				 ? reduce_to_bit_field_precision ((expr), \
6756 								  target, \
6757 								  type)	  \
6758 				 : (expr))
6759 
6760   mode = TYPE_MODE (type);
6761   unsignedp = TYPE_UNSIGNED (type);
6762   if (lang_hooks.reduce_bit_field_operations
6763       && TREE_CODE (type) == INTEGER_TYPE
6764       && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
6765     {
6766       /* An operation in what may be a bit-field type needs the
6767 	 result to be reduced to the precision of the bit-field type,
6768 	 which is narrower than that of the type's mode.  */
6769       reduce_bit_field = true;
6770       if (modifier == EXPAND_STACK_PARM)
6771 	target = 0;
6772     }
6773 
6774   /* Use subtarget as the target for operand 0 of a binary operation.  */
6775   subtarget = get_subtarget (target);
6776   original_target = target;
6777   ignore = (target == const0_rtx
6778 	    || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6779 		 || code == CONVERT_EXPR || code == COND_EXPR
6780 		 || code == VIEW_CONVERT_EXPR)
6781 		&& TREE_CODE (type) == VOID_TYPE));
6782 
6783   /* If we are going to ignore this result, we need only do something
6784      if there is a side-effect somewhere in the expression.  If there
6785      is, short-circuit the most common cases here.  Note that we must
6786      not call expand_expr with anything but const0_rtx in case this
6787      is an initial expansion of a size that contains a PLACEHOLDER_EXPR.  */
6788 
6789   if (ignore)
6790     {
6791       if (! TREE_SIDE_EFFECTS (exp))
6792 	return const0_rtx;
6793 
6794       /* Ensure we reference a volatile object even if value is ignored, but
6795 	 don't do this if all we are doing is taking its address.  */
6796       if (TREE_THIS_VOLATILE (exp)
6797 	  && TREE_CODE (exp) != FUNCTION_DECL
6798 	  && mode != VOIDmode && mode != BLKmode
6799 	  && modifier != EXPAND_CONST_ADDRESS)
6800 	{
6801 	  temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6802 	  if (MEM_P (temp))
6803 	    temp = copy_to_reg (temp);
6804 	  return const0_rtx;
6805 	}
6806 
6807       if (TREE_CODE_CLASS (code) == tcc_unary
6808 	  || code == COMPONENT_REF || code == INDIRECT_REF)
6809 	return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6810 			    modifier);
6811 
6812       else if (TREE_CODE_CLASS (code) == tcc_binary
6813 	       || TREE_CODE_CLASS (code) == tcc_comparison
6814 	       || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6815 	{
6816 	  expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6817 	  expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6818 	  return const0_rtx;
6819 	}
6820       else if (code == BIT_FIELD_REF)
6821 	{
6822 	  expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6823 	  expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6824 	  expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6825 	  return const0_rtx;
6826 	}
6827 
6828       target = 0;
6829     }
6830 
6831 
6832   switch (code)
6833     {
6834     case LABEL_DECL:
6835       {
6836 	tree function = decl_function_context (exp);
6837 
6838 	temp = label_rtx (exp);
6839 	temp = gen_rtx_LABEL_REF (Pmode, temp);
6840 
6841 	if (function != current_function_decl
6842 	    && function != 0)
6843 	  LABEL_REF_NONLOCAL_P (temp) = 1;
6844 
6845 	temp = gen_rtx_MEM (FUNCTION_MODE, temp);
6846 	return temp;
6847       }
6848 
6849     case SSA_NAME:
6850       return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
6851 				 NULL);
6852 
6853     case PARM_DECL:
6854     case VAR_DECL:
6855       /* If a static var's type was incomplete when the decl was written,
6856 	 but the type is complete now, lay out the decl now.  */
6857       if (DECL_SIZE (exp) == 0
6858 	  && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6859 	  && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6860 	layout_decl (exp, 0);
6861 
6862       /* ... fall through ...  */
6863 
6864     case FUNCTION_DECL:
6865     case RESULT_DECL:
6866       decl_rtl = DECL_RTL (exp);
6867       gcc_assert (decl_rtl);
6868 
6869       /* Ensure variable marked as used even if it doesn't go through
6870 	 a parser.  If it hasn't be used yet, write out an external
6871 	 definition.  */
6872       if (! TREE_USED (exp))
6873 	{
6874 	  assemble_external (exp);
6875 	  TREE_USED (exp) = 1;
6876 	}
6877 
6878       /* Show we haven't gotten RTL for this yet.  */
6879       temp = 0;
6880 
6881       /* Variables inherited from containing functions should have
6882 	 been lowered by this point.  */
6883       context = decl_function_context (exp);
6884       gcc_assert (!context
6885 		  || context == current_function_decl
6886 		  || TREE_STATIC (exp)
6887 		  /* ??? C++ creates functions that are not TREE_STATIC.  */
6888 		  || TREE_CODE (exp) == FUNCTION_DECL);
6889 
6890       /* This is the case of an array whose size is to be determined
6891 	 from its initializer, while the initializer is still being parsed.
6892 	 See expand_decl.  */
6893 
6894       if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
6895 	temp = validize_mem (decl_rtl);
6896 
6897       /* If DECL_RTL is memory, we are in the normal case and either
6898 	 the address is not valid or it is not a register and -fforce-addr
6899 	 is specified, get the address into a register.  */
6900 
6901       else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
6902 	{
6903 	  if (alt_rtl)
6904 	    *alt_rtl = decl_rtl;
6905 	  decl_rtl = use_anchored_address (decl_rtl);
6906 	  if (modifier != EXPAND_CONST_ADDRESS
6907 	      && modifier != EXPAND_SUM
6908 	      && (!memory_address_p (DECL_MODE (exp), XEXP (decl_rtl, 0))
6909 		  || (flag_force_addr && !REG_P (XEXP (decl_rtl, 0)))))
6910 	    temp = replace_equiv_address (decl_rtl,
6911 					  copy_rtx (XEXP (decl_rtl, 0)));
6912 	}
6913 
6914       /* If we got something, return it.  But first, set the alignment
6915 	 if the address is a register.  */
6916       if (temp != 0)
6917 	{
6918 	  if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
6919 	    mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6920 
6921 	  return temp;
6922 	}
6923 
6924       /* If the mode of DECL_RTL does not match that of the decl, it
6925 	 must be a promoted value.  We return a SUBREG of the wanted mode,
6926 	 but mark it so that we know that it was already extended.  */
6927 
6928       if (REG_P (decl_rtl)
6929 	  && GET_MODE (decl_rtl) != DECL_MODE (exp))
6930 	{
6931 	  enum machine_mode pmode;
6932 
6933 	  /* Get the signedness used for this variable.  Ensure we get the
6934 	     same mode we got when the variable was declared.  */
6935 	  pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
6936 				(TREE_CODE (exp) == RESULT_DECL
6937 				 || TREE_CODE (exp) == PARM_DECL) ? 1 : 0);
6938 	  gcc_assert (GET_MODE (decl_rtl) == pmode);
6939 
6940 	  temp = gen_lowpart_SUBREG (mode, decl_rtl);
6941 	  SUBREG_PROMOTED_VAR_P (temp) = 1;
6942 	  SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6943 	  return temp;
6944 	}
6945 
6946       return decl_rtl;
6947 
6948     case INTEGER_CST:
6949       temp = immed_double_const (TREE_INT_CST_LOW (exp),
6950 				 TREE_INT_CST_HIGH (exp), mode);
6951 
6952       /* ??? If overflow is set, fold will have done an incomplete job,
6953 	 which can result in (plus xx (const_int 0)), which can get
6954 	 simplified by validate_replace_rtx during virtual register
6955 	 instantiation, which can result in unrecognizable insns.
6956 	 Avoid this by forcing all overflows into registers.  */
6957       if (TREE_CONSTANT_OVERFLOW (exp)
6958 	  && modifier != EXPAND_INITIALIZER)
6959 	temp = force_reg (mode, temp);
6960 
6961       return temp;
6962 
6963     case VECTOR_CST:
6964       {
6965 	tree tmp = NULL_TREE;
6966 	if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
6967 	    || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT)
6968 	  return const_vector_from_tree (exp);
6969 	if (GET_MODE_CLASS (mode) == MODE_INT)
6970 	  {
6971 	    tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
6972 	    if (type_for_mode)
6973 	      tmp = fold_unary (VIEW_CONVERT_EXPR, type_for_mode, exp);
6974 	  }
6975 	if (!tmp)
6976 	  tmp = build_constructor_from_list (type,
6977 					     TREE_VECTOR_CST_ELTS (exp));
6978 	return expand_expr (tmp, ignore ? const0_rtx : target,
6979 			    tmode, modifier);
6980       }
6981 
6982     case CONST_DECL:
6983       return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6984 
6985     case REAL_CST:
6986       /* If optimized, generate immediate CONST_DOUBLE
6987 	 which will be turned into memory by reload if necessary.
6988 
6989 	 We used to force a register so that loop.c could see it.  But
6990 	 this does not allow gen_* patterns to perform optimizations with
6991 	 the constants.  It also produces two insns in cases like "x = 1.0;".
6992 	 On most machines, floating-point constants are not permitted in
6993 	 many insns, so we'd end up copying it to a register in any case.
6994 
6995 	 Now, we do the copying in expand_binop, if appropriate.  */
6996       return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6997 					   TYPE_MODE (TREE_TYPE (exp)));
6998 
6999     case COMPLEX_CST:
7000       /* Handle evaluating a complex constant in a CONCAT target.  */
7001       if (original_target && GET_CODE (original_target) == CONCAT)
7002 	{
7003 	  enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7004 	  rtx rtarg, itarg;
7005 
7006 	  rtarg = XEXP (original_target, 0);
7007 	  itarg = XEXP (original_target, 1);
7008 
7009 	  /* Move the real and imaginary parts separately.  */
7010 	  op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
7011 	  op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
7012 
7013 	  if (op0 != rtarg)
7014 	    emit_move_insn (rtarg, op0);
7015 	  if (op1 != itarg)
7016 	    emit_move_insn (itarg, op1);
7017 
7018 	  return original_target;
7019 	}
7020 
7021       /* ... fall through ...  */
7022 
7023     case STRING_CST:
7024       temp = expand_expr_constant (exp, 1, modifier);
7025 
7026       /* temp contains a constant address.
7027 	 On RISC machines where a constant address isn't valid,
7028 	 make some insns to get that address into a register.  */
7029       if (modifier != EXPAND_CONST_ADDRESS
7030 	  && modifier != EXPAND_INITIALIZER
7031 	  && modifier != EXPAND_SUM
7032 	  && (! memory_address_p (mode, XEXP (temp, 0))
7033 	      || flag_force_addr))
7034 	return replace_equiv_address (temp,
7035 				      copy_rtx (XEXP (temp, 0)));
7036       return temp;
7037 
7038     case SAVE_EXPR:
7039       {
7040 	tree val = TREE_OPERAND (exp, 0);
7041 	rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
7042 
7043 	if (!SAVE_EXPR_RESOLVED_P (exp))
7044 	  {
7045 	    /* We can indeed still hit this case, typically via builtin
7046 	       expanders calling save_expr immediately before expanding
7047 	       something.  Assume this means that we only have to deal
7048 	       with non-BLKmode values.  */
7049 	    gcc_assert (GET_MODE (ret) != BLKmode);
7050 
7051 	    val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
7052 	    DECL_ARTIFICIAL (val) = 1;
7053 	    DECL_IGNORED_P (val) = 1;
7054 	    TREE_OPERAND (exp, 0) = val;
7055 	    SAVE_EXPR_RESOLVED_P (exp) = 1;
7056 
7057 	    if (!CONSTANT_P (ret))
7058 	      ret = copy_to_reg (ret);
7059 	    SET_DECL_RTL (val, ret);
7060 	  }
7061 
7062         return ret;
7063       }
7064 
7065     case GOTO_EXPR:
7066       if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
7067 	expand_goto (TREE_OPERAND (exp, 0));
7068       else
7069 	expand_computed_goto (TREE_OPERAND (exp, 0));
7070       return const0_rtx;
7071 
7072     case CONSTRUCTOR:
7073       /* If we don't need the result, just ensure we evaluate any
7074 	 subexpressions.  */
7075       if (ignore)
7076 	{
7077 	  unsigned HOST_WIDE_INT idx;
7078 	  tree value;
7079 
7080 	  FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
7081 	    expand_expr (value, const0_rtx, VOIDmode, 0);
7082 
7083 	  return const0_rtx;
7084 	}
7085 
7086       /* Try to avoid creating a temporary at all.  This is possible
7087 	 if all of the initializer is zero.
7088 	 FIXME: try to handle all [0..255] initializers we can handle
7089 	 with memset.  */
7090       else if (TREE_STATIC (exp)
7091 	       && !TREE_ADDRESSABLE (exp)
7092 	       && target != 0 && mode == BLKmode
7093 	       && all_zeros_p (exp))
7094 	{
7095 	  clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7096 	  return target;
7097 	}
7098 
7099       /* All elts simple constants => refer to a constant in memory.  But
7100 	 if this is a non-BLKmode mode, let it store a field at a time
7101 	 since that should make a CONST_INT or CONST_DOUBLE when we
7102 	 fold.  Likewise, if we have a target we can use, it is best to
7103 	 store directly into the target unless the type is large enough
7104 	 that memcpy will be used.  If we are making an initializer and
7105 	 all operands are constant, put it in memory as well.
7106 
7107 	FIXME: Avoid trying to fill vector constructors piece-meal.
7108 	Output them with output_constant_def below unless we're sure
7109 	they're zeros.  This should go away when vector initializers
7110 	are treated like VECTOR_CST instead of arrays.
7111       */
7112       else if ((TREE_STATIC (exp)
7113 		&& ((mode == BLKmode
7114 		     && ! (target != 0 && safe_from_p (target, exp, 1)))
7115 		    || TREE_ADDRESSABLE (exp)
7116 		    || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7117 			&& (! MOVE_BY_PIECES_P
7118 			    (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7119 			     TYPE_ALIGN (type)))
7120 			&& ! mostly_zeros_p (exp))))
7121 	       || ((modifier == EXPAND_INITIALIZER
7122 		    || modifier == EXPAND_CONST_ADDRESS)
7123 		   && TREE_CONSTANT (exp)))
7124 	{
7125 	  rtx constructor = expand_expr_constant (exp, 1, modifier);
7126 
7127 	  if (modifier != EXPAND_CONST_ADDRESS
7128 	      && modifier != EXPAND_INITIALIZER
7129 	      && modifier != EXPAND_SUM)
7130 	    constructor = validize_mem (constructor);
7131 
7132 	  return constructor;
7133 	}
7134       else
7135 	{
7136 	  /* Handle calls that pass values in multiple non-contiguous
7137 	     locations.  The Irix 6 ABI has examples of this.  */
7138 	  if (target == 0 || ! safe_from_p (target, exp, 1)
7139 	      || GET_CODE (target) == PARALLEL
7140 	      || modifier == EXPAND_STACK_PARM)
7141 	    target
7142 	      = assign_temp (build_qualified_type (type,
7143 						   (TYPE_QUALS (type)
7144 						    | (TREE_READONLY (exp)
7145 						       * TYPE_QUAL_CONST))),
7146 			     0, TREE_ADDRESSABLE (exp), 1);
7147 
7148 	  store_constructor (exp, target, 0, int_expr_size (exp));
7149 	  return target;
7150 	}
7151 
7152     case MISALIGNED_INDIRECT_REF:
7153     case ALIGN_INDIRECT_REF:
7154     case INDIRECT_REF:
7155       {
7156 	tree exp1 = TREE_OPERAND (exp, 0);
7157 
7158 	if (modifier != EXPAND_WRITE)
7159 	  {
7160 	    tree t;
7161 
7162 	    t = fold_read_from_constant_string (exp);
7163 	    if (t)
7164 	      return expand_expr (t, target, tmode, modifier);
7165 	  }
7166 
7167 	op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7168 	op0 = memory_address (mode, op0);
7169 
7170 	if (code == ALIGN_INDIRECT_REF)
7171 	  {
7172 	    int align = TYPE_ALIGN_UNIT (type);
7173 	    op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
7174 	    op0 = memory_address (mode, op0);
7175 	  }
7176 
7177 	temp = gen_rtx_MEM (mode, op0);
7178 
7179 	set_mem_attributes (temp, exp, 0);
7180 
7181 	/* Resolve the misalignment now, so that we don't have to remember
7182 	   to resolve it later.  Of course, this only works for reads.  */
7183 	/* ??? When we get around to supporting writes, we'll have to handle
7184 	   this in store_expr directly.  The vectorizer isn't generating
7185 	   those yet, however.  */
7186 	if (code == MISALIGNED_INDIRECT_REF)
7187 	  {
7188 	    int icode;
7189 	    rtx reg, insn;
7190 
7191 	    gcc_assert (modifier == EXPAND_NORMAL
7192 			|| modifier == EXPAND_STACK_PARM);
7193 
7194 	    /* The vectorizer should have already checked the mode.  */
7195 	    icode = movmisalign_optab->handlers[mode].insn_code;
7196 	    gcc_assert (icode != CODE_FOR_nothing);
7197 
7198 	    /* We've already validated the memory, and we're creating a
7199 	       new pseudo destination.  The predicates really can't fail.  */
7200 	    reg = gen_reg_rtx (mode);
7201 
7202 	    /* Nor can the insn generator.  */
7203 	    insn = GEN_FCN (icode) (reg, temp);
7204 	    emit_insn (insn);
7205 
7206 	    return reg;
7207 	  }
7208 
7209 	return temp;
7210       }
7211 
7212     case TARGET_MEM_REF:
7213       {
7214 	struct mem_address addr;
7215 
7216 	get_address_description (exp, &addr);
7217 	op0 = addr_for_mem_ref (&addr, true);
7218 	op0 = memory_address (mode, op0);
7219 	temp = gen_rtx_MEM (mode, op0);
7220 	set_mem_attributes (temp, TMR_ORIGINAL (exp), 0);
7221       }
7222       return temp;
7223 
7224     case ARRAY_REF:
7225 
7226       {
7227 	tree array = TREE_OPERAND (exp, 0);
7228 	tree index = TREE_OPERAND (exp, 1);
7229 
7230 	/* Fold an expression like: "foo"[2].
7231 	   This is not done in fold so it won't happen inside &.
7232 	   Don't fold if this is for wide characters since it's too
7233 	   difficult to do correctly and this is a very rare case.  */
7234 
7235 	if (modifier != EXPAND_CONST_ADDRESS
7236 	    && modifier != EXPAND_INITIALIZER
7237 	    && modifier != EXPAND_MEMORY)
7238 	  {
7239 	    tree t = fold_read_from_constant_string (exp);
7240 
7241 	    if (t)
7242 	      return expand_expr (t, target, tmode, modifier);
7243 	  }
7244 
7245 	/* If this is a constant index into a constant array,
7246 	   just get the value from the array.  Handle both the cases when
7247 	   we have an explicit constructor and when our operand is a variable
7248 	   that was declared const.  */
7249 
7250 	if (modifier != EXPAND_CONST_ADDRESS
7251 	    && modifier != EXPAND_INITIALIZER
7252 	    && modifier != EXPAND_MEMORY
7253 	    && TREE_CODE (array) == CONSTRUCTOR
7254 	    && ! TREE_SIDE_EFFECTS (array)
7255 	    && TREE_CODE (index) == INTEGER_CST)
7256 	  {
7257 	    unsigned HOST_WIDE_INT ix;
7258 	    tree field, value;
7259 
7260 	    FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
7261 				      field, value)
7262 	      if (tree_int_cst_equal (field, index))
7263 		{
7264 		  if (!TREE_SIDE_EFFECTS (value))
7265 		    return expand_expr (fold (value), target, tmode, modifier);
7266 		  break;
7267 		}
7268 	  }
7269 
7270 	else if (optimize >= 1
7271 		 && modifier != EXPAND_CONST_ADDRESS
7272 		 && modifier != EXPAND_INITIALIZER
7273 		 && modifier != EXPAND_MEMORY
7274 		 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7275 		 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7276 		 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
7277 		 && targetm.binds_local_p (array))
7278 	  {
7279 	    if (TREE_CODE (index) == INTEGER_CST)
7280 	      {
7281 		tree init = DECL_INITIAL (array);
7282 
7283 		if (TREE_CODE (init) == CONSTRUCTOR)
7284 		  {
7285 		    unsigned HOST_WIDE_INT ix;
7286 		    tree field, value;
7287 
7288 		    FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
7289 					      field, value)
7290 		      if (tree_int_cst_equal (field, index))
7291 			{
7292 			  if (!TREE_SIDE_EFFECTS (value))
7293 			    return expand_expr (fold (value), target, tmode,
7294 						modifier);
7295 			  break;
7296 			}
7297 		  }
7298 		else if(TREE_CODE (init) == STRING_CST)
7299 		  {
7300 		    tree index1 = index;
7301 		    tree low_bound = array_ref_low_bound (exp);
7302 		    index1 = fold_convert (sizetype, TREE_OPERAND (exp, 1));
7303 
7304 		    /* Optimize the special-case of a zero lower bound.
7305 
7306 		       We convert the low_bound to sizetype to avoid some problems
7307 		       with constant folding.  (E.g. suppose the lower bound is 1,
7308 		       and its mode is QI.  Without the conversion,l (ARRAY
7309 		       +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7310 		       +INDEX), which becomes (ARRAY+255+INDEX).  Opps!)  */
7311 
7312 		    if (! integer_zerop (low_bound))
7313 		      index1 = size_diffop (index1, fold_convert (sizetype,
7314 								  low_bound));
7315 
7316 		    if (0 > compare_tree_int (index1,
7317 					      TREE_STRING_LENGTH (init)))
7318 		      {
7319 			tree type = TREE_TYPE (TREE_TYPE (init));
7320 			enum machine_mode mode = TYPE_MODE (type);
7321 
7322 			if (GET_MODE_CLASS (mode) == MODE_INT
7323 			    && GET_MODE_SIZE (mode) == 1)
7324 			  return gen_int_mode (TREE_STRING_POINTER (init)
7325 					       [TREE_INT_CST_LOW (index1)],
7326 					       mode);
7327 		      }
7328 		  }
7329 	      }
7330 	  }
7331       }
7332       goto normal_inner_ref;
7333 
7334     case COMPONENT_REF:
7335       /* If the operand is a CONSTRUCTOR, we can just extract the
7336 	 appropriate field if it is present.  */
7337       if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7338 	{
7339 	  unsigned HOST_WIDE_INT idx;
7340 	  tree field, value;
7341 
7342 	  FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7343 				    idx, field, value)
7344 	    if (field == TREE_OPERAND (exp, 1)
7345 		/* We can normally use the value of the field in the
7346 		   CONSTRUCTOR.  However, if this is a bitfield in
7347 		   an integral mode that we can fit in a HOST_WIDE_INT,
7348 		   we must mask only the number of bits in the bitfield,
7349 		   since this is done implicitly by the constructor.  If
7350 		   the bitfield does not meet either of those conditions,
7351 		   we can't do this optimization.  */
7352 		&& (! DECL_BIT_FIELD (field)
7353 		    || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
7354 			&& (GET_MODE_BITSIZE (DECL_MODE (field))
7355 			    <= HOST_BITS_PER_WIDE_INT))))
7356 	      {
7357 		if (DECL_BIT_FIELD (field)
7358 		    && modifier == EXPAND_STACK_PARM)
7359 		  target = 0;
7360 		op0 = expand_expr (value, target, tmode, modifier);
7361 		if (DECL_BIT_FIELD (field))
7362 		  {
7363 		    HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
7364 		    enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
7365 
7366 		    if (TYPE_UNSIGNED (TREE_TYPE (field)))
7367 		      {
7368 			op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7369 			op0 = expand_and (imode, op0, op1, target);
7370 		      }
7371 		    else
7372 		      {
7373 			tree count
7374 			  = build_int_cst (NULL_TREE,
7375 					   GET_MODE_BITSIZE (imode) - bitsize);
7376 
7377 			op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7378 					    target, 0);
7379 			op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7380 					    target, 0);
7381 		      }
7382 		  }
7383 
7384 		return op0;
7385 	      }
7386 	}
7387       goto normal_inner_ref;
7388 
7389     case BIT_FIELD_REF:
7390     case ARRAY_RANGE_REF:
7391     normal_inner_ref:
7392       {
7393 	enum machine_mode mode1;
7394 	HOST_WIDE_INT bitsize, bitpos;
7395 	tree offset;
7396 	int volatilep = 0;
7397 	tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7398 					&mode1, &unsignedp, &volatilep, true);
7399 	rtx orig_op0;
7400 
7401 	/* If we got back the original object, something is wrong.  Perhaps
7402 	   we are evaluating an expression too early.  In any event, don't
7403 	   infinitely recurse.  */
7404 	gcc_assert (tem != exp);
7405 
7406 	/* If TEM's type is a union of variable size, pass TARGET to the inner
7407 	   computation, since it will need a temporary and TARGET is known
7408 	   to have to do.  This occurs in unchecked conversion in Ada.  */
7409 
7410 	orig_op0 = op0
7411 	  = expand_expr (tem,
7412 			 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7413 			  && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7414 			      != INTEGER_CST)
7415 			  && modifier != EXPAND_STACK_PARM
7416 			  ? target : NULL_RTX),
7417 			 VOIDmode,
7418 			 (modifier == EXPAND_INITIALIZER
7419 			  || modifier == EXPAND_CONST_ADDRESS
7420 			  || modifier == EXPAND_STACK_PARM)
7421 			 ? modifier : EXPAND_NORMAL);
7422 
7423 	/* If this is a constant, put it into a register if it is a legitimate
7424 	   constant, OFFSET is 0, and we won't try to extract outside the
7425 	   register (in case we were passed a partially uninitialized object
7426 	   or a view_conversion to a larger size).  Force the constant to
7427 	   memory otherwise.  */
7428 	if (CONSTANT_P (op0))
7429 	  {
7430 	    enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7431 	    if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7432 		&& offset == 0
7433 		&& bitpos + bitsize <= GET_MODE_BITSIZE (mode))
7434 	      op0 = force_reg (mode, op0);
7435 	    else
7436 	      op0 = validize_mem (force_const_mem (mode, op0));
7437 	  }
7438 
7439 	/* Otherwise, if this object not in memory and we either have an
7440 	   offset, a BLKmode result, or a reference outside the object, put it
7441 	   there.  Such cases can occur in Ada if we have unchecked conversion
7442 	   of an expression from a scalar type to an array or record type or
7443 	   for an ARRAY_RANGE_REF whose type is BLKmode.  */
7444 	else if (!MEM_P (op0)
7445 		 && (offset != 0
7446 		     || (bitpos + bitsize > GET_MODE_BITSIZE (GET_MODE (op0)))
7447 		     || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7448 	  {
7449 	    tree nt = build_qualified_type (TREE_TYPE (tem),
7450 					    (TYPE_QUALS (TREE_TYPE (tem))
7451 					     | TYPE_QUAL_CONST));
7452 	    rtx memloc = assign_temp (nt, 1, 1, 1);
7453 
7454 	    emit_move_insn (memloc, op0);
7455 	    op0 = memloc;
7456 	  }
7457 
7458 	if (offset != 0)
7459 	  {
7460 	    rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7461 					  EXPAND_SUM);
7462 
7463 	    gcc_assert (MEM_P (op0));
7464 
7465 #ifdef POINTERS_EXTEND_UNSIGNED
7466 	    if (GET_MODE (offset_rtx) != Pmode)
7467 	      offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7468 #else
7469 	    if (GET_MODE (offset_rtx) != ptr_mode)
7470 	      offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7471 #endif
7472 
7473 	    if (GET_MODE (op0) == BLKmode
7474 		/* A constant address in OP0 can have VOIDmode, we must
7475 		   not try to call force_reg in that case.  */
7476 		&& GET_MODE (XEXP (op0, 0)) != VOIDmode
7477 		&& bitsize != 0
7478 		&& (bitpos % bitsize) == 0
7479 		&& (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7480 		&& MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7481 	      {
7482 		op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7483 		bitpos = 0;
7484 	      }
7485 
7486 	    op0 = offset_address (op0, offset_rtx,
7487 				  highest_pow2_factor (offset));
7488 	  }
7489 
7490 	/* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7491 	   record its alignment as BIGGEST_ALIGNMENT.  */
7492 	if (MEM_P (op0) && bitpos == 0 && offset != 0
7493 	    && is_aligning_offset (offset, tem))
7494 	  set_mem_align (op0, BIGGEST_ALIGNMENT);
7495 
7496 	/* Don't forget about volatility even if this is a bitfield.  */
7497 	if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
7498 	  {
7499 	    if (op0 == orig_op0)
7500 	      op0 = copy_rtx (op0);
7501 
7502 	    MEM_VOLATILE_P (op0) = 1;
7503 	  }
7504 
7505 	/* The following code doesn't handle CONCAT.
7506 	   Assume only bitpos == 0 can be used for CONCAT, due to
7507 	   one element arrays having the same mode as its element.  */
7508 	if (GET_CODE (op0) == CONCAT)
7509 	  {
7510 	    gcc_assert (bitpos == 0
7511 			&& bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
7512 	    return op0;
7513 	  }
7514 
7515 	/* In cases where an aligned union has an unaligned object
7516 	   as a field, we might be extracting a BLKmode value from
7517 	   an integer-mode (e.g., SImode) object.  Handle this case
7518 	   by doing the extract into an object as wide as the field
7519 	   (which we know to be the width of a basic mode), then
7520 	   storing into memory, and changing the mode to BLKmode.  */
7521 	if (mode1 == VOIDmode
7522 	    || REG_P (op0) || GET_CODE (op0) == SUBREG
7523 	    || (mode1 != BLKmode && ! direct_load[(int) mode1]
7524 		&& GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7525 		&& GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7526 		&& modifier != EXPAND_CONST_ADDRESS
7527 		&& modifier != EXPAND_INITIALIZER)
7528 	    /* If the field isn't aligned enough to fetch as a memref,
7529 	       fetch it as a bit field.  */
7530 	    || (mode1 != BLKmode
7531 		&& (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7532 		      || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7533 		      || (MEM_P (op0)
7534 			  && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7535 			      || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7536 		     && ((modifier == EXPAND_CONST_ADDRESS
7537 			  || modifier == EXPAND_INITIALIZER)
7538 			 ? STRICT_ALIGNMENT
7539 			 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7540 		    || (bitpos % BITS_PER_UNIT != 0)))
7541 	    /* If the type and the field are a constant size and the
7542 	       size of the type isn't the same size as the bitfield,
7543 	       we must use bitfield operations.  */
7544 	    || (bitsize >= 0
7545 		&& TYPE_SIZE (TREE_TYPE (exp))
7546 		&& TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
7547 		&& 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7548 					  bitsize)))
7549 	  {
7550 	    enum machine_mode ext_mode = mode;
7551 
7552 	    if (ext_mode == BLKmode
7553 		&& ! (target != 0 && MEM_P (op0)
7554 		      && MEM_P (target)
7555 		      && bitpos % BITS_PER_UNIT == 0))
7556 	      ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7557 
7558 	    if (ext_mode == BLKmode)
7559 	      {
7560 		if (target == 0)
7561 		  target = assign_temp (type, 0, 1, 1);
7562 
7563 		if (bitsize == 0)
7564 		  return target;
7565 
7566 		/* In this case, BITPOS must start at a byte boundary and
7567 		   TARGET, if specified, must be a MEM.  */
7568 		gcc_assert (MEM_P (op0)
7569 			    && (!target || MEM_P (target))
7570 			    && !(bitpos % BITS_PER_UNIT));
7571 
7572 		emit_block_move (target,
7573 				 adjust_address (op0, VOIDmode,
7574 						 bitpos / BITS_PER_UNIT),
7575 				 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7576 					  / BITS_PER_UNIT),
7577 				 (modifier == EXPAND_STACK_PARM
7578 				  ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7579 
7580 		return target;
7581 	      }
7582 
7583 	    op0 = validize_mem (op0);
7584 
7585 	    if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7586 	      mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7587 
7588 	    op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7589 				     (modifier == EXPAND_STACK_PARM
7590 				      ? NULL_RTX : target),
7591 				     ext_mode, ext_mode);
7592 
7593 	    /* If the result is a record type and BITSIZE is narrower than
7594 	       the mode of OP0, an integral mode, and this is a big endian
7595 	       machine, we must put the field into the high-order bits.  */
7596 	    if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7597 		&& GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7598 		&& bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7599 	      op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7600 				  size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7601 					    - bitsize),
7602 				  op0, 1);
7603 
7604 	    /* If the result type is BLKmode, store the data into a temporary
7605 	       of the appropriate type, but with the mode corresponding to the
7606 	       mode for the data we have (op0's mode).  It's tempting to make
7607 	       this a constant type, since we know it's only being stored once,
7608 	       but that can cause problems if we are taking the address of this
7609 	       COMPONENT_REF because the MEM of any reference via that address
7610 	       will have flags corresponding to the type, which will not
7611 	       necessarily be constant.  */
7612 	    if (mode == BLKmode)
7613 	      {
7614 		rtx new
7615 		  = assign_stack_temp_for_type
7616 		    (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
7617 
7618 		emit_move_insn (new, op0);
7619 		op0 = copy_rtx (new);
7620 		PUT_MODE (op0, BLKmode);
7621 		set_mem_attributes (op0, exp, 1);
7622 	      }
7623 
7624 	    return op0;
7625 	  }
7626 
7627 	/* If the result is BLKmode, use that to access the object
7628 	   now as well.  */
7629 	if (mode == BLKmode)
7630 	  mode1 = BLKmode;
7631 
7632 	/* Get a reference to just this component.  */
7633 	if (modifier == EXPAND_CONST_ADDRESS
7634 	    || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7635 	  op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7636 	else
7637 	  op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7638 
7639 	if (op0 == orig_op0)
7640 	  op0 = copy_rtx (op0);
7641 
7642 	set_mem_attributes (op0, exp, 0);
7643 	if (REG_P (XEXP (op0, 0)))
7644 	  mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7645 
7646 	MEM_VOLATILE_P (op0) |= volatilep;
7647 	if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7648 	    || modifier == EXPAND_CONST_ADDRESS
7649 	    || modifier == EXPAND_INITIALIZER)
7650 	  return op0;
7651 	else if (target == 0)
7652 	  target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7653 
7654 	convert_move (target, op0, unsignedp);
7655 	return target;
7656       }
7657 
7658     case OBJ_TYPE_REF:
7659       return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7660 
7661     case CALL_EXPR:
7662       /* Check for a built-in function.  */
7663       if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7664 	  && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7665 	      == FUNCTION_DECL)
7666 	  && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7667 	{
7668 	  if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7669 	      == BUILT_IN_FRONTEND)
7670 	    return lang_hooks.expand_expr (exp, original_target,
7671 					   tmode, modifier,
7672 					   alt_rtl);
7673 	  else
7674 	    return expand_builtin (exp, target, subtarget, tmode, ignore);
7675 	}
7676 
7677       return expand_call (exp, target, ignore);
7678 
7679     case NON_LVALUE_EXPR:
7680     case NOP_EXPR:
7681     case CONVERT_EXPR:
7682       if (TREE_OPERAND (exp, 0) == error_mark_node)
7683 	return const0_rtx;
7684 
7685       if (TREE_CODE (type) == UNION_TYPE)
7686 	{
7687 	  tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7688 
7689 	  /* If both input and output are BLKmode, this conversion isn't doing
7690 	     anything except possibly changing memory attribute.  */
7691 	  if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7692 	    {
7693 	      rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7694 					modifier);
7695 
7696 	      result = copy_rtx (result);
7697 	      set_mem_attributes (result, exp, 0);
7698 	      return result;
7699 	    }
7700 
7701 	  if (target == 0)
7702 	    {
7703 	      if (TYPE_MODE (type) != BLKmode)
7704 		target = gen_reg_rtx (TYPE_MODE (type));
7705 	      else
7706 		target = assign_temp (type, 0, 1, 1);
7707 	    }
7708 
7709 	  if (MEM_P (target))
7710 	    /* Store data into beginning of memory target.  */
7711 	    store_expr (TREE_OPERAND (exp, 0),
7712 			adjust_address (target, TYPE_MODE (valtype), 0),
7713 			modifier == EXPAND_STACK_PARM);
7714 
7715 	  else
7716 	    {
7717 	      gcc_assert (REG_P (target));
7718 
7719 	      /* Store this field into a union of the proper type.  */
7720 	      store_field (target,
7721 			   MIN ((int_size_in_bytes (TREE_TYPE
7722 						    (TREE_OPERAND (exp, 0)))
7723 				 * BITS_PER_UNIT),
7724 				(HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7725 			   0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7726 			   type, 0);
7727 	    }
7728 
7729 	  /* Return the entire union.  */
7730 	  return target;
7731 	}
7732 
7733       if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7734 	{
7735 	  op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7736 			     modifier);
7737 
7738 	  /* If the signedness of the conversion differs and OP0 is
7739 	     a promoted SUBREG, clear that indication since we now
7740 	     have to do the proper extension.  */
7741 	  if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7742 	      && GET_CODE (op0) == SUBREG)
7743 	    SUBREG_PROMOTED_VAR_P (op0) = 0;
7744 
7745 	  return REDUCE_BIT_FIELD (op0);
7746 	}
7747 
7748       op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode,
7749 			 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
7750       if (GET_MODE (op0) == mode)
7751 	;
7752 
7753       /* If OP0 is a constant, just convert it into the proper mode.  */
7754       else if (CONSTANT_P (op0))
7755 	{
7756 	  tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7757 	  enum machine_mode inner_mode = TYPE_MODE (inner_type);
7758 
7759 	  if (modifier == EXPAND_INITIALIZER)
7760 	    op0 = simplify_gen_subreg (mode, op0, inner_mode,
7761 				       subreg_lowpart_offset (mode,
7762 							      inner_mode));
7763 	  else
7764 	    op0=  convert_modes (mode, inner_mode, op0,
7765 				 TYPE_UNSIGNED (inner_type));
7766 	}
7767 
7768       else if (modifier == EXPAND_INITIALIZER)
7769 	op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7770 
7771       else if (target == 0)
7772 	op0 = convert_to_mode (mode, op0,
7773 			       TYPE_UNSIGNED (TREE_TYPE
7774 					      (TREE_OPERAND (exp, 0))));
7775       else
7776 	{
7777 	  convert_move (target, op0,
7778 			TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7779 	  op0 = target;
7780 	}
7781 
7782       return REDUCE_BIT_FIELD (op0);
7783 
7784     case VIEW_CONVERT_EXPR:
7785       op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7786 
7787       /* If the input and output modes are both the same, we are done.  */
7788       if (TYPE_MODE (type) == GET_MODE (op0))
7789 	;
7790       /* If neither mode is BLKmode, and both modes are the same size
7791 	 then we can use gen_lowpart.  */
7792       else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7793 	       && GET_MODE_SIZE (TYPE_MODE (type))
7794 		   == GET_MODE_SIZE (GET_MODE (op0)))
7795 	{
7796 	  if (GET_CODE (op0) == SUBREG)
7797 	    op0 = force_reg (GET_MODE (op0), op0);
7798 	  op0 = gen_lowpart (TYPE_MODE (type), op0);
7799 	}
7800       /* If both modes are integral, then we can convert from one to the
7801 	 other.  */
7802       else if (SCALAR_INT_MODE_P (GET_MODE (op0))
7803 	       && SCALAR_INT_MODE_P (TYPE_MODE (type)))
7804 	op0 = convert_modes (TYPE_MODE (type), GET_MODE (op0), op0,
7805 			     TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7806       /* As a last resort, spill op0 to memory, and reload it in a
7807 	 different mode.  */
7808       else if (!MEM_P (op0))
7809 	{
7810 	  /* If the operand is not a MEM, force it into memory.  Since we
7811 	     are going to be changing the mode of the MEM, don't call
7812 	     force_const_mem for constants because we don't allow pool
7813 	     constants to change mode.  */
7814 	  tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7815 
7816 	  gcc_assert (!TREE_ADDRESSABLE (exp));
7817 
7818 	  if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7819 	    target
7820 	      = assign_stack_temp_for_type
7821 		(TYPE_MODE (inner_type),
7822 		 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7823 
7824 	  emit_move_insn (target, op0);
7825 	  op0 = target;
7826 	}
7827 
7828       /* At this point, OP0 is in the correct mode.  If the output type is such
7829 	 that the operand is known to be aligned, indicate that it is.
7830 	 Otherwise, we need only be concerned about alignment for non-BLKmode
7831 	 results.  */
7832       if (MEM_P (op0))
7833 	{
7834 	  op0 = copy_rtx (op0);
7835 
7836 	  if (TYPE_ALIGN_OK (type))
7837 	    set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7838 	  else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7839 		   && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7840 	    {
7841 	      tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7842 	      HOST_WIDE_INT temp_size
7843 		= MAX (int_size_in_bytes (inner_type),
7844 		       (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7845 	      rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7846 						    temp_size, 0, type);
7847 	      rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7848 
7849 	      gcc_assert (!TREE_ADDRESSABLE (exp));
7850 
7851 	      if (GET_MODE (op0) == BLKmode)
7852 		emit_block_move (new_with_op0_mode, op0,
7853 				 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7854 				 (modifier == EXPAND_STACK_PARM
7855 				  ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7856 	      else
7857 		emit_move_insn (new_with_op0_mode, op0);
7858 
7859 	      op0 = new;
7860 	    }
7861 
7862 	  op0 = adjust_address (op0, TYPE_MODE (type), 0);
7863 	}
7864 
7865       return op0;
7866 
7867     case PLUS_EXPR:
7868       /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7869 	 something else, make sure we add the register to the constant and
7870 	 then to the other thing.  This case can occur during strength
7871 	 reduction and doing it this way will produce better code if the
7872 	 frame pointer or argument pointer is eliminated.
7873 
7874 	 fold-const.c will ensure that the constant is always in the inner
7875 	 PLUS_EXPR, so the only case we need to do anything about is if
7876 	 sp, ap, or fp is our second argument, in which case we must swap
7877 	 the innermost first argument and our second argument.  */
7878 
7879       if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7880 	  && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7881 	  && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
7882 	  && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7883 	      || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7884 	      || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7885 	{
7886 	  tree t = TREE_OPERAND (exp, 1);
7887 
7888 	  TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7889 	  TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7890 	}
7891 
7892       /* If the result is to be ptr_mode and we are adding an integer to
7893 	 something, we might be forming a constant.  So try to use
7894 	 plus_constant.  If it produces a sum and we can't accept it,
7895 	 use force_operand.  This allows P = &ARR[const] to generate
7896 	 efficient code on machines where a SYMBOL_REF is not a valid
7897 	 address.
7898 
7899 	 If this is an EXPAND_SUM call, always return the sum.  */
7900       if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7901 	  || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7902 	{
7903 	  if (modifier == EXPAND_STACK_PARM)
7904 	    target = 0;
7905 	  if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7906 	      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7907 	      && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7908 	    {
7909 	      rtx constant_part;
7910 
7911 	      op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7912 				 EXPAND_SUM);
7913 	      /* Use immed_double_const to ensure that the constant is
7914 		 truncated according to the mode of OP1, then sign extended
7915 		 to a HOST_WIDE_INT.  Using the constant directly can result
7916 		 in non-canonical RTL in a 64x32 cross compile.  */
7917 	      constant_part
7918 		= immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7919 				      (HOST_WIDE_INT) 0,
7920 				      TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7921 	      op1 = plus_constant (op1, INTVAL (constant_part));
7922 	      if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7923 		op1 = force_operand (op1, target);
7924 	      return REDUCE_BIT_FIELD (op1);
7925 	    }
7926 
7927 	  else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7928 		   && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7929 		   && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7930 	    {
7931 	      rtx constant_part;
7932 
7933 	      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7934 				 (modifier == EXPAND_INITIALIZER
7935 				 ? EXPAND_INITIALIZER : EXPAND_SUM));
7936 	      if (! CONSTANT_P (op0))
7937 		{
7938 		  op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7939 				     VOIDmode, modifier);
7940 		  /* Return a PLUS if modifier says it's OK.  */
7941 		  if (modifier == EXPAND_SUM
7942 		      || modifier == EXPAND_INITIALIZER)
7943 		    return simplify_gen_binary (PLUS, mode, op0, op1);
7944 		  goto binop2;
7945 		}
7946 	      /* Use immed_double_const to ensure that the constant is
7947 		 truncated according to the mode of OP1, then sign extended
7948 		 to a HOST_WIDE_INT.  Using the constant directly can result
7949 		 in non-canonical RTL in a 64x32 cross compile.  */
7950 	      constant_part
7951 		= immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7952 				      (HOST_WIDE_INT) 0,
7953 				      TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7954 	      op0 = plus_constant (op0, INTVAL (constant_part));
7955 	      if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7956 		op0 = force_operand (op0, target);
7957 	      return REDUCE_BIT_FIELD (op0);
7958 	    }
7959 	}
7960 
7961       /* No sense saving up arithmetic to be done
7962 	 if it's all in the wrong mode to form part of an address.
7963 	 And force_operand won't know whether to sign-extend or
7964 	 zero-extend.  */
7965       if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7966 	  || mode != ptr_mode)
7967 	{
7968 	  expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7969 			   subtarget, &op0, &op1, 0);
7970 	  if (op0 == const0_rtx)
7971 	    return op1;
7972 	  if (op1 == const0_rtx)
7973 	    return op0;
7974 	  goto binop2;
7975 	}
7976 
7977       expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7978 		       subtarget, &op0, &op1, modifier);
7979       return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7980 
7981     case MINUS_EXPR:
7982       /* For initializers, we are allowed to return a MINUS of two
7983 	 symbolic constants.  Here we handle all cases when both operands
7984 	 are constant.  */
7985       /* Handle difference of two symbolic constants,
7986 	 for the sake of an initializer.  */
7987       if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7988 	  && really_constant_p (TREE_OPERAND (exp, 0))
7989 	  && really_constant_p (TREE_OPERAND (exp, 1)))
7990 	{
7991 	  expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7992 			   NULL_RTX, &op0, &op1, modifier);
7993 
7994 	  /* If the last operand is a CONST_INT, use plus_constant of
7995 	     the negated constant.  Else make the MINUS.  */
7996 	  if (GET_CODE (op1) == CONST_INT)
7997 	    return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
7998 	  else
7999 	    return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8000 	}
8001 
8002       /* No sense saving up arithmetic to be done
8003 	 if it's all in the wrong mode to form part of an address.
8004 	 And force_operand won't know whether to sign-extend or
8005 	 zero-extend.  */
8006       if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8007 	  || mode != ptr_mode)
8008 	goto binop;
8009 
8010       expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8011 		       subtarget, &op0, &op1, modifier);
8012 
8013       /* Convert A - const to A + (-const).  */
8014       if (GET_CODE (op1) == CONST_INT)
8015 	{
8016 	  op1 = negate_rtx (mode, op1);
8017 	  return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8018 	}
8019 
8020       goto binop2;
8021 
8022     case MULT_EXPR:
8023       /* If first operand is constant, swap them.
8024 	 Thus the following special case checks need only
8025 	 check the second operand.  */
8026       if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8027 	{
8028 	  tree t1 = TREE_OPERAND (exp, 0);
8029 	  TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8030 	  TREE_OPERAND (exp, 1) = t1;
8031 	}
8032 
8033       /* Attempt to return something suitable for generating an
8034 	 indexed address, for machines that support that.  */
8035 
8036       if (modifier == EXPAND_SUM && mode == ptr_mode
8037 	  && host_integerp (TREE_OPERAND (exp, 1), 0))
8038 	{
8039 	  tree exp1 = TREE_OPERAND (exp, 1);
8040 
8041 	  op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8042 			     EXPAND_SUM);
8043 
8044 	  if (!REG_P (op0))
8045 	    op0 = force_operand (op0, NULL_RTX);
8046 	  if (!REG_P (op0))
8047 	    op0 = copy_to_mode_reg (mode, op0);
8048 
8049 	  return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8050 			       gen_int_mode (tree_low_cst (exp1, 0),
8051 					     TYPE_MODE (TREE_TYPE (exp1)))));
8052 	}
8053 
8054       if (modifier == EXPAND_STACK_PARM)
8055 	target = 0;
8056 
8057       /* Check for multiplying things that have been extended
8058 	 from a narrower type.  If this machine supports multiplying
8059 	 in that narrower type with a result in the desired type,
8060 	 do it that way, and avoid the explicit type-conversion.  */
8061 
8062       subexp0 = TREE_OPERAND (exp, 0);
8063       subexp1 = TREE_OPERAND (exp, 1);
8064       /* First, check if we have a multiplication of one signed and one
8065 	 unsigned operand.  */
8066       if (TREE_CODE (subexp0) == NOP_EXPR
8067 	  && TREE_CODE (subexp1) == NOP_EXPR
8068 	  && TREE_CODE (type) == INTEGER_TYPE
8069 	  && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8070 	      < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8071 	  && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8072 	      == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp1, 0))))
8073 	  && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8074 	      != TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp1, 0)))))
8075 	{
8076 	  enum machine_mode innermode
8077 	    = TYPE_MODE (TREE_TYPE (TREE_OPERAND (subexp0, 0)));
8078 	  this_optab = usmul_widen_optab;
8079 	  if (mode == GET_MODE_WIDER_MODE (innermode))
8080 	    {
8081 	      if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8082 		{
8083 		  if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0))))
8084 		    expand_operands (TREE_OPERAND (subexp0, 0),
8085 				     TREE_OPERAND (subexp1, 0),
8086 				     NULL_RTX, &op0, &op1, 0);
8087 		  else
8088 		    expand_operands (TREE_OPERAND (subexp0, 0),
8089 				     TREE_OPERAND (subexp1, 0),
8090 				     NULL_RTX, &op1, &op0, 0);
8091 
8092 		  goto binop3;
8093 		}
8094 	    }
8095 	}
8096       /* Check for a multiplication with matching signedness.  */
8097       else if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8098 	  && TREE_CODE (type) == INTEGER_TYPE
8099 	  && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8100 	      < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8101 	  && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8102 	       && int_fits_type_p (TREE_OPERAND (exp, 1),
8103 				   TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8104 	       /* Don't use a widening multiply if a shift will do.  */
8105 	       && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8106 		    > HOST_BITS_PER_WIDE_INT)
8107 		   || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8108 	      ||
8109 	      (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8110 	       && (TYPE_PRECISION (TREE_TYPE
8111 				   (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8112 		   == TYPE_PRECISION (TREE_TYPE
8113 				      (TREE_OPERAND
8114 				       (TREE_OPERAND (exp, 0), 0))))
8115 	       /* If both operands are extended, they must either both
8116 		  be zero-extended or both be sign-extended.  */
8117 	       && (TYPE_UNSIGNED (TREE_TYPE
8118 				  (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8119 		   == TYPE_UNSIGNED (TREE_TYPE
8120 				     (TREE_OPERAND
8121 				      (TREE_OPERAND (exp, 0), 0)))))))
8122 	{
8123 	  tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8124 	  enum machine_mode innermode = TYPE_MODE (op0type);
8125 	  bool zextend_p = TYPE_UNSIGNED (op0type);
8126 	  optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8127 	  this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8128 
8129 	  if (mode == GET_MODE_2XWIDER_MODE (innermode))
8130 	    {
8131 	      if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8132 		{
8133 		  if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8134 		    expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8135 				     TREE_OPERAND (exp, 1),
8136 				     NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8137 		  else
8138 		    expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8139 				     TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8140 				     NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8141 		  goto binop3;
8142 		}
8143 	      else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8144 		       && innermode == word_mode)
8145 		{
8146 		  rtx htem, hipart;
8147 		  op0 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8148 		  if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8149 		    op1 = convert_modes (innermode, mode,
8150 					 expand_normal (TREE_OPERAND (exp, 1)),
8151 					 unsignedp);
8152 		  else
8153 		    op1 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 1), 0));
8154 		  temp = expand_binop (mode, other_optab, op0, op1, target,
8155 				       unsignedp, OPTAB_LIB_WIDEN);
8156 		  hipart = gen_highpart (innermode, temp);
8157 		  htem = expand_mult_highpart_adjust (innermode, hipart,
8158 						      op0, op1, hipart,
8159 						      zextend_p);
8160 		  if (htem != hipart)
8161 		    emit_move_insn (hipart, htem);
8162 		  return REDUCE_BIT_FIELD (temp);
8163 		}
8164 	    }
8165 	}
8166       expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8167 		       subtarget, &op0, &op1, 0);
8168       return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8169 
8170     case TRUNC_DIV_EXPR:
8171     case FLOOR_DIV_EXPR:
8172     case CEIL_DIV_EXPR:
8173     case ROUND_DIV_EXPR:
8174     case EXACT_DIV_EXPR:
8175       if (modifier == EXPAND_STACK_PARM)
8176 	target = 0;
8177       /* Possible optimization: compute the dividend with EXPAND_SUM
8178 	 then if the divisor is constant can optimize the case
8179 	 where some terms of the dividend have coeffs divisible by it.  */
8180       expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8181 		       subtarget, &op0, &op1, 0);
8182       return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8183 
8184     case RDIV_EXPR:
8185       goto binop;
8186 
8187     case TRUNC_MOD_EXPR:
8188     case FLOOR_MOD_EXPR:
8189     case CEIL_MOD_EXPR:
8190     case ROUND_MOD_EXPR:
8191       if (modifier == EXPAND_STACK_PARM)
8192 	target = 0;
8193       expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8194 		       subtarget, &op0, &op1, 0);
8195       return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8196 
8197     case FIX_ROUND_EXPR:
8198     case FIX_FLOOR_EXPR:
8199     case FIX_CEIL_EXPR:
8200       gcc_unreachable ();			/* Not used for C.  */
8201 
8202     case FIX_TRUNC_EXPR:
8203       op0 = expand_normal (TREE_OPERAND (exp, 0));
8204       if (target == 0 || modifier == EXPAND_STACK_PARM)
8205 	target = gen_reg_rtx (mode);
8206       expand_fix (target, op0, unsignedp);
8207       return target;
8208 
8209     case FLOAT_EXPR:
8210       op0 = expand_normal (TREE_OPERAND (exp, 0));
8211       if (target == 0 || modifier == EXPAND_STACK_PARM)
8212 	target = gen_reg_rtx (mode);
8213       /* expand_float can't figure out what to do if FROM has VOIDmode.
8214 	 So give it the correct mode.  With -O, cse will optimize this.  */
8215       if (GET_MODE (op0) == VOIDmode)
8216 	op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8217 				op0);
8218       expand_float (target, op0,
8219 		    TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8220       return target;
8221 
8222     case NEGATE_EXPR:
8223       op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8224       if (modifier == EXPAND_STACK_PARM)
8225 	target = 0;
8226       temp = expand_unop (mode,
8227       			  optab_for_tree_code (NEGATE_EXPR, type),
8228 			  op0, target, 0);
8229       gcc_assert (temp);
8230       return REDUCE_BIT_FIELD (temp);
8231 
8232     case ABS_EXPR:
8233       op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8234       if (modifier == EXPAND_STACK_PARM)
8235 	target = 0;
8236 
8237       /* ABS_EXPR is not valid for complex arguments.  */
8238       gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8239 		  && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8240 
8241       /* Unsigned abs is simply the operand.  Testing here means we don't
8242 	 risk generating incorrect code below.  */
8243       if (TYPE_UNSIGNED (type))
8244 	return op0;
8245 
8246       return expand_abs (mode, op0, target, unsignedp,
8247 			 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8248 
8249     case MAX_EXPR:
8250     case MIN_EXPR:
8251       target = original_target;
8252       if (target == 0
8253 	  || modifier == EXPAND_STACK_PARM
8254 	  || (MEM_P (target) && MEM_VOLATILE_P (target))
8255 	  || GET_MODE (target) != mode
8256 	  || (REG_P (target)
8257 	      && REGNO (target) < FIRST_PSEUDO_REGISTER))
8258 	target = gen_reg_rtx (mode);
8259       expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8260 		       target, &op0, &op1, 0);
8261 
8262       /* First try to do it with a special MIN or MAX instruction.
8263 	 If that does not win, use a conditional jump to select the proper
8264 	 value.  */
8265       this_optab = optab_for_tree_code (code, type);
8266       temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8267 			   OPTAB_WIDEN);
8268       if (temp != 0)
8269 	return temp;
8270 
8271       /* At this point, a MEM target is no longer useful; we will get better
8272 	 code without it.  */
8273 
8274       if (! REG_P (target))
8275 	target = gen_reg_rtx (mode);
8276 
8277       /* If op1 was placed in target, swap op0 and op1.  */
8278       if (target != op0 && target == op1)
8279 	{
8280 	  temp = op0;
8281 	  op0 = op1;
8282 	  op1 = temp;
8283 	}
8284 
8285       /* We generate better code and avoid problems with op1 mentioning
8286 	 target by forcing op1 into a pseudo if it isn't a constant.  */
8287       if (! CONSTANT_P (op1))
8288 	op1 = force_reg (mode, op1);
8289 
8290       {
8291 	enum rtx_code comparison_code;
8292 	rtx cmpop1 = op1;
8293 
8294 	if (code == MAX_EXPR)
8295 	  comparison_code = unsignedp ? GEU : GE;
8296 	else
8297 	  comparison_code = unsignedp ? LEU : LE;
8298 
8299 	/* Canonicalize to comparisons against 0.  */
8300 	if (op1 == const1_rtx)
8301 	  {
8302 	    /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8303 	       or (a != 0 ? a : 1) for unsigned.
8304 	       For MIN we are safe converting (a <= 1 ? a : 1)
8305 	       into (a <= 0 ? a : 1)  */
8306 	    cmpop1 = const0_rtx;
8307 	    if (code == MAX_EXPR)
8308 	      comparison_code = unsignedp ? NE : GT;
8309 	  }
8310 	if (op1 == constm1_rtx && !unsignedp)
8311 	  {
8312 	    /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8313 	       and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8314 	    cmpop1 = const0_rtx;
8315 	    if (code == MIN_EXPR)
8316 	      comparison_code = LT;
8317 	  }
8318 #ifdef HAVE_conditional_move
8319 	/* Use a conditional move if possible.  */
8320 	if (can_conditionally_move_p (mode))
8321 	  {
8322 	    rtx insn;
8323 
8324 	    /* ??? Same problem as in expmed.c: emit_conditional_move
8325 	       forces a stack adjustment via compare_from_rtx, and we
8326 	       lose the stack adjustment if the sequence we are about
8327 	       to create is discarded.  */
8328 	    do_pending_stack_adjust ();
8329 
8330 	    start_sequence ();
8331 
8332 	    /* Try to emit the conditional move.  */
8333 	    insn = emit_conditional_move (target, comparison_code,
8334 					  op0, cmpop1, mode,
8335 					  op0, op1, mode,
8336 					  unsignedp);
8337 
8338 	    /* If we could do the conditional move, emit the sequence,
8339 	       and return.  */
8340 	    if (insn)
8341 	      {
8342 		rtx seq = get_insns ();
8343 		end_sequence ();
8344 		emit_insn (seq);
8345 		return target;
8346 	      }
8347 
8348 	    /* Otherwise discard the sequence and fall back to code with
8349 	       branches.  */
8350 	    end_sequence ();
8351 	  }
8352 #endif
8353 	if (target != op0)
8354 	  emit_move_insn (target, op0);
8355 
8356 	temp = gen_label_rtx ();
8357 	do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8358 				 unsignedp, mode, NULL_RTX, NULL_RTX, temp);
8359       }
8360       emit_move_insn (target, op1);
8361       emit_label (temp);
8362       return target;
8363 
8364     case BIT_NOT_EXPR:
8365       op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8366       if (modifier == EXPAND_STACK_PARM)
8367 	target = 0;
8368       temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8369       gcc_assert (temp);
8370       return temp;
8371 
8372       /* ??? Can optimize bitwise operations with one arg constant.
8373 	 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8374 	 and (a bitwise1 b) bitwise2 b (etc)
8375 	 but that is probably not worth while.  */
8376 
8377       /* BIT_AND_EXPR is for bitwise anding.  TRUTH_AND_EXPR is for anding two
8378 	 boolean values when we want in all cases to compute both of them.  In
8379 	 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8380 	 as actual zero-or-1 values and then bitwise anding.  In cases where
8381 	 there cannot be any side effects, better code would be made by
8382 	 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8383 	 how to recognize those cases.  */
8384 
8385     case TRUTH_AND_EXPR:
8386       code = BIT_AND_EXPR;
8387     case BIT_AND_EXPR:
8388       goto binop;
8389 
8390     case TRUTH_OR_EXPR:
8391       code = BIT_IOR_EXPR;
8392     case BIT_IOR_EXPR:
8393       goto binop;
8394 
8395     case TRUTH_XOR_EXPR:
8396       code = BIT_XOR_EXPR;
8397     case BIT_XOR_EXPR:
8398       goto binop;
8399 
8400     case LSHIFT_EXPR:
8401     case RSHIFT_EXPR:
8402     case LROTATE_EXPR:
8403     case RROTATE_EXPR:
8404       if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8405 	subtarget = 0;
8406       if (modifier == EXPAND_STACK_PARM)
8407 	target = 0;
8408       op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8409       return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8410 			   unsignedp);
8411 
8412       /* Could determine the answer when only additive constants differ.  Also,
8413 	 the addition of one can be handled by changing the condition.  */
8414     case LT_EXPR:
8415     case LE_EXPR:
8416     case GT_EXPR:
8417     case GE_EXPR:
8418     case EQ_EXPR:
8419     case NE_EXPR:
8420     case UNORDERED_EXPR:
8421     case ORDERED_EXPR:
8422     case UNLT_EXPR:
8423     case UNLE_EXPR:
8424     case UNGT_EXPR:
8425     case UNGE_EXPR:
8426     case UNEQ_EXPR:
8427     case LTGT_EXPR:
8428       temp = do_store_flag (exp,
8429 			    modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8430 			    tmode != VOIDmode ? tmode : mode, 0);
8431       if (temp != 0)
8432 	return temp;
8433 
8434       /* For foo != 0, load foo, and if it is nonzero load 1 instead.  */
8435       if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8436 	  && original_target
8437 	  && REG_P (original_target)
8438 	  && (GET_MODE (original_target)
8439 	      == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8440 	{
8441 	  temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8442 			      VOIDmode, 0);
8443 
8444 	  /* If temp is constant, we can just compute the result.  */
8445 	  if (GET_CODE (temp) == CONST_INT)
8446 	    {
8447 	      if (INTVAL (temp) != 0)
8448 	        emit_move_insn (target, const1_rtx);
8449 	      else
8450 	        emit_move_insn (target, const0_rtx);
8451 
8452 	      return target;
8453 	    }
8454 
8455 	  if (temp != original_target)
8456 	    {
8457 	      enum machine_mode mode1 = GET_MODE (temp);
8458 	      if (mode1 == VOIDmode)
8459 		mode1 = tmode != VOIDmode ? tmode : mode;
8460 
8461 	      temp = copy_to_mode_reg (mode1, temp);
8462 	    }
8463 
8464 	  op1 = gen_label_rtx ();
8465 	  emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8466 				   GET_MODE (temp), unsignedp, op1);
8467 	  emit_move_insn (temp, const1_rtx);
8468 	  emit_label (op1);
8469 	  return temp;
8470 	}
8471 
8472       /* If no set-flag instruction, must generate a conditional store
8473 	 into a temporary variable.  Drop through and handle this
8474 	 like && and ||.  */
8475 
8476       if (! ignore
8477 	  && (target == 0
8478 	      || modifier == EXPAND_STACK_PARM
8479 	      || ! safe_from_p (target, exp, 1)
8480 	      /* Make sure we don't have a hard reg (such as function's return
8481 		 value) live across basic blocks, if not optimizing.  */
8482 	      || (!optimize && REG_P (target)
8483 		  && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8484 	target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8485 
8486       if (target)
8487 	emit_move_insn (target, const0_rtx);
8488 
8489       op1 = gen_label_rtx ();
8490       jumpifnot (exp, op1);
8491 
8492       if (target)
8493 	emit_move_insn (target, const1_rtx);
8494 
8495       emit_label (op1);
8496       return ignore ? const0_rtx : target;
8497 
8498     case TRUTH_NOT_EXPR:
8499       if (modifier == EXPAND_STACK_PARM)
8500 	target = 0;
8501       op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8502       /* The parser is careful to generate TRUTH_NOT_EXPR
8503 	 only with operands that are always zero or one.  */
8504       temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8505 			   target, 1, OPTAB_LIB_WIDEN);
8506       gcc_assert (temp);
8507       return temp;
8508 
8509     case STATEMENT_LIST:
8510       {
8511 	tree_stmt_iterator iter;
8512 
8513 	gcc_assert (ignore);
8514 
8515 	for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
8516 	  expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
8517       }
8518       return const0_rtx;
8519 
8520     case COND_EXPR:
8521       /* A COND_EXPR with its type being VOID_TYPE represents a
8522 	 conditional jump and is handled in
8523 	 expand_gimple_cond_expr.  */
8524       gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp)));
8525 
8526         /* Note that COND_EXPRs whose type is a structure or union
8527   	 are required to be constructed to contain assignments of
8528   	 a temporary variable, so that we can evaluate them here
8529   	 for side effect only.  If type is void, we must do likewise.  */
8530 
8531         gcc_assert (!TREE_ADDRESSABLE (type)
8532 		    && !ignore
8533 		    && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
8534 		    && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
8535 
8536        /* If we are not to produce a result, we have no target.  Otherwise,
8537  	 if a target was specified use it; it will not be used as an
8538  	 intermediate target unless it is safe.  If no target, use a
8539  	 temporary.  */
8540 
8541        if (modifier != EXPAND_STACK_PARM
8542  	  && original_target
8543  	  && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8544  	  && GET_MODE (original_target) == mode
8545 #ifdef HAVE_conditional_move
8546  	  && (! can_conditionally_move_p (mode)
8547  	      || REG_P (original_target))
8548 #endif
8549  	  && !MEM_P (original_target))
8550  	temp = original_target;
8551        else
8552  	temp = assign_temp (type, 0, 0, 1);
8553 
8554        do_pending_stack_adjust ();
8555        NO_DEFER_POP;
8556        op0 = gen_label_rtx ();
8557        op1 = gen_label_rtx ();
8558        jumpifnot (TREE_OPERAND (exp, 0), op0);
8559        store_expr (TREE_OPERAND (exp, 1), temp,
8560  		  modifier == EXPAND_STACK_PARM);
8561 
8562        emit_jump_insn (gen_jump (op1));
8563        emit_barrier ();
8564        emit_label (op0);
8565        store_expr (TREE_OPERAND (exp, 2), temp,
8566  		  modifier == EXPAND_STACK_PARM);
8567 
8568        emit_label (op1);
8569        OK_DEFER_POP;
8570        return temp;
8571 
8572     case VEC_COND_EXPR:
8573 	target = expand_vec_cond_expr (exp, target);
8574 	return target;
8575 
8576     case MODIFY_EXPR:
8577       {
8578 	tree lhs = TREE_OPERAND (exp, 0);
8579 	tree rhs = TREE_OPERAND (exp, 1);
8580 
8581 	gcc_assert (ignore);
8582 
8583 	/* Check for |= or &= of a bitfield of size one into another bitfield
8584 	   of size 1.  In this case, (unless we need the result of the
8585 	   assignment) we can do this more efficiently with a
8586 	   test followed by an assignment, if necessary.
8587 
8588 	   ??? At this point, we can't get a BIT_FIELD_REF here.  But if
8589 	   things change so we do, this code should be enhanced to
8590 	   support it.  */
8591 	if (TREE_CODE (lhs) == COMPONENT_REF
8592 	    && (TREE_CODE (rhs) == BIT_IOR_EXPR
8593 		|| TREE_CODE (rhs) == BIT_AND_EXPR)
8594 	    && TREE_OPERAND (rhs, 0) == lhs
8595 	    && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8596 	    && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8597 	    && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8598 	  {
8599 	    rtx label = gen_label_rtx ();
8600 	    int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
8601 	    do_jump (TREE_OPERAND (rhs, 1),
8602 		     value ? label : 0,
8603 		     value ? 0 : label);
8604 	    expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value));
8605 	    do_pending_stack_adjust ();
8606 	    emit_label (label);
8607 	    return const0_rtx;
8608 	  }
8609 
8610 	expand_assignment (lhs, rhs);
8611 
8612 	return const0_rtx;
8613       }
8614 
8615     case RETURN_EXPR:
8616       if (!TREE_OPERAND (exp, 0))
8617 	expand_null_return ();
8618       else
8619 	expand_return (TREE_OPERAND (exp, 0));
8620       return const0_rtx;
8621 
8622     case ADDR_EXPR:
8623       return expand_expr_addr_expr (exp, target, tmode, modifier);
8624 
8625     case COMPLEX_EXPR:
8626       /* Get the rtx code of the operands.  */
8627       op0 = expand_normal (TREE_OPERAND (exp, 0));
8628       op1 = expand_normal (TREE_OPERAND (exp, 1));
8629 
8630       if (!target)
8631 	target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8632 
8633       /* Move the real (op0) and imaginary (op1) parts to their location.  */
8634       write_complex_part (target, op0, false);
8635       write_complex_part (target, op1, true);
8636 
8637       return target;
8638 
8639     case REALPART_EXPR:
8640       op0 = expand_normal (TREE_OPERAND (exp, 0));
8641       return read_complex_part (op0, false);
8642 
8643     case IMAGPART_EXPR:
8644       op0 = expand_normal (TREE_OPERAND (exp, 0));
8645       return read_complex_part (op0, true);
8646 
8647     case RESX_EXPR:
8648       expand_resx_expr (exp);
8649       return const0_rtx;
8650 
8651     case TRY_CATCH_EXPR:
8652     case CATCH_EXPR:
8653     case EH_FILTER_EXPR:
8654     case TRY_FINALLY_EXPR:
8655       /* Lowered by tree-eh.c.  */
8656       gcc_unreachable ();
8657 
8658     case WITH_CLEANUP_EXPR:
8659     case CLEANUP_POINT_EXPR:
8660     case TARGET_EXPR:
8661     case CASE_LABEL_EXPR:
8662     case VA_ARG_EXPR:
8663     case BIND_EXPR:
8664     case INIT_EXPR:
8665     case CONJ_EXPR:
8666     case COMPOUND_EXPR:
8667     case PREINCREMENT_EXPR:
8668     case PREDECREMENT_EXPR:
8669     case POSTINCREMENT_EXPR:
8670     case POSTDECREMENT_EXPR:
8671     case LOOP_EXPR:
8672     case EXIT_EXPR:
8673     case TRUTH_ANDIF_EXPR:
8674     case TRUTH_ORIF_EXPR:
8675       /* Lowered by gimplify.c.  */
8676       gcc_unreachable ();
8677 
8678     case EXC_PTR_EXPR:
8679       return get_exception_pointer (cfun);
8680 
8681     case FILTER_EXPR:
8682       return get_exception_filter (cfun);
8683 
8684     case FDESC_EXPR:
8685       /* Function descriptors are not valid except for as
8686 	 initialization constants, and should not be expanded.  */
8687       gcc_unreachable ();
8688 
8689     case SWITCH_EXPR:
8690       expand_case (exp);
8691       return const0_rtx;
8692 
8693     case LABEL_EXPR:
8694       expand_label (TREE_OPERAND (exp, 0));
8695       return const0_rtx;
8696 
8697     case ASM_EXPR:
8698       expand_asm_expr (exp);
8699       return const0_rtx;
8700 
8701     case WITH_SIZE_EXPR:
8702       /* WITH_SIZE_EXPR expands to its first argument.  The caller should
8703 	 have pulled out the size to use in whatever context it needed.  */
8704       return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
8705 			       modifier, alt_rtl);
8706 
8707     case REALIGN_LOAD_EXPR:
8708       {
8709         tree oprnd0 = TREE_OPERAND (exp, 0);
8710         tree oprnd1 = TREE_OPERAND (exp, 1);
8711         tree oprnd2 = TREE_OPERAND (exp, 2);
8712         rtx op2;
8713 
8714         this_optab = optab_for_tree_code (code, type);
8715         expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8716         op2 = expand_normal (oprnd2);
8717         temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8718 				  target, unsignedp);
8719         gcc_assert (temp);
8720         return temp;
8721       }
8722 
8723     case DOT_PROD_EXPR:
8724       {
8725 	tree oprnd0 = TREE_OPERAND (exp, 0);
8726 	tree oprnd1 = TREE_OPERAND (exp, 1);
8727 	tree oprnd2 = TREE_OPERAND (exp, 2);
8728 	rtx op2;
8729 
8730 	expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8731 	op2 = expand_normal (oprnd2);
8732 	target = expand_widen_pattern_expr (exp, op0, op1, op2,
8733 					    target, unsignedp);
8734 	return target;
8735       }
8736 
8737     case WIDEN_SUM_EXPR:
8738       {
8739         tree oprnd0 = TREE_OPERAND (exp, 0);
8740         tree oprnd1 = TREE_OPERAND (exp, 1);
8741 
8742         expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
8743         target = expand_widen_pattern_expr (exp, op0, NULL_RTX, op1,
8744                                             target, unsignedp);
8745         return target;
8746       }
8747 
8748     case REDUC_MAX_EXPR:
8749     case REDUC_MIN_EXPR:
8750     case REDUC_PLUS_EXPR:
8751       {
8752         op0 = expand_normal (TREE_OPERAND (exp, 0));
8753         this_optab = optab_for_tree_code (code, type);
8754         temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8755         gcc_assert (temp);
8756         return temp;
8757       }
8758 
8759     case VEC_LSHIFT_EXPR:
8760     case VEC_RSHIFT_EXPR:
8761       {
8762 	target = expand_vec_shift_expr (exp, target);
8763 	return target;
8764       }
8765 
8766     default:
8767       return lang_hooks.expand_expr (exp, original_target, tmode,
8768 				     modifier, alt_rtl);
8769     }
8770 
8771   /* Here to do an ordinary binary operator.  */
8772  binop:
8773   expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8774 		   subtarget, &op0, &op1, 0);
8775  binop2:
8776   this_optab = optab_for_tree_code (code, type);
8777  binop3:
8778   if (modifier == EXPAND_STACK_PARM)
8779     target = 0;
8780   temp = expand_binop (mode, this_optab, op0, op1, target,
8781 		       unsignedp, OPTAB_LIB_WIDEN);
8782   gcc_assert (temp);
8783   return REDUCE_BIT_FIELD (temp);
8784 }
8785 #undef REDUCE_BIT_FIELD
8786 
8787 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
8788    signedness of TYPE), possibly returning the result in TARGET.  */
8789 static rtx
reduce_to_bit_field_precision(rtx exp,rtx target,tree type)8790 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
8791 {
8792   HOST_WIDE_INT prec = TYPE_PRECISION (type);
8793   if (target && GET_MODE (target) != GET_MODE (exp))
8794     target = 0;
8795   /* For constant values, reduce using build_int_cst_type. */
8796   if (GET_CODE (exp) == CONST_INT)
8797     {
8798       HOST_WIDE_INT value = INTVAL (exp);
8799       tree t = build_int_cst_type (type, value);
8800       return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
8801     }
8802   else if (TYPE_UNSIGNED (type))
8803     {
8804       rtx mask;
8805       if (prec < HOST_BITS_PER_WIDE_INT)
8806 	mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
8807 				   GET_MODE (exp));
8808       else
8809 	mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
8810 				   ((unsigned HOST_WIDE_INT) 1
8811 				    << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
8812 				   GET_MODE (exp));
8813       return expand_and (GET_MODE (exp), exp, mask, target);
8814     }
8815   else
8816     {
8817       tree count = build_int_cst (NULL_TREE,
8818 				  GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
8819       exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8820       return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8821     }
8822 }
8823 
8824 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8825    when applied to the address of EXP produces an address known to be
8826    aligned more than BIGGEST_ALIGNMENT.  */
8827 
8828 static int
is_aligning_offset(tree offset,tree exp)8829 is_aligning_offset (tree offset, tree exp)
8830 {
8831   /* Strip off any conversions.  */
8832   while (TREE_CODE (offset) == NON_LVALUE_EXPR
8833 	 || TREE_CODE (offset) == NOP_EXPR
8834 	 || TREE_CODE (offset) == CONVERT_EXPR)
8835     offset = TREE_OPERAND (offset, 0);
8836 
8837   /* We must now have a BIT_AND_EXPR with a constant that is one less than
8838      power of 2 and which is larger than BIGGEST_ALIGNMENT.  */
8839   if (TREE_CODE (offset) != BIT_AND_EXPR
8840       || !host_integerp (TREE_OPERAND (offset, 1), 1)
8841       || compare_tree_int (TREE_OPERAND (offset, 1),
8842 			   BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
8843       || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
8844     return 0;
8845 
8846   /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8847      It must be NEGATE_EXPR.  Then strip any more conversions.  */
8848   offset = TREE_OPERAND (offset, 0);
8849   while (TREE_CODE (offset) == NON_LVALUE_EXPR
8850 	 || TREE_CODE (offset) == NOP_EXPR
8851 	 || TREE_CODE (offset) == CONVERT_EXPR)
8852     offset = TREE_OPERAND (offset, 0);
8853 
8854   if (TREE_CODE (offset) != NEGATE_EXPR)
8855     return 0;
8856 
8857   offset = TREE_OPERAND (offset, 0);
8858   while (TREE_CODE (offset) == NON_LVALUE_EXPR
8859 	 || TREE_CODE (offset) == NOP_EXPR
8860 	 || TREE_CODE (offset) == CONVERT_EXPR)
8861     offset = TREE_OPERAND (offset, 0);
8862 
8863   /* This must now be the address of EXP.  */
8864   return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
8865 }
8866 
8867 /* Return the tree node if an ARG corresponds to a string constant or zero
8868    if it doesn't.  If we return nonzero, set *PTR_OFFSET to the offset
8869    in bytes within the string that ARG is accessing.  The type of the
8870    offset will be `sizetype'.  */
8871 
8872 tree
string_constant(tree arg,tree * ptr_offset)8873 string_constant (tree arg, tree *ptr_offset)
8874 {
8875   tree array, offset;
8876   STRIP_NOPS (arg);
8877 
8878   if (TREE_CODE (arg) == ADDR_EXPR)
8879     {
8880       if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8881 	{
8882 	  *ptr_offset = size_zero_node;
8883 	  return TREE_OPERAND (arg, 0);
8884 	}
8885       else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
8886 	{
8887 	  array = TREE_OPERAND (arg, 0);
8888 	  offset = size_zero_node;
8889 	}
8890       else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
8891 	{
8892 	  array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
8893 	  offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
8894 	  if (TREE_CODE (array) != STRING_CST
8895 	      && TREE_CODE (array) != VAR_DECL)
8896 	    return 0;
8897 	}
8898       else
8899 	return 0;
8900     }
8901   else if (TREE_CODE (arg) == PLUS_EXPR)
8902     {
8903       tree arg0 = TREE_OPERAND (arg, 0);
8904       tree arg1 = TREE_OPERAND (arg, 1);
8905 
8906       STRIP_NOPS (arg0);
8907       STRIP_NOPS (arg1);
8908 
8909       if (TREE_CODE (arg0) == ADDR_EXPR
8910 	  && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
8911 	      || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
8912 	{
8913 	  array = TREE_OPERAND (arg0, 0);
8914 	  offset = arg1;
8915 	}
8916       else if (TREE_CODE (arg1) == ADDR_EXPR
8917 	       && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
8918 		   || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
8919 	{
8920 	  array = TREE_OPERAND (arg1, 0);
8921 	  offset = arg0;
8922 	}
8923       else
8924 	return 0;
8925     }
8926   else
8927     return 0;
8928 
8929   if (TREE_CODE (array) == STRING_CST)
8930     {
8931       *ptr_offset = fold_convert (sizetype, offset);
8932       return array;
8933     }
8934   else if (TREE_CODE (array) == VAR_DECL)
8935     {
8936       int length;
8937 
8938       /* Variables initialized to string literals can be handled too.  */
8939       if (DECL_INITIAL (array) == NULL_TREE
8940 	  || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
8941 	return 0;
8942 
8943       /* If they are read-only, non-volatile and bind locally.  */
8944       if (! TREE_READONLY (array)
8945 	  || TREE_SIDE_EFFECTS (array)
8946 	  || ! targetm.binds_local_p (array))
8947 	return 0;
8948 
8949       /* Avoid const char foo[4] = "abcde";  */
8950       if (DECL_SIZE_UNIT (array) == NULL_TREE
8951 	  || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
8952 	  || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
8953 	  || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
8954 	return 0;
8955 
8956       /* If variable is bigger than the string literal, OFFSET must be constant
8957 	 and inside of the bounds of the string literal.  */
8958       offset = fold_convert (sizetype, offset);
8959       if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
8960 	  && (! host_integerp (offset, 1)
8961 	      || compare_tree_int (offset, length) >= 0))
8962 	return 0;
8963 
8964       *ptr_offset = offset;
8965       return DECL_INITIAL (array);
8966     }
8967 
8968   return 0;
8969 }
8970 
8971 /* Generate code to calculate EXP using a store-flag instruction
8972    and return an rtx for the result.  EXP is either a comparison
8973    or a TRUTH_NOT_EXPR whose operand is a comparison.
8974 
8975    If TARGET is nonzero, store the result there if convenient.
8976 
8977    If ONLY_CHEAP is nonzero, only do this if it is likely to be very
8978    cheap.
8979 
8980    Return zero if there is no suitable set-flag instruction
8981    available on this machine.
8982 
8983    Once expand_expr has been called on the arguments of the comparison,
8984    we are committed to doing the store flag, since it is not safe to
8985    re-evaluate the expression.  We emit the store-flag insn by calling
8986    emit_store_flag, but only expand the arguments if we have a reason
8987    to believe that emit_store_flag will be successful.  If we think that
8988    it will, but it isn't, we have to simulate the store-flag with a
8989    set/jump/set sequence.  */
8990 
8991 static rtx
do_store_flag(tree exp,rtx target,enum machine_mode mode,int only_cheap)8992 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
8993 {
8994   enum rtx_code code;
8995   tree arg0, arg1, type;
8996   tree tem;
8997   enum machine_mode operand_mode;
8998   int invert = 0;
8999   int unsignedp;
9000   rtx op0, op1;
9001   enum insn_code icode;
9002   rtx subtarget = target;
9003   rtx result, label;
9004 
9005   /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9006      result at the end.  We can't simply invert the test since it would
9007      have already been inverted if it were valid.  This case occurs for
9008      some floating-point comparisons.  */
9009 
9010   if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9011     invert = 1, exp = TREE_OPERAND (exp, 0);
9012 
9013   arg0 = TREE_OPERAND (exp, 0);
9014   arg1 = TREE_OPERAND (exp, 1);
9015 
9016   /* Don't crash if the comparison was erroneous.  */
9017   if (arg0 == error_mark_node || arg1 == error_mark_node)
9018     return const0_rtx;
9019 
9020   type = TREE_TYPE (arg0);
9021   operand_mode = TYPE_MODE (type);
9022   unsignedp = TYPE_UNSIGNED (type);
9023 
9024   /* We won't bother with BLKmode store-flag operations because it would mean
9025      passing a lot of information to emit_store_flag.  */
9026   if (operand_mode == BLKmode)
9027     return 0;
9028 
9029   /* We won't bother with store-flag operations involving function pointers
9030      when function pointers must be canonicalized before comparisons.  */
9031 #ifdef HAVE_canonicalize_funcptr_for_compare
9032   if (HAVE_canonicalize_funcptr_for_compare
9033       && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9034 	   && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9035 	       == FUNCTION_TYPE))
9036 	  || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9037 	      && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9038 		  == FUNCTION_TYPE))))
9039     return 0;
9040 #endif
9041 
9042   STRIP_NOPS (arg0);
9043   STRIP_NOPS (arg1);
9044 
9045   /* Get the rtx comparison code to use.  We know that EXP is a comparison
9046      operation of some type.  Some comparisons against 1 and -1 can be
9047      converted to comparisons with zero.  Do so here so that the tests
9048      below will be aware that we have a comparison with zero.   These
9049      tests will not catch constants in the first operand, but constants
9050      are rarely passed as the first operand.  */
9051 
9052   switch (TREE_CODE (exp))
9053     {
9054     case EQ_EXPR:
9055       code = EQ;
9056       break;
9057     case NE_EXPR:
9058       code = NE;
9059       break;
9060     case LT_EXPR:
9061       if (integer_onep (arg1))
9062 	arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9063       else
9064 	code = unsignedp ? LTU : LT;
9065       break;
9066     case LE_EXPR:
9067       if (! unsignedp && integer_all_onesp (arg1))
9068 	arg1 = integer_zero_node, code = LT;
9069       else
9070 	code = unsignedp ? LEU : LE;
9071       break;
9072     case GT_EXPR:
9073       if (! unsignedp && integer_all_onesp (arg1))
9074 	arg1 = integer_zero_node, code = GE;
9075       else
9076 	code = unsignedp ? GTU : GT;
9077       break;
9078     case GE_EXPR:
9079       if (integer_onep (arg1))
9080 	arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9081       else
9082 	code = unsignedp ? GEU : GE;
9083       break;
9084 
9085     case UNORDERED_EXPR:
9086       code = UNORDERED;
9087       break;
9088     case ORDERED_EXPR:
9089       code = ORDERED;
9090       break;
9091     case UNLT_EXPR:
9092       code = UNLT;
9093       break;
9094     case UNLE_EXPR:
9095       code = UNLE;
9096       break;
9097     case UNGT_EXPR:
9098       code = UNGT;
9099       break;
9100     case UNGE_EXPR:
9101       code = UNGE;
9102       break;
9103     case UNEQ_EXPR:
9104       code = UNEQ;
9105       break;
9106     case LTGT_EXPR:
9107       code = LTGT;
9108       break;
9109 
9110     default:
9111       gcc_unreachable ();
9112     }
9113 
9114   /* Put a constant second.  */
9115   if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9116     {
9117       tem = arg0; arg0 = arg1; arg1 = tem;
9118       code = swap_condition (code);
9119     }
9120 
9121   /* If this is an equality or inequality test of a single bit, we can
9122      do this by shifting the bit being tested to the low-order bit and
9123      masking the result with the constant 1.  If the condition was EQ,
9124      we xor it with 1.  This does not require an scc insn and is faster
9125      than an scc insn even if we have it.
9126 
9127      The code to make this transformation was moved into fold_single_bit_test,
9128      so we just call into the folder and expand its result.  */
9129 
9130   if ((code == NE || code == EQ)
9131       && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9132       && integer_pow2p (TREE_OPERAND (arg0, 1)))
9133     {
9134       tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
9135       return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9136 						arg0, arg1, type),
9137 			  target, VOIDmode, EXPAND_NORMAL);
9138     }
9139 
9140   /* Now see if we are likely to be able to do this.  Return if not.  */
9141   if (! can_compare_p (code, operand_mode, ccp_store_flag))
9142     return 0;
9143 
9144   icode = setcc_gen_code[(int) code];
9145   if (icode == CODE_FOR_nothing
9146       || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9147     {
9148       /* We can only do this if it is one of the special cases that
9149 	 can be handled without an scc insn.  */
9150       if ((code == LT && integer_zerop (arg1))
9151 	  || (! only_cheap && code == GE && integer_zerop (arg1)))
9152 	;
9153       else if (! only_cheap && (code == NE || code == EQ)
9154 	       && TREE_CODE (type) != REAL_TYPE
9155 	       && ((abs_optab->handlers[(int) operand_mode].insn_code
9156 		    != CODE_FOR_nothing)
9157 		   || (ffs_optab->handlers[(int) operand_mode].insn_code
9158 		       != CODE_FOR_nothing)))
9159 	;
9160       else
9161 	return 0;
9162     }
9163 
9164   if (! get_subtarget (target)
9165       || GET_MODE (subtarget) != operand_mode)
9166     subtarget = 0;
9167 
9168   expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9169 
9170   if (target == 0)
9171     target = gen_reg_rtx (mode);
9172 
9173   result = emit_store_flag (target, code, op0, op1,
9174 			    operand_mode, unsignedp, 1);
9175 
9176   if (result)
9177     {
9178       if (invert)
9179 	result = expand_binop (mode, xor_optab, result, const1_rtx,
9180 			       result, 0, OPTAB_LIB_WIDEN);
9181       return result;
9182     }
9183 
9184   /* If this failed, we have to do this with set/compare/jump/set code.  */
9185   if (!REG_P (target)
9186       || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9187     target = gen_reg_rtx (GET_MODE (target));
9188 
9189   emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9190   result = compare_from_rtx (op0, op1, code, unsignedp,
9191 			     operand_mode, NULL_RTX);
9192   if (GET_CODE (result) == CONST_INT)
9193     return (((result == const0_rtx && ! invert)
9194 	     || (result != const0_rtx && invert))
9195 	    ? const0_rtx : const1_rtx);
9196 
9197   /* The code of RESULT may not match CODE if compare_from_rtx
9198      decided to swap its operands and reverse the original code.
9199 
9200      We know that compare_from_rtx returns either a CONST_INT or
9201      a new comparison code, so it is safe to just extract the
9202      code from RESULT.  */
9203   code = GET_CODE (result);
9204 
9205   label = gen_label_rtx ();
9206   gcc_assert (bcc_gen_fctn[(int) code]);
9207 
9208   emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9209   emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9210   emit_label (label);
9211 
9212   return target;
9213 }
9214 
9215 
9216 /* Stubs in case we haven't got a casesi insn.  */
9217 #ifndef HAVE_casesi
9218 # define HAVE_casesi 0
9219 # define gen_casesi(a, b, c, d, e) (0)
9220 # define CODE_FOR_casesi CODE_FOR_nothing
9221 #endif
9222 
9223 /* If the machine does not have a case insn that compares the bounds,
9224    this means extra overhead for dispatch tables, which raises the
9225    threshold for using them.  */
9226 #ifndef CASE_VALUES_THRESHOLD
9227 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9228 #endif /* CASE_VALUES_THRESHOLD */
9229 
9230 unsigned int
case_values_threshold(void)9231 case_values_threshold (void)
9232 {
9233   return CASE_VALUES_THRESHOLD;
9234 }
9235 
9236 /* Attempt to generate a casesi instruction.  Returns 1 if successful,
9237    0 otherwise (i.e. if there is no casesi instruction).  */
9238 int
try_casesi(tree index_type,tree index_expr,tree minval,tree range,rtx table_label ATTRIBUTE_UNUSED,rtx default_label)9239 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9240 	    rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9241 {
9242   enum machine_mode index_mode = SImode;
9243   int index_bits = GET_MODE_BITSIZE (index_mode);
9244   rtx op1, op2, index;
9245   enum machine_mode op_mode;
9246 
9247   if (! HAVE_casesi)
9248     return 0;
9249 
9250   /* Convert the index to SImode.  */
9251   if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9252     {
9253       enum machine_mode omode = TYPE_MODE (index_type);
9254       rtx rangertx = expand_normal (range);
9255 
9256       /* We must handle the endpoints in the original mode.  */
9257       index_expr = build2 (MINUS_EXPR, index_type,
9258 			   index_expr, minval);
9259       minval = integer_zero_node;
9260       index = expand_normal (index_expr);
9261       emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9262 			       omode, 1, default_label);
9263       /* Now we can safely truncate.  */
9264       index = convert_to_mode (index_mode, index, 0);
9265     }
9266   else
9267     {
9268       if (TYPE_MODE (index_type) != index_mode)
9269 	{
9270 	  index_type = lang_hooks.types.type_for_size (index_bits, 0);
9271 	  index_expr = fold_convert (index_type, index_expr);
9272 	}
9273 
9274       index = expand_normal (index_expr);
9275     }
9276 
9277   do_pending_stack_adjust ();
9278 
9279   op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9280   if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9281       (index, op_mode))
9282     index = copy_to_mode_reg (op_mode, index);
9283 
9284   op1 = expand_normal (minval);
9285 
9286   op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9287   op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9288 		       op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
9289   if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9290       (op1, op_mode))
9291     op1 = copy_to_mode_reg (op_mode, op1);
9292 
9293   op2 = expand_normal (range);
9294 
9295   op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9296   op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9297 		       op2, TYPE_UNSIGNED (TREE_TYPE (range)));
9298   if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9299       (op2, op_mode))
9300     op2 = copy_to_mode_reg (op_mode, op2);
9301 
9302   emit_jump_insn (gen_casesi (index, op1, op2,
9303 			      table_label, default_label));
9304   return 1;
9305 }
9306 
9307 /* Attempt to generate a tablejump instruction; same concept.  */
9308 #ifndef HAVE_tablejump
9309 #define HAVE_tablejump 0
9310 #define gen_tablejump(x, y) (0)
9311 #endif
9312 
9313 /* Subroutine of the next function.
9314 
9315    INDEX is the value being switched on, with the lowest value
9316    in the table already subtracted.
9317    MODE is its expected mode (needed if INDEX is constant).
9318    RANGE is the length of the jump table.
9319    TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9320 
9321    DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9322    index value is out of range.  */
9323 
9324 static void
do_tablejump(rtx index,enum machine_mode mode,rtx range,rtx table_label,rtx default_label)9325 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9326 	      rtx default_label)
9327 {
9328   rtx temp, vector;
9329 
9330   if (INTVAL (range) > cfun->max_jumptable_ents)
9331     cfun->max_jumptable_ents = INTVAL (range);
9332 
9333   /* Do an unsigned comparison (in the proper mode) between the index
9334      expression and the value which represents the length of the range.
9335      Since we just finished subtracting the lower bound of the range
9336      from the index expression, this comparison allows us to simultaneously
9337      check that the original index expression value is both greater than
9338      or equal to the minimum value of the range and less than or equal to
9339      the maximum value of the range.  */
9340 
9341   emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9342 			   default_label);
9343 
9344   /* If index is in range, it must fit in Pmode.
9345      Convert to Pmode so we can index with it.  */
9346   if (mode != Pmode)
9347     index = convert_to_mode (Pmode, index, 1);
9348 
9349   /* Don't let a MEM slip through, because then INDEX that comes
9350      out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9351      and break_out_memory_refs will go to work on it and mess it up.  */
9352 #ifdef PIC_CASE_VECTOR_ADDRESS
9353   if (flag_pic && !REG_P (index))
9354     index = copy_to_mode_reg (Pmode, index);
9355 #endif
9356 
9357   /* If flag_force_addr were to affect this address
9358      it could interfere with the tricky assumptions made
9359      about addresses that contain label-refs,
9360      which may be valid only very near the tablejump itself.  */
9361   /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9362      GET_MODE_SIZE, because this indicates how large insns are.  The other
9363      uses should all be Pmode, because they are addresses.  This code
9364      could fail if addresses and insns are not the same size.  */
9365   index = gen_rtx_PLUS (Pmode,
9366 			gen_rtx_MULT (Pmode, index,
9367 				      GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9368 			gen_rtx_LABEL_REF (Pmode, table_label));
9369 #ifdef PIC_CASE_VECTOR_ADDRESS
9370   if (flag_pic)
9371     index = PIC_CASE_VECTOR_ADDRESS (index);
9372   else
9373 #endif
9374     index = memory_address_noforce (CASE_VECTOR_MODE, index);
9375   temp = gen_reg_rtx (CASE_VECTOR_MODE);
9376   vector = gen_const_mem (CASE_VECTOR_MODE, index);
9377   convert_move (temp, vector, 0);
9378 
9379   emit_jump_insn (gen_tablejump (temp, table_label));
9380 
9381   /* If we are generating PIC code or if the table is PC-relative, the
9382      table and JUMP_INSN must be adjacent, so don't output a BARRIER.  */
9383   if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9384     emit_barrier ();
9385 }
9386 
9387 int
try_tablejump(tree index_type,tree index_expr,tree minval,tree range,rtx table_label,rtx default_label)9388 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9389 	       rtx table_label, rtx default_label)
9390 {
9391   rtx index;
9392 
9393   if (! HAVE_tablejump)
9394     return 0;
9395 
9396   index_expr = fold_build2 (MINUS_EXPR, index_type,
9397 			    fold_convert (index_type, index_expr),
9398 			    fold_convert (index_type, minval));
9399   index = expand_normal (index_expr);
9400   do_pending_stack_adjust ();
9401 
9402   do_tablejump (index, TYPE_MODE (index_type),
9403 		convert_modes (TYPE_MODE (index_type),
9404 			       TYPE_MODE (TREE_TYPE (range)),
9405 			       expand_normal (range),
9406 			       TYPE_UNSIGNED (TREE_TYPE (range))),
9407 		table_label, default_label);
9408   return 1;
9409 }
9410 
9411 /* Nonzero if the mode is a valid vector mode for this architecture.
9412    This returns nonzero even if there is no hardware support for the
9413    vector mode, but we can emulate with narrower modes.  */
9414 
9415 int
vector_mode_valid_p(enum machine_mode mode)9416 vector_mode_valid_p (enum machine_mode mode)
9417 {
9418   enum mode_class class = GET_MODE_CLASS (mode);
9419   enum machine_mode innermode;
9420 
9421   /* Doh!  What's going on?  */
9422   if (class != MODE_VECTOR_INT
9423       && class != MODE_VECTOR_FLOAT)
9424     return 0;
9425 
9426   /* Hardware support.  Woo hoo!  */
9427   if (targetm.vector_mode_supported_p (mode))
9428     return 1;
9429 
9430   innermode = GET_MODE_INNER (mode);
9431 
9432   /* We should probably return 1 if requesting V4DI and we have no DI,
9433      but we have V2DI, but this is probably very unlikely.  */
9434 
9435   /* If we have support for the inner mode, we can safely emulate it.
9436      We may not have V2DI, but me can emulate with a pair of DIs.  */
9437   return targetm.scalar_mode_supported_p (innermode);
9438 }
9439 
9440 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree.  */
9441 static rtx
const_vector_from_tree(tree exp)9442 const_vector_from_tree (tree exp)
9443 {
9444   rtvec v;
9445   int units, i;
9446   tree link, elt;
9447   enum machine_mode inner, mode;
9448 
9449   mode = TYPE_MODE (TREE_TYPE (exp));
9450 
9451   if (initializer_zerop (exp))
9452     return CONST0_RTX (mode);
9453 
9454   units = GET_MODE_NUNITS (mode);
9455   inner = GET_MODE_INNER (mode);
9456 
9457   v = rtvec_alloc (units);
9458 
9459   link = TREE_VECTOR_CST_ELTS (exp);
9460   for (i = 0; link; link = TREE_CHAIN (link), ++i)
9461     {
9462       elt = TREE_VALUE (link);
9463 
9464       if (TREE_CODE (elt) == REAL_CST)
9465 	RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9466 							 inner);
9467       else
9468 	RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9469 					       TREE_INT_CST_HIGH (elt),
9470 					       inner);
9471     }
9472 
9473   /* Initialize remaining elements to 0.  */
9474   for (; i < units; ++i)
9475     RTVEC_ELT (v, i) = CONST0_RTX (inner);
9476 
9477   return gen_rtx_CONST_VECTOR (mode, v);
9478 }
9479 #include "gt-expr.h"
9480