xref: /dragonfly/contrib/gcc-4.7/gcc/expr.c (revision 73610d44)
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2    Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3    2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
4    2012 Free Software Foundation, Inc.
5 
6 This file is part of GCC.
7 
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12 
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16 for more details.
17 
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3.  If not see
20 <http://www.gnu.org/licenses/>.  */
21 
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "flags.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "except.h"
33 #include "function.h"
34 #include "insn-config.h"
35 #include "insn-attr.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move.  */
37 #include "expr.h"
38 #include "optabs.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "reload.h"
42 #include "output.h"
43 #include "typeclass.h"
44 #include "toplev.h"
45 #include "langhooks.h"
46 #include "intl.h"
47 #include "tm_p.h"
48 #include "tree-iterator.h"
49 #include "tree-pass.h"
50 #include "tree-flow.h"
51 #include "target.h"
52 #include "common/common-target.h"
53 #include "timevar.h"
54 #include "df.h"
55 #include "diagnostic.h"
56 #include "ssaexpand.h"
57 #include "target-globals.h"
58 #include "params.h"
59 
60 /* Decide whether a function's arguments should be processed
61    from first to last or from last to first.
62 
63    They should if the stack and args grow in opposite directions, but
64    only if we have push insns.  */
65 
66 #ifdef PUSH_ROUNDING
67 
68 #ifndef PUSH_ARGS_REVERSED
69 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
70 #define PUSH_ARGS_REVERSED	/* If it's last to first.  */
71 #endif
72 #endif
73 
74 #endif
75 
76 #ifndef STACK_PUSH_CODE
77 #ifdef STACK_GROWS_DOWNWARD
78 #define STACK_PUSH_CODE PRE_DEC
79 #else
80 #define STACK_PUSH_CODE PRE_INC
81 #endif
82 #endif
83 
84 
85 /* If this is nonzero, we do not bother generating VOLATILE
86    around volatile memory references, and we are willing to
87    output indirect addresses.  If cse is to follow, we reject
88    indirect addresses so a useful potential cse is generated;
89    if it is used only once, instruction combination will produce
90    the same indirect address eventually.  */
91 int cse_not_expected;
92 
93 /* This structure is used by move_by_pieces to describe the move to
94    be performed.  */
95 struct move_by_pieces_d
96 {
97   rtx to;
98   rtx to_addr;
99   int autinc_to;
100   int explicit_inc_to;
101   rtx from;
102   rtx from_addr;
103   int autinc_from;
104   int explicit_inc_from;
105   unsigned HOST_WIDE_INT len;
106   HOST_WIDE_INT offset;
107   int reverse;
108 };
109 
110 /* This structure is used by store_by_pieces to describe the clear to
111    be performed.  */
112 
113 struct store_by_pieces_d
114 {
115   rtx to;
116   rtx to_addr;
117   int autinc_to;
118   int explicit_inc_to;
119   unsigned HOST_WIDE_INT len;
120   HOST_WIDE_INT offset;
121   rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
122   void *constfundata;
123   int reverse;
124 };
125 
126 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
127 			      struct move_by_pieces_d *);
128 static bool block_move_libcall_safe_for_call_parm (void);
129 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT);
130 static tree emit_block_move_libcall_fn (int);
131 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
132 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
133 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
134 static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
135 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
136 			       struct store_by_pieces_d *);
137 static tree clear_storage_libcall_fn (int);
138 static rtx compress_float_constant (rtx, rtx);
139 static rtx get_subtarget (rtx);
140 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
141 				     HOST_WIDE_INT, enum machine_mode,
142 				     tree, tree, int, alias_set_type);
143 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
144 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT,
145 			unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
146 			enum machine_mode,
147 			tree, tree, alias_set_type, bool);
148 
149 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
150 
151 static int is_aligning_offset (const_tree, const_tree);
152 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
153 			     enum expand_modifier);
154 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
155 static rtx do_store_flag (sepops, rtx, enum machine_mode);
156 #ifdef PUSH_ROUNDING
157 static void emit_single_push_insn (enum machine_mode, rtx, tree);
158 #endif
159 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
160 static rtx const_vector_from_tree (tree);
161 static void write_complex_part (rtx, rtx, bool);
162 
163 /* This macro is used to determine whether move_by_pieces should be called
164    to perform a structure copy.  */
165 #ifndef MOVE_BY_PIECES_P
166 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
167   (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
168    < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
169 #endif
170 
171 /* This macro is used to determine whether clear_by_pieces should be
172    called to clear storage.  */
173 #ifndef CLEAR_BY_PIECES_P
174 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
175   (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
176    < (unsigned int) CLEAR_RATIO (optimize_insn_for_speed_p ()))
177 #endif
178 
179 /* This macro is used to determine whether store_by_pieces should be
180    called to "memset" storage with byte values other than zero.  */
181 #ifndef SET_BY_PIECES_P
182 #define SET_BY_PIECES_P(SIZE, ALIGN) \
183   (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
184    < (unsigned int) SET_RATIO (optimize_insn_for_speed_p ()))
185 #endif
186 
187 /* This macro is used to determine whether store_by_pieces should be
188    called to "memcpy" storage when the source is a constant string.  */
189 #ifndef STORE_BY_PIECES_P
190 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
191   (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
192    < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
193 #endif
194 
195 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow.  */
196 
197 #ifndef SLOW_UNALIGNED_ACCESS
198 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
199 #endif
200 
201 /* This is run to set up which modes can be used
202    directly in memory and to initialize the block move optab.  It is run
203    at the beginning of compilation and when the target is reinitialized.  */
204 
205 void
206 init_expr_target (void)
207 {
208   rtx insn, pat;
209   enum machine_mode mode;
210   int num_clobbers;
211   rtx mem, mem1;
212   rtx reg;
213 
214   /* Try indexing by frame ptr and try by stack ptr.
215      It is known that on the Convex the stack ptr isn't a valid index.
216      With luck, one or the other is valid on any machine.  */
217   mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
218   mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
219 
220   /* A scratch register we can modify in-place below to avoid
221      useless RTL allocations.  */
222   reg = gen_rtx_REG (VOIDmode, -1);
223 
224   insn = rtx_alloc (INSN);
225   pat = gen_rtx_SET (VOIDmode, NULL_RTX, NULL_RTX);
226   PATTERN (insn) = pat;
227 
228   for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
229        mode = (enum machine_mode) ((int) mode + 1))
230     {
231       int regno;
232 
233       direct_load[(int) mode] = direct_store[(int) mode] = 0;
234       PUT_MODE (mem, mode);
235       PUT_MODE (mem1, mode);
236       PUT_MODE (reg, mode);
237 
238       /* See if there is some register that can be used in this mode and
239 	 directly loaded or stored from memory.  */
240 
241       if (mode != VOIDmode && mode != BLKmode)
242 	for (regno = 0; regno < FIRST_PSEUDO_REGISTER
243 	     && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
244 	     regno++)
245 	  {
246 	    if (! HARD_REGNO_MODE_OK (regno, mode))
247 	      continue;
248 
249 	    SET_REGNO (reg, regno);
250 
251 	    SET_SRC (pat) = mem;
252 	    SET_DEST (pat) = reg;
253 	    if (recog (pat, insn, &num_clobbers) >= 0)
254 	      direct_load[(int) mode] = 1;
255 
256 	    SET_SRC (pat) = mem1;
257 	    SET_DEST (pat) = reg;
258 	    if (recog (pat, insn, &num_clobbers) >= 0)
259 	      direct_load[(int) mode] = 1;
260 
261 	    SET_SRC (pat) = reg;
262 	    SET_DEST (pat) = mem;
263 	    if (recog (pat, insn, &num_clobbers) >= 0)
264 	      direct_store[(int) mode] = 1;
265 
266 	    SET_SRC (pat) = reg;
267 	    SET_DEST (pat) = mem1;
268 	    if (recog (pat, insn, &num_clobbers) >= 0)
269 	      direct_store[(int) mode] = 1;
270 	  }
271     }
272 
273   mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
274 
275   for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
276        mode = GET_MODE_WIDER_MODE (mode))
277     {
278       enum machine_mode srcmode;
279       for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
280 	   srcmode = GET_MODE_WIDER_MODE (srcmode))
281 	{
282 	  enum insn_code ic;
283 
284 	  ic = can_extend_p (mode, srcmode, 0);
285 	  if (ic == CODE_FOR_nothing)
286 	    continue;
287 
288 	  PUT_MODE (mem, srcmode);
289 
290 	  if (insn_operand_matches (ic, 1, mem))
291 	    float_extend_from_mem[mode][srcmode] = true;
292 	}
293     }
294 }
295 
296 /* This is run at the start of compiling a function.  */
297 
298 void
299 init_expr (void)
300 {
301   memset (&crtl->expr, 0, sizeof (crtl->expr));
302 }
303 
304 /* Copy data from FROM to TO, where the machine modes are not the same.
305    Both modes may be integer, or both may be floating, or both may be
306    fixed-point.
307    UNSIGNEDP should be nonzero if FROM is an unsigned type.
308    This causes zero-extension instead of sign-extension.  */
309 
310 void
311 convert_move (rtx to, rtx from, int unsignedp)
312 {
313   enum machine_mode to_mode = GET_MODE (to);
314   enum machine_mode from_mode = GET_MODE (from);
315   int to_real = SCALAR_FLOAT_MODE_P (to_mode);
316   int from_real = SCALAR_FLOAT_MODE_P (from_mode);
317   enum insn_code code;
318   rtx libcall;
319 
320   /* rtx code for making an equivalent value.  */
321   enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
322 			      : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
323 
324 
325   gcc_assert (to_real == from_real);
326   gcc_assert (to_mode != BLKmode);
327   gcc_assert (from_mode != BLKmode);
328 
329   /* If the source and destination are already the same, then there's
330      nothing to do.  */
331   if (to == from)
332     return;
333 
334   /* If FROM is a SUBREG that indicates that we have already done at least
335      the required extension, strip it.  We don't handle such SUBREGs as
336      TO here.  */
337 
338   if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
339       && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (from)))
340 	  >= GET_MODE_PRECISION (to_mode))
341       && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
342     from = gen_lowpart (to_mode, from), from_mode = to_mode;
343 
344   gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
345 
346   if (to_mode == from_mode
347       || (from_mode == VOIDmode && CONSTANT_P (from)))
348     {
349       emit_move_insn (to, from);
350       return;
351     }
352 
353   if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
354     {
355       gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
356 
357       if (VECTOR_MODE_P (to_mode))
358 	from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
359       else
360 	to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
361 
362       emit_move_insn (to, from);
363       return;
364     }
365 
366   if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
367     {
368       convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
369       convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
370       return;
371     }
372 
373   if (to_real)
374     {
375       rtx value, insns;
376       convert_optab tab;
377 
378       gcc_assert ((GET_MODE_PRECISION (from_mode)
379 		   != GET_MODE_PRECISION (to_mode))
380 		  || (DECIMAL_FLOAT_MODE_P (from_mode)
381 		      != DECIMAL_FLOAT_MODE_P (to_mode)));
382 
383       if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
384 	/* Conversion between decimal float and binary float, same size.  */
385 	tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
386       else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
387 	tab = sext_optab;
388       else
389 	tab = trunc_optab;
390 
391       /* Try converting directly if the insn is supported.  */
392 
393       code = convert_optab_handler (tab, to_mode, from_mode);
394       if (code != CODE_FOR_nothing)
395 	{
396 	  emit_unop_insn (code, to, from,
397 			  tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
398 	  return;
399 	}
400 
401       /* Otherwise use a libcall.  */
402       libcall = convert_optab_libfunc (tab, to_mode, from_mode);
403 
404       /* Is this conversion implemented yet?  */
405       gcc_assert (libcall);
406 
407       start_sequence ();
408       value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
409 				       1, from, from_mode);
410       insns = get_insns ();
411       end_sequence ();
412       emit_libcall_block (insns, to, value,
413 			  tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
414 								       from)
415 			  : gen_rtx_FLOAT_EXTEND (to_mode, from));
416       return;
417     }
418 
419   /* Handle pointer conversion.  */			/* SPEE 900220.  */
420   /* Targets are expected to provide conversion insns between PxImode and
421      xImode for all MODE_PARTIAL_INT modes they use, but no others.  */
422   if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
423     {
424       enum machine_mode full_mode
425 	= smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
426 
427       gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
428 		  != CODE_FOR_nothing);
429 
430       if (full_mode != from_mode)
431 	from = convert_to_mode (full_mode, from, unsignedp);
432       emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
433 		      to, from, UNKNOWN);
434       return;
435     }
436   if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
437     {
438       rtx new_from;
439       enum machine_mode full_mode
440 	= smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
441 
442       gcc_assert (convert_optab_handler (sext_optab, full_mode, from_mode)
443 		  != CODE_FOR_nothing);
444 
445       if (to_mode == full_mode)
446 	{
447 	  emit_unop_insn (convert_optab_handler (sext_optab, full_mode,
448 						 from_mode),
449 			  to, from, UNKNOWN);
450 	  return;
451 	}
452 
453       new_from = gen_reg_rtx (full_mode);
454       emit_unop_insn (convert_optab_handler (sext_optab, full_mode, from_mode),
455 		      new_from, from, UNKNOWN);
456 
457       /* else proceed to integer conversions below.  */
458       from_mode = full_mode;
459       from = new_from;
460     }
461 
462    /* Make sure both are fixed-point modes or both are not.  */
463    gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
464 	       ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
465    if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
466     {
467       /* If we widen from_mode to to_mode and they are in the same class,
468 	 we won't saturate the result.
469 	 Otherwise, always saturate the result to play safe.  */
470       if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
471 	  && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
472 	expand_fixed_convert (to, from, 0, 0);
473       else
474 	expand_fixed_convert (to, from, 0, 1);
475       return;
476     }
477 
478   /* Now both modes are integers.  */
479 
480   /* Handle expanding beyond a word.  */
481   if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode)
482       && GET_MODE_PRECISION (to_mode) > BITS_PER_WORD)
483     {
484       rtx insns;
485       rtx lowpart;
486       rtx fill_value;
487       rtx lowfrom;
488       int i;
489       enum machine_mode lowpart_mode;
490       int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
491 
492       /* Try converting directly if the insn is supported.  */
493       if ((code = can_extend_p (to_mode, from_mode, unsignedp))
494 	  != CODE_FOR_nothing)
495 	{
496 	  /* If FROM is a SUBREG, put it into a register.  Do this
497 	     so that we always generate the same set of insns for
498 	     better cse'ing; if an intermediate assignment occurred,
499 	     we won't be doing the operation directly on the SUBREG.  */
500 	  if (optimize > 0 && GET_CODE (from) == SUBREG)
501 	    from = force_reg (from_mode, from);
502 	  emit_unop_insn (code, to, from, equiv_code);
503 	  return;
504 	}
505       /* Next, try converting via full word.  */
506       else if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD
507 	       && ((code = can_extend_p (to_mode, word_mode, unsignedp))
508 		   != CODE_FOR_nothing))
509 	{
510 	  rtx word_to = gen_reg_rtx (word_mode);
511 	  if (REG_P (to))
512 	    {
513 	      if (reg_overlap_mentioned_p (to, from))
514 		from = force_reg (from_mode, from);
515 	      emit_clobber (to);
516 	    }
517 	  convert_move (word_to, from, unsignedp);
518 	  emit_unop_insn (code, to, word_to, equiv_code);
519 	  return;
520 	}
521 
522       /* No special multiword conversion insn; do it by hand.  */
523       start_sequence ();
524 
525       /* Since we will turn this into a no conflict block, we must ensure
526 	 that the source does not overlap the target.  */
527 
528       if (reg_overlap_mentioned_p (to, from))
529 	from = force_reg (from_mode, from);
530 
531       /* Get a copy of FROM widened to a word, if necessary.  */
532       if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD)
533 	lowpart_mode = word_mode;
534       else
535 	lowpart_mode = from_mode;
536 
537       lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
538 
539       lowpart = gen_lowpart (lowpart_mode, to);
540       emit_move_insn (lowpart, lowfrom);
541 
542       /* Compute the value to put in each remaining word.  */
543       if (unsignedp)
544 	fill_value = const0_rtx;
545       else
546 	fill_value = emit_store_flag (gen_reg_rtx (word_mode),
547 				      LT, lowfrom, const0_rtx,
548 				      VOIDmode, 0, -1);
549 
550       /* Fill the remaining words.  */
551       for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
552 	{
553 	  int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
554 	  rtx subword = operand_subword (to, index, 1, to_mode);
555 
556 	  gcc_assert (subword);
557 
558 	  if (fill_value != subword)
559 	    emit_move_insn (subword, fill_value);
560 	}
561 
562       insns = get_insns ();
563       end_sequence ();
564 
565       emit_insn (insns);
566       return;
567     }
568 
569   /* Truncating multi-word to a word or less.  */
570   if (GET_MODE_PRECISION (from_mode) > BITS_PER_WORD
571       && GET_MODE_PRECISION (to_mode) <= BITS_PER_WORD)
572     {
573       if (!((MEM_P (from)
574 	     && ! MEM_VOLATILE_P (from)
575 	     && direct_load[(int) to_mode]
576 	     && ! mode_dependent_address_p (XEXP (from, 0)))
577 	    || REG_P (from)
578 	    || GET_CODE (from) == SUBREG))
579 	from = force_reg (from_mode, from);
580       convert_move (to, gen_lowpart (word_mode, from), 0);
581       return;
582     }
583 
584   /* Now follow all the conversions between integers
585      no more than a word long.  */
586 
587   /* For truncation, usually we can just refer to FROM in a narrower mode.  */
588   if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
589       && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, from_mode))
590     {
591       if (!((MEM_P (from)
592 	     && ! MEM_VOLATILE_P (from)
593 	     && direct_load[(int) to_mode]
594 	     && ! mode_dependent_address_p (XEXP (from, 0)))
595 	    || REG_P (from)
596 	    || GET_CODE (from) == SUBREG))
597 	from = force_reg (from_mode, from);
598       if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
599 	  && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
600 	from = copy_to_reg (from);
601       emit_move_insn (to, gen_lowpart (to_mode, from));
602       return;
603     }
604 
605   /* Handle extension.  */
606   if (GET_MODE_PRECISION (to_mode) > GET_MODE_PRECISION (from_mode))
607     {
608       /* Convert directly if that works.  */
609       if ((code = can_extend_p (to_mode, from_mode, unsignedp))
610 	  != CODE_FOR_nothing)
611 	{
612 	  emit_unop_insn (code, to, from, equiv_code);
613 	  return;
614 	}
615       else
616 	{
617 	  enum machine_mode intermediate;
618 	  rtx tmp;
619 	  int shift_amount;
620 
621 	  /* Search for a mode to convert via.  */
622 	  for (intermediate = from_mode; intermediate != VOIDmode;
623 	       intermediate = GET_MODE_WIDER_MODE (intermediate))
624 	    if (((can_extend_p (to_mode, intermediate, unsignedp)
625 		  != CODE_FOR_nothing)
626 		 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
627 		     && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, intermediate)))
628 		&& (can_extend_p (intermediate, from_mode, unsignedp)
629 		    != CODE_FOR_nothing))
630 	      {
631 		convert_move (to, convert_to_mode (intermediate, from,
632 						   unsignedp), unsignedp);
633 		return;
634 	      }
635 
636 	  /* No suitable intermediate mode.
637 	     Generate what we need with	shifts.  */
638 	  shift_amount = (GET_MODE_PRECISION (to_mode)
639 			  - GET_MODE_PRECISION (from_mode));
640 	  from = gen_lowpart (to_mode, force_reg (from_mode, from));
641 	  tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
642 			      to, unsignedp);
643 	  tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
644 			      to, unsignedp);
645 	  if (tmp != to)
646 	    emit_move_insn (to, tmp);
647 	  return;
648 	}
649     }
650 
651   /* Support special truncate insns for certain modes.  */
652   if (convert_optab_handler (trunc_optab, to_mode,
653 			     from_mode) != CODE_FOR_nothing)
654     {
655       emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
656 		      to, from, UNKNOWN);
657       return;
658     }
659 
660   /* Handle truncation of volatile memrefs, and so on;
661      the things that couldn't be truncated directly,
662      and for which there was no special instruction.
663 
664      ??? Code above formerly short-circuited this, for most integer
665      mode pairs, with a force_reg in from_mode followed by a recursive
666      call to this routine.  Appears always to have been wrong.  */
667   if (GET_MODE_PRECISION (to_mode) < GET_MODE_PRECISION (from_mode))
668     {
669       rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
670       emit_move_insn (to, temp);
671       return;
672     }
673 
674   /* Mode combination is not recognized.  */
675   gcc_unreachable ();
676 }
677 
678 /* Return an rtx for a value that would result
679    from converting X to mode MODE.
680    Both X and MODE may be floating, or both integer.
681    UNSIGNEDP is nonzero if X is an unsigned value.
682    This can be done by referring to a part of X in place
683    or by copying to a new temporary with conversion.  */
684 
685 rtx
686 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
687 {
688   return convert_modes (mode, VOIDmode, x, unsignedp);
689 }
690 
691 /* Return an rtx for a value that would result
692    from converting X from mode OLDMODE to mode MODE.
693    Both modes may be floating, or both integer.
694    UNSIGNEDP is nonzero if X is an unsigned value.
695 
696    This can be done by referring to a part of X in place
697    or by copying to a new temporary with conversion.
698 
699    You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.  */
700 
701 rtx
702 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
703 {
704   rtx temp;
705 
706   /* If FROM is a SUBREG that indicates that we have already done at least
707      the required extension, strip it.  */
708 
709   if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
710       && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
711       && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
712     x = gen_lowpart (mode, x);
713 
714   if (GET_MODE (x) != VOIDmode)
715     oldmode = GET_MODE (x);
716 
717   if (mode == oldmode)
718     return x;
719 
720   /* There is one case that we must handle specially: If we are converting
721      a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
722      we are to interpret the constant as unsigned, gen_lowpart will do
723      the wrong if the constant appears negative.  What we want to do is
724      make the high-order word of the constant zero, not all ones.  */
725 
726   if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
727       && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
728       && CONST_INT_P (x) && INTVAL (x) < 0)
729     {
730       double_int val = uhwi_to_double_int (INTVAL (x));
731 
732       /* We need to zero extend VAL.  */
733       if (oldmode != VOIDmode)
734 	val = double_int_zext (val, GET_MODE_BITSIZE (oldmode));
735 
736       return immed_double_int_const (val, mode);
737     }
738 
739   /* We can do this with a gen_lowpart if both desired and current modes
740      are integer, and this is either a constant integer, a register, or a
741      non-volatile MEM.  Except for the constant case where MODE is no
742      wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand.  */
743 
744   if ((CONST_INT_P (x)
745        && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT)
746       || (GET_MODE_CLASS (mode) == MODE_INT
747 	  && GET_MODE_CLASS (oldmode) == MODE_INT
748 	  && (GET_CODE (x) == CONST_DOUBLE
749 	      || (GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (oldmode)
750 		  && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
751 		       && direct_load[(int) mode])
752 		      || (REG_P (x)
753 			  && (! HARD_REGISTER_P (x)
754 			      || HARD_REGNO_MODE_OK (REGNO (x), mode))
755 			  && TRULY_NOOP_TRUNCATION_MODES_P (mode,
756 							    GET_MODE (x))))))))
757     {
758       /* ?? If we don't know OLDMODE, we have to assume here that
759 	 X does not need sign- or zero-extension.   This may not be
760 	 the case, but it's the best we can do.  */
761       if (CONST_INT_P (x) && oldmode != VOIDmode
762 	  && GET_MODE_PRECISION (mode) > GET_MODE_PRECISION (oldmode))
763 	{
764 	  HOST_WIDE_INT val = INTVAL (x);
765 
766 	  /* We must sign or zero-extend in this case.  Start by
767 	     zero-extending, then sign extend if we need to.  */
768 	  val &= GET_MODE_MASK (oldmode);
769 	  if (! unsignedp
770 	      && val_signbit_known_set_p (oldmode, val))
771 	    val |= ~GET_MODE_MASK (oldmode);
772 
773 	  return gen_int_mode (val, mode);
774 	}
775 
776       return gen_lowpart (mode, x);
777     }
778 
779   /* Converting from integer constant into mode is always equivalent to an
780      subreg operation.  */
781   if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
782     {
783       gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
784       return simplify_gen_subreg (mode, x, oldmode, 0);
785     }
786 
787   temp = gen_reg_rtx (mode);
788   convert_move (temp, x, unsignedp);
789   return temp;
790 }
791 
792 /* Return the largest alignment we can use for doing a move (or store)
793    of MAX_PIECES.  ALIGN is the largest alignment we could use.  */
794 
795 static unsigned int
796 alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
797 {
798   enum machine_mode tmode;
799 
800   tmode = mode_for_size (max_pieces * BITS_PER_UNIT, MODE_INT, 1);
801   if (align >= GET_MODE_ALIGNMENT (tmode))
802     align = GET_MODE_ALIGNMENT (tmode);
803   else
804     {
805       enum machine_mode tmode, xmode;
806 
807       for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
808 	   tmode != VOIDmode;
809 	   xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
810 	if (GET_MODE_SIZE (tmode) > max_pieces
811 	    || SLOW_UNALIGNED_ACCESS (tmode, align))
812 	  break;
813 
814       align = MAX (align, GET_MODE_ALIGNMENT (xmode));
815     }
816 
817   return align;
818 }
819 
820 /* Return the widest integer mode no wider than SIZE.  If no such mode
821    can be found, return VOIDmode.  */
822 
823 static enum machine_mode
824 widest_int_mode_for_size (unsigned int size)
825 {
826   enum machine_mode tmode, mode = VOIDmode;
827 
828   for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
829        tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
830     if (GET_MODE_SIZE (tmode) < size)
831       mode = tmode;
832 
833   return mode;
834 }
835 
836 /* STORE_MAX_PIECES is the number of bytes at a time that we can
837    store efficiently.  Due to internal GCC limitations, this is
838    MOVE_MAX_PIECES limited by the number of bytes GCC can represent
839    for an immediate constant.  */
840 
841 #define STORE_MAX_PIECES  MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
842 
843 /* Determine whether the LEN bytes can be moved by using several move
844    instructions.  Return nonzero if a call to move_by_pieces should
845    succeed.  */
846 
847 int
848 can_move_by_pieces (unsigned HOST_WIDE_INT len,
849 		    unsigned int align ATTRIBUTE_UNUSED)
850 {
851   return MOVE_BY_PIECES_P (len, align);
852 }
853 
854 /* Generate several move instructions to copy LEN bytes from block FROM to
855    block TO.  (These are MEM rtx's with BLKmode).
856 
857    If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
858    used to push FROM to the stack.
859 
860    ALIGN is maximum stack alignment we can assume.
861 
862    If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
863    mempcpy, and if ENDP is 2 return memory the end minus one byte ala
864    stpcpy.  */
865 
866 rtx
867 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
868 		unsigned int align, int endp)
869 {
870   struct move_by_pieces_d data;
871   enum machine_mode to_addr_mode, from_addr_mode
872     = targetm.addr_space.address_mode (MEM_ADDR_SPACE (from));
873   rtx to_addr, from_addr = XEXP (from, 0);
874   unsigned int max_size = MOVE_MAX_PIECES + 1;
875   enum insn_code icode;
876 
877   align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
878 
879   data.offset = 0;
880   data.from_addr = from_addr;
881   if (to)
882     {
883       to_addr_mode = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to));
884       to_addr = XEXP (to, 0);
885       data.to = to;
886       data.autinc_to
887 	= (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
888 	   || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
889       data.reverse
890 	= (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
891     }
892   else
893     {
894       to_addr_mode = VOIDmode;
895       to_addr = NULL_RTX;
896       data.to = NULL_RTX;
897       data.autinc_to = 1;
898 #ifdef STACK_GROWS_DOWNWARD
899       data.reverse = 1;
900 #else
901       data.reverse = 0;
902 #endif
903     }
904   data.to_addr = to_addr;
905   data.from = from;
906   data.autinc_from
907     = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
908        || GET_CODE (from_addr) == POST_INC
909        || GET_CODE (from_addr) == POST_DEC);
910 
911   data.explicit_inc_from = 0;
912   data.explicit_inc_to = 0;
913   if (data.reverse) data.offset = len;
914   data.len = len;
915 
916   /* If copying requires more than two move insns,
917      copy addresses to registers (to make displacements shorter)
918      and use post-increment if available.  */
919   if (!(data.autinc_from && data.autinc_to)
920       && move_by_pieces_ninsns (len, align, max_size) > 2)
921     {
922       /* Find the mode of the largest move...
923 	 MODE might not be used depending on the definitions of the
924 	 USE_* macros below.  */
925       enum machine_mode mode ATTRIBUTE_UNUSED
926 	= widest_int_mode_for_size (max_size);
927 
928       if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
929 	{
930 	  data.from_addr = copy_to_mode_reg (from_addr_mode,
931 					     plus_constant (from_addr, len));
932 	  data.autinc_from = 1;
933 	  data.explicit_inc_from = -1;
934 	}
935       if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
936 	{
937 	  data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
938 	  data.autinc_from = 1;
939 	  data.explicit_inc_from = 1;
940 	}
941       if (!data.autinc_from && CONSTANT_P (from_addr))
942 	data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
943       if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
944 	{
945 	  data.to_addr = copy_to_mode_reg (to_addr_mode,
946 					   plus_constant (to_addr, len));
947 	  data.autinc_to = 1;
948 	  data.explicit_inc_to = -1;
949 	}
950       if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
951 	{
952 	  data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
953 	  data.autinc_to = 1;
954 	  data.explicit_inc_to = 1;
955 	}
956       if (!data.autinc_to && CONSTANT_P (to_addr))
957 	data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
958     }
959 
960   align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
961 
962   /* First move what we can in the largest integer mode, then go to
963      successively smaller modes.  */
964 
965   while (max_size > 1)
966     {
967       enum machine_mode mode = widest_int_mode_for_size (max_size);
968 
969       if (mode == VOIDmode)
970 	break;
971 
972       icode = optab_handler (mov_optab, mode);
973       if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
974 	move_by_pieces_1 (GEN_FCN (icode), mode, &data);
975 
976       max_size = GET_MODE_SIZE (mode);
977     }
978 
979   /* The code above should have handled everything.  */
980   gcc_assert (!data.len);
981 
982   if (endp)
983     {
984       rtx to1;
985 
986       gcc_assert (!data.reverse);
987       if (data.autinc_to)
988 	{
989 	  if (endp == 2)
990 	    {
991 	      if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
992 		emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
993 	      else
994 		data.to_addr = copy_to_mode_reg (to_addr_mode,
995 						 plus_constant (data.to_addr,
996 								-1));
997 	    }
998 	  to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
999 					   data.offset);
1000 	}
1001       else
1002 	{
1003 	  if (endp == 2)
1004 	    --data.offset;
1005 	  to1 = adjust_address (data.to, QImode, data.offset);
1006 	}
1007       return to1;
1008     }
1009   else
1010     return data.to;
1011 }
1012 
1013 /* Return number of insns required to move L bytes by pieces.
1014    ALIGN (in bits) is maximum alignment we can assume.  */
1015 
1016 unsigned HOST_WIDE_INT
1017 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1018 		       unsigned int max_size)
1019 {
1020   unsigned HOST_WIDE_INT n_insns = 0;
1021 
1022   align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
1023 
1024   while (max_size > 1)
1025     {
1026       enum machine_mode mode;
1027       enum insn_code icode;
1028 
1029       mode = widest_int_mode_for_size (max_size);
1030 
1031       if (mode == VOIDmode)
1032 	break;
1033 
1034       icode = optab_handler (mov_optab, mode);
1035       if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1036 	n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1037 
1038       max_size = GET_MODE_SIZE (mode);
1039     }
1040 
1041   gcc_assert (!l);
1042   return n_insns;
1043 }
1044 
1045 /* Subroutine of move_by_pieces.  Move as many bytes as appropriate
1046    with move instructions for mode MODE.  GENFUN is the gen_... function
1047    to make a move insn for that mode.  DATA has all the other info.  */
1048 
1049 static void
1050 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1051 		  struct move_by_pieces_d *data)
1052 {
1053   unsigned int size = GET_MODE_SIZE (mode);
1054   rtx to1 = NULL_RTX, from1;
1055 
1056   while (data->len >= size)
1057     {
1058       if (data->reverse)
1059 	data->offset -= size;
1060 
1061       if (data->to)
1062 	{
1063 	  if (data->autinc_to)
1064 	    to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1065 					     data->offset);
1066 	  else
1067 	    to1 = adjust_address (data->to, mode, data->offset);
1068 	}
1069 
1070       if (data->autinc_from)
1071 	from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1072 					   data->offset);
1073       else
1074 	from1 = adjust_address (data->from, mode, data->offset);
1075 
1076       if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1077 	emit_insn (gen_add2_insn (data->to_addr,
1078 				  GEN_INT (-(HOST_WIDE_INT)size)));
1079       if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1080 	emit_insn (gen_add2_insn (data->from_addr,
1081 				  GEN_INT (-(HOST_WIDE_INT)size)));
1082 
1083       if (data->to)
1084 	emit_insn ((*genfun) (to1, from1));
1085       else
1086 	{
1087 #ifdef PUSH_ROUNDING
1088 	  emit_single_push_insn (mode, from1, NULL);
1089 #else
1090 	  gcc_unreachable ();
1091 #endif
1092 	}
1093 
1094       if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1095 	emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1096       if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1097 	emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1098 
1099       if (! data->reverse)
1100 	data->offset += size;
1101 
1102       data->len -= size;
1103     }
1104 }
1105 
1106 /* Emit code to move a block Y to a block X.  This may be done with
1107    string-move instructions, with multiple scalar move instructions,
1108    or with a library call.
1109 
1110    Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1111    SIZE is an rtx that says how long they are.
1112    ALIGN is the maximum alignment we can assume they have.
1113    METHOD describes what kind of copy this is, and what mechanisms may be used.
1114 
1115    Return the address of the new block, if memcpy is called and returns it,
1116    0 otherwise.  */
1117 
1118 rtx
1119 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1120 		       unsigned int expected_align, HOST_WIDE_INT expected_size)
1121 {
1122   bool may_use_call;
1123   rtx retval = 0;
1124   unsigned int align;
1125 
1126   gcc_assert (size);
1127   if (CONST_INT_P (size)
1128       && INTVAL (size) == 0)
1129     return 0;
1130 
1131   switch (method)
1132     {
1133     case BLOCK_OP_NORMAL:
1134     case BLOCK_OP_TAILCALL:
1135       may_use_call = true;
1136       break;
1137 
1138     case BLOCK_OP_CALL_PARM:
1139       may_use_call = block_move_libcall_safe_for_call_parm ();
1140 
1141       /* Make inhibit_defer_pop nonzero around the library call
1142 	 to force it to pop the arguments right away.  */
1143       NO_DEFER_POP;
1144       break;
1145 
1146     case BLOCK_OP_NO_LIBCALL:
1147       may_use_call = false;
1148       break;
1149 
1150     default:
1151       gcc_unreachable ();
1152     }
1153 
1154   gcc_assert (MEM_P (x) && MEM_P (y));
1155   align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1156   gcc_assert (align >= BITS_PER_UNIT);
1157 
1158   /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1159      block copy is more efficient for other large modes, e.g. DCmode.  */
1160   x = adjust_address (x, BLKmode, 0);
1161   y = adjust_address (y, BLKmode, 0);
1162 
1163   /* Set MEM_SIZE as appropriate for this block copy.  The main place this
1164      can be incorrect is coming from __builtin_memcpy.  */
1165   if (CONST_INT_P (size))
1166     {
1167       x = shallow_copy_rtx (x);
1168       y = shallow_copy_rtx (y);
1169       set_mem_size (x, INTVAL (size));
1170       set_mem_size (y, INTVAL (size));
1171     }
1172 
1173   if (CONST_INT_P (size) && MOVE_BY_PIECES_P (INTVAL (size), align))
1174     move_by_pieces (x, y, INTVAL (size), align, 0);
1175   else if (emit_block_move_via_movmem (x, y, size, align,
1176 				       expected_align, expected_size))
1177     ;
1178   else if (may_use_call
1179 	   && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1180 	   && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1181     {
1182       /* Since x and y are passed to a libcall, mark the corresponding
1183 	 tree EXPR as addressable.  */
1184       tree y_expr = MEM_EXPR (y);
1185       tree x_expr = MEM_EXPR (x);
1186       if (y_expr)
1187 	mark_addressable (y_expr);
1188       if (x_expr)
1189 	mark_addressable (x_expr);
1190       retval = emit_block_move_via_libcall (x, y, size,
1191 					    method == BLOCK_OP_TAILCALL);
1192     }
1193 
1194   else
1195     emit_block_move_via_loop (x, y, size, align);
1196 
1197   if (method == BLOCK_OP_CALL_PARM)
1198     OK_DEFER_POP;
1199 
1200   return retval;
1201 }
1202 
1203 rtx
1204 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1205 {
1206   return emit_block_move_hints (x, y, size, method, 0, -1);
1207 }
1208 
1209 /* A subroutine of emit_block_move.  Returns true if calling the
1210    block move libcall will not clobber any parameters which may have
1211    already been placed on the stack.  */
1212 
1213 static bool
1214 block_move_libcall_safe_for_call_parm (void)
1215 {
1216 #if defined (REG_PARM_STACK_SPACE)
1217   tree fn;
1218 #endif
1219 
1220   /* If arguments are pushed on the stack, then they're safe.  */
1221   if (PUSH_ARGS)
1222     return true;
1223 
1224   /* If registers go on the stack anyway, any argument is sure to clobber
1225      an outgoing argument.  */
1226 #if defined (REG_PARM_STACK_SPACE)
1227   fn = emit_block_move_libcall_fn (false);
1228   /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1229      depend on its argument.  */
1230   (void) fn;
1231   if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1232       && REG_PARM_STACK_SPACE (fn) != 0)
1233     return false;
1234 #endif
1235 
1236   /* If any argument goes in memory, then it might clobber an outgoing
1237      argument.  */
1238   {
1239     CUMULATIVE_ARGS args_so_far_v;
1240     cumulative_args_t args_so_far;
1241     tree fn, arg;
1242 
1243     fn = emit_block_move_libcall_fn (false);
1244     INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
1245     args_so_far = pack_cumulative_args (&args_so_far_v);
1246 
1247     arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1248     for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1249       {
1250 	enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1251 	rtx tmp = targetm.calls.function_arg (args_so_far, mode,
1252 					      NULL_TREE, true);
1253 	if (!tmp || !REG_P (tmp))
1254 	  return false;
1255 	if (targetm.calls.arg_partial_bytes (args_so_far, mode, NULL, 1))
1256 	  return false;
1257 	targetm.calls.function_arg_advance (args_so_far, mode,
1258 					    NULL_TREE, true);
1259       }
1260   }
1261   return true;
1262 }
1263 
1264 /* A subroutine of emit_block_move.  Expand a movmem pattern;
1265    return true if successful.  */
1266 
1267 static bool
1268 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1269 			    unsigned int expected_align, HOST_WIDE_INT expected_size)
1270 {
1271   int save_volatile_ok = volatile_ok;
1272   enum machine_mode mode;
1273 
1274   if (expected_align < align)
1275     expected_align = align;
1276 
1277   /* Since this is a move insn, we don't care about volatility.  */
1278   volatile_ok = 1;
1279 
1280   /* Try the most limited insn first, because there's no point
1281      including more than one in the machine description unless
1282      the more limited one has some advantage.  */
1283 
1284   for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1285        mode = GET_MODE_WIDER_MODE (mode))
1286     {
1287       enum insn_code code = direct_optab_handler (movmem_optab, mode);
1288 
1289       if (code != CODE_FOR_nothing
1290 	  /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1291 	     here because if SIZE is less than the mode mask, as it is
1292 	     returned by the macro, it will definitely be less than the
1293 	     actual mode mask.  */
1294 	  && ((CONST_INT_P (size)
1295 	       && ((unsigned HOST_WIDE_INT) INTVAL (size)
1296 		   <= (GET_MODE_MASK (mode) >> 1)))
1297 	      || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD))
1298 	{
1299 	  struct expand_operand ops[6];
1300 	  unsigned int nops;
1301 
1302 	  /* ??? When called via emit_block_move_for_call, it'd be
1303 	     nice if there were some way to inform the backend, so
1304 	     that it doesn't fail the expansion because it thinks
1305 	     emitting the libcall would be more efficient.  */
1306 	  nops = insn_data[(int) code].n_generator_args;
1307 	  gcc_assert (nops == 4 || nops == 6);
1308 
1309 	  create_fixed_operand (&ops[0], x);
1310 	  create_fixed_operand (&ops[1], y);
1311 	  /* The check above guarantees that this size conversion is valid.  */
1312 	  create_convert_operand_to (&ops[2], size, mode, true);
1313 	  create_integer_operand (&ops[3], align / BITS_PER_UNIT);
1314 	  if (nops == 6)
1315 	    {
1316 	      create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
1317 	      create_integer_operand (&ops[5], expected_size);
1318 	    }
1319 	  if (maybe_expand_insn (code, nops, ops))
1320 	    {
1321 	      volatile_ok = save_volatile_ok;
1322 	      return true;
1323 	    }
1324 	}
1325     }
1326 
1327   volatile_ok = save_volatile_ok;
1328   return false;
1329 }
1330 
1331 /* A subroutine of emit_block_move.  Expand a call to memcpy.
1332    Return the return value from memcpy, 0 otherwise.  */
1333 
1334 rtx
1335 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1336 {
1337   rtx dst_addr, src_addr;
1338   tree call_expr, fn, src_tree, dst_tree, size_tree;
1339   enum machine_mode size_mode;
1340   rtx retval;
1341 
1342   /* Emit code to copy the addresses of DST and SRC and SIZE into new
1343      pseudos.  We can then place those new pseudos into a VAR_DECL and
1344      use them later.  */
1345 
1346   dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1347   src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1348 
1349   dst_addr = convert_memory_address (ptr_mode, dst_addr);
1350   src_addr = convert_memory_address (ptr_mode, src_addr);
1351 
1352   dst_tree = make_tree (ptr_type_node, dst_addr);
1353   src_tree = make_tree (ptr_type_node, src_addr);
1354 
1355   size_mode = TYPE_MODE (sizetype);
1356 
1357   size = convert_to_mode (size_mode, size, 1);
1358   size = copy_to_mode_reg (size_mode, size);
1359 
1360   /* It is incorrect to use the libcall calling conventions to call
1361      memcpy in this context.  This could be a user call to memcpy and
1362      the user may wish to examine the return value from memcpy.  For
1363      targets where libcalls and normal calls have different conventions
1364      for returning pointers, we could end up generating incorrect code.  */
1365 
1366   size_tree = make_tree (sizetype, size);
1367 
1368   fn = emit_block_move_libcall_fn (true);
1369   call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1370   CALL_EXPR_TAILCALL (call_expr) = tailcall;
1371 
1372   retval = expand_normal (call_expr);
1373 
1374   return retval;
1375 }
1376 
1377 /* A subroutine of emit_block_move_via_libcall.  Create the tree node
1378    for the function we use for block copies.  The first time FOR_CALL
1379    is true, we call assemble_external.  */
1380 
1381 static GTY(()) tree block_move_fn;
1382 
1383 void
1384 init_block_move_fn (const char *asmspec)
1385 {
1386   if (!block_move_fn)
1387     {
1388       tree args, fn;
1389 
1390       fn = get_identifier ("memcpy");
1391       args = build_function_type_list (ptr_type_node, ptr_type_node,
1392 				       const_ptr_type_node, sizetype,
1393 				       NULL_TREE);
1394 
1395       fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
1396       DECL_EXTERNAL (fn) = 1;
1397       TREE_PUBLIC (fn) = 1;
1398       DECL_ARTIFICIAL (fn) = 1;
1399       TREE_NOTHROW (fn) = 1;
1400       DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1401       DECL_VISIBILITY_SPECIFIED (fn) = 1;
1402 
1403       block_move_fn = fn;
1404     }
1405 
1406   if (asmspec)
1407     set_user_assembler_name (block_move_fn, asmspec);
1408 }
1409 
1410 static tree
1411 emit_block_move_libcall_fn (int for_call)
1412 {
1413   static bool emitted_extern;
1414 
1415   if (!block_move_fn)
1416     init_block_move_fn (NULL);
1417 
1418   if (for_call && !emitted_extern)
1419     {
1420       emitted_extern = true;
1421       make_decl_rtl (block_move_fn);
1422       assemble_external (block_move_fn);
1423     }
1424 
1425   return block_move_fn;
1426 }
1427 
1428 /* A subroutine of emit_block_move.  Copy the data via an explicit
1429    loop.  This is used only when libcalls are forbidden.  */
1430 /* ??? It'd be nice to copy in hunks larger than QImode.  */
1431 
1432 static void
1433 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1434 			  unsigned int align ATTRIBUTE_UNUSED)
1435 {
1436   rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1437   enum machine_mode x_addr_mode
1438     = targetm.addr_space.address_mode (MEM_ADDR_SPACE (x));
1439   enum machine_mode y_addr_mode
1440     = targetm.addr_space.address_mode (MEM_ADDR_SPACE (y));
1441   enum machine_mode iter_mode;
1442 
1443   iter_mode = GET_MODE (size);
1444   if (iter_mode == VOIDmode)
1445     iter_mode = word_mode;
1446 
1447   top_label = gen_label_rtx ();
1448   cmp_label = gen_label_rtx ();
1449   iter = gen_reg_rtx (iter_mode);
1450 
1451   emit_move_insn (iter, const0_rtx);
1452 
1453   x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1454   y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1455   do_pending_stack_adjust ();
1456 
1457   emit_jump (cmp_label);
1458   emit_label (top_label);
1459 
1460   tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1461   x_addr = gen_rtx_PLUS (x_addr_mode, x_addr, tmp);
1462 
1463   if (x_addr_mode != y_addr_mode)
1464     tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1465   y_addr = gen_rtx_PLUS (y_addr_mode, y_addr, tmp);
1466 
1467   x = change_address (x, QImode, x_addr);
1468   y = change_address (y, QImode, y_addr);
1469 
1470   emit_move_insn (x, y);
1471 
1472   tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1473 			     true, OPTAB_LIB_WIDEN);
1474   if (tmp != iter)
1475     emit_move_insn (iter, tmp);
1476 
1477   emit_label (cmp_label);
1478 
1479   emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1480 			   true, top_label);
1481 }
1482 
1483 /* Copy all or part of a value X into registers starting at REGNO.
1484    The number of registers to be filled is NREGS.  */
1485 
1486 void
1487 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1488 {
1489   int i;
1490 #ifdef HAVE_load_multiple
1491   rtx pat;
1492   rtx last;
1493 #endif
1494 
1495   if (nregs == 0)
1496     return;
1497 
1498   if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
1499     x = validize_mem (force_const_mem (mode, x));
1500 
1501   /* See if the machine can do this with a load multiple insn.  */
1502 #ifdef HAVE_load_multiple
1503   if (HAVE_load_multiple)
1504     {
1505       last = get_last_insn ();
1506       pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1507 			       GEN_INT (nregs));
1508       if (pat)
1509 	{
1510 	  emit_insn (pat);
1511 	  return;
1512 	}
1513       else
1514 	delete_insns_since (last);
1515     }
1516 #endif
1517 
1518   for (i = 0; i < nregs; i++)
1519     emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1520 		    operand_subword_force (x, i, mode));
1521 }
1522 
1523 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1524    The number of registers to be filled is NREGS.  */
1525 
1526 void
1527 move_block_from_reg (int regno, rtx x, int nregs)
1528 {
1529   int i;
1530 
1531   if (nregs == 0)
1532     return;
1533 
1534   /* See if the machine can do this with a store multiple insn.  */
1535 #ifdef HAVE_store_multiple
1536   if (HAVE_store_multiple)
1537     {
1538       rtx last = get_last_insn ();
1539       rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1540 				    GEN_INT (nregs));
1541       if (pat)
1542 	{
1543 	  emit_insn (pat);
1544 	  return;
1545 	}
1546       else
1547 	delete_insns_since (last);
1548     }
1549 #endif
1550 
1551   for (i = 0; i < nregs; i++)
1552     {
1553       rtx tem = operand_subword (x, i, 1, BLKmode);
1554 
1555       gcc_assert (tem);
1556 
1557       emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1558     }
1559 }
1560 
1561 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1562    ORIG, where ORIG is a non-consecutive group of registers represented by
1563    a PARALLEL.  The clone is identical to the original except in that the
1564    original set of registers is replaced by a new set of pseudo registers.
1565    The new set has the same modes as the original set.  */
1566 
1567 rtx
1568 gen_group_rtx (rtx orig)
1569 {
1570   int i, length;
1571   rtx *tmps;
1572 
1573   gcc_assert (GET_CODE (orig) == PARALLEL);
1574 
1575   length = XVECLEN (orig, 0);
1576   tmps = XALLOCAVEC (rtx, length);
1577 
1578   /* Skip a NULL entry in first slot.  */
1579   i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1580 
1581   if (i)
1582     tmps[0] = 0;
1583 
1584   for (; i < length; i++)
1585     {
1586       enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1587       rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1588 
1589       tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1590     }
1591 
1592   return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1593 }
1594 
1595 /* A subroutine of emit_group_load.  Arguments as for emit_group_load,
1596    except that values are placed in TMPS[i], and must later be moved
1597    into corresponding XEXP (XVECEXP (DST, 0, i), 0) element.  */
1598 
1599 static void
1600 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1601 {
1602   rtx src;
1603   int start, i;
1604   enum machine_mode m = GET_MODE (orig_src);
1605 
1606   gcc_assert (GET_CODE (dst) == PARALLEL);
1607 
1608   if (m != VOIDmode
1609       && !SCALAR_INT_MODE_P (m)
1610       && !MEM_P (orig_src)
1611       && GET_CODE (orig_src) != CONCAT)
1612     {
1613       enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1614       if (imode == BLKmode)
1615 	src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1616       else
1617 	src = gen_reg_rtx (imode);
1618       if (imode != BLKmode)
1619 	src = gen_lowpart (GET_MODE (orig_src), src);
1620       emit_move_insn (src, orig_src);
1621       /* ...and back again.  */
1622       if (imode != BLKmode)
1623 	src = gen_lowpart (imode, src);
1624       emit_group_load_1 (tmps, dst, src, type, ssize);
1625       return;
1626     }
1627 
1628   /* Check for a NULL entry, used to indicate that the parameter goes
1629      both on the stack and in registers.  */
1630   if (XEXP (XVECEXP (dst, 0, 0), 0))
1631     start = 0;
1632   else
1633     start = 1;
1634 
1635   /* Process the pieces.  */
1636   for (i = start; i < XVECLEN (dst, 0); i++)
1637     {
1638       enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1639       HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1640       unsigned int bytelen = GET_MODE_SIZE (mode);
1641       int shift = 0;
1642 
1643       /* Handle trailing fragments that run over the size of the struct.  */
1644       if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1645 	{
1646 	  /* Arrange to shift the fragment to where it belongs.
1647 	     extract_bit_field loads to the lsb of the reg.  */
1648 	  if (
1649 #ifdef BLOCK_REG_PADDING
1650 	      BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1651 	      == (BYTES_BIG_ENDIAN ? upward : downward)
1652 #else
1653 	      BYTES_BIG_ENDIAN
1654 #endif
1655 	      )
1656 	    shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1657 	  bytelen = ssize - bytepos;
1658 	  gcc_assert (bytelen > 0);
1659 	}
1660 
1661       /* If we won't be loading directly from memory, protect the real source
1662 	 from strange tricks we might play; but make sure that the source can
1663 	 be loaded directly into the destination.  */
1664       src = orig_src;
1665       if (!MEM_P (orig_src)
1666 	  && (!CONSTANT_P (orig_src)
1667 	      || (GET_MODE (orig_src) != mode
1668 		  && GET_MODE (orig_src) != VOIDmode)))
1669 	{
1670 	  if (GET_MODE (orig_src) == VOIDmode)
1671 	    src = gen_reg_rtx (mode);
1672 	  else
1673 	    src = gen_reg_rtx (GET_MODE (orig_src));
1674 
1675 	  emit_move_insn (src, orig_src);
1676 	}
1677 
1678       /* Optimize the access just a bit.  */
1679       if (MEM_P (src)
1680 	  && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1681 	      || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1682 	  && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1683 	  && bytelen == GET_MODE_SIZE (mode))
1684 	{
1685 	  tmps[i] = gen_reg_rtx (mode);
1686 	  emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1687 	}
1688       else if (COMPLEX_MODE_P (mode)
1689 	       && GET_MODE (src) == mode
1690 	       && bytelen == GET_MODE_SIZE (mode))
1691 	/* Let emit_move_complex do the bulk of the work.  */
1692 	tmps[i] = src;
1693       else if (GET_CODE (src) == CONCAT)
1694 	{
1695 	  unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1696 	  unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1697 
1698 	  if ((bytepos == 0 && bytelen == slen0)
1699 	      || (bytepos != 0 && bytepos + bytelen <= slen))
1700 	    {
1701 	      /* The following assumes that the concatenated objects all
1702 		 have the same size.  In this case, a simple calculation
1703 		 can be used to determine the object and the bit field
1704 		 to be extracted.  */
1705 	      tmps[i] = XEXP (src, bytepos / slen0);
1706 	      if (! CONSTANT_P (tmps[i])
1707 		  && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1708 		tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1709 					     (bytepos % slen0) * BITS_PER_UNIT,
1710 					     1, false, NULL_RTX, mode, mode);
1711 	    }
1712 	  else
1713 	    {
1714 	      rtx mem;
1715 
1716 	      gcc_assert (!bytepos);
1717 	      mem = assign_stack_temp (GET_MODE (src), slen, 0);
1718 	      emit_move_insn (mem, src);
1719 	      tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1720 					   0, 1, false, NULL_RTX, mode, mode);
1721 	    }
1722 	}
1723       /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1724 	 SIMD register, which is currently broken.  While we get GCC
1725 	 to emit proper RTL for these cases, let's dump to memory.  */
1726       else if (VECTOR_MODE_P (GET_MODE (dst))
1727 	       && REG_P (src))
1728 	{
1729 	  int slen = GET_MODE_SIZE (GET_MODE (src));
1730 	  rtx mem;
1731 
1732 	  mem = assign_stack_temp (GET_MODE (src), slen, 0);
1733 	  emit_move_insn (mem, src);
1734 	  tmps[i] = adjust_address (mem, mode, (int) bytepos);
1735 	}
1736       else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1737                && XVECLEN (dst, 0) > 1)
1738         tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1739       else if (CONSTANT_P (src))
1740 	{
1741 	  HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1742 
1743 	  if (len == ssize)
1744 	    tmps[i] = src;
1745 	  else
1746 	    {
1747 	      rtx first, second;
1748 
1749 	      gcc_assert (2 * len == ssize);
1750 	      split_double (src, &first, &second);
1751 	      if (i)
1752 		tmps[i] = second;
1753 	      else
1754 		tmps[i] = first;
1755 	    }
1756 	}
1757       else if (REG_P (src) && GET_MODE (src) == mode)
1758 	tmps[i] = src;
1759       else
1760 	tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1761 				     bytepos * BITS_PER_UNIT, 1, false, NULL_RTX,
1762 				     mode, mode);
1763 
1764       if (shift)
1765 	tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1766 				shift, tmps[i], 0);
1767     }
1768 }
1769 
1770 /* Emit code to move a block SRC of type TYPE to a block DST,
1771    where DST is non-consecutive registers represented by a PARALLEL.
1772    SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1773    if not known.  */
1774 
1775 void
1776 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1777 {
1778   rtx *tmps;
1779   int i;
1780 
1781   tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1782   emit_group_load_1 (tmps, dst, src, type, ssize);
1783 
1784   /* Copy the extracted pieces into the proper (probable) hard regs.  */
1785   for (i = 0; i < XVECLEN (dst, 0); i++)
1786     {
1787       rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1788       if (d == NULL)
1789 	continue;
1790       emit_move_insn (d, tmps[i]);
1791     }
1792 }
1793 
1794 /* Similar, but load SRC into new pseudos in a format that looks like
1795    PARALLEL.  This can later be fed to emit_group_move to get things
1796    in the right place.  */
1797 
1798 rtx
1799 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1800 {
1801   rtvec vec;
1802   int i;
1803 
1804   vec = rtvec_alloc (XVECLEN (parallel, 0));
1805   emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1806 
1807   /* Convert the vector to look just like the original PARALLEL, except
1808      with the computed values.  */
1809   for (i = 0; i < XVECLEN (parallel, 0); i++)
1810     {
1811       rtx e = XVECEXP (parallel, 0, i);
1812       rtx d = XEXP (e, 0);
1813 
1814       if (d)
1815 	{
1816 	  d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1817 	  e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1818 	}
1819       RTVEC_ELT (vec, i) = e;
1820     }
1821 
1822   return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1823 }
1824 
1825 /* Emit code to move a block SRC to block DST, where SRC and DST are
1826    non-consecutive groups of registers, each represented by a PARALLEL.  */
1827 
1828 void
1829 emit_group_move (rtx dst, rtx src)
1830 {
1831   int i;
1832 
1833   gcc_assert (GET_CODE (src) == PARALLEL
1834 	      && GET_CODE (dst) == PARALLEL
1835 	      && XVECLEN (src, 0) == XVECLEN (dst, 0));
1836 
1837   /* Skip first entry if NULL.  */
1838   for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1839     emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1840 		    XEXP (XVECEXP (src, 0, i), 0));
1841 }
1842 
1843 /* Move a group of registers represented by a PARALLEL into pseudos.  */
1844 
1845 rtx
1846 emit_group_move_into_temps (rtx src)
1847 {
1848   rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1849   int i;
1850 
1851   for (i = 0; i < XVECLEN (src, 0); i++)
1852     {
1853       rtx e = XVECEXP (src, 0, i);
1854       rtx d = XEXP (e, 0);
1855 
1856       if (d)
1857 	e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1858       RTVEC_ELT (vec, i) = e;
1859     }
1860 
1861   return gen_rtx_PARALLEL (GET_MODE (src), vec);
1862 }
1863 
1864 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1865    where SRC is non-consecutive registers represented by a PARALLEL.
1866    SSIZE represents the total size of block ORIG_DST, or -1 if not
1867    known.  */
1868 
1869 void
1870 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1871 {
1872   rtx *tmps, dst;
1873   int start, finish, i;
1874   enum machine_mode m = GET_MODE (orig_dst);
1875 
1876   gcc_assert (GET_CODE (src) == PARALLEL);
1877 
1878   if (!SCALAR_INT_MODE_P (m)
1879       && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1880     {
1881       enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1882       if (imode == BLKmode)
1883         dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1884       else
1885         dst = gen_reg_rtx (imode);
1886       emit_group_store (dst, src, type, ssize);
1887       if (imode != BLKmode)
1888         dst = gen_lowpart (GET_MODE (orig_dst), dst);
1889       emit_move_insn (orig_dst, dst);
1890       return;
1891     }
1892 
1893   /* Check for a NULL entry, used to indicate that the parameter goes
1894      both on the stack and in registers.  */
1895   if (XEXP (XVECEXP (src, 0, 0), 0))
1896     start = 0;
1897   else
1898     start = 1;
1899   finish = XVECLEN (src, 0);
1900 
1901   tmps = XALLOCAVEC (rtx, finish);
1902 
1903   /* Copy the (probable) hard regs into pseudos.  */
1904   for (i = start; i < finish; i++)
1905     {
1906       rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1907       if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1908 	{
1909 	  tmps[i] = gen_reg_rtx (GET_MODE (reg));
1910 	  emit_move_insn (tmps[i], reg);
1911 	}
1912       else
1913 	tmps[i] = reg;
1914     }
1915 
1916   /* If we won't be storing directly into memory, protect the real destination
1917      from strange tricks we might play.  */
1918   dst = orig_dst;
1919   if (GET_CODE (dst) == PARALLEL)
1920     {
1921       rtx temp;
1922 
1923       /* We can get a PARALLEL dst if there is a conditional expression in
1924 	 a return statement.  In that case, the dst and src are the same,
1925 	 so no action is necessary.  */
1926       if (rtx_equal_p (dst, src))
1927 	return;
1928 
1929       /* It is unclear if we can ever reach here, but we may as well handle
1930 	 it.  Allocate a temporary, and split this into a store/load to/from
1931 	 the temporary.  */
1932 
1933       temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1934       emit_group_store (temp, src, type, ssize);
1935       emit_group_load (dst, temp, type, ssize);
1936       return;
1937     }
1938   else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1939     {
1940       enum machine_mode outer = GET_MODE (dst);
1941       enum machine_mode inner;
1942       HOST_WIDE_INT bytepos;
1943       bool done = false;
1944       rtx temp;
1945 
1946       if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1947 	dst = gen_reg_rtx (outer);
1948 
1949       /* Make life a bit easier for combine.  */
1950       /* If the first element of the vector is the low part
1951 	 of the destination mode, use a paradoxical subreg to
1952 	 initialize the destination.  */
1953       if (start < finish)
1954 	{
1955 	  inner = GET_MODE (tmps[start]);
1956 	  bytepos = subreg_lowpart_offset (inner, outer);
1957 	  if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1958 	    {
1959 	      temp = simplify_gen_subreg (outer, tmps[start],
1960 					  inner, 0);
1961 	      if (temp)
1962 		{
1963 		  emit_move_insn (dst, temp);
1964 		  done = true;
1965 		  start++;
1966 		}
1967 	    }
1968 	}
1969 
1970       /* If the first element wasn't the low part, try the last.  */
1971       if (!done
1972 	  && start < finish - 1)
1973 	{
1974 	  inner = GET_MODE (tmps[finish - 1]);
1975 	  bytepos = subreg_lowpart_offset (inner, outer);
1976 	  if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1977 	    {
1978 	      temp = simplify_gen_subreg (outer, tmps[finish - 1],
1979 					  inner, 0);
1980 	      if (temp)
1981 		{
1982 		  emit_move_insn (dst, temp);
1983 		  done = true;
1984 		  finish--;
1985 		}
1986 	    }
1987 	}
1988 
1989       /* Otherwise, simply initialize the result to zero.  */
1990       if (!done)
1991         emit_move_insn (dst, CONST0_RTX (outer));
1992     }
1993 
1994   /* Process the pieces.  */
1995   for (i = start; i < finish; i++)
1996     {
1997       HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1998       enum machine_mode mode = GET_MODE (tmps[i]);
1999       unsigned int bytelen = GET_MODE_SIZE (mode);
2000       unsigned int adj_bytelen = bytelen;
2001       rtx dest = dst;
2002 
2003       /* Handle trailing fragments that run over the size of the struct.  */
2004       if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2005 	adj_bytelen = ssize - bytepos;
2006 
2007       if (GET_CODE (dst) == CONCAT)
2008 	{
2009 	  if (bytepos + adj_bytelen
2010 	      <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2011 	    dest = XEXP (dst, 0);
2012 	  else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2013 	    {
2014 	      bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2015 	      dest = XEXP (dst, 1);
2016 	    }
2017 	  else
2018 	    {
2019 	      enum machine_mode dest_mode = GET_MODE (dest);
2020 	      enum machine_mode tmp_mode = GET_MODE (tmps[i]);
2021 
2022 	      gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2023 
2024 	      if (GET_MODE_ALIGNMENT (dest_mode)
2025 		  >= GET_MODE_ALIGNMENT (tmp_mode))
2026 		{
2027 		  dest = assign_stack_temp (dest_mode,
2028 					    GET_MODE_SIZE (dest_mode),
2029 					    0);
2030 		  emit_move_insn (adjust_address (dest,
2031 						  tmp_mode,
2032 						  bytepos),
2033 				  tmps[i]);
2034 		  dst = dest;
2035 		}
2036 	      else
2037 		{
2038 		  dest = assign_stack_temp (tmp_mode,
2039 					    GET_MODE_SIZE (tmp_mode),
2040 					    0);
2041 		  emit_move_insn (dest, tmps[i]);
2042 		  dst = adjust_address (dest, dest_mode, bytepos);
2043 		}
2044 	      break;
2045 	    }
2046 	}
2047 
2048       if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2049 	{
2050 	  /* store_bit_field always takes its value from the lsb.
2051 	     Move the fragment to the lsb if it's not already there.  */
2052 	  if (
2053 #ifdef BLOCK_REG_PADDING
2054 	      BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2055 	      == (BYTES_BIG_ENDIAN ? upward : downward)
2056 #else
2057 	      BYTES_BIG_ENDIAN
2058 #endif
2059 	      )
2060 	    {
2061 	      int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2062 	      tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2063 				      shift, tmps[i], 0);
2064 	    }
2065 	  bytelen = adj_bytelen;
2066 	}
2067 
2068       /* Optimize the access just a bit.  */
2069       if (MEM_P (dest)
2070 	  && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2071 	      || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2072 	  && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2073 	  && bytelen == GET_MODE_SIZE (mode))
2074 	emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2075       else
2076 	store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2077 			 0, 0, mode, tmps[i]);
2078     }
2079 
2080   /* Copy from the pseudo into the (probable) hard reg.  */
2081   if (orig_dst != dst)
2082     emit_move_insn (orig_dst, dst);
2083 }
2084 
2085 /* Generate code to copy a BLKmode object of TYPE out of a
2086    set of registers starting with SRCREG into TGTBLK.  If TGTBLK
2087    is null, a stack temporary is created.  TGTBLK is returned.
2088 
2089    The purpose of this routine is to handle functions that return
2090    BLKmode structures in registers.  Some machines (the PA for example)
2091    want to return all small structures in registers regardless of the
2092    structure's alignment.  */
2093 
2094 rtx
2095 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2096 {
2097   unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2098   rtx src = NULL, dst = NULL;
2099   unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2100   unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2101   enum machine_mode copy_mode;
2102 
2103   if (tgtblk == 0)
2104     {
2105       tgtblk = assign_temp (build_qualified_type (type,
2106 						  (TYPE_QUALS (type)
2107 						   | TYPE_QUAL_CONST)),
2108 			    0, 1, 1);
2109       preserve_temp_slots (tgtblk);
2110     }
2111 
2112   /* This code assumes srcreg is at least a full word.  If it isn't, copy it
2113      into a new pseudo which is a full word.  */
2114 
2115   if (GET_MODE (srcreg) != BLKmode
2116       && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2117     srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2118 
2119   /* If the structure doesn't take up a whole number of words, see whether
2120      SRCREG is padded on the left or on the right.  If it's on the left,
2121      set PADDING_CORRECTION to the number of bits to skip.
2122 
2123      In most ABIs, the structure will be returned at the least end of
2124      the register, which translates to right padding on little-endian
2125      targets and left padding on big-endian targets.  The opposite
2126      holds if the structure is returned at the most significant
2127      end of the register.  */
2128   if (bytes % UNITS_PER_WORD != 0
2129       && (targetm.calls.return_in_msb (type)
2130 	  ? !BYTES_BIG_ENDIAN
2131 	  : BYTES_BIG_ENDIAN))
2132     padding_correction
2133       = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2134 
2135   /* Copy the structure BITSIZE bits at a time.  If the target lives in
2136      memory, take care of not reading/writing past its end by selecting
2137      a copy mode suited to BITSIZE.  This should always be possible given
2138      how it is computed.
2139 
2140      We could probably emit more efficient code for machines which do not use
2141      strict alignment, but it doesn't seem worth the effort at the current
2142      time.  */
2143 
2144   copy_mode = word_mode;
2145   if (MEM_P (tgtblk))
2146     {
2147       enum machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2148       if (mem_mode != BLKmode)
2149 	copy_mode = mem_mode;
2150     }
2151 
2152   for (bitpos = 0, xbitpos = padding_correction;
2153        bitpos < bytes * BITS_PER_UNIT;
2154        bitpos += bitsize, xbitpos += bitsize)
2155     {
2156       /* We need a new source operand each time xbitpos is on a
2157 	 word boundary and when xbitpos == padding_correction
2158 	 (the first time through).  */
2159       if (xbitpos % BITS_PER_WORD == 0
2160 	  || xbitpos == padding_correction)
2161 	src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2162 				     GET_MODE (srcreg));
2163 
2164       /* We need a new destination operand each time bitpos is on
2165 	 a word boundary.  */
2166       if (bitpos % BITS_PER_WORD == 0)
2167 	dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2168 
2169       /* Use xbitpos for the source extraction (right justified) and
2170 	 bitpos for the destination store (left justified).  */
2171       store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, 0, 0, copy_mode,
2172 		       extract_bit_field (src, bitsize,
2173 					  xbitpos % BITS_PER_WORD, 1, false,
2174 					  NULL_RTX, copy_mode, copy_mode));
2175     }
2176 
2177   return tgtblk;
2178 }
2179 
2180 /* Copy BLKmode value SRC into a register of mode MODE.  Return the
2181    register if it contains any data, otherwise return null.
2182 
2183    This is used on targets that return BLKmode values in registers.  */
2184 
2185 rtx
2186 copy_blkmode_to_reg (enum machine_mode mode, tree src)
2187 {
2188   int i, n_regs;
2189   unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0, bytes;
2190   unsigned int bitsize;
2191   rtx *dst_words, dst, x, src_word = NULL_RTX, dst_word = NULL_RTX;
2192   enum machine_mode dst_mode;
2193 
2194   gcc_assert (TYPE_MODE (TREE_TYPE (src)) == BLKmode);
2195 
2196   x = expand_normal (src);
2197 
2198   bytes = int_size_in_bytes (TREE_TYPE (src));
2199   if (bytes == 0)
2200     return NULL_RTX;
2201 
2202   /* If the structure doesn't take up a whole number of words, see
2203      whether the register value should be padded on the left or on
2204      the right.  Set PADDING_CORRECTION to the number of padding
2205      bits needed on the left side.
2206 
2207      In most ABIs, the structure will be returned at the least end of
2208      the register, which translates to right padding on little-endian
2209      targets and left padding on big-endian targets.  The opposite
2210      holds if the structure is returned at the most significant
2211      end of the register.  */
2212   if (bytes % UNITS_PER_WORD != 0
2213       && (targetm.calls.return_in_msb (TREE_TYPE (src))
2214 	  ? !BYTES_BIG_ENDIAN
2215 	  : BYTES_BIG_ENDIAN))
2216     padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2217 					   * BITS_PER_UNIT));
2218 
2219   n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2220   dst_words = XALLOCAVEC (rtx, n_regs);
2221   bitsize = MIN (TYPE_ALIGN (TREE_TYPE (src)), BITS_PER_WORD);
2222 
2223   /* Copy the structure BITSIZE bits at a time.  */
2224   for (bitpos = 0, xbitpos = padding_correction;
2225        bitpos < bytes * BITS_PER_UNIT;
2226        bitpos += bitsize, xbitpos += bitsize)
2227     {
2228       /* We need a new destination pseudo each time xbitpos is
2229 	 on a word boundary and when xbitpos == padding_correction
2230 	 (the first time through).  */
2231       if (xbitpos % BITS_PER_WORD == 0
2232 	  || xbitpos == padding_correction)
2233 	{
2234 	  /* Generate an appropriate register.  */
2235 	  dst_word = gen_reg_rtx (word_mode);
2236 	  dst_words[xbitpos / BITS_PER_WORD] = dst_word;
2237 
2238 	  /* Clear the destination before we move anything into it.  */
2239 	  emit_move_insn (dst_word, CONST0_RTX (word_mode));
2240 	}
2241 
2242       /* We need a new source operand each time bitpos is on a word
2243 	 boundary.  */
2244       if (bitpos % BITS_PER_WORD == 0)
2245 	src_word = operand_subword_force (x, bitpos / BITS_PER_WORD, BLKmode);
2246 
2247       /* Use bitpos for the source extraction (left justified) and
2248 	 xbitpos for the destination store (right justified).  */
2249       store_bit_field (dst_word, bitsize, xbitpos % BITS_PER_WORD,
2250 		       0, 0, word_mode,
2251 		       extract_bit_field (src_word, bitsize,
2252 					  bitpos % BITS_PER_WORD, 1, false,
2253 					  NULL_RTX, word_mode, word_mode));
2254     }
2255 
2256   if (mode == BLKmode)
2257     {
2258       /* Find the smallest integer mode large enough to hold the
2259 	 entire structure.  */
2260       for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2261 	   mode != VOIDmode;
2262 	   mode = GET_MODE_WIDER_MODE (mode))
2263 	/* Have we found a large enough mode?  */
2264 	if (GET_MODE_SIZE (mode) >= bytes)
2265 	  break;
2266 
2267       /* A suitable mode should have been found.  */
2268       gcc_assert (mode != VOIDmode);
2269     }
2270 
2271   if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2272     dst_mode = word_mode;
2273   else
2274     dst_mode = mode;
2275   dst = gen_reg_rtx (dst_mode);
2276 
2277   for (i = 0; i < n_regs; i++)
2278     emit_move_insn (operand_subword (dst, i, 0, dst_mode), dst_words[i]);
2279 
2280   if (mode != dst_mode)
2281     dst = gen_lowpart (mode, dst);
2282 
2283   return dst;
2284 }
2285 
2286 /* Add a USE expression for REG to the (possibly empty) list pointed
2287    to by CALL_FUSAGE.  REG must denote a hard register.  */
2288 
2289 void
2290 use_reg_mode (rtx *call_fusage, rtx reg, enum machine_mode mode)
2291 {
2292   gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2293 
2294   *call_fusage
2295     = gen_rtx_EXPR_LIST (mode, gen_rtx_USE (VOIDmode, reg), *call_fusage);
2296 }
2297 
2298 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2299    starting at REGNO.  All of these registers must be hard registers.  */
2300 
2301 void
2302 use_regs (rtx *call_fusage, int regno, int nregs)
2303 {
2304   int i;
2305 
2306   gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2307 
2308   for (i = 0; i < nregs; i++)
2309     use_reg (call_fusage, regno_reg_rtx[regno + i]);
2310 }
2311 
2312 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2313    PARALLEL REGS.  This is for calls that pass values in multiple
2314    non-contiguous locations.  The Irix 6 ABI has examples of this.  */
2315 
2316 void
2317 use_group_regs (rtx *call_fusage, rtx regs)
2318 {
2319   int i;
2320 
2321   for (i = 0; i < XVECLEN (regs, 0); i++)
2322     {
2323       rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2324 
2325       /* A NULL entry means the parameter goes both on the stack and in
2326 	 registers.  This can also be a MEM for targets that pass values
2327 	 partially on the stack and partially in registers.  */
2328       if (reg != 0 && REG_P (reg))
2329 	use_reg (call_fusage, reg);
2330     }
2331 }
2332 
2333 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2334    assigment and the code of the expresion on the RHS is CODE.  Return
2335    NULL otherwise.  */
2336 
2337 static gimple
2338 get_def_for_expr (tree name, enum tree_code code)
2339 {
2340   gimple def_stmt;
2341 
2342   if (TREE_CODE (name) != SSA_NAME)
2343     return NULL;
2344 
2345   def_stmt = get_gimple_for_ssa_name (name);
2346   if (!def_stmt
2347       || gimple_assign_rhs_code (def_stmt) != code)
2348     return NULL;
2349 
2350   return def_stmt;
2351 }
2352 
2353 
2354 /* Determine whether the LEN bytes generated by CONSTFUN can be
2355    stored to memory using several move instructions.  CONSTFUNDATA is
2356    a pointer which will be passed as argument in every CONSTFUN call.
2357    ALIGN is maximum alignment we can assume.  MEMSETP is true if this is
2358    a memset operation and false if it's a copy of a constant string.
2359    Return nonzero if a call to store_by_pieces should succeed.  */
2360 
2361 int
2362 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2363 		     rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2364 		     void *constfundata, unsigned int align, bool memsetp)
2365 {
2366   unsigned HOST_WIDE_INT l;
2367   unsigned int max_size;
2368   HOST_WIDE_INT offset = 0;
2369   enum machine_mode mode;
2370   enum insn_code icode;
2371   int reverse;
2372   /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it.  */
2373   rtx cst ATTRIBUTE_UNUSED;
2374 
2375   if (len == 0)
2376     return 1;
2377 
2378   if (! (memsetp
2379 	 ? SET_BY_PIECES_P (len, align)
2380 	 : STORE_BY_PIECES_P (len, align)))
2381     return 0;
2382 
2383   align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2384 
2385   /* We would first store what we can in the largest integer mode, then go to
2386      successively smaller modes.  */
2387 
2388   for (reverse = 0;
2389        reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2390        reverse++)
2391     {
2392       l = len;
2393       max_size = STORE_MAX_PIECES + 1;
2394       while (max_size > 1)
2395 	{
2396 	  mode = widest_int_mode_for_size (max_size);
2397 
2398 	  if (mode == VOIDmode)
2399 	    break;
2400 
2401 	  icode = optab_handler (mov_optab, mode);
2402 	  if (icode != CODE_FOR_nothing
2403 	      && align >= GET_MODE_ALIGNMENT (mode))
2404 	    {
2405 	      unsigned int size = GET_MODE_SIZE (mode);
2406 
2407 	      while (l >= size)
2408 		{
2409 		  if (reverse)
2410 		    offset -= size;
2411 
2412 		  cst = (*constfun) (constfundata, offset, mode);
2413 		  if (!targetm.legitimate_constant_p (mode, cst))
2414 		    return 0;
2415 
2416 		  if (!reverse)
2417 		    offset += size;
2418 
2419 		  l -= size;
2420 		}
2421 	    }
2422 
2423 	  max_size = GET_MODE_SIZE (mode);
2424 	}
2425 
2426       /* The code above should have handled everything.  */
2427       gcc_assert (!l);
2428     }
2429 
2430   return 1;
2431 }
2432 
2433 /* Generate several move instructions to store LEN bytes generated by
2434    CONSTFUN to block TO.  (A MEM rtx with BLKmode).  CONSTFUNDATA is a
2435    pointer which will be passed as argument in every CONSTFUN call.
2436    ALIGN is maximum alignment we can assume.  MEMSETP is true if this is
2437    a memset operation and false if it's a copy of a constant string.
2438    If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2439    mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2440    stpcpy.  */
2441 
2442 rtx
2443 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2444 		 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2445 		 void *constfundata, unsigned int align, bool memsetp, int endp)
2446 {
2447   enum machine_mode to_addr_mode
2448     = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to));
2449   struct store_by_pieces_d data;
2450 
2451   if (len == 0)
2452     {
2453       gcc_assert (endp != 2);
2454       return to;
2455     }
2456 
2457   gcc_assert (memsetp
2458 	      ? SET_BY_PIECES_P (len, align)
2459 	      : STORE_BY_PIECES_P (len, align));
2460   data.constfun = constfun;
2461   data.constfundata = constfundata;
2462   data.len = len;
2463   data.to = to;
2464   store_by_pieces_1 (&data, align);
2465   if (endp)
2466     {
2467       rtx to1;
2468 
2469       gcc_assert (!data.reverse);
2470       if (data.autinc_to)
2471 	{
2472 	  if (endp == 2)
2473 	    {
2474 	      if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2475 		emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2476 	      else
2477 		data.to_addr = copy_to_mode_reg (to_addr_mode,
2478 						 plus_constant (data.to_addr,
2479 								-1));
2480 	    }
2481 	  to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2482 					   data.offset);
2483 	}
2484       else
2485 	{
2486 	  if (endp == 2)
2487 	    --data.offset;
2488 	  to1 = adjust_address (data.to, QImode, data.offset);
2489 	}
2490       return to1;
2491     }
2492   else
2493     return data.to;
2494 }
2495 
2496 /* Generate several move instructions to clear LEN bytes of block TO.  (A MEM
2497    rtx with BLKmode).  ALIGN is maximum alignment we can assume.  */
2498 
2499 static void
2500 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2501 {
2502   struct store_by_pieces_d data;
2503 
2504   if (len == 0)
2505     return;
2506 
2507   data.constfun = clear_by_pieces_1;
2508   data.constfundata = NULL;
2509   data.len = len;
2510   data.to = to;
2511   store_by_pieces_1 (&data, align);
2512 }
2513 
2514 /* Callback routine for clear_by_pieces.
2515    Return const0_rtx unconditionally.  */
2516 
2517 static rtx
2518 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2519 		   HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2520 		   enum machine_mode mode ATTRIBUTE_UNUSED)
2521 {
2522   return const0_rtx;
2523 }
2524 
2525 /* Subroutine of clear_by_pieces and store_by_pieces.
2526    Generate several move instructions to store LEN bytes of block TO.  (A MEM
2527    rtx with BLKmode).  ALIGN is maximum alignment we can assume.  */
2528 
2529 static void
2530 store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
2531 		   unsigned int align ATTRIBUTE_UNUSED)
2532 {
2533   enum machine_mode to_addr_mode
2534     = targetm.addr_space.address_mode (MEM_ADDR_SPACE (data->to));
2535   rtx to_addr = XEXP (data->to, 0);
2536   unsigned int max_size = STORE_MAX_PIECES + 1;
2537   enum insn_code icode;
2538 
2539   data->offset = 0;
2540   data->to_addr = to_addr;
2541   data->autinc_to
2542     = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2543        || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2544 
2545   data->explicit_inc_to = 0;
2546   data->reverse
2547     = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2548   if (data->reverse)
2549     data->offset = data->len;
2550 
2551   /* If storing requires more than two move insns,
2552      copy addresses to registers (to make displacements shorter)
2553      and use post-increment if available.  */
2554   if (!data->autinc_to
2555       && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2556     {
2557       /* Determine the main mode we'll be using.
2558 	 MODE might not be used depending on the definitions of the
2559 	 USE_* macros below.  */
2560       enum machine_mode mode ATTRIBUTE_UNUSED
2561 	= widest_int_mode_for_size (max_size);
2562 
2563       if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2564 	{
2565 	  data->to_addr = copy_to_mode_reg (to_addr_mode,
2566 					    plus_constant (to_addr, data->len));
2567 	  data->autinc_to = 1;
2568 	  data->explicit_inc_to = -1;
2569 	}
2570 
2571       if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2572 	  && ! data->autinc_to)
2573 	{
2574 	  data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2575 	  data->autinc_to = 1;
2576 	  data->explicit_inc_to = 1;
2577 	}
2578 
2579       if ( !data->autinc_to && CONSTANT_P (to_addr))
2580 	data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2581     }
2582 
2583   align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2584 
2585   /* First store what we can in the largest integer mode, then go to
2586      successively smaller modes.  */
2587 
2588   while (max_size > 1)
2589     {
2590       enum machine_mode mode = widest_int_mode_for_size (max_size);
2591 
2592       if (mode == VOIDmode)
2593 	break;
2594 
2595       icode = optab_handler (mov_optab, mode);
2596       if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2597 	store_by_pieces_2 (GEN_FCN (icode), mode, data);
2598 
2599       max_size = GET_MODE_SIZE (mode);
2600     }
2601 
2602   /* The code above should have handled everything.  */
2603   gcc_assert (!data->len);
2604 }
2605 
2606 /* Subroutine of store_by_pieces_1.  Store as many bytes as appropriate
2607    with move instructions for mode MODE.  GENFUN is the gen_... function
2608    to make a move insn for that mode.  DATA has all the other info.  */
2609 
2610 static void
2611 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2612 		   struct store_by_pieces_d *data)
2613 {
2614   unsigned int size = GET_MODE_SIZE (mode);
2615   rtx to1, cst;
2616 
2617   while (data->len >= size)
2618     {
2619       if (data->reverse)
2620 	data->offset -= size;
2621 
2622       if (data->autinc_to)
2623 	to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2624 					 data->offset);
2625       else
2626 	to1 = adjust_address (data->to, mode, data->offset);
2627 
2628       if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2629 	emit_insn (gen_add2_insn (data->to_addr,
2630 				  GEN_INT (-(HOST_WIDE_INT) size)));
2631 
2632       cst = (*data->constfun) (data->constfundata, data->offset, mode);
2633       emit_insn ((*genfun) (to1, cst));
2634 
2635       if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2636 	emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2637 
2638       if (! data->reverse)
2639 	data->offset += size;
2640 
2641       data->len -= size;
2642     }
2643 }
2644 
2645 /* Write zeros through the storage of OBJECT.  If OBJECT has BLKmode, SIZE is
2646    its length in bytes.  */
2647 
2648 rtx
2649 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2650 		     unsigned int expected_align, HOST_WIDE_INT expected_size)
2651 {
2652   enum machine_mode mode = GET_MODE (object);
2653   unsigned int align;
2654 
2655   gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2656 
2657   /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2658      just move a zero.  Otherwise, do this a piece at a time.  */
2659   if (mode != BLKmode
2660       && CONST_INT_P (size)
2661       && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2662     {
2663       rtx zero = CONST0_RTX (mode);
2664       if (zero != NULL)
2665 	{
2666 	  emit_move_insn (object, zero);
2667 	  return NULL;
2668 	}
2669 
2670       if (COMPLEX_MODE_P (mode))
2671 	{
2672 	  zero = CONST0_RTX (GET_MODE_INNER (mode));
2673 	  if (zero != NULL)
2674 	    {
2675 	      write_complex_part (object, zero, 0);
2676 	      write_complex_part (object, zero, 1);
2677 	      return NULL;
2678 	    }
2679 	}
2680     }
2681 
2682   if (size == const0_rtx)
2683     return NULL;
2684 
2685   align = MEM_ALIGN (object);
2686 
2687   if (CONST_INT_P (size)
2688       && CLEAR_BY_PIECES_P (INTVAL (size), align))
2689     clear_by_pieces (object, INTVAL (size), align);
2690   else if (set_storage_via_setmem (object, size, const0_rtx, align,
2691 				   expected_align, expected_size))
2692     ;
2693   else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
2694     return set_storage_via_libcall (object, size, const0_rtx,
2695 				    method == BLOCK_OP_TAILCALL);
2696   else
2697     gcc_unreachable ();
2698 
2699   return NULL;
2700 }
2701 
2702 rtx
2703 clear_storage (rtx object, rtx size, enum block_op_methods method)
2704 {
2705   return clear_storage_hints (object, size, method, 0, -1);
2706 }
2707 
2708 
2709 /* A subroutine of clear_storage.  Expand a call to memset.
2710    Return the return value of memset, 0 otherwise.  */
2711 
2712 rtx
2713 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2714 {
2715   tree call_expr, fn, object_tree, size_tree, val_tree;
2716   enum machine_mode size_mode;
2717   rtx retval;
2718 
2719   /* Emit code to copy OBJECT and SIZE into new pseudos.  We can then
2720      place those into new pseudos into a VAR_DECL and use them later.  */
2721 
2722   object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2723 
2724   size_mode = TYPE_MODE (sizetype);
2725   size = convert_to_mode (size_mode, size, 1);
2726   size = copy_to_mode_reg (size_mode, size);
2727 
2728   /* It is incorrect to use the libcall calling conventions to call
2729      memset in this context.  This could be a user call to memset and
2730      the user may wish to examine the return value from memset.  For
2731      targets where libcalls and normal calls have different conventions
2732      for returning pointers, we could end up generating incorrect code.  */
2733 
2734   object_tree = make_tree (ptr_type_node, object);
2735   if (!CONST_INT_P (val))
2736     val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2737   size_tree = make_tree (sizetype, size);
2738   val_tree = make_tree (integer_type_node, val);
2739 
2740   fn = clear_storage_libcall_fn (true);
2741   call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
2742   CALL_EXPR_TAILCALL (call_expr) = tailcall;
2743 
2744   retval = expand_normal (call_expr);
2745 
2746   return retval;
2747 }
2748 
2749 /* A subroutine of set_storage_via_libcall.  Create the tree node
2750    for the function we use for block clears.  The first time FOR_CALL
2751    is true, we call assemble_external.  */
2752 
2753 tree block_clear_fn;
2754 
2755 void
2756 init_block_clear_fn (const char *asmspec)
2757 {
2758   if (!block_clear_fn)
2759     {
2760       tree fn, args;
2761 
2762       fn = get_identifier ("memset");
2763       args = build_function_type_list (ptr_type_node, ptr_type_node,
2764 				       integer_type_node, sizetype,
2765 				       NULL_TREE);
2766 
2767       fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
2768       DECL_EXTERNAL (fn) = 1;
2769       TREE_PUBLIC (fn) = 1;
2770       DECL_ARTIFICIAL (fn) = 1;
2771       TREE_NOTHROW (fn) = 1;
2772       DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2773       DECL_VISIBILITY_SPECIFIED (fn) = 1;
2774 
2775       block_clear_fn = fn;
2776     }
2777 
2778   if (asmspec)
2779     set_user_assembler_name (block_clear_fn, asmspec);
2780 }
2781 
2782 static tree
2783 clear_storage_libcall_fn (int for_call)
2784 {
2785   static bool emitted_extern;
2786 
2787   if (!block_clear_fn)
2788     init_block_clear_fn (NULL);
2789 
2790   if (for_call && !emitted_extern)
2791     {
2792       emitted_extern = true;
2793       make_decl_rtl (block_clear_fn);
2794       assemble_external (block_clear_fn);
2795     }
2796 
2797   return block_clear_fn;
2798 }
2799 
2800 /* Expand a setmem pattern; return true if successful.  */
2801 
2802 bool
2803 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2804 			unsigned int expected_align, HOST_WIDE_INT expected_size)
2805 {
2806   /* Try the most limited insn first, because there's no point
2807      including more than one in the machine description unless
2808      the more limited one has some advantage.  */
2809 
2810   enum machine_mode mode;
2811 
2812   if (expected_align < align)
2813     expected_align = align;
2814 
2815   for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2816        mode = GET_MODE_WIDER_MODE (mode))
2817     {
2818       enum insn_code code = direct_optab_handler (setmem_optab, mode);
2819 
2820       if (code != CODE_FOR_nothing
2821 	  /* We don't need MODE to be narrower than
2822 	     BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2823 	     the mode mask, as it is returned by the macro, it will
2824 	     definitely be less than the actual mode mask.  */
2825 	  && ((CONST_INT_P (size)
2826 	       && ((unsigned HOST_WIDE_INT) INTVAL (size)
2827 		   <= (GET_MODE_MASK (mode) >> 1)))
2828 	      || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD))
2829 	{
2830 	  struct expand_operand ops[6];
2831 	  unsigned int nops;
2832 
2833 	  nops = insn_data[(int) code].n_generator_args;
2834 	  gcc_assert (nops == 4 || nops == 6);
2835 
2836 	  create_fixed_operand (&ops[0], object);
2837 	  /* The check above guarantees that this size conversion is valid.  */
2838 	  create_convert_operand_to (&ops[1], size, mode, true);
2839 	  create_convert_operand_from (&ops[2], val, byte_mode, true);
2840 	  create_integer_operand (&ops[3], align / BITS_PER_UNIT);
2841 	  if (nops == 6)
2842 	    {
2843 	      create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
2844 	      create_integer_operand (&ops[5], expected_size);
2845 	    }
2846 	  if (maybe_expand_insn (code, nops, ops))
2847 	    return true;
2848 	}
2849     }
2850 
2851   return false;
2852 }
2853 
2854 
2855 /* Write to one of the components of the complex value CPLX.  Write VAL to
2856    the real part if IMAG_P is false, and the imaginary part if its true.  */
2857 
2858 static void
2859 write_complex_part (rtx cplx, rtx val, bool imag_p)
2860 {
2861   enum machine_mode cmode;
2862   enum machine_mode imode;
2863   unsigned ibitsize;
2864 
2865   if (GET_CODE (cplx) == CONCAT)
2866     {
2867       emit_move_insn (XEXP (cplx, imag_p), val);
2868       return;
2869     }
2870 
2871   cmode = GET_MODE (cplx);
2872   imode = GET_MODE_INNER (cmode);
2873   ibitsize = GET_MODE_BITSIZE (imode);
2874 
2875   /* For MEMs simplify_gen_subreg may generate an invalid new address
2876      because, e.g., the original address is considered mode-dependent
2877      by the target, which restricts simplify_subreg from invoking
2878      adjust_address_nv.  Instead of preparing fallback support for an
2879      invalid address, we call adjust_address_nv directly.  */
2880   if (MEM_P (cplx))
2881     {
2882       emit_move_insn (adjust_address_nv (cplx, imode,
2883 					 imag_p ? GET_MODE_SIZE (imode) : 0),
2884 		      val);
2885       return;
2886     }
2887 
2888   /* If the sub-object is at least word sized, then we know that subregging
2889      will work.  This special case is important, since store_bit_field
2890      wants to operate on integer modes, and there's rarely an OImode to
2891      correspond to TCmode.  */
2892   if (ibitsize >= BITS_PER_WORD
2893       /* For hard regs we have exact predicates.  Assume we can split
2894 	 the original object if it spans an even number of hard regs.
2895 	 This special case is important for SCmode on 64-bit platforms
2896 	 where the natural size of floating-point regs is 32-bit.  */
2897       || (REG_P (cplx)
2898 	  && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2899 	  && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2900     {
2901       rtx part = simplify_gen_subreg (imode, cplx, cmode,
2902 				      imag_p ? GET_MODE_SIZE (imode) : 0);
2903       if (part)
2904         {
2905 	  emit_move_insn (part, val);
2906 	  return;
2907 	}
2908       else
2909 	/* simplify_gen_subreg may fail for sub-word MEMs.  */
2910 	gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2911     }
2912 
2913   store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, 0, 0, imode, val);
2914 }
2915 
2916 /* Extract one of the components of the complex value CPLX.  Extract the
2917    real part if IMAG_P is false, and the imaginary part if it's true.  */
2918 
2919 static rtx
2920 read_complex_part (rtx cplx, bool imag_p)
2921 {
2922   enum machine_mode cmode, imode;
2923   unsigned ibitsize;
2924 
2925   if (GET_CODE (cplx) == CONCAT)
2926     return XEXP (cplx, imag_p);
2927 
2928   cmode = GET_MODE (cplx);
2929   imode = GET_MODE_INNER (cmode);
2930   ibitsize = GET_MODE_BITSIZE (imode);
2931 
2932   /* Special case reads from complex constants that got spilled to memory.  */
2933   if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2934     {
2935       tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2936       if (decl && TREE_CODE (decl) == COMPLEX_CST)
2937 	{
2938 	  tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2939 	  if (CONSTANT_CLASS_P (part))
2940 	    return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2941 	}
2942     }
2943 
2944   /* For MEMs simplify_gen_subreg may generate an invalid new address
2945      because, e.g., the original address is considered mode-dependent
2946      by the target, which restricts simplify_subreg from invoking
2947      adjust_address_nv.  Instead of preparing fallback support for an
2948      invalid address, we call adjust_address_nv directly.  */
2949   if (MEM_P (cplx))
2950     return adjust_address_nv (cplx, imode,
2951 			      imag_p ? GET_MODE_SIZE (imode) : 0);
2952 
2953   /* If the sub-object is at least word sized, then we know that subregging
2954      will work.  This special case is important, since extract_bit_field
2955      wants to operate on integer modes, and there's rarely an OImode to
2956      correspond to TCmode.  */
2957   if (ibitsize >= BITS_PER_WORD
2958       /* For hard regs we have exact predicates.  Assume we can split
2959 	 the original object if it spans an even number of hard regs.
2960 	 This special case is important for SCmode on 64-bit platforms
2961 	 where the natural size of floating-point regs is 32-bit.  */
2962       || (REG_P (cplx)
2963 	  && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2964 	  && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2965     {
2966       rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2967 				     imag_p ? GET_MODE_SIZE (imode) : 0);
2968       if (ret)
2969         return ret;
2970       else
2971 	/* simplify_gen_subreg may fail for sub-word MEMs.  */
2972 	gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2973     }
2974 
2975   return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2976 			    true, false, NULL_RTX, imode, imode);
2977 }
2978 
2979 /* A subroutine of emit_move_insn_1.  Yet another lowpart generator.
2980    NEW_MODE and OLD_MODE are the same size.  Return NULL if X cannot be
2981    represented in NEW_MODE.  If FORCE is true, this will never happen, as
2982    we'll force-create a SUBREG if needed.  */
2983 
2984 static rtx
2985 emit_move_change_mode (enum machine_mode new_mode,
2986 		       enum machine_mode old_mode, rtx x, bool force)
2987 {
2988   rtx ret;
2989 
2990   if (push_operand (x, GET_MODE (x)))
2991     {
2992       ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
2993       MEM_COPY_ATTRIBUTES (ret, x);
2994     }
2995   else if (MEM_P (x))
2996     {
2997       /* We don't have to worry about changing the address since the
2998 	 size in bytes is supposed to be the same.  */
2999       if (reload_in_progress)
3000 	{
3001 	  /* Copy the MEM to change the mode and move any
3002 	     substitutions from the old MEM to the new one.  */
3003 	  ret = adjust_address_nv (x, new_mode, 0);
3004 	  copy_replacements (x, ret);
3005 	}
3006       else
3007 	ret = adjust_address (x, new_mode, 0);
3008     }
3009   else
3010     {
3011       /* Note that we do want simplify_subreg's behavior of validating
3012 	 that the new mode is ok for a hard register.  If we were to use
3013 	 simplify_gen_subreg, we would create the subreg, but would
3014 	 probably run into the target not being able to implement it.  */
3015       /* Except, of course, when FORCE is true, when this is exactly what
3016 	 we want.  Which is needed for CCmodes on some targets.  */
3017       if (force)
3018 	ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
3019       else
3020 	ret = simplify_subreg (new_mode, x, old_mode, 0);
3021     }
3022 
3023   return ret;
3024 }
3025 
3026 /* A subroutine of emit_move_insn_1.  Generate a move from Y into X using
3027    an integer mode of the same size as MODE.  Returns the instruction
3028    emitted, or NULL if such a move could not be generated.  */
3029 
3030 static rtx
3031 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
3032 {
3033   enum machine_mode imode;
3034   enum insn_code code;
3035 
3036   /* There must exist a mode of the exact size we require.  */
3037   imode = int_mode_for_mode (mode);
3038   if (imode == BLKmode)
3039     return NULL_RTX;
3040 
3041   /* The target must support moves in this mode.  */
3042   code = optab_handler (mov_optab, imode);
3043   if (code == CODE_FOR_nothing)
3044     return NULL_RTX;
3045 
3046   x = emit_move_change_mode (imode, mode, x, force);
3047   if (x == NULL_RTX)
3048     return NULL_RTX;
3049   y = emit_move_change_mode (imode, mode, y, force);
3050   if (y == NULL_RTX)
3051     return NULL_RTX;
3052   return emit_insn (GEN_FCN (code) (x, y));
3053 }
3054 
3055 /* A subroutine of emit_move_insn_1.  X is a push_operand in MODE.
3056    Return an equivalent MEM that does not use an auto-increment.  */
3057 
3058 static rtx
3059 emit_move_resolve_push (enum machine_mode mode, rtx x)
3060 {
3061   enum rtx_code code = GET_CODE (XEXP (x, 0));
3062   HOST_WIDE_INT adjust;
3063   rtx temp;
3064 
3065   adjust = GET_MODE_SIZE (mode);
3066 #ifdef PUSH_ROUNDING
3067   adjust = PUSH_ROUNDING (adjust);
3068 #endif
3069   if (code == PRE_DEC || code == POST_DEC)
3070     adjust = -adjust;
3071   else if (code == PRE_MODIFY || code == POST_MODIFY)
3072     {
3073       rtx expr = XEXP (XEXP (x, 0), 1);
3074       HOST_WIDE_INT val;
3075 
3076       gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3077       gcc_assert (CONST_INT_P (XEXP (expr, 1)));
3078       val = INTVAL (XEXP (expr, 1));
3079       if (GET_CODE (expr) == MINUS)
3080 	val = -val;
3081       gcc_assert (adjust == val || adjust == -val);
3082       adjust = val;
3083     }
3084 
3085   /* Do not use anti_adjust_stack, since we don't want to update
3086      stack_pointer_delta.  */
3087   temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3088 			      GEN_INT (adjust), stack_pointer_rtx,
3089 			      0, OPTAB_LIB_WIDEN);
3090   if (temp != stack_pointer_rtx)
3091     emit_move_insn (stack_pointer_rtx, temp);
3092 
3093   switch (code)
3094     {
3095     case PRE_INC:
3096     case PRE_DEC:
3097     case PRE_MODIFY:
3098       temp = stack_pointer_rtx;
3099       break;
3100     case POST_INC:
3101     case POST_DEC:
3102     case POST_MODIFY:
3103       temp = plus_constant (stack_pointer_rtx, -adjust);
3104       break;
3105     default:
3106       gcc_unreachable ();
3107     }
3108 
3109   return replace_equiv_address (x, temp);
3110 }
3111 
3112 /* A subroutine of emit_move_complex.  Generate a move from Y into X.
3113    X is known to satisfy push_operand, and MODE is known to be complex.
3114    Returns the last instruction emitted.  */
3115 
3116 rtx
3117 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
3118 {
3119   enum machine_mode submode = GET_MODE_INNER (mode);
3120   bool imag_first;
3121 
3122 #ifdef PUSH_ROUNDING
3123   unsigned int submodesize = GET_MODE_SIZE (submode);
3124 
3125   /* In case we output to the stack, but the size is smaller than the
3126      machine can push exactly, we need to use move instructions.  */
3127   if (PUSH_ROUNDING (submodesize) != submodesize)
3128     {
3129       x = emit_move_resolve_push (mode, x);
3130       return emit_move_insn (x, y);
3131     }
3132 #endif
3133 
3134   /* Note that the real part always precedes the imag part in memory
3135      regardless of machine's endianness.  */
3136   switch (GET_CODE (XEXP (x, 0)))
3137     {
3138     case PRE_DEC:
3139     case POST_DEC:
3140       imag_first = true;
3141       break;
3142     case PRE_INC:
3143     case POST_INC:
3144       imag_first = false;
3145       break;
3146     default:
3147       gcc_unreachable ();
3148     }
3149 
3150   emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3151 		  read_complex_part (y, imag_first));
3152   return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3153 			 read_complex_part (y, !imag_first));
3154 }
3155 
3156 /* A subroutine of emit_move_complex.  Perform the move from Y to X
3157    via two moves of the parts.  Returns the last instruction emitted.  */
3158 
3159 rtx
3160 emit_move_complex_parts (rtx x, rtx y)
3161 {
3162   /* Show the output dies here.  This is necessary for SUBREGs
3163      of pseudos since we cannot track their lifetimes correctly;
3164      hard regs shouldn't appear here except as return values.  */
3165   if (!reload_completed && !reload_in_progress
3166       && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3167     emit_clobber (x);
3168 
3169   write_complex_part (x, read_complex_part (y, false), false);
3170   write_complex_part (x, read_complex_part (y, true), true);
3171 
3172   return get_last_insn ();
3173 }
3174 
3175 /* A subroutine of emit_move_insn_1.  Generate a move from Y into X.
3176    MODE is known to be complex.  Returns the last instruction emitted.  */
3177 
3178 static rtx
3179 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3180 {
3181   bool try_int;
3182 
3183   /* Need to take special care for pushes, to maintain proper ordering
3184      of the data, and possibly extra padding.  */
3185   if (push_operand (x, mode))
3186     return emit_move_complex_push (mode, x, y);
3187 
3188   /* See if we can coerce the target into moving both values at once.  */
3189 
3190   /* Move floating point as parts.  */
3191   if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3192       && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing)
3193     try_int = false;
3194   /* Not possible if the values are inherently not adjacent.  */
3195   else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3196     try_int = false;
3197   /* Is possible if both are registers (or subregs of registers).  */
3198   else if (register_operand (x, mode) && register_operand (y, mode))
3199     try_int = true;
3200   /* If one of the operands is a memory, and alignment constraints
3201      are friendly enough, we may be able to do combined memory operations.
3202      We do not attempt this if Y is a constant because that combination is
3203      usually better with the by-parts thing below.  */
3204   else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3205 	   && (!STRICT_ALIGNMENT
3206 	       || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3207     try_int = true;
3208   else
3209     try_int = false;
3210 
3211   if (try_int)
3212     {
3213       rtx ret;
3214 
3215       /* For memory to memory moves, optimal behavior can be had with the
3216 	 existing block move logic.  */
3217       if (MEM_P (x) && MEM_P (y))
3218 	{
3219 	  emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3220 			   BLOCK_OP_NO_LIBCALL);
3221 	  return get_last_insn ();
3222 	}
3223 
3224       ret = emit_move_via_integer (mode, x, y, true);
3225       if (ret)
3226 	return ret;
3227     }
3228 
3229   return emit_move_complex_parts (x, y);
3230 }
3231 
3232 /* A subroutine of emit_move_insn_1.  Generate a move from Y into X.
3233    MODE is known to be MODE_CC.  Returns the last instruction emitted.  */
3234 
3235 static rtx
3236 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3237 {
3238   rtx ret;
3239 
3240   /* Assume all MODE_CC modes are equivalent; if we have movcc, use it.  */
3241   if (mode != CCmode)
3242     {
3243       enum insn_code code = optab_handler (mov_optab, CCmode);
3244       if (code != CODE_FOR_nothing)
3245 	{
3246 	  x = emit_move_change_mode (CCmode, mode, x, true);
3247 	  y = emit_move_change_mode (CCmode, mode, y, true);
3248 	  return emit_insn (GEN_FCN (code) (x, y));
3249 	}
3250     }
3251 
3252   /* Otherwise, find the MODE_INT mode of the same width.  */
3253   ret = emit_move_via_integer (mode, x, y, false);
3254   gcc_assert (ret != NULL);
3255   return ret;
3256 }
3257 
3258 /* Return true if word I of OP lies entirely in the
3259    undefined bits of a paradoxical subreg.  */
3260 
3261 static bool
3262 undefined_operand_subword_p (const_rtx op, int i)
3263 {
3264   enum machine_mode innermode, innermostmode;
3265   int offset;
3266   if (GET_CODE (op) != SUBREG)
3267     return false;
3268   innermode = GET_MODE (op);
3269   innermostmode = GET_MODE (SUBREG_REG (op));
3270   offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3271   /* The SUBREG_BYTE represents offset, as if the value were stored in
3272      memory, except for a paradoxical subreg where we define
3273      SUBREG_BYTE to be 0; undo this exception as in
3274      simplify_subreg.  */
3275   if (SUBREG_BYTE (op) == 0
3276       && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3277     {
3278       int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3279       if (WORDS_BIG_ENDIAN)
3280 	offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3281       if (BYTES_BIG_ENDIAN)
3282 	offset += difference % UNITS_PER_WORD;
3283     }
3284   if (offset >= GET_MODE_SIZE (innermostmode)
3285       || offset <= -GET_MODE_SIZE (word_mode))
3286     return true;
3287   return false;
3288 }
3289 
3290 /* A subroutine of emit_move_insn_1.  Generate a move from Y into X.
3291    MODE is any multi-word or full-word mode that lacks a move_insn
3292    pattern.  Note that you will get better code if you define such
3293    patterns, even if they must turn into multiple assembler instructions.  */
3294 
3295 static rtx
3296 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3297 {
3298   rtx last_insn = 0;
3299   rtx seq, inner;
3300   bool need_clobber;
3301   int i;
3302 
3303   gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3304 
3305   /* If X is a push on the stack, do the push now and replace
3306      X with a reference to the stack pointer.  */
3307   if (push_operand (x, mode))
3308     x = emit_move_resolve_push (mode, x);
3309 
3310   /* If we are in reload, see if either operand is a MEM whose address
3311      is scheduled for replacement.  */
3312   if (reload_in_progress && MEM_P (x)
3313       && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3314     x = replace_equiv_address_nv (x, inner);
3315   if (reload_in_progress && MEM_P (y)
3316       && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3317     y = replace_equiv_address_nv (y, inner);
3318 
3319   start_sequence ();
3320 
3321   need_clobber = false;
3322   for (i = 0;
3323        i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3324        i++)
3325     {
3326       rtx xpart = operand_subword (x, i, 1, mode);
3327       rtx ypart;
3328 
3329       /* Do not generate code for a move if it would come entirely
3330 	 from the undefined bits of a paradoxical subreg.  */
3331       if (undefined_operand_subword_p (y, i))
3332 	continue;
3333 
3334       ypart = operand_subword (y, i, 1, mode);
3335 
3336       /* If we can't get a part of Y, put Y into memory if it is a
3337 	 constant.  Otherwise, force it into a register.  Then we must
3338 	 be able to get a part of Y.  */
3339       if (ypart == 0 && CONSTANT_P (y))
3340 	{
3341 	  y = use_anchored_address (force_const_mem (mode, y));
3342 	  ypart = operand_subword (y, i, 1, mode);
3343 	}
3344       else if (ypart == 0)
3345 	ypart = operand_subword_force (y, i, mode);
3346 
3347       gcc_assert (xpart && ypart);
3348 
3349       need_clobber |= (GET_CODE (xpart) == SUBREG);
3350 
3351       last_insn = emit_move_insn (xpart, ypart);
3352     }
3353 
3354   seq = get_insns ();
3355   end_sequence ();
3356 
3357   /* Show the output dies here.  This is necessary for SUBREGs
3358      of pseudos since we cannot track their lifetimes correctly;
3359      hard regs shouldn't appear here except as return values.
3360      We never want to emit such a clobber after reload.  */
3361   if (x != y
3362       && ! (reload_in_progress || reload_completed)
3363       && need_clobber != 0)
3364     emit_clobber (x);
3365 
3366   emit_insn (seq);
3367 
3368   return last_insn;
3369 }
3370 
3371 /* Low level part of emit_move_insn.
3372    Called just like emit_move_insn, but assumes X and Y
3373    are basically valid.  */
3374 
3375 rtx
3376 emit_move_insn_1 (rtx x, rtx y)
3377 {
3378   enum machine_mode mode = GET_MODE (x);
3379   enum insn_code code;
3380 
3381   gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3382 
3383   code = optab_handler (mov_optab, mode);
3384   if (code != CODE_FOR_nothing)
3385     return emit_insn (GEN_FCN (code) (x, y));
3386 
3387   /* Expand complex moves by moving real part and imag part.  */
3388   if (COMPLEX_MODE_P (mode))
3389     return emit_move_complex (mode, x, y);
3390 
3391   if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3392       || ALL_FIXED_POINT_MODE_P (mode))
3393     {
3394       rtx result = emit_move_via_integer (mode, x, y, true);
3395 
3396       /* If we can't find an integer mode, use multi words.  */
3397       if (result)
3398 	return result;
3399       else
3400 	return emit_move_multi_word (mode, x, y);
3401     }
3402 
3403   if (GET_MODE_CLASS (mode) == MODE_CC)
3404     return emit_move_ccmode (mode, x, y);
3405 
3406   /* Try using a move pattern for the corresponding integer mode.  This is
3407      only safe when simplify_subreg can convert MODE constants into integer
3408      constants.  At present, it can only do this reliably if the value
3409      fits within a HOST_WIDE_INT.  */
3410   if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3411     {
3412       rtx ret = emit_move_via_integer (mode, x, y, false);
3413       if (ret)
3414 	return ret;
3415     }
3416 
3417   return emit_move_multi_word (mode, x, y);
3418 }
3419 
3420 /* Generate code to copy Y into X.
3421    Both Y and X must have the same mode, except that
3422    Y can be a constant with VOIDmode.
3423    This mode cannot be BLKmode; use emit_block_move for that.
3424 
3425    Return the last instruction emitted.  */
3426 
3427 rtx
3428 emit_move_insn (rtx x, rtx y)
3429 {
3430   enum machine_mode mode = GET_MODE (x);
3431   rtx y_cst = NULL_RTX;
3432   rtx last_insn, set;
3433 
3434   gcc_assert (mode != BLKmode
3435 	      && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3436 
3437   if (CONSTANT_P (y))
3438     {
3439       if (optimize
3440 	  && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3441 	  && (last_insn = compress_float_constant (x, y)))
3442 	return last_insn;
3443 
3444       y_cst = y;
3445 
3446       if (!targetm.legitimate_constant_p (mode, y))
3447 	{
3448 	  y = force_const_mem (mode, y);
3449 
3450 	  /* If the target's cannot_force_const_mem prevented the spill,
3451 	     assume that the target's move expanders will also take care
3452 	     of the non-legitimate constant.  */
3453 	  if (!y)
3454 	    y = y_cst;
3455 	  else
3456 	    y = use_anchored_address (y);
3457 	}
3458     }
3459 
3460   /* If X or Y are memory references, verify that their addresses are valid
3461      for the machine.  */
3462   if (MEM_P (x)
3463       && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3464 					 MEM_ADDR_SPACE (x))
3465 	  && ! push_operand (x, GET_MODE (x))))
3466     x = validize_mem (x);
3467 
3468   if (MEM_P (y)
3469       && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3470 					MEM_ADDR_SPACE (y)))
3471     y = validize_mem (y);
3472 
3473   gcc_assert (mode != BLKmode);
3474 
3475   last_insn = emit_move_insn_1 (x, y);
3476 
3477   if (y_cst && REG_P (x)
3478       && (set = single_set (last_insn)) != NULL_RTX
3479       && SET_DEST (set) == x
3480       && ! rtx_equal_p (y_cst, SET_SRC (set)))
3481     set_unique_reg_note (last_insn, REG_EQUAL, copy_rtx (y_cst));
3482 
3483   return last_insn;
3484 }
3485 
3486 /* If Y is representable exactly in a narrower mode, and the target can
3487    perform the extension directly from constant or memory, then emit the
3488    move as an extension.  */
3489 
3490 static rtx
3491 compress_float_constant (rtx x, rtx y)
3492 {
3493   enum machine_mode dstmode = GET_MODE (x);
3494   enum machine_mode orig_srcmode = GET_MODE (y);
3495   enum machine_mode srcmode;
3496   REAL_VALUE_TYPE r;
3497   int oldcost, newcost;
3498   bool speed = optimize_insn_for_speed_p ();
3499 
3500   REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3501 
3502   if (targetm.legitimate_constant_p (dstmode, y))
3503     oldcost = set_src_cost (y, speed);
3504   else
3505     oldcost = set_src_cost (force_const_mem (dstmode, y), speed);
3506 
3507   for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3508        srcmode != orig_srcmode;
3509        srcmode = GET_MODE_WIDER_MODE (srcmode))
3510     {
3511       enum insn_code ic;
3512       rtx trunc_y, last_insn;
3513 
3514       /* Skip if the target can't extend this way.  */
3515       ic = can_extend_p (dstmode, srcmode, 0);
3516       if (ic == CODE_FOR_nothing)
3517 	continue;
3518 
3519       /* Skip if the narrowed value isn't exact.  */
3520       if (! exact_real_truncate (srcmode, &r))
3521 	continue;
3522 
3523       trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3524 
3525       if (targetm.legitimate_constant_p (srcmode, trunc_y))
3526 	{
3527 	  /* Skip if the target needs extra instructions to perform
3528 	     the extension.  */
3529 	  if (!insn_operand_matches (ic, 1, trunc_y))
3530 	    continue;
3531 	  /* This is valid, but may not be cheaper than the original. */
3532 	  newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3533 				  speed);
3534 	  if (oldcost < newcost)
3535 	    continue;
3536 	}
3537       else if (float_extend_from_mem[dstmode][srcmode])
3538 	{
3539 	  trunc_y = force_const_mem (srcmode, trunc_y);
3540 	  /* This is valid, but may not be cheaper than the original. */
3541 	  newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3542 				  speed);
3543 	  if (oldcost < newcost)
3544 	    continue;
3545 	  trunc_y = validize_mem (trunc_y);
3546 	}
3547       else
3548 	continue;
3549 
3550       /* For CSE's benefit, force the compressed constant pool entry
3551 	 into a new pseudo.  This constant may be used in different modes,
3552 	 and if not, combine will put things back together for us.  */
3553       trunc_y = force_reg (srcmode, trunc_y);
3554       emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3555       last_insn = get_last_insn ();
3556 
3557       if (REG_P (x))
3558 	set_unique_reg_note (last_insn, REG_EQUAL, y);
3559 
3560       return last_insn;
3561     }
3562 
3563   return NULL_RTX;
3564 }
3565 
3566 /* Pushing data onto the stack.  */
3567 
3568 /* Push a block of length SIZE (perhaps variable)
3569    and return an rtx to address the beginning of the block.
3570    The value may be virtual_outgoing_args_rtx.
3571 
3572    EXTRA is the number of bytes of padding to push in addition to SIZE.
3573    BELOW nonzero means this padding comes at low addresses;
3574    otherwise, the padding comes at high addresses.  */
3575 
3576 rtx
3577 push_block (rtx size, int extra, int below)
3578 {
3579   rtx temp;
3580 
3581   size = convert_modes (Pmode, ptr_mode, size, 1);
3582   if (CONSTANT_P (size))
3583     anti_adjust_stack (plus_constant (size, extra));
3584   else if (REG_P (size) && extra == 0)
3585     anti_adjust_stack (size);
3586   else
3587     {
3588       temp = copy_to_mode_reg (Pmode, size);
3589       if (extra != 0)
3590 	temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3591 			     temp, 0, OPTAB_LIB_WIDEN);
3592       anti_adjust_stack (temp);
3593     }
3594 
3595 #ifndef STACK_GROWS_DOWNWARD
3596   if (0)
3597 #else
3598   if (1)
3599 #endif
3600     {
3601       temp = virtual_outgoing_args_rtx;
3602       if (extra != 0 && below)
3603 	temp = plus_constant (temp, extra);
3604     }
3605   else
3606     {
3607       if (CONST_INT_P (size))
3608 	temp = plus_constant (virtual_outgoing_args_rtx,
3609 			      -INTVAL (size) - (below ? 0 : extra));
3610       else if (extra != 0 && !below)
3611 	temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3612 			     negate_rtx (Pmode, plus_constant (size, extra)));
3613       else
3614 	temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3615 			     negate_rtx (Pmode, size));
3616     }
3617 
3618   return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3619 }
3620 
3621 /* A utility routine that returns the base of an auto-inc memory, or NULL.  */
3622 
3623 static rtx
3624 mem_autoinc_base (rtx mem)
3625 {
3626   if (MEM_P (mem))
3627     {
3628       rtx addr = XEXP (mem, 0);
3629       if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC)
3630 	return XEXP (addr, 0);
3631     }
3632   return NULL;
3633 }
3634 
3635 /* A utility routine used here, in reload, and in try_split.  The insns
3636    after PREV up to and including LAST are known to adjust the stack,
3637    with a final value of END_ARGS_SIZE.  Iterate backward from LAST
3638    placing notes as appropriate.  PREV may be NULL, indicating the
3639    entire insn sequence prior to LAST should be scanned.
3640 
3641    The set of allowed stack pointer modifications is small:
3642      (1) One or more auto-inc style memory references (aka pushes),
3643      (2) One or more addition/subtraction with the SP as destination,
3644      (3) A single move insn with the SP as destination,
3645      (4) A call_pop insn,
3646      (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3647 
3648    Insns in the sequence that do not modify the SP are ignored,
3649    except for noreturn calls.
3650 
3651    The return value is the amount of adjustment that can be trivially
3652    verified, via immediate operand or auto-inc.  If the adjustment
3653    cannot be trivially extracted, the return value is INT_MIN.  */
3654 
3655 HOST_WIDE_INT
3656 find_args_size_adjust (rtx insn)
3657 {
3658   rtx dest, set, pat;
3659   int i;
3660 
3661   pat = PATTERN (insn);
3662   set = NULL;
3663 
3664   /* Look for a call_pop pattern.  */
3665   if (CALL_P (insn))
3666     {
3667       /* We have to allow non-call_pop patterns for the case
3668 	 of emit_single_push_insn of a TLS address.  */
3669       if (GET_CODE (pat) != PARALLEL)
3670 	return 0;
3671 
3672       /* All call_pop have a stack pointer adjust in the parallel.
3673 	 The call itself is always first, and the stack adjust is
3674 	 usually last, so search from the end.  */
3675       for (i = XVECLEN (pat, 0) - 1; i > 0; --i)
3676 	{
3677 	  set = XVECEXP (pat, 0, i);
3678 	  if (GET_CODE (set) != SET)
3679 	    continue;
3680 	  dest = SET_DEST (set);
3681 	  if (dest == stack_pointer_rtx)
3682 	    break;
3683 	}
3684       /* We'd better have found the stack pointer adjust.  */
3685       if (i == 0)
3686 	return 0;
3687       /* Fall through to process the extracted SET and DEST
3688 	 as if it was a standalone insn.  */
3689     }
3690   else if (GET_CODE (pat) == SET)
3691     set = pat;
3692   else if ((set = single_set (insn)) != NULL)
3693     ;
3694   else if (GET_CODE (pat) == PARALLEL)
3695     {
3696       /* ??? Some older ports use a parallel with a stack adjust
3697 	 and a store for a PUSH_ROUNDING pattern, rather than a
3698 	 PRE/POST_MODIFY rtx.  Don't force them to update yet...  */
3699       /* ??? See h8300 and m68k, pushqi1.  */
3700       for (i = XVECLEN (pat, 0) - 1; i >= 0; --i)
3701 	{
3702 	  set = XVECEXP (pat, 0, i);
3703 	  if (GET_CODE (set) != SET)
3704 	    continue;
3705 	  dest = SET_DEST (set);
3706 	  if (dest == stack_pointer_rtx)
3707 	    break;
3708 
3709 	  /* We do not expect an auto-inc of the sp in the parallel.  */
3710 	  gcc_checking_assert (mem_autoinc_base (dest) != stack_pointer_rtx);
3711 	  gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3712 			       != stack_pointer_rtx);
3713 	}
3714       if (i < 0)
3715 	return 0;
3716     }
3717   else
3718     return 0;
3719 
3720   dest = SET_DEST (set);
3721 
3722   /* Look for direct modifications of the stack pointer.  */
3723   if (REG_P (dest) && REGNO (dest) == STACK_POINTER_REGNUM)
3724     {
3725       /* Look for a trivial adjustment, otherwise assume nothing.  */
3726       /* Note that the SPU restore_stack_block pattern refers to
3727 	 the stack pointer in V4SImode.  Consider that non-trivial.  */
3728       if (SCALAR_INT_MODE_P (GET_MODE (dest))
3729 	  && GET_CODE (SET_SRC (set)) == PLUS
3730 	  && XEXP (SET_SRC (set), 0) == stack_pointer_rtx
3731 	  && CONST_INT_P (XEXP (SET_SRC (set), 1)))
3732 	return INTVAL (XEXP (SET_SRC (set), 1));
3733       /* ??? Reload can generate no-op moves, which will be cleaned
3734 	 up later.  Recognize it and continue searching.  */
3735       else if (rtx_equal_p (dest, SET_SRC (set)))
3736 	return 0;
3737       else
3738 	return HOST_WIDE_INT_MIN;
3739     }
3740   else
3741     {
3742       rtx mem, addr;
3743 
3744       /* Otherwise only think about autoinc patterns.  */
3745       if (mem_autoinc_base (dest) == stack_pointer_rtx)
3746 	{
3747 	  mem = dest;
3748 	  gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3749 			       != stack_pointer_rtx);
3750 	}
3751       else if (mem_autoinc_base (SET_SRC (set)) == stack_pointer_rtx)
3752 	mem = SET_SRC (set);
3753       else
3754 	return 0;
3755 
3756       addr = XEXP (mem, 0);
3757       switch (GET_CODE (addr))
3758 	{
3759 	case PRE_INC:
3760 	case POST_INC:
3761 	  return GET_MODE_SIZE (GET_MODE (mem));
3762 	case PRE_DEC:
3763 	case POST_DEC:
3764 	  return -GET_MODE_SIZE (GET_MODE (mem));
3765 	case PRE_MODIFY:
3766 	case POST_MODIFY:
3767 	  addr = XEXP (addr, 1);
3768 	  gcc_assert (GET_CODE (addr) == PLUS);
3769 	  gcc_assert (XEXP (addr, 0) == stack_pointer_rtx);
3770 	  gcc_assert (CONST_INT_P (XEXP (addr, 1)));
3771 	  return INTVAL (XEXP (addr, 1));
3772 	default:
3773 	  gcc_unreachable ();
3774 	}
3775     }
3776 }
3777 
3778 int
3779 fixup_args_size_notes (rtx prev, rtx last, int end_args_size)
3780 {
3781   int args_size = end_args_size;
3782   bool saw_unknown = false;
3783   rtx insn;
3784 
3785   for (insn = last; insn != prev; insn = PREV_INSN (insn))
3786     {
3787       HOST_WIDE_INT this_delta;
3788 
3789       if (!NONDEBUG_INSN_P (insn))
3790 	continue;
3791 
3792       this_delta = find_args_size_adjust (insn);
3793       if (this_delta == 0)
3794 	{
3795 	  if (!CALL_P (insn)
3796 	      || ACCUMULATE_OUTGOING_ARGS
3797 	      || find_reg_note (insn, REG_NORETURN, NULL_RTX) == NULL_RTX)
3798 	    continue;
3799 	}
3800 
3801       gcc_assert (!saw_unknown);
3802       if (this_delta == HOST_WIDE_INT_MIN)
3803 	saw_unknown = true;
3804 
3805       add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (args_size));
3806 #ifdef STACK_GROWS_DOWNWARD
3807       this_delta = -this_delta;
3808 #endif
3809       args_size -= this_delta;
3810     }
3811 
3812   return saw_unknown ? INT_MIN : args_size;
3813 }
3814 
3815 #ifdef PUSH_ROUNDING
3816 /* Emit single push insn.  */
3817 
3818 static void
3819 emit_single_push_insn_1 (enum machine_mode mode, rtx x, tree type)
3820 {
3821   rtx dest_addr;
3822   unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3823   rtx dest;
3824   enum insn_code icode;
3825 
3826   stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3827   /* If there is push pattern, use it.  Otherwise try old way of throwing
3828      MEM representing push operation to move expander.  */
3829   icode = optab_handler (push_optab, mode);
3830   if (icode != CODE_FOR_nothing)
3831     {
3832       struct expand_operand ops[1];
3833 
3834       create_input_operand (&ops[0], x, mode);
3835       if (maybe_expand_insn (icode, 1, ops))
3836 	return;
3837     }
3838   if (GET_MODE_SIZE (mode) == rounded_size)
3839     dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3840   /* If we are to pad downward, adjust the stack pointer first and
3841      then store X into the stack location using an offset.  This is
3842      because emit_move_insn does not know how to pad; it does not have
3843      access to type.  */
3844   else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3845     {
3846       unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3847       HOST_WIDE_INT offset;
3848 
3849       emit_move_insn (stack_pointer_rtx,
3850 		      expand_binop (Pmode,
3851 #ifdef STACK_GROWS_DOWNWARD
3852 				    sub_optab,
3853 #else
3854 				    add_optab,
3855 #endif
3856 				    stack_pointer_rtx,
3857 				    GEN_INT (rounded_size),
3858 				    NULL_RTX, 0, OPTAB_LIB_WIDEN));
3859 
3860       offset = (HOST_WIDE_INT) padding_size;
3861 #ifdef STACK_GROWS_DOWNWARD
3862       if (STACK_PUSH_CODE == POST_DEC)
3863 	/* We have already decremented the stack pointer, so get the
3864 	   previous value.  */
3865 	offset += (HOST_WIDE_INT) rounded_size;
3866 #else
3867       if (STACK_PUSH_CODE == POST_INC)
3868 	/* We have already incremented the stack pointer, so get the
3869 	   previous value.  */
3870 	offset -= (HOST_WIDE_INT) rounded_size;
3871 #endif
3872       dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3873     }
3874   else
3875     {
3876 #ifdef STACK_GROWS_DOWNWARD
3877       /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC.  */
3878       dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3879 				GEN_INT (-(HOST_WIDE_INT) rounded_size));
3880 #else
3881       /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC.  */
3882       dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3883 				GEN_INT (rounded_size));
3884 #endif
3885       dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3886     }
3887 
3888   dest = gen_rtx_MEM (mode, dest_addr);
3889 
3890   if (type != 0)
3891     {
3892       set_mem_attributes (dest, type, 1);
3893 
3894       if (flag_optimize_sibling_calls)
3895 	/* Function incoming arguments may overlap with sibling call
3896 	   outgoing arguments and we cannot allow reordering of reads
3897 	   from function arguments with stores to outgoing arguments
3898 	   of sibling calls.  */
3899 	set_mem_alias_set (dest, 0);
3900     }
3901   emit_move_insn (dest, x);
3902 }
3903 
3904 /* Emit and annotate a single push insn.  */
3905 
3906 static void
3907 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3908 {
3909   int delta, old_delta = stack_pointer_delta;
3910   rtx prev = get_last_insn ();
3911   rtx last;
3912 
3913   emit_single_push_insn_1 (mode, x, type);
3914 
3915   last = get_last_insn ();
3916 
3917   /* Notice the common case where we emitted exactly one insn.  */
3918   if (PREV_INSN (last) == prev)
3919     {
3920       add_reg_note (last, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
3921       return;
3922     }
3923 
3924   delta = fixup_args_size_notes (prev, last, stack_pointer_delta);
3925   gcc_assert (delta == INT_MIN || delta == old_delta);
3926 }
3927 #endif
3928 
3929 /* Generate code to push X onto the stack, assuming it has mode MODE and
3930    type TYPE.
3931    MODE is redundant except when X is a CONST_INT (since they don't
3932    carry mode info).
3933    SIZE is an rtx for the size of data to be copied (in bytes),
3934    needed only if X is BLKmode.
3935 
3936    ALIGN (in bits) is maximum alignment we can assume.
3937 
3938    If PARTIAL and REG are both nonzero, then copy that many of the first
3939    bytes of X into registers starting with REG, and push the rest of X.
3940    The amount of space pushed is decreased by PARTIAL bytes.
3941    REG must be a hard register in this case.
3942    If REG is zero but PARTIAL is not, take any all others actions for an
3943    argument partially in registers, but do not actually load any
3944    registers.
3945 
3946    EXTRA is the amount in bytes of extra space to leave next to this arg.
3947    This is ignored if an argument block has already been allocated.
3948 
3949    On a machine that lacks real push insns, ARGS_ADDR is the address of
3950    the bottom of the argument block for this call.  We use indexing off there
3951    to store the arg.  On machines with push insns, ARGS_ADDR is 0 when a
3952    argument block has not been preallocated.
3953 
3954    ARGS_SO_FAR is the size of args previously pushed for this call.
3955 
3956    REG_PARM_STACK_SPACE is nonzero if functions require stack space
3957    for arguments passed in registers.  If nonzero, it will be the number
3958    of bytes required.  */
3959 
3960 void
3961 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3962 		unsigned int align, int partial, rtx reg, int extra,
3963 		rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3964 		rtx alignment_pad)
3965 {
3966   rtx xinner;
3967   enum direction stack_direction
3968 #ifdef STACK_GROWS_DOWNWARD
3969     = downward;
3970 #else
3971     = upward;
3972 #endif
3973 
3974   /* Decide where to pad the argument: `downward' for below,
3975      `upward' for above, or `none' for don't pad it.
3976      Default is below for small data on big-endian machines; else above.  */
3977   enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3978 
3979   /* Invert direction if stack is post-decrement.
3980      FIXME: why?  */
3981   if (STACK_PUSH_CODE == POST_DEC)
3982     if (where_pad != none)
3983       where_pad = (where_pad == downward ? upward : downward);
3984 
3985   xinner = x;
3986 
3987   if (mode == BLKmode
3988       || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
3989     {
3990       /* Copy a block into the stack, entirely or partially.  */
3991 
3992       rtx temp;
3993       int used;
3994       int offset;
3995       int skip;
3996 
3997       offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3998       used = partial - offset;
3999 
4000       if (mode != BLKmode)
4001 	{
4002 	  /* A value is to be stored in an insufficiently aligned
4003 	     stack slot; copy via a suitably aligned slot if
4004 	     necessary.  */
4005 	  size = GEN_INT (GET_MODE_SIZE (mode));
4006 	  if (!MEM_P (xinner))
4007 	    {
4008 	      temp = assign_temp (type, 0, 1, 1);
4009 	      emit_move_insn (temp, xinner);
4010 	      xinner = temp;
4011 	    }
4012 	}
4013 
4014       gcc_assert (size);
4015 
4016       /* USED is now the # of bytes we need not copy to the stack
4017 	 because registers will take care of them.  */
4018 
4019       if (partial != 0)
4020 	xinner = adjust_address (xinner, BLKmode, used);
4021 
4022       /* If the partial register-part of the arg counts in its stack size,
4023 	 skip the part of stack space corresponding to the registers.
4024 	 Otherwise, start copying to the beginning of the stack space,
4025 	 by setting SKIP to 0.  */
4026       skip = (reg_parm_stack_space == 0) ? 0 : used;
4027 
4028 #ifdef PUSH_ROUNDING
4029       /* Do it with several push insns if that doesn't take lots of insns
4030 	 and if there is no difficulty with push insns that skip bytes
4031 	 on the stack for alignment purposes.  */
4032       if (args_addr == 0
4033 	  && PUSH_ARGS
4034 	  && CONST_INT_P (size)
4035 	  && skip == 0
4036 	  && MEM_ALIGN (xinner) >= align
4037 	  && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
4038 	  /* Here we avoid the case of a structure whose weak alignment
4039 	     forces many pushes of a small amount of data,
4040 	     and such small pushes do rounding that causes trouble.  */
4041 	  && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
4042 	      || align >= BIGGEST_ALIGNMENT
4043 	      || (PUSH_ROUNDING (align / BITS_PER_UNIT)
4044 		  == (align / BITS_PER_UNIT)))
4045 	  && (HOST_WIDE_INT) PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
4046 	{
4047 	  /* Push padding now if padding above and stack grows down,
4048 	     or if padding below and stack grows up.
4049 	     But if space already allocated, this has already been done.  */
4050 	  if (extra && args_addr == 0
4051 	      && where_pad != none && where_pad != stack_direction)
4052 	    anti_adjust_stack (GEN_INT (extra));
4053 
4054 	  move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
4055 	}
4056       else
4057 #endif /* PUSH_ROUNDING  */
4058 	{
4059 	  rtx target;
4060 
4061 	  /* Otherwise make space on the stack and copy the data
4062 	     to the address of that space.  */
4063 
4064 	  /* Deduct words put into registers from the size we must copy.  */
4065 	  if (partial != 0)
4066 	    {
4067 	      if (CONST_INT_P (size))
4068 		size = GEN_INT (INTVAL (size) - used);
4069 	      else
4070 		size = expand_binop (GET_MODE (size), sub_optab, size,
4071 				     GEN_INT (used), NULL_RTX, 0,
4072 				     OPTAB_LIB_WIDEN);
4073 	    }
4074 
4075 	  /* Get the address of the stack space.
4076 	     In this case, we do not deal with EXTRA separately.
4077 	     A single stack adjust will do.  */
4078 	  if (! args_addr)
4079 	    {
4080 	      temp = push_block (size, extra, where_pad == downward);
4081 	      extra = 0;
4082 	    }
4083 	  else if (CONST_INT_P (args_so_far))
4084 	    temp = memory_address (BLKmode,
4085 				   plus_constant (args_addr,
4086 						  skip + INTVAL (args_so_far)));
4087 	  else
4088 	    temp = memory_address (BLKmode,
4089 				   plus_constant (gen_rtx_PLUS (Pmode,
4090 								args_addr,
4091 								args_so_far),
4092 						  skip));
4093 
4094 	  if (!ACCUMULATE_OUTGOING_ARGS)
4095 	    {
4096 	      /* If the source is referenced relative to the stack pointer,
4097 		 copy it to another register to stabilize it.  We do not need
4098 		 to do this if we know that we won't be changing sp.  */
4099 
4100 	      if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
4101 		  || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
4102 		temp = copy_to_reg (temp);
4103 	    }
4104 
4105 	  target = gen_rtx_MEM (BLKmode, temp);
4106 
4107 	  /* We do *not* set_mem_attributes here, because incoming arguments
4108 	     may overlap with sibling call outgoing arguments and we cannot
4109 	     allow reordering of reads from function arguments with stores
4110 	     to outgoing arguments of sibling calls.  We do, however, want
4111 	     to record the alignment of the stack slot.  */
4112 	  /* ALIGN may well be better aligned than TYPE, e.g. due to
4113 	     PARM_BOUNDARY.  Assume the caller isn't lying.  */
4114 	  set_mem_align (target, align);
4115 
4116 	  emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
4117 	}
4118     }
4119   else if (partial > 0)
4120     {
4121       /* Scalar partly in registers.  */
4122 
4123       int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
4124       int i;
4125       int not_stack;
4126       /* # bytes of start of argument
4127 	 that we must make space for but need not store.  */
4128       int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4129       int args_offset = INTVAL (args_so_far);
4130       int skip;
4131 
4132       /* Push padding now if padding above and stack grows down,
4133 	 or if padding below and stack grows up.
4134 	 But if space already allocated, this has already been done.  */
4135       if (extra && args_addr == 0
4136 	  && where_pad != none && where_pad != stack_direction)
4137 	anti_adjust_stack (GEN_INT (extra));
4138 
4139       /* If we make space by pushing it, we might as well push
4140 	 the real data.  Otherwise, we can leave OFFSET nonzero
4141 	 and leave the space uninitialized.  */
4142       if (args_addr == 0)
4143 	offset = 0;
4144 
4145       /* Now NOT_STACK gets the number of words that we don't need to
4146 	 allocate on the stack.  Convert OFFSET to words too.  */
4147       not_stack = (partial - offset) / UNITS_PER_WORD;
4148       offset /= UNITS_PER_WORD;
4149 
4150       /* If the partial register-part of the arg counts in its stack size,
4151 	 skip the part of stack space corresponding to the registers.
4152 	 Otherwise, start copying to the beginning of the stack space,
4153 	 by setting SKIP to 0.  */
4154       skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4155 
4156       if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
4157 	x = validize_mem (force_const_mem (mode, x));
4158 
4159       /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4160 	 SUBREGs of such registers are not allowed.  */
4161       if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
4162 	   && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4163 	x = copy_to_reg (x);
4164 
4165       /* Loop over all the words allocated on the stack for this arg.  */
4166       /* We can do it by words, because any scalar bigger than a word
4167 	 has a size a multiple of a word.  */
4168 #ifndef PUSH_ARGS_REVERSED
4169       for (i = not_stack; i < size; i++)
4170 #else
4171       for (i = size - 1; i >= not_stack; i--)
4172 #endif
4173 	if (i >= not_stack + offset)
4174 	  emit_push_insn (operand_subword_force (x, i, mode),
4175 			  word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4176 			  0, args_addr,
4177 			  GEN_INT (args_offset + ((i - not_stack + skip)
4178 						  * UNITS_PER_WORD)),
4179 			  reg_parm_stack_space, alignment_pad);
4180     }
4181   else
4182     {
4183       rtx addr;
4184       rtx dest;
4185 
4186       /* Push padding now if padding above and stack grows down,
4187 	 or if padding below and stack grows up.
4188 	 But if space already allocated, this has already been done.  */
4189       if (extra && args_addr == 0
4190 	  && where_pad != none && where_pad != stack_direction)
4191 	anti_adjust_stack (GEN_INT (extra));
4192 
4193 #ifdef PUSH_ROUNDING
4194       if (args_addr == 0 && PUSH_ARGS)
4195 	emit_single_push_insn (mode, x, type);
4196       else
4197 #endif
4198 	{
4199 	  if (CONST_INT_P (args_so_far))
4200 	    addr
4201 	      = memory_address (mode,
4202 				plus_constant (args_addr,
4203 					       INTVAL (args_so_far)));
4204 	  else
4205 	    addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4206 						       args_so_far));
4207 	  dest = gen_rtx_MEM (mode, addr);
4208 
4209 	  /* We do *not* set_mem_attributes here, because incoming arguments
4210 	     may overlap with sibling call outgoing arguments and we cannot
4211 	     allow reordering of reads from function arguments with stores
4212 	     to outgoing arguments of sibling calls.  We do, however, want
4213 	     to record the alignment of the stack slot.  */
4214 	  /* ALIGN may well be better aligned than TYPE, e.g. due to
4215 	     PARM_BOUNDARY.  Assume the caller isn't lying.  */
4216 	  set_mem_align (dest, align);
4217 
4218 	  emit_move_insn (dest, x);
4219 	}
4220     }
4221 
4222   /* If part should go in registers, copy that part
4223      into the appropriate registers.  Do this now, at the end,
4224      since mem-to-mem copies above may do function calls.  */
4225   if (partial > 0 && reg != 0)
4226     {
4227       /* Handle calls that pass values in multiple non-contiguous locations.
4228 	 The Irix 6 ABI has examples of this.  */
4229       if (GET_CODE (reg) == PARALLEL)
4230 	emit_group_load (reg, x, type, -1);
4231       else
4232 	{
4233 	  gcc_assert (partial % UNITS_PER_WORD == 0);
4234 	  move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
4235 	}
4236     }
4237 
4238   if (extra && args_addr == 0 && where_pad == stack_direction)
4239     anti_adjust_stack (GEN_INT (extra));
4240 
4241   if (alignment_pad && args_addr == 0)
4242     anti_adjust_stack (alignment_pad);
4243 }
4244 
4245 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4246    operations.  */
4247 
4248 static rtx
4249 get_subtarget (rtx x)
4250 {
4251   return (optimize
4252           || x == 0
4253 	   /* Only registers can be subtargets.  */
4254 	   || !REG_P (x)
4255 	   /* Don't use hard regs to avoid extending their life.  */
4256 	   || REGNO (x) < FIRST_PSEUDO_REGISTER
4257 	  ? 0 : x);
4258 }
4259 
4260 /* A subroutine of expand_assignment.  Optimize FIELD op= VAL, where
4261    FIELD is a bitfield.  Returns true if the optimization was successful,
4262    and there's nothing else to do.  */
4263 
4264 static bool
4265 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
4266 				 unsigned HOST_WIDE_INT bitpos,
4267 				 unsigned HOST_WIDE_INT bitregion_start,
4268 				 unsigned HOST_WIDE_INT bitregion_end,
4269 				 enum machine_mode mode1, rtx str_rtx,
4270 				 tree to, tree src)
4271 {
4272   enum machine_mode str_mode = GET_MODE (str_rtx);
4273   unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4274   tree op0, op1;
4275   rtx value, result;
4276   optab binop;
4277   gimple srcstmt;
4278   enum tree_code code;
4279 
4280   if (mode1 != VOIDmode
4281       || bitsize >= BITS_PER_WORD
4282       || str_bitsize > BITS_PER_WORD
4283       || TREE_SIDE_EFFECTS (to)
4284       || TREE_THIS_VOLATILE (to))
4285     return false;
4286 
4287   STRIP_NOPS (src);
4288   if (TREE_CODE (src) != SSA_NAME)
4289     return false;
4290   if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4291     return false;
4292 
4293   srcstmt = get_gimple_for_ssa_name (src);
4294   if (!srcstmt
4295       || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt)) != tcc_binary)
4296     return false;
4297 
4298   code = gimple_assign_rhs_code (srcstmt);
4299 
4300   op0 = gimple_assign_rhs1 (srcstmt);
4301 
4302   /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4303      to find its initialization.  Hopefully the initialization will
4304      be from a bitfield load.  */
4305   if (TREE_CODE (op0) == SSA_NAME)
4306     {
4307       gimple op0stmt = get_gimple_for_ssa_name (op0);
4308 
4309       /* We want to eventually have OP0 be the same as TO, which
4310 	 should be a bitfield.  */
4311       if (!op0stmt
4312 	  || !is_gimple_assign (op0stmt)
4313 	  || gimple_assign_rhs_code (op0stmt) != TREE_CODE (to))
4314 	return false;
4315       op0 = gimple_assign_rhs1 (op0stmt);
4316     }
4317 
4318   op1 = gimple_assign_rhs2 (srcstmt);
4319 
4320   if (!operand_equal_p (to, op0, 0))
4321     return false;
4322 
4323   if (MEM_P (str_rtx))
4324     {
4325       unsigned HOST_WIDE_INT offset1;
4326 
4327       if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4328 	str_mode = word_mode;
4329       str_mode = get_best_mode (bitsize, bitpos,
4330 				bitregion_start, bitregion_end,
4331 				MEM_ALIGN (str_rtx), str_mode, 0);
4332       if (str_mode == VOIDmode)
4333 	return false;
4334       str_bitsize = GET_MODE_BITSIZE (str_mode);
4335 
4336       offset1 = bitpos;
4337       bitpos %= str_bitsize;
4338       offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4339       str_rtx = adjust_address (str_rtx, str_mode, offset1);
4340     }
4341   else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4342     return false;
4343 
4344   /* If the bit field covers the whole REG/MEM, store_field
4345      will likely generate better code.  */
4346   if (bitsize >= str_bitsize)
4347     return false;
4348 
4349   /* We can't handle fields split across multiple entities.  */
4350   if (bitpos + bitsize > str_bitsize)
4351     return false;
4352 
4353   if (BYTES_BIG_ENDIAN)
4354     bitpos = str_bitsize - bitpos - bitsize;
4355 
4356   switch (code)
4357     {
4358     case PLUS_EXPR:
4359     case MINUS_EXPR:
4360       /* For now, just optimize the case of the topmost bitfield
4361 	 where we don't need to do any masking and also
4362 	 1 bit bitfields where xor can be used.
4363 	 We might win by one instruction for the other bitfields
4364 	 too if insv/extv instructions aren't used, so that
4365 	 can be added later.  */
4366       if (bitpos + bitsize != str_bitsize
4367 	  && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4368 	break;
4369 
4370       value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4371       value = convert_modes (str_mode,
4372 			     TYPE_MODE (TREE_TYPE (op1)), value,
4373 			     TYPE_UNSIGNED (TREE_TYPE (op1)));
4374 
4375       /* We may be accessing data outside the field, which means
4376 	 we can alias adjacent data.  */
4377       if (MEM_P (str_rtx))
4378 	{
4379 	  str_rtx = shallow_copy_rtx (str_rtx);
4380 	  set_mem_alias_set (str_rtx, 0);
4381 	  set_mem_expr (str_rtx, 0);
4382 	}
4383 
4384       binop = code == PLUS_EXPR ? add_optab : sub_optab;
4385       if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4386 	{
4387 	  value = expand_and (str_mode, value, const1_rtx, NULL);
4388 	  binop = xor_optab;
4389 	}
4390       value = expand_shift (LSHIFT_EXPR, str_mode, value,
4391 			    bitpos, NULL_RTX, 1);
4392       result = expand_binop (str_mode, binop, str_rtx,
4393 			     value, str_rtx, 1, OPTAB_WIDEN);
4394       if (result != str_rtx)
4395 	emit_move_insn (str_rtx, result);
4396       return true;
4397 
4398     case BIT_IOR_EXPR:
4399     case BIT_XOR_EXPR:
4400       if (TREE_CODE (op1) != INTEGER_CST)
4401 	break;
4402       value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), EXPAND_NORMAL);
4403       value = convert_modes (GET_MODE (str_rtx),
4404 			     TYPE_MODE (TREE_TYPE (op1)), value,
4405 			     TYPE_UNSIGNED (TREE_TYPE (op1)));
4406 
4407       /* We may be accessing data outside the field, which means
4408 	 we can alias adjacent data.  */
4409       if (MEM_P (str_rtx))
4410 	{
4411 	  str_rtx = shallow_copy_rtx (str_rtx);
4412 	  set_mem_alias_set (str_rtx, 0);
4413 	  set_mem_expr (str_rtx, 0);
4414 	}
4415 
4416       binop = code == BIT_IOR_EXPR ? ior_optab : xor_optab;
4417       if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
4418 	{
4419 	  rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
4420 			      - 1);
4421 	  value = expand_and (GET_MODE (str_rtx), value, mask,
4422 			      NULL_RTX);
4423 	}
4424       value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
4425 			    bitpos, NULL_RTX, 1);
4426       result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
4427 			     value, str_rtx, 1, OPTAB_WIDEN);
4428       if (result != str_rtx)
4429 	emit_move_insn (str_rtx, result);
4430       return true;
4431 
4432     default:
4433       break;
4434     }
4435 
4436   return false;
4437 }
4438 
4439 /* In the C++ memory model, consecutive bit fields in a structure are
4440    considered one memory location.
4441 
4442    Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4443    returns the bit range of consecutive bits in which this COMPONENT_REF
4444    belongs.  The values are returned in *BITSTART and *BITEND.  *BITPOS
4445    and *OFFSET may be adjusted in the process.
4446 
4447    If the access does not need to be restricted, 0 is returned in both
4448    *BITSTART and *BITEND.  */
4449 
4450 static void
4451 get_bit_range (unsigned HOST_WIDE_INT *bitstart,
4452 	       unsigned HOST_WIDE_INT *bitend,
4453 	       tree exp,
4454 	       HOST_WIDE_INT *bitpos,
4455 	       tree *offset)
4456 {
4457   HOST_WIDE_INT bitoffset;
4458   tree field, repr;
4459 
4460   gcc_assert (TREE_CODE (exp) == COMPONENT_REF);
4461 
4462   field = TREE_OPERAND (exp, 1);
4463   repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
4464   /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4465      need to limit the range we can access.  */
4466   if (!repr)
4467     {
4468       *bitstart = *bitend = 0;
4469       return;
4470     }
4471 
4472   /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4473      part of a larger bit field, then the representative does not serve any
4474      useful purpose.  This can occur in Ada.  */
4475   if (handled_component_p (TREE_OPERAND (exp, 0)))
4476     {
4477       enum machine_mode rmode;
4478       HOST_WIDE_INT rbitsize, rbitpos;
4479       tree roffset;
4480       int unsignedp;
4481       int volatilep = 0;
4482       get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos,
4483 			   &roffset, &rmode, &unsignedp, &volatilep, false);
4484       if ((rbitpos % BITS_PER_UNIT) != 0)
4485 	{
4486 	  *bitstart = *bitend = 0;
4487 	  return;
4488 	}
4489     }
4490 
4491   /* Compute the adjustment to bitpos from the offset of the field
4492      relative to the representative.  DECL_FIELD_OFFSET of field and
4493      repr are the same by construction if they are not constants,
4494      see finish_bitfield_layout.  */
4495   if (host_integerp (DECL_FIELD_OFFSET (field), 1)
4496       && host_integerp (DECL_FIELD_OFFSET (repr), 1))
4497     bitoffset = (tree_low_cst (DECL_FIELD_OFFSET (field), 1)
4498 		 - tree_low_cst (DECL_FIELD_OFFSET (repr), 1)) * BITS_PER_UNIT;
4499   else
4500     bitoffset = 0;
4501   bitoffset += (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
4502 		- tree_low_cst (DECL_FIELD_BIT_OFFSET (repr), 1));
4503 
4504   /* If the adjustment is larger than bitpos, we would have a negative bit
4505      position for the lower bound and this may wreak havoc later.  This can
4506      occur only if we have a non-null offset, so adjust offset and bitpos
4507      to make the lower bound non-negative.  */
4508   if (bitoffset > *bitpos)
4509     {
4510       HOST_WIDE_INT adjust = bitoffset - *bitpos;
4511 
4512       gcc_assert ((adjust % BITS_PER_UNIT) == 0);
4513       gcc_assert (*offset != NULL_TREE);
4514 
4515       *bitpos += adjust;
4516       *offset
4517 	= size_binop (MINUS_EXPR, *offset, size_int (adjust / BITS_PER_UNIT));
4518       *bitstart = 0;
4519     }
4520   else
4521     *bitstart = *bitpos - bitoffset;
4522 
4523   *bitend = *bitstart + tree_low_cst (DECL_SIZE (repr), 1) - 1;
4524 }
4525 
4526 /* Returns true if the MEM_REF REF refers to an object that does not
4527    reside in memory and has non-BLKmode.  */
4528 
4529 static bool
4530 mem_ref_refers_to_non_mem_p (tree ref)
4531 {
4532   tree base = TREE_OPERAND (ref, 0);
4533   if (TREE_CODE (base) != ADDR_EXPR)
4534     return false;
4535   base = TREE_OPERAND (base, 0);
4536   return (DECL_P (base)
4537 	  && !TREE_ADDRESSABLE (base)
4538 	  && DECL_MODE (base) != BLKmode
4539 	  && DECL_RTL_SET_P (base)
4540 	  && !MEM_P (DECL_RTL (base)));
4541 }
4542 
4543 /* Expand an assignment that stores the value of FROM into TO.  If NONTEMPORAL
4544    is true, try generating a nontemporal store.  */
4545 
4546 void
4547 expand_assignment (tree to, tree from, bool nontemporal)
4548 {
4549   rtx to_rtx = 0;
4550   rtx result;
4551   enum machine_mode mode;
4552   unsigned int align;
4553   enum insn_code icode;
4554 
4555   /* Don't crash if the lhs of the assignment was erroneous.  */
4556   if (TREE_CODE (to) == ERROR_MARK)
4557     {
4558       expand_normal (from);
4559       return;
4560     }
4561 
4562   /* Optimize away no-op moves without side-effects.  */
4563   if (operand_equal_p (to, from, 0))
4564     return;
4565 
4566   /* Handle misaligned stores.  */
4567   mode = TYPE_MODE (TREE_TYPE (to));
4568   if ((TREE_CODE (to) == MEM_REF
4569        || TREE_CODE (to) == TARGET_MEM_REF)
4570       && mode != BLKmode
4571       && !mem_ref_refers_to_non_mem_p (to)
4572       && ((align = get_object_or_type_alignment (to))
4573 	  < GET_MODE_ALIGNMENT (mode))
4574       && ((icode = optab_handler (movmisalign_optab, mode))
4575 	  != CODE_FOR_nothing))
4576     {
4577       addr_space_t as
4578 	= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (to, 0))));
4579       struct expand_operand ops[2];
4580       enum machine_mode address_mode;
4581       rtx reg, op0, mem;
4582 
4583       reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4584       reg = force_not_mem (reg);
4585 
4586       if (TREE_CODE (to) == MEM_REF)
4587 	{
4588 	  tree base = TREE_OPERAND (to, 0);
4589 	  address_mode = targetm.addr_space.address_mode (as);
4590 	  op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4591 	  op0 = convert_memory_address_addr_space (address_mode, op0, as);
4592 	  if (!integer_zerop (TREE_OPERAND (to, 1)))
4593 	    {
4594 	      rtx off
4595 		= immed_double_int_const (mem_ref_offset (to), address_mode);
4596 	      op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
4597 	    }
4598 	  op0 = memory_address_addr_space (mode, op0, as);
4599 	  mem = gen_rtx_MEM (mode, op0);
4600 	  set_mem_attributes (mem, to, 0);
4601 	  set_mem_addr_space (mem, as);
4602 	}
4603       else if (TREE_CODE (to) == TARGET_MEM_REF)
4604 	{
4605 	  struct mem_address addr;
4606 	  get_address_description (to, &addr);
4607 	  op0 = addr_for_mem_ref (&addr, as, true);
4608 	  op0 = memory_address_addr_space (mode, op0, as);
4609 	  mem = gen_rtx_MEM (mode, op0);
4610 	  set_mem_attributes (mem, to, 0);
4611 	  set_mem_addr_space (mem, as);
4612 	}
4613       else
4614 	gcc_unreachable ();
4615       if (TREE_THIS_VOLATILE (to))
4616 	MEM_VOLATILE_P (mem) = 1;
4617 
4618       create_fixed_operand (&ops[0], mem);
4619       create_input_operand (&ops[1], reg, mode);
4620       /* The movmisalign<mode> pattern cannot fail, else the assignment would
4621 	 silently be omitted.  */
4622       expand_insn (icode, 2, ops);
4623       return;
4624     }
4625 
4626   /* Assignment of a structure component needs special treatment
4627      if the structure component's rtx is not simply a MEM.
4628      Assignment of an array element at a constant index, and assignment of
4629      an array element in an unaligned packed structure field, has the same
4630      problem.  Same for (partially) storing into a non-memory object.  */
4631   if (handled_component_p (to)
4632       || (TREE_CODE (to) == MEM_REF
4633 	  && mem_ref_refers_to_non_mem_p (to))
4634       || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4635     {
4636       enum machine_mode mode1;
4637       HOST_WIDE_INT bitsize, bitpos;
4638       unsigned HOST_WIDE_INT bitregion_start = 0;
4639       unsigned HOST_WIDE_INT bitregion_end = 0;
4640       tree offset;
4641       int unsignedp;
4642       int volatilep = 0;
4643       tree tem;
4644       bool misalignp;
4645       rtx mem = NULL_RTX;
4646 
4647       push_temp_slots ();
4648       tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4649 				 &unsignedp, &volatilep, true);
4650 
4651       if (TREE_CODE (to) == COMPONENT_REF
4652 	  && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to, 1)))
4653 	get_bit_range (&bitregion_start, &bitregion_end, to, &bitpos, &offset);
4654 
4655       /* If we are going to use store_bit_field and extract_bit_field,
4656 	 make sure to_rtx will be safe for multiple use.  */
4657       mode = TYPE_MODE (TREE_TYPE (tem));
4658       if (TREE_CODE (tem) == MEM_REF
4659 	  && mode != BLKmode
4660 	  && ((align = get_object_or_type_alignment (tem))
4661 	      < GET_MODE_ALIGNMENT (mode))
4662 	  && ((icode = optab_handler (movmisalign_optab, mode))
4663 	      != CODE_FOR_nothing))
4664 	{
4665 	  enum machine_mode address_mode;
4666 	  rtx op0;
4667 	  struct expand_operand ops[2];
4668 	  addr_space_t as = TYPE_ADDR_SPACE
4669 	      (TREE_TYPE (TREE_TYPE (TREE_OPERAND (tem, 0))));
4670 	  tree base = TREE_OPERAND (tem, 0);
4671 
4672 	  misalignp = true;
4673 	  to_rtx = gen_reg_rtx (mode);
4674 
4675 	  address_mode = targetm.addr_space.address_mode (as);
4676 	  op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4677 	  op0 = convert_memory_address_addr_space (address_mode, op0, as);
4678 	  if (!integer_zerop (TREE_OPERAND (tem, 1)))
4679 	    {
4680 	      rtx off = immed_double_int_const (mem_ref_offset (tem),
4681 						address_mode);
4682 	      op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
4683 	    }
4684 	  op0 = memory_address_addr_space (mode, op0, as);
4685 	  mem = gen_rtx_MEM (mode, op0);
4686 	  set_mem_attributes (mem, tem, 0);
4687 	  set_mem_addr_space (mem, as);
4688 	  if (TREE_THIS_VOLATILE (tem))
4689 	    MEM_VOLATILE_P (mem) = 1;
4690 
4691 	  /* If the misaligned store doesn't overwrite all bits, perform
4692 	     rmw cycle on MEM.  */
4693 	  if (bitsize != GET_MODE_BITSIZE (mode))
4694 	    {
4695 	      create_input_operand (&ops[0], to_rtx, mode);
4696 	      create_fixed_operand (&ops[1], mem);
4697 	      /* The movmisalign<mode> pattern cannot fail, else the assignment
4698 		 would silently be omitted.  */
4699 	      expand_insn (icode, 2, ops);
4700 
4701 	      mem = copy_rtx (mem);
4702 	    }
4703 	}
4704       else
4705 	{
4706 	  misalignp = false;
4707 	  to_rtx = expand_normal (tem);
4708 	}
4709 
4710       /* If the bitfield is volatile, we want to access it in the
4711 	 field's mode, not the computed mode.
4712 	 If a MEM has VOIDmode (external with incomplete type),
4713 	 use BLKmode for it instead.  */
4714       if (MEM_P (to_rtx))
4715 	{
4716 	  if (volatilep && flag_strict_volatile_bitfields > 0)
4717 	    to_rtx = adjust_address (to_rtx, mode1, 0);
4718 	  else if (GET_MODE (to_rtx) == VOIDmode)
4719 	    to_rtx = adjust_address (to_rtx, BLKmode, 0);
4720 	}
4721 
4722       if (offset != 0)
4723 	{
4724 	  enum machine_mode address_mode;
4725 	  rtx offset_rtx;
4726 
4727 	  if (!MEM_P (to_rtx))
4728 	    {
4729 	      /* We can get constant negative offsets into arrays with broken
4730 		 user code.  Translate this to a trap instead of ICEing.  */
4731 	      gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4732 	      expand_builtin_trap ();
4733 	      to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4734 	    }
4735 
4736 	  offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4737 	  address_mode
4738 	    = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to_rtx));
4739 	  if (GET_MODE (offset_rtx) != address_mode)
4740 	    offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
4741 
4742 	  /* A constant address in TO_RTX can have VOIDmode, we must not try
4743 	     to call force_reg for that case.  Avoid that case.  */
4744 	  if (MEM_P (to_rtx)
4745 	      && GET_MODE (to_rtx) == BLKmode
4746 	      && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4747 	      && bitsize > 0
4748 	      && (bitpos % bitsize) == 0
4749 	      && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4750 	      && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4751 	    {
4752 	      to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4753 	      bitpos = 0;
4754 	    }
4755 
4756 	  to_rtx = offset_address (to_rtx, offset_rtx,
4757 				   highest_pow2_factor_for_target (to,
4758 				   				   offset));
4759 	}
4760 
4761       /* No action is needed if the target is not a memory and the field
4762 	 lies completely outside that target.  This can occur if the source
4763 	 code contains an out-of-bounds access to a small array.  */
4764       if (!MEM_P (to_rtx)
4765 	  && GET_MODE (to_rtx) != BLKmode
4766 	  && (unsigned HOST_WIDE_INT) bitpos
4767 	     >= GET_MODE_PRECISION (GET_MODE (to_rtx)))
4768 	{
4769 	  expand_normal (from);
4770 	  result = NULL;
4771 	}
4772       /* Handle expand_expr of a complex value returning a CONCAT.  */
4773       else if (GET_CODE (to_rtx) == CONCAT)
4774 	{
4775 	  unsigned short mode_bitsize = GET_MODE_BITSIZE (GET_MODE (to_rtx));
4776 	  if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from)))
4777 	      && bitpos == 0
4778 	      && bitsize == mode_bitsize)
4779 	    result = store_expr (from, to_rtx, false, nontemporal);
4780 	  else if (bitsize == mode_bitsize / 2
4781 		   && (bitpos == 0 || bitpos == mode_bitsize / 2))
4782 	    result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4783 				 nontemporal);
4784 	  else if (bitpos + bitsize <= mode_bitsize / 2)
4785 	    result = store_field (XEXP (to_rtx, 0), bitsize, bitpos,
4786 				  bitregion_start, bitregion_end,
4787 				  mode1, from, TREE_TYPE (tem),
4788 				  get_alias_set (to), nontemporal);
4789 	  else if (bitpos >= mode_bitsize / 2)
4790 	    result = store_field (XEXP (to_rtx, 1), bitsize,
4791 				  bitpos - mode_bitsize / 2,
4792 				  bitregion_start, bitregion_end,
4793 				  mode1, from,
4794 				  TREE_TYPE (tem), get_alias_set (to),
4795 				  nontemporal);
4796 	  else if (bitpos == 0 && bitsize == mode_bitsize)
4797 	    {
4798 	      rtx from_rtx;
4799 	      result = expand_normal (from);
4800 	      from_rtx = simplify_gen_subreg (GET_MODE (to_rtx), result,
4801 					      TYPE_MODE (TREE_TYPE (from)), 0);
4802 	      emit_move_insn (XEXP (to_rtx, 0),
4803 			      read_complex_part (from_rtx, false));
4804 	      emit_move_insn (XEXP (to_rtx, 1),
4805 			      read_complex_part (from_rtx, true));
4806 	    }
4807 	  else
4808 	    {
4809 	      rtx temp = assign_stack_temp (GET_MODE (to_rtx),
4810 					    GET_MODE_SIZE (GET_MODE (to_rtx)),
4811 					    0);
4812 	      write_complex_part (temp, XEXP (to_rtx, 0), false);
4813 	      write_complex_part (temp, XEXP (to_rtx, 1), true);
4814 	      result = store_field (temp, bitsize, bitpos,
4815 				    bitregion_start, bitregion_end,
4816 				    mode1, from,
4817 				    TREE_TYPE (tem), get_alias_set (to),
4818 				    nontemporal);
4819 	      emit_move_insn (XEXP (to_rtx, 0), read_complex_part (temp, false));
4820 	      emit_move_insn (XEXP (to_rtx, 1), read_complex_part (temp, true));
4821 	    }
4822 	}
4823       else
4824 	{
4825 	  if (MEM_P (to_rtx))
4826 	    {
4827 	      /* If the field is at offset zero, we could have been given the
4828 		 DECL_RTX of the parent struct.  Don't munge it.  */
4829 	      to_rtx = shallow_copy_rtx (to_rtx);
4830 
4831 	      set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4832 
4833 	      /* Deal with volatile and readonly fields.  The former is only
4834 		 done for MEM.  Also set MEM_KEEP_ALIAS_SET_P if needed.  */
4835 	      if (volatilep)
4836 		MEM_VOLATILE_P (to_rtx) = 1;
4837 	      if (component_uses_parent_alias_set (to))
4838 		MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4839 	    }
4840 
4841 	  if (optimize_bitfield_assignment_op (bitsize, bitpos,
4842 					       bitregion_start, bitregion_end,
4843 					       mode1,
4844 					       to_rtx, to, from))
4845 	    result = NULL;
4846 	  else
4847 	    result = store_field (to_rtx, bitsize, bitpos,
4848 				  bitregion_start, bitregion_end,
4849 				  mode1, from,
4850 				  TREE_TYPE (tem), get_alias_set (to),
4851 				  nontemporal);
4852 	}
4853 
4854       if (misalignp)
4855 	{
4856 	  struct expand_operand ops[2];
4857 
4858 	  create_fixed_operand (&ops[0], mem);
4859 	  create_input_operand (&ops[1], to_rtx, mode);
4860 	  /* The movmisalign<mode> pattern cannot fail, else the assignment
4861 	     would silently be omitted.  */
4862 	  expand_insn (icode, 2, ops);
4863 	}
4864 
4865       if (result)
4866 	preserve_temp_slots (result);
4867       free_temp_slots ();
4868       pop_temp_slots ();
4869       return;
4870     }
4871 
4872   /* If the rhs is a function call and its value is not an aggregate,
4873      call the function before we start to compute the lhs.
4874      This is needed for correct code for cases such as
4875      val = setjmp (buf) on machines where reference to val
4876      requires loading up part of an address in a separate insn.
4877 
4878      Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4879      since it might be a promoted variable where the zero- or sign- extension
4880      needs to be done.  Handling this in the normal way is safe because no
4881      computation is done before the call.  The same is true for SSA names.  */
4882   if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4883       && COMPLETE_TYPE_P (TREE_TYPE (from))
4884       && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4885       && ! (((TREE_CODE (to) == VAR_DECL
4886 	      || TREE_CODE (to) == PARM_DECL
4887 	      || TREE_CODE (to) == RESULT_DECL)
4888 	     && REG_P (DECL_RTL (to)))
4889 	    || TREE_CODE (to) == SSA_NAME))
4890     {
4891       rtx value;
4892 
4893       push_temp_slots ();
4894       value = expand_normal (from);
4895       if (to_rtx == 0)
4896 	to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4897 
4898       /* Handle calls that return values in multiple non-contiguous locations.
4899 	 The Irix 6 ABI has examples of this.  */
4900       if (GET_CODE (to_rtx) == PARALLEL)
4901 	emit_group_load (to_rtx, value, TREE_TYPE (from),
4902 			 int_size_in_bytes (TREE_TYPE (from)));
4903       else if (GET_MODE (to_rtx) == BLKmode)
4904 	emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4905       else
4906 	{
4907 	  if (POINTER_TYPE_P (TREE_TYPE (to)))
4908 	    value = convert_memory_address_addr_space
4909 		      (GET_MODE (to_rtx), value,
4910 		       TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
4911 
4912 	  emit_move_insn (to_rtx, value);
4913 	}
4914       preserve_temp_slots (to_rtx);
4915       free_temp_slots ();
4916       pop_temp_slots ();
4917       return;
4918     }
4919 
4920   /* Ordinary treatment.  Expand TO to get a REG or MEM rtx.  */
4921   to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4922 
4923   /* Don't move directly into a return register.  */
4924   if (TREE_CODE (to) == RESULT_DECL
4925       && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4926     {
4927       rtx temp;
4928 
4929       push_temp_slots ();
4930       if (REG_P (to_rtx) && TYPE_MODE (TREE_TYPE (from)) == BLKmode)
4931 	temp = copy_blkmode_to_reg (GET_MODE (to_rtx), from);
4932       else
4933 	temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
4934 
4935       if (GET_CODE (to_rtx) == PARALLEL)
4936 	emit_group_load (to_rtx, temp, TREE_TYPE (from),
4937 			 int_size_in_bytes (TREE_TYPE (from)));
4938       else if (temp)
4939 	emit_move_insn (to_rtx, temp);
4940 
4941       preserve_temp_slots (to_rtx);
4942       free_temp_slots ();
4943       pop_temp_slots ();
4944       return;
4945     }
4946 
4947   /* In case we are returning the contents of an object which overlaps
4948      the place the value is being stored, use a safe function when copying
4949      a value through a pointer into a structure value return block.  */
4950   if (TREE_CODE (to) == RESULT_DECL
4951       && TREE_CODE (from) == INDIRECT_REF
4952       && ADDR_SPACE_GENERIC_P
4953 	   (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
4954       && refs_may_alias_p (to, from)
4955       && cfun->returns_struct
4956       && !cfun->returns_pcc_struct)
4957     {
4958       rtx from_rtx, size;
4959 
4960       push_temp_slots ();
4961       size = expr_size (from);
4962       from_rtx = expand_normal (from);
4963 
4964       emit_library_call (memmove_libfunc, LCT_NORMAL,
4965 			 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4966 			 XEXP (from_rtx, 0), Pmode,
4967 			 convert_to_mode (TYPE_MODE (sizetype),
4968 					  size, TYPE_UNSIGNED (sizetype)),
4969 			 TYPE_MODE (sizetype));
4970 
4971       preserve_temp_slots (to_rtx);
4972       free_temp_slots ();
4973       pop_temp_slots ();
4974       return;
4975     }
4976 
4977   /* Compute FROM and store the value in the rtx we got.  */
4978 
4979   push_temp_slots ();
4980   result = store_expr (from, to_rtx, 0, nontemporal);
4981   preserve_temp_slots (result);
4982   free_temp_slots ();
4983   pop_temp_slots ();
4984   return;
4985 }
4986 
4987 /* Emits nontemporal store insn that moves FROM to TO.  Returns true if this
4988    succeeded, false otherwise.  */
4989 
4990 bool
4991 emit_storent_insn (rtx to, rtx from)
4992 {
4993   struct expand_operand ops[2];
4994   enum machine_mode mode = GET_MODE (to);
4995   enum insn_code code = optab_handler (storent_optab, mode);
4996 
4997   if (code == CODE_FOR_nothing)
4998     return false;
4999 
5000   create_fixed_operand (&ops[0], to);
5001   create_input_operand (&ops[1], from, mode);
5002   return maybe_expand_insn (code, 2, ops);
5003 }
5004 
5005 /* Generate code for computing expression EXP,
5006    and storing the value into TARGET.
5007 
5008    If the mode is BLKmode then we may return TARGET itself.
5009    It turns out that in BLKmode it doesn't cause a problem.
5010    because C has no operators that could combine two different
5011    assignments into the same BLKmode object with different values
5012    with no sequence point.  Will other languages need this to
5013    be more thorough?
5014 
5015    If CALL_PARAM_P is nonzero, this is a store into a call param on the
5016    stack, and block moves may need to be treated specially.
5017 
5018    If NONTEMPORAL is true, try using a nontemporal store instruction.  */
5019 
5020 rtx
5021 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
5022 {
5023   rtx temp;
5024   rtx alt_rtl = NULL_RTX;
5025   location_t loc = EXPR_LOCATION (exp);
5026 
5027   if (VOID_TYPE_P (TREE_TYPE (exp)))
5028     {
5029       /* C++ can generate ?: expressions with a throw expression in one
5030 	 branch and an rvalue in the other. Here, we resolve attempts to
5031 	 store the throw expression's nonexistent result.  */
5032       gcc_assert (!call_param_p);
5033       expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5034       return NULL_RTX;
5035     }
5036   if (TREE_CODE (exp) == COMPOUND_EXPR)
5037     {
5038       /* Perform first part of compound expression, then assign from second
5039 	 part.  */
5040       expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5041 		   call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5042       return store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
5043 			 nontemporal);
5044     }
5045   else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
5046     {
5047       /* For conditional expression, get safe form of the target.  Then
5048 	 test the condition, doing the appropriate assignment on either
5049 	 side.  This avoids the creation of unnecessary temporaries.
5050 	 For non-BLKmode, it is more efficient not to do this.  */
5051 
5052       rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
5053 
5054       do_pending_stack_adjust ();
5055       NO_DEFER_POP;
5056       jumpifnot (TREE_OPERAND (exp, 0), lab1, -1);
5057       store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
5058 		  nontemporal);
5059       emit_jump_insn (gen_jump (lab2));
5060       emit_barrier ();
5061       emit_label (lab1);
5062       store_expr (TREE_OPERAND (exp, 2), target, call_param_p,
5063 		  nontemporal);
5064       emit_label (lab2);
5065       OK_DEFER_POP;
5066 
5067       return NULL_RTX;
5068     }
5069   else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
5070     /* If this is a scalar in a register that is stored in a wider mode
5071        than the declared mode, compute the result into its declared mode
5072        and then convert to the wider mode.  Our value is the computed
5073        expression.  */
5074     {
5075       rtx inner_target = 0;
5076 
5077       /* We can do the conversion inside EXP, which will often result
5078 	 in some optimizations.  Do the conversion in two steps: first
5079 	 change the signedness, if needed, then the extend.  But don't
5080 	 do this if the type of EXP is a subtype of something else
5081 	 since then the conversion might involve more than just
5082 	 converting modes.  */
5083       if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
5084 	  && TREE_TYPE (TREE_TYPE (exp)) == 0
5085 	  && GET_MODE_PRECISION (GET_MODE (target))
5086 	     == TYPE_PRECISION (TREE_TYPE (exp)))
5087 	{
5088 	  if (TYPE_UNSIGNED (TREE_TYPE (exp))
5089 	      != SUBREG_PROMOTED_UNSIGNED_P (target))
5090 	    {
5091 	      /* Some types, e.g. Fortran's logical*4, won't have a signed
5092 		 version, so use the mode instead.  */
5093 	      tree ntype
5094 		= (signed_or_unsigned_type_for
5095 		   (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)));
5096 	      if (ntype == NULL)
5097 		ntype = lang_hooks.types.type_for_mode
5098 		  (TYPE_MODE (TREE_TYPE (exp)),
5099 		   SUBREG_PROMOTED_UNSIGNED_P (target));
5100 
5101 	      exp = fold_convert_loc (loc, ntype, exp);
5102 	    }
5103 
5104 	  exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
5105 				  (GET_MODE (SUBREG_REG (target)),
5106 				   SUBREG_PROMOTED_UNSIGNED_P (target)),
5107 				  exp);
5108 
5109 	  inner_target = SUBREG_REG (target);
5110 	}
5111 
5112       temp = expand_expr (exp, inner_target, VOIDmode,
5113 			  call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5114 
5115       /* If TEMP is a VOIDmode constant, use convert_modes to make
5116 	 sure that we properly convert it.  */
5117       if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
5118 	{
5119 	  temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5120 				temp, SUBREG_PROMOTED_UNSIGNED_P (target));
5121 	  temp = convert_modes (GET_MODE (SUBREG_REG (target)),
5122 			        GET_MODE (target), temp,
5123 			        SUBREG_PROMOTED_UNSIGNED_P (target));
5124 	}
5125 
5126       convert_move (SUBREG_REG (target), temp,
5127 		    SUBREG_PROMOTED_UNSIGNED_P (target));
5128 
5129       return NULL_RTX;
5130     }
5131   else if ((TREE_CODE (exp) == STRING_CST
5132 	    || (TREE_CODE (exp) == MEM_REF
5133 		&& TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5134 		&& TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5135 		   == STRING_CST
5136 		&& integer_zerop (TREE_OPERAND (exp, 1))))
5137 	   && !nontemporal && !call_param_p
5138 	   && MEM_P (target))
5139     {
5140       /* Optimize initialization of an array with a STRING_CST.  */
5141       HOST_WIDE_INT exp_len, str_copy_len;
5142       rtx dest_mem;
5143       tree str = TREE_CODE (exp) == STRING_CST
5144 		 ? exp : TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5145 
5146       exp_len = int_expr_size (exp);
5147       if (exp_len <= 0)
5148 	goto normal_expr;
5149 
5150       if (TREE_STRING_LENGTH (str) <= 0)
5151 	goto normal_expr;
5152 
5153       str_copy_len = strlen (TREE_STRING_POINTER (str));
5154       if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
5155 	goto normal_expr;
5156 
5157       str_copy_len = TREE_STRING_LENGTH (str);
5158       if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0
5159 	  && TREE_STRING_POINTER (str)[TREE_STRING_LENGTH (str) - 1] == '\0')
5160 	{
5161 	  str_copy_len += STORE_MAX_PIECES - 1;
5162 	  str_copy_len &= ~(STORE_MAX_PIECES - 1);
5163 	}
5164       str_copy_len = MIN (str_copy_len, exp_len);
5165       if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
5166 				CONST_CAST (char *, TREE_STRING_POINTER (str)),
5167 				MEM_ALIGN (target), false))
5168 	goto normal_expr;
5169 
5170       dest_mem = target;
5171 
5172       dest_mem = store_by_pieces (dest_mem,
5173 				  str_copy_len, builtin_strncpy_read_str,
5174 				  CONST_CAST (char *,
5175 					      TREE_STRING_POINTER (str)),
5176 				  MEM_ALIGN (target), false,
5177 				  exp_len > str_copy_len ? 1 : 0);
5178       if (exp_len > str_copy_len)
5179 	clear_storage (adjust_address (dest_mem, BLKmode, 0),
5180 		       GEN_INT (exp_len - str_copy_len),
5181 		       BLOCK_OP_NORMAL);
5182       return NULL_RTX;
5183     }
5184   else
5185     {
5186       rtx tmp_target;
5187 
5188   normal_expr:
5189       /* If we want to use a nontemporal store, force the value to
5190 	 register first.  */
5191       tmp_target = nontemporal ? NULL_RTX : target;
5192       temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
5193 			       (call_param_p
5194 				? EXPAND_STACK_PARM : EXPAND_NORMAL),
5195 			       &alt_rtl);
5196     }
5197 
5198   /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5199      the same as that of TARGET, adjust the constant.  This is needed, for
5200      example, in case it is a CONST_DOUBLE and we want only a word-sized
5201      value.  */
5202   if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
5203       && TREE_CODE (exp) != ERROR_MARK
5204       && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
5205     temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5206 			  temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5207 
5208   /* If value was not generated in the target, store it there.
5209      Convert the value to TARGET's type first if necessary and emit the
5210      pending incrementations that have been queued when expanding EXP.
5211      Note that we cannot emit the whole queue blindly because this will
5212      effectively disable the POST_INC optimization later.
5213 
5214      If TEMP and TARGET compare equal according to rtx_equal_p, but
5215      one or both of them are volatile memory refs, we have to distinguish
5216      two cases:
5217      - expand_expr has used TARGET.  In this case, we must not generate
5218        another copy.  This can be detected by TARGET being equal according
5219        to == .
5220      - expand_expr has not used TARGET - that means that the source just
5221        happens to have the same RTX form.  Since temp will have been created
5222        by expand_expr, it will compare unequal according to == .
5223        We must generate a copy in this case, to reach the correct number
5224        of volatile memory references.  */
5225 
5226   if ((! rtx_equal_p (temp, target)
5227        || (temp != target && (side_effects_p (temp)
5228 			      || side_effects_p (target))))
5229       && TREE_CODE (exp) != ERROR_MARK
5230       /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5231 	 but TARGET is not valid memory reference, TEMP will differ
5232 	 from TARGET although it is really the same location.  */
5233       && !(alt_rtl
5234 	   && rtx_equal_p (alt_rtl, target)
5235 	   && !side_effects_p (alt_rtl)
5236 	   && !side_effects_p (target))
5237       /* If there's nothing to copy, don't bother.  Don't call
5238 	 expr_size unless necessary, because some front-ends (C++)
5239 	 expr_size-hook must not be given objects that are not
5240 	 supposed to be bit-copied or bit-initialized.  */
5241       && expr_size (exp) != const0_rtx)
5242     {
5243       if (GET_MODE (temp) != GET_MODE (target)
5244 	  && GET_MODE (temp) != VOIDmode)
5245 	{
5246 	  int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5247 	  if (GET_MODE (target) == BLKmode
5248 	      && GET_MODE (temp) == BLKmode)
5249 	    emit_block_move (target, temp, expr_size (exp),
5250 			     (call_param_p
5251 			      ? BLOCK_OP_CALL_PARM
5252 			      : BLOCK_OP_NORMAL));
5253 	  else if (GET_MODE (target) == BLKmode)
5254 	    store_bit_field (target, INTVAL (expr_size (exp)) * BITS_PER_UNIT,
5255 			     0, 0, 0, GET_MODE (temp), temp);
5256 	  else
5257 	    convert_move (target, temp, unsignedp);
5258 	}
5259 
5260       else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
5261 	{
5262 	  /* Handle copying a string constant into an array.  The string
5263 	     constant may be shorter than the array.  So copy just the string's
5264 	     actual length, and clear the rest.  First get the size of the data
5265 	     type of the string, which is actually the size of the target.  */
5266 	  rtx size = expr_size (exp);
5267 
5268 	  if (CONST_INT_P (size)
5269 	      && INTVAL (size) < TREE_STRING_LENGTH (exp))
5270 	    emit_block_move (target, temp, size,
5271 			     (call_param_p
5272 			      ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5273 	  else
5274 	    {
5275 	      enum machine_mode pointer_mode
5276 		= targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
5277 	      enum machine_mode address_mode
5278 		= targetm.addr_space.address_mode (MEM_ADDR_SPACE (target));
5279 
5280 	      /* Compute the size of the data to copy from the string.  */
5281 	      tree copy_size
5282 		= size_binop_loc (loc, MIN_EXPR,
5283 				  make_tree (sizetype, size),
5284 				  size_int (TREE_STRING_LENGTH (exp)));
5285 	      rtx copy_size_rtx
5286 		= expand_expr (copy_size, NULL_RTX, VOIDmode,
5287 			       (call_param_p
5288 				? EXPAND_STACK_PARM : EXPAND_NORMAL));
5289 	      rtx label = 0;
5290 
5291 	      /* Copy that much.  */
5292 	      copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
5293 					       TYPE_UNSIGNED (sizetype));
5294 	      emit_block_move (target, temp, copy_size_rtx,
5295 			       (call_param_p
5296 				? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5297 
5298 	      /* Figure out how much is left in TARGET that we have to clear.
5299 		 Do all calculations in pointer_mode.  */
5300 	      if (CONST_INT_P (copy_size_rtx))
5301 		{
5302 		  size = plus_constant (size, -INTVAL (copy_size_rtx));
5303 		  target = adjust_address (target, BLKmode,
5304 					   INTVAL (copy_size_rtx));
5305 		}
5306 	      else
5307 		{
5308 		  size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
5309 				       copy_size_rtx, NULL_RTX, 0,
5310 				       OPTAB_LIB_WIDEN);
5311 
5312 		  if (GET_MODE (copy_size_rtx) != address_mode)
5313 		    copy_size_rtx = convert_to_mode (address_mode,
5314 						     copy_size_rtx,
5315 						     TYPE_UNSIGNED (sizetype));
5316 
5317 		  target = offset_address (target, copy_size_rtx,
5318 					   highest_pow2_factor (copy_size));
5319 		  label = gen_label_rtx ();
5320 		  emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
5321 					   GET_MODE (size), 0, label);
5322 		}
5323 
5324 	      if (size != const0_rtx)
5325 		clear_storage (target, size, BLOCK_OP_NORMAL);
5326 
5327 	      if (label)
5328 		emit_label (label);
5329 	    }
5330 	}
5331       /* Handle calls that return values in multiple non-contiguous locations.
5332 	 The Irix 6 ABI has examples of this.  */
5333       else if (GET_CODE (target) == PARALLEL)
5334 	emit_group_load (target, temp, TREE_TYPE (exp),
5335 			 int_size_in_bytes (TREE_TYPE (exp)));
5336       else if (GET_MODE (temp) == BLKmode)
5337 	emit_block_move (target, temp, expr_size (exp),
5338 			 (call_param_p
5339 			  ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5340       else if (nontemporal
5341 	       && emit_storent_insn (target, temp))
5342 	/* If we managed to emit a nontemporal store, there is nothing else to
5343 	   do.  */
5344 	;
5345       else
5346 	{
5347 	  temp = force_operand (temp, target);
5348 	  if (temp != target)
5349 	    emit_move_insn (target, temp);
5350 	}
5351     }
5352 
5353   return NULL_RTX;
5354 }
5355 
5356 /* Return true if field F of structure TYPE is a flexible array.  */
5357 
5358 static bool
5359 flexible_array_member_p (const_tree f, const_tree type)
5360 {
5361   const_tree tf;
5362 
5363   tf = TREE_TYPE (f);
5364   return (DECL_CHAIN (f) == NULL
5365 	  && TREE_CODE (tf) == ARRAY_TYPE
5366 	  && TYPE_DOMAIN (tf)
5367 	  && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5368 	  && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5369 	  && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5370 	  && int_size_in_bytes (type) >= 0);
5371 }
5372 
5373 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5374    must have in order for it to completely initialize a value of type TYPE.
5375    Return -1 if the number isn't known.
5376 
5377    If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE.  */
5378 
5379 static HOST_WIDE_INT
5380 count_type_elements (const_tree type, bool for_ctor_p)
5381 {
5382   switch (TREE_CODE (type))
5383     {
5384     case ARRAY_TYPE:
5385       {
5386 	tree nelts;
5387 
5388 	nelts = array_type_nelts (type);
5389 	if (nelts && host_integerp (nelts, 1))
5390 	  {
5391 	    unsigned HOST_WIDE_INT n;
5392 
5393 	    n = tree_low_cst (nelts, 1) + 1;
5394 	    if (n == 0 || for_ctor_p)
5395 	      return n;
5396 	    else
5397 	      return n * count_type_elements (TREE_TYPE (type), false);
5398 	  }
5399 	return for_ctor_p ? -1 : 1;
5400       }
5401 
5402     case RECORD_TYPE:
5403       {
5404 	unsigned HOST_WIDE_INT n;
5405 	tree f;
5406 
5407 	n = 0;
5408 	for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5409 	  if (TREE_CODE (f) == FIELD_DECL)
5410 	    {
5411 	      if (!for_ctor_p)
5412 		n += count_type_elements (TREE_TYPE (f), false);
5413 	      else if (!flexible_array_member_p (f, type))
5414 		/* Don't count flexible arrays, which are not supposed
5415 		   to be initialized.  */
5416 		n += 1;
5417 	    }
5418 
5419 	return n;
5420       }
5421 
5422     case UNION_TYPE:
5423     case QUAL_UNION_TYPE:
5424       {
5425 	tree f;
5426 	HOST_WIDE_INT n, m;
5427 
5428 	gcc_assert (!for_ctor_p);
5429 	/* Estimate the number of scalars in each field and pick the
5430 	   maximum.  Other estimates would do instead; the idea is simply
5431 	   to make sure that the estimate is not sensitive to the ordering
5432 	   of the fields.  */
5433 	n = 1;
5434 	for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5435 	  if (TREE_CODE (f) == FIELD_DECL)
5436 	    {
5437 	      m = count_type_elements (TREE_TYPE (f), false);
5438 	      /* If the field doesn't span the whole union, add an extra
5439 		 scalar for the rest.  */
5440 	      if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f)),
5441 				    TYPE_SIZE (type)) != 1)
5442 		m++;
5443 	      if (n < m)
5444 		n = m;
5445 	    }
5446 	return n;
5447       }
5448 
5449     case COMPLEX_TYPE:
5450       return 2;
5451 
5452     case VECTOR_TYPE:
5453       return TYPE_VECTOR_SUBPARTS (type);
5454 
5455     case INTEGER_TYPE:
5456     case REAL_TYPE:
5457     case FIXED_POINT_TYPE:
5458     case ENUMERAL_TYPE:
5459     case BOOLEAN_TYPE:
5460     case POINTER_TYPE:
5461     case OFFSET_TYPE:
5462     case REFERENCE_TYPE:
5463     case NULLPTR_TYPE:
5464       return 1;
5465 
5466     case ERROR_MARK:
5467       return 0;
5468 
5469     case VOID_TYPE:
5470     case METHOD_TYPE:
5471     case FUNCTION_TYPE:
5472     case LANG_TYPE:
5473     default:
5474       gcc_unreachable ();
5475     }
5476 }
5477 
5478 /* Helper for categorize_ctor_elements.  Identical interface.  */
5479 
5480 static bool
5481 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5482 			    HOST_WIDE_INT *p_init_elts, bool *p_complete)
5483 {
5484   unsigned HOST_WIDE_INT idx;
5485   HOST_WIDE_INT nz_elts, init_elts, num_fields;
5486   tree value, purpose, elt_type;
5487 
5488   /* Whether CTOR is a valid constant initializer, in accordance with what
5489      initializer_constant_valid_p does.  If inferred from the constructor
5490      elements, true until proven otherwise.  */
5491   bool const_from_elts_p = constructor_static_from_elts_p (ctor);
5492   bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
5493 
5494   nz_elts = 0;
5495   init_elts = 0;
5496   num_fields = 0;
5497   elt_type = NULL_TREE;
5498 
5499   FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
5500     {
5501       HOST_WIDE_INT mult = 1;
5502 
5503       if (TREE_CODE (purpose) == RANGE_EXPR)
5504 	{
5505 	  tree lo_index = TREE_OPERAND (purpose, 0);
5506 	  tree hi_index = TREE_OPERAND (purpose, 1);
5507 
5508 	  if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
5509 	    mult = (tree_low_cst (hi_index, 1)
5510 		    - tree_low_cst (lo_index, 1) + 1);
5511 	}
5512       num_fields += mult;
5513       elt_type = TREE_TYPE (value);
5514 
5515       switch (TREE_CODE (value))
5516 	{
5517 	case CONSTRUCTOR:
5518 	  {
5519 	    HOST_WIDE_INT nz = 0, ic = 0;
5520 
5521 	    bool const_elt_p = categorize_ctor_elements_1 (value, &nz, &ic,
5522 							   p_complete);
5523 
5524 	    nz_elts += mult * nz;
5525  	    init_elts += mult * ic;
5526 
5527 	    if (const_from_elts_p && const_p)
5528 	      const_p = const_elt_p;
5529 	  }
5530 	  break;
5531 
5532 	case INTEGER_CST:
5533 	case REAL_CST:
5534 	case FIXED_CST:
5535 	  if (!initializer_zerop (value))
5536 	    nz_elts += mult;
5537 	  init_elts += mult;
5538 	  break;
5539 
5540 	case STRING_CST:
5541 	  nz_elts += mult * TREE_STRING_LENGTH (value);
5542 	  init_elts += mult * TREE_STRING_LENGTH (value);
5543 	  break;
5544 
5545 	case COMPLEX_CST:
5546 	  if (!initializer_zerop (TREE_REALPART (value)))
5547 	    nz_elts += mult;
5548 	  if (!initializer_zerop (TREE_IMAGPART (value)))
5549 	    nz_elts += mult;
5550 	  init_elts += mult;
5551 	  break;
5552 
5553 	case VECTOR_CST:
5554 	  {
5555 	    tree v;
5556 	    for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
5557 	      {
5558 		if (!initializer_zerop (TREE_VALUE (v)))
5559 		  nz_elts += mult;
5560 		init_elts += mult;
5561 	      }
5562 	  }
5563 	  break;
5564 
5565 	default:
5566 	  {
5567 	    HOST_WIDE_INT tc = count_type_elements (elt_type, false);
5568 	    nz_elts += mult * tc;
5569 	    init_elts += mult * tc;
5570 
5571 	    if (const_from_elts_p && const_p)
5572 	      const_p = initializer_constant_valid_p (value, elt_type)
5573 			!= NULL_TREE;
5574 	  }
5575 	  break;
5576 	}
5577     }
5578 
5579   if (*p_complete && !complete_ctor_at_level_p (TREE_TYPE (ctor),
5580 						num_fields, elt_type))
5581     *p_complete = false;
5582 
5583   *p_nz_elts += nz_elts;
5584   *p_init_elts += init_elts;
5585 
5586   return const_p;
5587 }
5588 
5589 /* Examine CTOR to discover:
5590    * how many scalar fields are set to nonzero values,
5591      and place it in *P_NZ_ELTS;
5592    * how many scalar fields in total are in CTOR,
5593      and place it in *P_ELT_COUNT.
5594    * whether the constructor is complete -- in the sense that every
5595      meaningful byte is explicitly given a value --
5596      and place it in *P_COMPLETE.
5597 
5598    Return whether or not CTOR is a valid static constant initializer, the same
5599    as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0".  */
5600 
5601 bool
5602 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5603 			  HOST_WIDE_INT *p_init_elts, bool *p_complete)
5604 {
5605   *p_nz_elts = 0;
5606   *p_init_elts = 0;
5607   *p_complete = true;
5608 
5609   return categorize_ctor_elements_1 (ctor, p_nz_elts, p_init_elts, p_complete);
5610 }
5611 
5612 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
5613    of which had type LAST_TYPE.  Each element was itself a complete
5614    initializer, in the sense that every meaningful byte was explicitly
5615    given a value.  Return true if the same is true for the constructor
5616    as a whole.  */
5617 
5618 bool
5619 complete_ctor_at_level_p (const_tree type, HOST_WIDE_INT num_elts,
5620 			  const_tree last_type)
5621 {
5622   if (TREE_CODE (type) == UNION_TYPE
5623       || TREE_CODE (type) == QUAL_UNION_TYPE)
5624     {
5625       if (num_elts == 0)
5626 	return false;
5627 
5628       gcc_assert (num_elts == 1 && last_type);
5629 
5630       /* ??? We could look at each element of the union, and find the
5631 	 largest element.  Which would avoid comparing the size of the
5632 	 initialized element against any tail padding in the union.
5633 	 Doesn't seem worth the effort...  */
5634       return simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (last_type)) == 1;
5635     }
5636 
5637   return count_type_elements (type, true) == num_elts;
5638 }
5639 
5640 /* Return 1 if EXP contains mostly (3/4)  zeros.  */
5641 
5642 static int
5643 mostly_zeros_p (const_tree exp)
5644 {
5645   if (TREE_CODE (exp) == CONSTRUCTOR)
5646     {
5647       HOST_WIDE_INT nz_elts, init_elts;
5648       bool complete_p;
5649 
5650       categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5651       return !complete_p || nz_elts < init_elts / 4;
5652     }
5653 
5654   return initializer_zerop (exp);
5655 }
5656 
5657 /* Return 1 if EXP contains all zeros.  */
5658 
5659 static int
5660 all_zeros_p (const_tree exp)
5661 {
5662   if (TREE_CODE (exp) == CONSTRUCTOR)
5663     {
5664       HOST_WIDE_INT nz_elts, init_elts;
5665       bool complete_p;
5666 
5667       categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5668       return nz_elts == 0;
5669     }
5670 
5671   return initializer_zerop (exp);
5672 }
5673 
5674 /* Helper function for store_constructor.
5675    TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5676    TYPE is the type of the CONSTRUCTOR, not the element type.
5677    CLEARED is as for store_constructor.
5678    ALIAS_SET is the alias set to use for any stores.
5679 
5680    This provides a recursive shortcut back to store_constructor when it isn't
5681    necessary to go through store_field.  This is so that we can pass through
5682    the cleared field to let store_constructor know that we may not have to
5683    clear a substructure if the outer structure has already been cleared.  */
5684 
5685 static void
5686 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5687 			 HOST_WIDE_INT bitpos, enum machine_mode mode,
5688 			 tree exp, tree type, int cleared,
5689 			 alias_set_type alias_set)
5690 {
5691   if (TREE_CODE (exp) == CONSTRUCTOR
5692       /* We can only call store_constructor recursively if the size and
5693 	 bit position are on a byte boundary.  */
5694       && bitpos % BITS_PER_UNIT == 0
5695       && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5696       /* If we have a nonzero bitpos for a register target, then we just
5697 	 let store_field do the bitfield handling.  This is unlikely to
5698 	 generate unnecessary clear instructions anyways.  */
5699       && (bitpos == 0 || MEM_P (target)))
5700     {
5701       if (MEM_P (target))
5702 	target
5703 	  = adjust_address (target,
5704 			    GET_MODE (target) == BLKmode
5705 			    || 0 != (bitpos
5706 				     % GET_MODE_ALIGNMENT (GET_MODE (target)))
5707 			    ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5708 
5709 
5710       /* Update the alias set, if required.  */
5711       if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5712 	  && MEM_ALIAS_SET (target) != 0)
5713 	{
5714 	  target = copy_rtx (target);
5715 	  set_mem_alias_set (target, alias_set);
5716 	}
5717 
5718       store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5719     }
5720   else
5721     store_field (target, bitsize, bitpos, 0, 0, mode, exp, type, alias_set,
5722 		 false);
5723 }
5724 
5725 /* Store the value of constructor EXP into the rtx TARGET.
5726    TARGET is either a REG or a MEM; we know it cannot conflict, since
5727    safe_from_p has been called.
5728    CLEARED is true if TARGET is known to have been zero'd.
5729    SIZE is the number of bytes of TARGET we are allowed to modify: this
5730    may not be the same as the size of EXP if we are assigning to a field
5731    which has been packed to exclude padding bits.  */
5732 
5733 static void
5734 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5735 {
5736   tree type = TREE_TYPE (exp);
5737 #ifdef WORD_REGISTER_OPERATIONS
5738   HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5739 #endif
5740 
5741   switch (TREE_CODE (type))
5742     {
5743     case RECORD_TYPE:
5744     case UNION_TYPE:
5745     case QUAL_UNION_TYPE:
5746       {
5747 	unsigned HOST_WIDE_INT idx;
5748 	tree field, value;
5749 
5750 	/* If size is zero or the target is already cleared, do nothing.  */
5751 	if (size == 0 || cleared)
5752 	  cleared = 1;
5753 	/* We either clear the aggregate or indicate the value is dead.  */
5754 	else if ((TREE_CODE (type) == UNION_TYPE
5755 		  || TREE_CODE (type) == QUAL_UNION_TYPE)
5756 		 && ! CONSTRUCTOR_ELTS (exp))
5757 	  /* If the constructor is empty, clear the union.  */
5758 	  {
5759 	    clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5760 	    cleared = 1;
5761 	  }
5762 
5763 	/* If we are building a static constructor into a register,
5764 	   set the initial value as zero so we can fold the value into
5765 	   a constant.  But if more than one register is involved,
5766 	   this probably loses.  */
5767 	else if (REG_P (target) && TREE_STATIC (exp)
5768 		 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5769 	  {
5770 	    emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5771 	    cleared = 1;
5772 	  }
5773 
5774         /* If the constructor has fewer fields than the structure or
5775 	   if we are initializing the structure to mostly zeros, clear
5776 	   the whole structure first.  Don't do this if TARGET is a
5777 	   register whose mode size isn't equal to SIZE since
5778 	   clear_storage can't handle this case.  */
5779 	else if (size > 0
5780 		 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
5781 		      != fields_length (type))
5782 		     || mostly_zeros_p (exp))
5783 		 && (!REG_P (target)
5784 		     || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5785 			 == size)))
5786 	  {
5787 	    clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5788 	    cleared = 1;
5789 	  }
5790 
5791 	if (REG_P (target) && !cleared)
5792 	  emit_clobber (target);
5793 
5794 	/* Store each element of the constructor into the
5795 	   corresponding field of TARGET.  */
5796 	FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5797 	  {
5798 	    enum machine_mode mode;
5799 	    HOST_WIDE_INT bitsize;
5800 	    HOST_WIDE_INT bitpos = 0;
5801 	    tree offset;
5802 	    rtx to_rtx = target;
5803 
5804 	    /* Just ignore missing fields.  We cleared the whole
5805 	       structure, above, if any fields are missing.  */
5806 	    if (field == 0)
5807 	      continue;
5808 
5809 	    if (cleared && initializer_zerop (value))
5810 	      continue;
5811 
5812 	    if (host_integerp (DECL_SIZE (field), 1))
5813 	      bitsize = tree_low_cst (DECL_SIZE (field), 1);
5814 	    else
5815 	      bitsize = -1;
5816 
5817 	    mode = DECL_MODE (field);
5818 	    if (DECL_BIT_FIELD (field))
5819 	      mode = VOIDmode;
5820 
5821 	    offset = DECL_FIELD_OFFSET (field);
5822 	    if (host_integerp (offset, 0)
5823 		&& host_integerp (bit_position (field), 0))
5824 	      {
5825 		bitpos = int_bit_position (field);
5826 		offset = 0;
5827 	      }
5828 	    else
5829 	      bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5830 
5831 	    if (offset)
5832 	      {
5833 	        enum machine_mode address_mode;
5834 		rtx offset_rtx;
5835 
5836 		offset
5837 		  = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5838 						    make_tree (TREE_TYPE (exp),
5839 							       target));
5840 
5841 		offset_rtx = expand_normal (offset);
5842 		gcc_assert (MEM_P (to_rtx));
5843 
5844 		address_mode
5845 		  = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to_rtx));
5846 		if (GET_MODE (offset_rtx) != address_mode)
5847 		  offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
5848 
5849 		to_rtx = offset_address (to_rtx, offset_rtx,
5850 					 highest_pow2_factor (offset));
5851 	      }
5852 
5853 #ifdef WORD_REGISTER_OPERATIONS
5854 	    /* If this initializes a field that is smaller than a
5855 	       word, at the start of a word, try to widen it to a full
5856 	       word.  This special case allows us to output C++ member
5857 	       function initializations in a form that the optimizers
5858 	       can understand.  */
5859 	    if (REG_P (target)
5860 		&& bitsize < BITS_PER_WORD
5861 		&& bitpos % BITS_PER_WORD == 0
5862 		&& GET_MODE_CLASS (mode) == MODE_INT
5863 		&& TREE_CODE (value) == INTEGER_CST
5864 		&& exp_size >= 0
5865 		&& bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5866 	      {
5867 		tree type = TREE_TYPE (value);
5868 
5869 		if (TYPE_PRECISION (type) < BITS_PER_WORD)
5870 		  {
5871 		    type = lang_hooks.types.type_for_size
5872 		      (BITS_PER_WORD, TYPE_UNSIGNED (type));
5873 		    value = fold_convert (type, value);
5874 		  }
5875 
5876 		if (BYTES_BIG_ENDIAN)
5877 		  value
5878 		   = fold_build2 (LSHIFT_EXPR, type, value,
5879 				   build_int_cst (type,
5880 						  BITS_PER_WORD - bitsize));
5881 		bitsize = BITS_PER_WORD;
5882 		mode = word_mode;
5883 	      }
5884 #endif
5885 
5886 	    if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5887 		&& DECL_NONADDRESSABLE_P (field))
5888 	      {
5889 		to_rtx = copy_rtx (to_rtx);
5890 		MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5891 	      }
5892 
5893 	    store_constructor_field (to_rtx, bitsize, bitpos, mode,
5894 				     value, type, cleared,
5895 				     get_alias_set (TREE_TYPE (field)));
5896 	  }
5897 	break;
5898       }
5899     case ARRAY_TYPE:
5900       {
5901 	tree value, index;
5902 	unsigned HOST_WIDE_INT i;
5903 	int need_to_clear;
5904 	tree domain;
5905 	tree elttype = TREE_TYPE (type);
5906 	int const_bounds_p;
5907 	HOST_WIDE_INT minelt = 0;
5908 	HOST_WIDE_INT maxelt = 0;
5909 
5910 	domain = TYPE_DOMAIN (type);
5911 	const_bounds_p = (TYPE_MIN_VALUE (domain)
5912 			  && TYPE_MAX_VALUE (domain)
5913 			  && host_integerp (TYPE_MIN_VALUE (domain), 0)
5914 			  && host_integerp (TYPE_MAX_VALUE (domain), 0));
5915 
5916 	/* If we have constant bounds for the range of the type, get them.  */
5917 	if (const_bounds_p)
5918 	  {
5919 	    minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5920 	    maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5921 	  }
5922 
5923 	/* If the constructor has fewer elements than the array, clear
5924            the whole array first.  Similarly if this is static
5925            constructor of a non-BLKmode object.  */
5926 	if (cleared)
5927 	  need_to_clear = 0;
5928 	else if (REG_P (target) && TREE_STATIC (exp))
5929 	  need_to_clear = 1;
5930 	else
5931 	  {
5932 	    unsigned HOST_WIDE_INT idx;
5933 	    tree index, value;
5934 	    HOST_WIDE_INT count = 0, zero_count = 0;
5935 	    need_to_clear = ! const_bounds_p;
5936 
5937 	    /* This loop is a more accurate version of the loop in
5938 	       mostly_zeros_p (it handles RANGE_EXPR in an index).  It
5939 	       is also needed to check for missing elements.  */
5940 	    FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5941 	      {
5942 		HOST_WIDE_INT this_node_count;
5943 
5944 		if (need_to_clear)
5945 		  break;
5946 
5947 		if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5948 		  {
5949 		    tree lo_index = TREE_OPERAND (index, 0);
5950 		    tree hi_index = TREE_OPERAND (index, 1);
5951 
5952 		    if (! host_integerp (lo_index, 1)
5953 			|| ! host_integerp (hi_index, 1))
5954 		      {
5955 			need_to_clear = 1;
5956 			break;
5957 		      }
5958 
5959 		    this_node_count = (tree_low_cst (hi_index, 1)
5960 				       - tree_low_cst (lo_index, 1) + 1);
5961 		  }
5962 		else
5963 		  this_node_count = 1;
5964 
5965 		count += this_node_count;
5966 		if (mostly_zeros_p (value))
5967 		  zero_count += this_node_count;
5968 	      }
5969 
5970 	    /* Clear the entire array first if there are any missing
5971 	       elements, or if the incidence of zero elements is >=
5972 	       75%.  */
5973 	    if (! need_to_clear
5974 		&& (count < maxelt - minelt + 1
5975 		    || 4 * zero_count >= 3 * count))
5976 	      need_to_clear = 1;
5977 	  }
5978 
5979 	if (need_to_clear && size > 0)
5980 	  {
5981 	    if (REG_P (target))
5982 	      emit_move_insn (target,  CONST0_RTX (GET_MODE (target)));
5983 	    else
5984 	      clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5985 	    cleared = 1;
5986 	  }
5987 
5988 	if (!cleared && REG_P (target))
5989 	  /* Inform later passes that the old value is dead.  */
5990 	  emit_clobber (target);
5991 
5992 	/* Store each element of the constructor into the
5993 	   corresponding element of TARGET, determined by counting the
5994 	   elements.  */
5995 	FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5996 	  {
5997 	    enum machine_mode mode;
5998 	    HOST_WIDE_INT bitsize;
5999 	    HOST_WIDE_INT bitpos;
6000 	    rtx xtarget = target;
6001 
6002 	    if (cleared && initializer_zerop (value))
6003 	      continue;
6004 
6005 	    mode = TYPE_MODE (elttype);
6006 	    if (mode == BLKmode)
6007 	      bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
6008 			 ? tree_low_cst (TYPE_SIZE (elttype), 1)
6009 			 : -1);
6010 	    else
6011 	      bitsize = GET_MODE_BITSIZE (mode);
6012 
6013 	    if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6014 	      {
6015 		tree lo_index = TREE_OPERAND (index, 0);
6016 		tree hi_index = TREE_OPERAND (index, 1);
6017 		rtx index_r, pos_rtx;
6018 		HOST_WIDE_INT lo, hi, count;
6019 		tree position;
6020 
6021 		/* If the range is constant and "small", unroll the loop.  */
6022 		if (const_bounds_p
6023 		    && host_integerp (lo_index, 0)
6024 		    && host_integerp (hi_index, 0)
6025 		    && (lo = tree_low_cst (lo_index, 0),
6026 			hi = tree_low_cst (hi_index, 0),
6027 			count = hi - lo + 1,
6028 			(!MEM_P (target)
6029 			 || count <= 2
6030 			 || (host_integerp (TYPE_SIZE (elttype), 1)
6031 			     && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
6032 				 <= 40 * 8)))))
6033 		  {
6034 		    lo -= minelt;  hi -= minelt;
6035 		    for (; lo <= hi; lo++)
6036 		      {
6037 			bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
6038 
6039 			if (MEM_P (target)
6040 			    && !MEM_KEEP_ALIAS_SET_P (target)
6041 			    && TREE_CODE (type) == ARRAY_TYPE
6042 			    && TYPE_NONALIASED_COMPONENT (type))
6043 			  {
6044 			    target = copy_rtx (target);
6045 			    MEM_KEEP_ALIAS_SET_P (target) = 1;
6046 			  }
6047 
6048 			store_constructor_field
6049 			  (target, bitsize, bitpos, mode, value, type, cleared,
6050 			   get_alias_set (elttype));
6051 		      }
6052 		  }
6053 		else
6054 		  {
6055 		    rtx loop_start = gen_label_rtx ();
6056 		    rtx loop_end = gen_label_rtx ();
6057 		    tree exit_cond;
6058 
6059 		    expand_normal (hi_index);
6060 
6061 		    index = build_decl (EXPR_LOCATION (exp),
6062 					VAR_DECL, NULL_TREE, domain);
6063 		    index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
6064 		    SET_DECL_RTL (index, index_r);
6065 		    store_expr (lo_index, index_r, 0, false);
6066 
6067 		    /* Build the head of the loop.  */
6068 		    do_pending_stack_adjust ();
6069 		    emit_label (loop_start);
6070 
6071 		    /* Assign value to element index.  */
6072 		    position =
6073 		      fold_convert (ssizetype,
6074 				    fold_build2 (MINUS_EXPR,
6075 						 TREE_TYPE (index),
6076 						 index,
6077 						 TYPE_MIN_VALUE (domain)));
6078 
6079 		    position =
6080 			size_binop (MULT_EXPR, position,
6081 				    fold_convert (ssizetype,
6082 						  TYPE_SIZE_UNIT (elttype)));
6083 
6084 		    pos_rtx = expand_normal (position);
6085 		    xtarget = offset_address (target, pos_rtx,
6086 					      highest_pow2_factor (position));
6087 		    xtarget = adjust_address (xtarget, mode, 0);
6088 		    if (TREE_CODE (value) == CONSTRUCTOR)
6089 		      store_constructor (value, xtarget, cleared,
6090 					 bitsize / BITS_PER_UNIT);
6091 		    else
6092 		      store_expr (value, xtarget, 0, false);
6093 
6094 		    /* Generate a conditional jump to exit the loop.  */
6095 		    exit_cond = build2 (LT_EXPR, integer_type_node,
6096 					index, hi_index);
6097 		    jumpif (exit_cond, loop_end, -1);
6098 
6099 		    /* Update the loop counter, and jump to the head of
6100 		       the loop.  */
6101 		    expand_assignment (index,
6102 				       build2 (PLUS_EXPR, TREE_TYPE (index),
6103 					       index, integer_one_node),
6104 				       false);
6105 
6106 		    emit_jump (loop_start);
6107 
6108 		    /* Build the end of the loop.  */
6109 		    emit_label (loop_end);
6110 		  }
6111 	      }
6112 	    else if ((index != 0 && ! host_integerp (index, 0))
6113 		     || ! host_integerp (TYPE_SIZE (elttype), 1))
6114 	      {
6115 		tree position;
6116 
6117 		if (index == 0)
6118 		  index = ssize_int (1);
6119 
6120 		if (minelt)
6121 		  index = fold_convert (ssizetype,
6122 					fold_build2 (MINUS_EXPR,
6123 						     TREE_TYPE (index),
6124 						     index,
6125 						     TYPE_MIN_VALUE (domain)));
6126 
6127 		position =
6128 		  size_binop (MULT_EXPR, index,
6129 			      fold_convert (ssizetype,
6130 					    TYPE_SIZE_UNIT (elttype)));
6131 		xtarget = offset_address (target,
6132 					  expand_normal (position),
6133 					  highest_pow2_factor (position));
6134 		xtarget = adjust_address (xtarget, mode, 0);
6135 		store_expr (value, xtarget, 0, false);
6136 	      }
6137 	    else
6138 	      {
6139 		if (index != 0)
6140 		  bitpos = ((tree_low_cst (index, 0) - minelt)
6141 			    * tree_low_cst (TYPE_SIZE (elttype), 1));
6142 		else
6143 		  bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
6144 
6145 		if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
6146 		    && TREE_CODE (type) == ARRAY_TYPE
6147 		    && TYPE_NONALIASED_COMPONENT (type))
6148 		  {
6149 		    target = copy_rtx (target);
6150 		    MEM_KEEP_ALIAS_SET_P (target) = 1;
6151 		  }
6152 		store_constructor_field (target, bitsize, bitpos, mode, value,
6153 					 type, cleared, get_alias_set (elttype));
6154 	      }
6155 	  }
6156 	break;
6157       }
6158 
6159     case VECTOR_TYPE:
6160       {
6161 	unsigned HOST_WIDE_INT idx;
6162 	constructor_elt *ce;
6163 	int i;
6164 	int need_to_clear;
6165 	int icode = 0;
6166 	tree elttype = TREE_TYPE (type);
6167 	int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
6168 	enum machine_mode eltmode = TYPE_MODE (elttype);
6169 	HOST_WIDE_INT bitsize;
6170 	HOST_WIDE_INT bitpos;
6171 	rtvec vector = NULL;
6172 	unsigned n_elts;
6173 	alias_set_type alias;
6174 
6175 	gcc_assert (eltmode != BLKmode);
6176 
6177 	n_elts = TYPE_VECTOR_SUBPARTS (type);
6178 	if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
6179 	  {
6180 	    enum machine_mode mode = GET_MODE (target);
6181 
6182 	    icode = (int) optab_handler (vec_init_optab, mode);
6183 	    if (icode != CODE_FOR_nothing)
6184 	      {
6185 		unsigned int i;
6186 
6187 		vector = rtvec_alloc (n_elts);
6188 		for (i = 0; i < n_elts; i++)
6189 		  RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
6190 	      }
6191 	  }
6192 
6193 	/* If the constructor has fewer elements than the vector,
6194 	   clear the whole array first.  Similarly if this is static
6195 	   constructor of a non-BLKmode object.  */
6196 	if (cleared)
6197 	  need_to_clear = 0;
6198 	else if (REG_P (target) && TREE_STATIC (exp))
6199 	  need_to_clear = 1;
6200 	else
6201 	  {
6202 	    unsigned HOST_WIDE_INT count = 0, zero_count = 0;
6203 	    tree value;
6204 
6205 	    FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6206 	      {
6207 		int n_elts_here = tree_low_cst
6208 		  (int_const_binop (TRUNC_DIV_EXPR,
6209 				    TYPE_SIZE (TREE_TYPE (value)),
6210 				    TYPE_SIZE (elttype)), 1);
6211 
6212 		count += n_elts_here;
6213 		if (mostly_zeros_p (value))
6214 		  zero_count += n_elts_here;
6215 	      }
6216 
6217 	    /* Clear the entire vector first if there are any missing elements,
6218 	       or if the incidence of zero elements is >= 75%.  */
6219 	    need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
6220 	  }
6221 
6222 	if (need_to_clear && size > 0 && !vector)
6223 	  {
6224 	    if (REG_P (target))
6225 	      emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6226 	    else
6227 	      clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6228 	    cleared = 1;
6229 	  }
6230 
6231 	/* Inform later passes that the old value is dead.  */
6232 	if (!cleared && !vector && REG_P (target))
6233 	  emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6234 
6235         if (MEM_P (target))
6236 	  alias = MEM_ALIAS_SET (target);
6237 	else
6238 	  alias = get_alias_set (elttype);
6239 
6240         /* Store each element of the constructor into the corresponding
6241 	   element of TARGET, determined by counting the elements.  */
6242 	for (idx = 0, i = 0;
6243 	     VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
6244 	     idx++, i += bitsize / elt_size)
6245 	  {
6246 	    HOST_WIDE_INT eltpos;
6247 	    tree value = ce->value;
6248 
6249 	    bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
6250 	    if (cleared && initializer_zerop (value))
6251 	      continue;
6252 
6253 	    if (ce->index)
6254 	      eltpos = tree_low_cst (ce->index, 1);
6255 	    else
6256 	      eltpos = i;
6257 
6258 	    if (vector)
6259 	      {
6260 	        /* Vector CONSTRUCTORs should only be built from smaller
6261 		   vectors in the case of BLKmode vectors.  */
6262 		gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
6263 		RTVEC_ELT (vector, eltpos)
6264 		  = expand_normal (value);
6265 	      }
6266 	    else
6267 	      {
6268 		enum machine_mode value_mode =
6269 		  TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
6270 		  ? TYPE_MODE (TREE_TYPE (value))
6271 		  : eltmode;
6272 		bitpos = eltpos * elt_size;
6273 		store_constructor_field (target, bitsize, bitpos,
6274 					 value_mode, value, type,
6275 					 cleared, alias);
6276 	      }
6277 	  }
6278 
6279 	if (vector)
6280 	  emit_insn (GEN_FCN (icode)
6281 		     (target,
6282 		      gen_rtx_PARALLEL (GET_MODE (target), vector)));
6283 	break;
6284       }
6285 
6286     default:
6287       gcc_unreachable ();
6288     }
6289 }
6290 
6291 /* Store the value of EXP (an expression tree)
6292    into a subfield of TARGET which has mode MODE and occupies
6293    BITSIZE bits, starting BITPOS bits from the start of TARGET.
6294    If MODE is VOIDmode, it means that we are storing into a bit-field.
6295 
6296    BITREGION_START is bitpos of the first bitfield in this region.
6297    BITREGION_END is the bitpos of the ending bitfield in this region.
6298    These two fields are 0, if the C++ memory model does not apply,
6299    or we are not interested in keeping track of bitfield regions.
6300 
6301    Always return const0_rtx unless we have something particular to
6302    return.
6303 
6304    TYPE is the type of the underlying object,
6305 
6306    ALIAS_SET is the alias set for the destination.  This value will
6307    (in general) be different from that for TARGET, since TARGET is a
6308    reference to the containing structure.
6309 
6310    If NONTEMPORAL is true, try generating a nontemporal store.  */
6311 
6312 static rtx
6313 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
6314 	     unsigned HOST_WIDE_INT bitregion_start,
6315 	     unsigned HOST_WIDE_INT bitregion_end,
6316 	     enum machine_mode mode, tree exp, tree type,
6317 	     alias_set_type alias_set, bool nontemporal)
6318 {
6319   if (TREE_CODE (exp) == ERROR_MARK)
6320     return const0_rtx;
6321 
6322   /* If we have nothing to store, do nothing unless the expression has
6323      side-effects.  */
6324   if (bitsize == 0)
6325     return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
6326 
6327   /* If we are storing into an unaligned field of an aligned union that is
6328      in a register, we may have the mode of TARGET being an integer mode but
6329      MODE == BLKmode.  In that case, get an aligned object whose size and
6330      alignment are the same as TARGET and store TARGET into it (we can avoid
6331      the store if the field being stored is the entire width of TARGET).  Then
6332      call ourselves recursively to store the field into a BLKmode version of
6333      that object.  Finally, load from the object into TARGET.  This is not
6334      very efficient in general, but should only be slightly more expensive
6335      than the otherwise-required unaligned accesses.  Perhaps this can be
6336      cleaned up later.  It's tempting to make OBJECT readonly, but it's set
6337      twice, once with emit_move_insn and once via store_field.  */
6338 
6339   if (mode == BLKmode
6340       && (REG_P (target) || GET_CODE (target) == SUBREG))
6341     {
6342       rtx object = assign_temp (type, 0, 1, 1);
6343       rtx blk_object = adjust_address (object, BLKmode, 0);
6344 
6345       if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
6346 	emit_move_insn (object, target);
6347 
6348       store_field (blk_object, bitsize, bitpos,
6349 		   bitregion_start, bitregion_end,
6350 		   mode, exp, type, MEM_ALIAS_SET (blk_object), nontemporal);
6351 
6352       emit_move_insn (target, object);
6353 
6354       /* We want to return the BLKmode version of the data.  */
6355       return blk_object;
6356     }
6357 
6358   if (GET_CODE (target) == CONCAT)
6359     {
6360       /* We're storing into a struct containing a single __complex.  */
6361 
6362       gcc_assert (!bitpos);
6363       return store_expr (exp, target, 0, nontemporal);
6364     }
6365 
6366   /* If the structure is in a register or if the component
6367      is a bit field, we cannot use addressing to access it.
6368      Use bit-field techniques or SUBREG to store in it.  */
6369 
6370   if (mode == VOIDmode
6371       || (mode != BLKmode && ! direct_store[(int) mode]
6372 	  && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6373 	  && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6374       || REG_P (target)
6375       || GET_CODE (target) == SUBREG
6376       /* If the field isn't aligned enough to store as an ordinary memref,
6377 	 store it as a bit field.  */
6378       || (mode != BLKmode
6379 	  && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
6380 		|| bitpos % GET_MODE_ALIGNMENT (mode))
6381 	       && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
6382 	      || (bitpos % BITS_PER_UNIT != 0)))
6383       || (bitsize >= 0 && mode != BLKmode
6384 	  && GET_MODE_BITSIZE (mode) > bitsize)
6385       /* If the RHS and field are a constant size and the size of the
6386 	 RHS isn't the same size as the bitfield, we must use bitfield
6387 	 operations.  */
6388       || (bitsize >= 0
6389 	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
6390 	  && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0)
6391       /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6392          decl we must use bitfield operations.  */
6393       || (bitsize >= 0
6394 	  && TREE_CODE (exp) == MEM_REF
6395 	  && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6396 	  && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6397 	  && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0),0 ))
6398 	  && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
6399     {
6400       rtx temp;
6401       gimple nop_def;
6402 
6403       /* If EXP is a NOP_EXPR of precision less than its mode, then that
6404 	 implies a mask operation.  If the precision is the same size as
6405 	 the field we're storing into, that mask is redundant.  This is
6406 	 particularly common with bit field assignments generated by the
6407 	 C front end.  */
6408       nop_def = get_def_for_expr (exp, NOP_EXPR);
6409       if (nop_def)
6410 	{
6411 	  tree type = TREE_TYPE (exp);
6412 	  if (INTEGRAL_TYPE_P (type)
6413 	      && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
6414 	      && bitsize == TYPE_PRECISION (type))
6415 	    {
6416 	      tree op = gimple_assign_rhs1 (nop_def);
6417 	      type = TREE_TYPE (op);
6418 	      if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
6419 		exp = op;
6420 	    }
6421 	}
6422 
6423       temp = expand_normal (exp);
6424 
6425       /* If BITSIZE is narrower than the size of the type of EXP
6426 	 we will be narrowing TEMP.  Normally, what's wanted are the
6427 	 low-order bits.  However, if EXP's type is a record and this is
6428 	 big-endian machine, we want the upper BITSIZE bits.  */
6429       if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
6430 	  && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
6431 	  && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
6432 	temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
6433 			     GET_MODE_BITSIZE (GET_MODE (temp)) - bitsize,
6434 			     NULL_RTX, 1);
6435 
6436       /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
6437 	 MODE.  */
6438       if (mode != VOIDmode && mode != BLKmode
6439 	  && mode != TYPE_MODE (TREE_TYPE (exp)))
6440 	temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
6441 
6442       /* If the modes of TEMP and TARGET are both BLKmode, both
6443 	 must be in memory and BITPOS must be aligned on a byte
6444 	 boundary.  If so, we simply do a block copy.  Likewise
6445 	 for a BLKmode-like TARGET.  */
6446       if (GET_MODE (temp) == BLKmode
6447 	  && (GET_MODE (target) == BLKmode
6448 	      || (MEM_P (target)
6449 		  && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
6450 		  && (bitpos % BITS_PER_UNIT) == 0
6451 		  && (bitsize % BITS_PER_UNIT) == 0)))
6452 	{
6453 	  gcc_assert (MEM_P (target) && MEM_P (temp)
6454 		      && (bitpos % BITS_PER_UNIT) == 0);
6455 
6456 	  target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
6457 	  emit_block_move (target, temp,
6458 			   GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6459 				    / BITS_PER_UNIT),
6460 			   BLOCK_OP_NORMAL);
6461 
6462 	  return const0_rtx;
6463 	}
6464 
6465       /* Store the value in the bitfield.  */
6466       store_bit_field (target, bitsize, bitpos,
6467 		       bitregion_start, bitregion_end,
6468 		       mode, temp);
6469 
6470       return const0_rtx;
6471     }
6472   else
6473     {
6474       /* Now build a reference to just the desired component.  */
6475       rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
6476 
6477       if (to_rtx == target)
6478 	to_rtx = copy_rtx (to_rtx);
6479 
6480       if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
6481 	set_mem_alias_set (to_rtx, alias_set);
6482 
6483       return store_expr (exp, to_rtx, 0, nontemporal);
6484     }
6485 }
6486 
6487 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
6488    an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
6489    codes and find the ultimate containing object, which we return.
6490 
6491    We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
6492    bit position, and *PUNSIGNEDP to the signedness of the field.
6493    If the position of the field is variable, we store a tree
6494    giving the variable offset (in units) in *POFFSET.
6495    This offset is in addition to the bit position.
6496    If the position is not variable, we store 0 in *POFFSET.
6497 
6498    If any of the extraction expressions is volatile,
6499    we store 1 in *PVOLATILEP.  Otherwise we don't change that.
6500 
6501    If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
6502    Otherwise, it is a mode that can be used to access the field.
6503 
6504    If the field describes a variable-sized object, *PMODE is set to
6505    BLKmode and *PBITSIZE is set to -1.  An access cannot be made in
6506    this case, but the address of the object can be found.
6507 
6508    If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
6509    look through nodes that serve as markers of a greater alignment than
6510    the one that can be deduced from the expression.  These nodes make it
6511    possible for front-ends to prevent temporaries from being created by
6512    the middle-end on alignment considerations.  For that purpose, the
6513    normal operating mode at high-level is to always pass FALSE so that
6514    the ultimate containing object is really returned; moreover, the
6515    associated predicate handled_component_p will always return TRUE
6516    on these nodes, thus indicating that they are essentially handled
6517    by get_inner_reference.  TRUE should only be passed when the caller
6518    is scanning the expression in order to build another representation
6519    and specifically knows how to handle these nodes; as such, this is
6520    the normal operating mode in the RTL expanders.  */
6521 
6522 tree
6523 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
6524 		     HOST_WIDE_INT *pbitpos, tree *poffset,
6525 		     enum machine_mode *pmode, int *punsignedp,
6526 		     int *pvolatilep, bool keep_aligning)
6527 {
6528   tree size_tree = 0;
6529   enum machine_mode mode = VOIDmode;
6530   bool blkmode_bitfield = false;
6531   tree offset = size_zero_node;
6532   double_int bit_offset = double_int_zero;
6533 
6534   /* First get the mode, signedness, and size.  We do this from just the
6535      outermost expression.  */
6536   *pbitsize = -1;
6537   if (TREE_CODE (exp) == COMPONENT_REF)
6538     {
6539       tree field = TREE_OPERAND (exp, 1);
6540       size_tree = DECL_SIZE (field);
6541       if (!DECL_BIT_FIELD (field))
6542 	mode = DECL_MODE (field);
6543       else if (DECL_MODE (field) == BLKmode)
6544 	blkmode_bitfield = true;
6545       else if (TREE_THIS_VOLATILE (exp)
6546 	       && flag_strict_volatile_bitfields > 0)
6547 	/* Volatile bitfields should be accessed in the mode of the
6548 	     field's type, not the mode computed based on the bit
6549 	     size.  */
6550 	mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
6551 
6552       *punsignedp = DECL_UNSIGNED (field);
6553     }
6554   else if (TREE_CODE (exp) == BIT_FIELD_REF)
6555     {
6556       size_tree = TREE_OPERAND (exp, 1);
6557       *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
6558 		     || TYPE_UNSIGNED (TREE_TYPE (exp)));
6559 
6560       /* For vector types, with the correct size of access, use the mode of
6561 	 inner type.  */
6562       if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
6563 	  && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
6564 	  && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
6565         mode = TYPE_MODE (TREE_TYPE (exp));
6566     }
6567   else
6568     {
6569       mode = TYPE_MODE (TREE_TYPE (exp));
6570       *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
6571 
6572       if (mode == BLKmode)
6573 	size_tree = TYPE_SIZE (TREE_TYPE (exp));
6574       else
6575 	*pbitsize = GET_MODE_BITSIZE (mode);
6576     }
6577 
6578   if (size_tree != 0)
6579     {
6580       if (! host_integerp (size_tree, 1))
6581 	mode = BLKmode, *pbitsize = -1;
6582       else
6583 	*pbitsize = tree_low_cst (size_tree, 1);
6584     }
6585 
6586   /* Compute cumulative bit-offset for nested component-refs and array-refs,
6587      and find the ultimate containing object.  */
6588   while (1)
6589     {
6590       switch (TREE_CODE (exp))
6591 	{
6592 	case BIT_FIELD_REF:
6593 	  bit_offset
6594 	    = double_int_add (bit_offset,
6595 			      tree_to_double_int (TREE_OPERAND (exp, 2)));
6596 	  break;
6597 
6598 	case COMPONENT_REF:
6599 	  {
6600 	    tree field = TREE_OPERAND (exp, 1);
6601 	    tree this_offset = component_ref_field_offset (exp);
6602 
6603 	    /* If this field hasn't been filled in yet, don't go past it.
6604 	       This should only happen when folding expressions made during
6605 	       type construction.  */
6606 	    if (this_offset == 0)
6607 	      break;
6608 
6609 	    offset = size_binop (PLUS_EXPR, offset, this_offset);
6610 	    bit_offset = double_int_add (bit_offset,
6611 					 tree_to_double_int
6612 					   (DECL_FIELD_BIT_OFFSET (field)));
6613 
6614 	    /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN.  */
6615 	  }
6616 	  break;
6617 
6618 	case ARRAY_REF:
6619 	case ARRAY_RANGE_REF:
6620 	  {
6621 	    tree index = TREE_OPERAND (exp, 1);
6622 	    tree low_bound = array_ref_low_bound (exp);
6623 	    tree unit_size = array_ref_element_size (exp);
6624 
6625 	    /* We assume all arrays have sizes that are a multiple of a byte.
6626 	       First subtract the lower bound, if any, in the type of the
6627 	       index, then convert to sizetype and multiply by the size of
6628 	       the array element.  */
6629 	    if (! integer_zerop (low_bound))
6630 	      index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6631 				   index, low_bound);
6632 
6633 	    offset = size_binop (PLUS_EXPR, offset,
6634 			         size_binop (MULT_EXPR,
6635 					     fold_convert (sizetype, index),
6636 					     unit_size));
6637 	  }
6638 	  break;
6639 
6640 	case REALPART_EXPR:
6641 	  break;
6642 
6643 	case IMAGPART_EXPR:
6644 	  bit_offset = double_int_add (bit_offset,
6645 				       uhwi_to_double_int (*pbitsize));
6646 	  break;
6647 
6648 	case VIEW_CONVERT_EXPR:
6649 	  if (keep_aligning && STRICT_ALIGNMENT
6650 	      && (TYPE_ALIGN (TREE_TYPE (exp))
6651 	       > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6652 	      && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6653 		  < BIGGEST_ALIGNMENT)
6654 	      && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6655 		  || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6656 	    goto done;
6657 	  break;
6658 
6659 	case MEM_REF:
6660 	  /* Hand back the decl for MEM[&decl, off].  */
6661 	  if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
6662 	    {
6663 	      tree off = TREE_OPERAND (exp, 1);
6664 	      if (!integer_zerop (off))
6665 		{
6666 		  double_int boff, coff = mem_ref_offset (exp);
6667 		  boff = double_int_lshift (coff,
6668 					    BITS_PER_UNIT == 8
6669 					    ? 3 : exact_log2 (BITS_PER_UNIT),
6670 					    HOST_BITS_PER_DOUBLE_INT, true);
6671 		  bit_offset = double_int_add (bit_offset, boff);
6672 		}
6673 	      exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6674 	    }
6675 	  goto done;
6676 
6677 	default:
6678 	  goto done;
6679 	}
6680 
6681       /* If any reference in the chain is volatile, the effect is volatile.  */
6682       if (TREE_THIS_VOLATILE (exp))
6683 	*pvolatilep = 1;
6684 
6685       exp = TREE_OPERAND (exp, 0);
6686     }
6687  done:
6688 
6689   /* If OFFSET is constant, see if we can return the whole thing as a
6690      constant bit position.  Make sure to handle overflow during
6691      this conversion.  */
6692   if (TREE_CODE (offset) == INTEGER_CST)
6693     {
6694       double_int tem = tree_to_double_int (offset);
6695       tem = double_int_sext (tem, TYPE_PRECISION (sizetype));
6696       tem = double_int_lshift (tem,
6697 			       BITS_PER_UNIT == 8
6698 			       ? 3 : exact_log2 (BITS_PER_UNIT),
6699 			       HOST_BITS_PER_DOUBLE_INT, true);
6700       tem = double_int_add (tem, bit_offset);
6701       if (double_int_fits_in_shwi_p (tem))
6702 	{
6703 	  *pbitpos = double_int_to_shwi (tem);
6704 	  *poffset = offset = NULL_TREE;
6705 	}
6706     }
6707 
6708   /* Otherwise, split it up.  */
6709   if (offset)
6710     {
6711       /* Avoid returning a negative bitpos as this may wreak havoc later.  */
6712       if (double_int_negative_p (bit_offset))
6713         {
6714 	  double_int mask
6715 	    = double_int_mask (BITS_PER_UNIT == 8
6716 			       ? 3 : exact_log2 (BITS_PER_UNIT));
6717 	  double_int tem = double_int_and_not (bit_offset, mask);
6718 	  /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
6719 	     Subtract it to BIT_OFFSET and add it (scaled) to OFFSET.  */
6720 	  bit_offset = double_int_sub (bit_offset, tem);
6721 	  tem = double_int_rshift (tem,
6722 				   BITS_PER_UNIT == 8
6723 				   ? 3 : exact_log2 (BITS_PER_UNIT),
6724 				   HOST_BITS_PER_DOUBLE_INT, true);
6725 	  offset = size_binop (PLUS_EXPR, offset,
6726 			       double_int_to_tree (sizetype, tem));
6727 	}
6728 
6729       *pbitpos = double_int_to_shwi (bit_offset);
6730       *poffset = offset;
6731     }
6732 
6733   /* We can use BLKmode for a byte-aligned BLKmode bitfield.  */
6734   if (mode == VOIDmode
6735       && blkmode_bitfield
6736       && (*pbitpos % BITS_PER_UNIT) == 0
6737       && (*pbitsize % BITS_PER_UNIT) == 0)
6738     *pmode = BLKmode;
6739   else
6740     *pmode = mode;
6741 
6742   return exp;
6743 }
6744 
6745 /* Given an expression EXP that may be a COMPONENT_REF, an ARRAY_REF or an
6746    ARRAY_RANGE_REF, look for whether EXP or any nested component-refs within
6747    EXP is marked as PACKED.  */
6748 
6749 bool
6750 contains_packed_reference (const_tree exp)
6751 {
6752   bool packed_p = false;
6753 
6754   while (1)
6755     {
6756       switch (TREE_CODE (exp))
6757 	{
6758 	case COMPONENT_REF:
6759 	  {
6760 	    tree field = TREE_OPERAND (exp, 1);
6761 	    packed_p = DECL_PACKED (field)
6762 		       || TYPE_PACKED (TREE_TYPE (field))
6763 		       || TYPE_PACKED (TREE_TYPE (exp));
6764 	    if (packed_p)
6765 	      goto done;
6766 	  }
6767 	  break;
6768 
6769 	case BIT_FIELD_REF:
6770 	case ARRAY_REF:
6771 	case ARRAY_RANGE_REF:
6772 	case REALPART_EXPR:
6773 	case IMAGPART_EXPR:
6774 	case VIEW_CONVERT_EXPR:
6775 	  break;
6776 
6777 	default:
6778 	  goto done;
6779 	}
6780       exp = TREE_OPERAND (exp, 0);
6781     }
6782  done:
6783   return packed_p;
6784 }
6785 
6786 /* Return a tree of sizetype representing the size, in bytes, of the element
6787    of EXP, an ARRAY_REF or an ARRAY_RANGE_REF.  */
6788 
6789 tree
6790 array_ref_element_size (tree exp)
6791 {
6792   tree aligned_size = TREE_OPERAND (exp, 3);
6793   tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6794   location_t loc = EXPR_LOCATION (exp);
6795 
6796   /* If a size was specified in the ARRAY_REF, it's the size measured
6797      in alignment units of the element type.  So multiply by that value.  */
6798   if (aligned_size)
6799     {
6800       /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6801 	 sizetype from another type of the same width and signedness.  */
6802       if (TREE_TYPE (aligned_size) != sizetype)
6803 	aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
6804       return size_binop_loc (loc, MULT_EXPR, aligned_size,
6805 			     size_int (TYPE_ALIGN_UNIT (elmt_type)));
6806     }
6807 
6808   /* Otherwise, take the size from that of the element type.  Substitute
6809      any PLACEHOLDER_EXPR that we have.  */
6810   else
6811     return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
6812 }
6813 
6814 /* Return a tree representing the lower bound of the array mentioned in
6815    EXP, an ARRAY_REF or an ARRAY_RANGE_REF.  */
6816 
6817 tree
6818 array_ref_low_bound (tree exp)
6819 {
6820   tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6821 
6822   /* If a lower bound is specified in EXP, use it.  */
6823   if (TREE_OPERAND (exp, 2))
6824     return TREE_OPERAND (exp, 2);
6825 
6826   /* Otherwise, if there is a domain type and it has a lower bound, use it,
6827      substituting for a PLACEHOLDER_EXPR as needed.  */
6828   if (domain_type && TYPE_MIN_VALUE (domain_type))
6829     return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
6830 
6831   /* Otherwise, return a zero of the appropriate type.  */
6832   return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
6833 }
6834 
6835 /* Return a tree representing the upper bound of the array mentioned in
6836    EXP, an ARRAY_REF or an ARRAY_RANGE_REF.  */
6837 
6838 tree
6839 array_ref_up_bound (tree exp)
6840 {
6841   tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6842 
6843   /* If there is a domain type and it has an upper bound, use it, substituting
6844      for a PLACEHOLDER_EXPR as needed.  */
6845   if (domain_type && TYPE_MAX_VALUE (domain_type))
6846     return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
6847 
6848   /* Otherwise fail.  */
6849   return NULL_TREE;
6850 }
6851 
6852 /* Return a tree representing the offset, in bytes, of the field referenced
6853    by EXP.  This does not include any offset in DECL_FIELD_BIT_OFFSET.  */
6854 
6855 tree
6856 component_ref_field_offset (tree exp)
6857 {
6858   tree aligned_offset = TREE_OPERAND (exp, 2);
6859   tree field = TREE_OPERAND (exp, 1);
6860   location_t loc = EXPR_LOCATION (exp);
6861 
6862   /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6863      in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT.  So multiply by that
6864      value.  */
6865   if (aligned_offset)
6866     {
6867       /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6868 	 sizetype from another type of the same width and signedness.  */
6869       if (TREE_TYPE (aligned_offset) != sizetype)
6870 	aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
6871       return size_binop_loc (loc, MULT_EXPR, aligned_offset,
6872 			     size_int (DECL_OFFSET_ALIGN (field)
6873 				       / BITS_PER_UNIT));
6874     }
6875 
6876   /* Otherwise, take the offset from that of the field.  Substitute
6877      any PLACEHOLDER_EXPR that we have.  */
6878   else
6879     return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
6880 }
6881 
6882 /* Alignment in bits the TARGET of an assignment may be assumed to have.  */
6883 
6884 static unsigned HOST_WIDE_INT
6885 target_align (const_tree target)
6886 {
6887   /* We might have a chain of nested references with intermediate misaligning
6888      bitfields components, so need to recurse to find out.  */
6889 
6890   unsigned HOST_WIDE_INT this_align, outer_align;
6891 
6892   switch (TREE_CODE (target))
6893     {
6894     case BIT_FIELD_REF:
6895       return 1;
6896 
6897     case COMPONENT_REF:
6898       this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
6899       outer_align = target_align (TREE_OPERAND (target, 0));
6900       return MIN (this_align, outer_align);
6901 
6902     case ARRAY_REF:
6903     case ARRAY_RANGE_REF:
6904       this_align = TYPE_ALIGN (TREE_TYPE (target));
6905       outer_align = target_align (TREE_OPERAND (target, 0));
6906       return MIN (this_align, outer_align);
6907 
6908     CASE_CONVERT:
6909     case NON_LVALUE_EXPR:
6910     case VIEW_CONVERT_EXPR:
6911       this_align = TYPE_ALIGN (TREE_TYPE (target));
6912       outer_align = target_align (TREE_OPERAND (target, 0));
6913       return MAX (this_align, outer_align);
6914 
6915     default:
6916       return TYPE_ALIGN (TREE_TYPE (target));
6917     }
6918 }
6919 
6920 
6921 /* Given an rtx VALUE that may contain additions and multiplications, return
6922    an equivalent value that just refers to a register, memory, or constant.
6923    This is done by generating instructions to perform the arithmetic and
6924    returning a pseudo-register containing the value.
6925 
6926    The returned value may be a REG, SUBREG, MEM or constant.  */
6927 
6928 rtx
6929 force_operand (rtx value, rtx target)
6930 {
6931   rtx op1, op2;
6932   /* Use subtarget as the target for operand 0 of a binary operation.  */
6933   rtx subtarget = get_subtarget (target);
6934   enum rtx_code code = GET_CODE (value);
6935 
6936   /* Check for subreg applied to an expression produced by loop optimizer.  */
6937   if (code == SUBREG
6938       && !REG_P (SUBREG_REG (value))
6939       && !MEM_P (SUBREG_REG (value)))
6940     {
6941       value
6942 	= simplify_gen_subreg (GET_MODE (value),
6943 			       force_reg (GET_MODE (SUBREG_REG (value)),
6944 					  force_operand (SUBREG_REG (value),
6945 							 NULL_RTX)),
6946 			       GET_MODE (SUBREG_REG (value)),
6947 			       SUBREG_BYTE (value));
6948       code = GET_CODE (value);
6949     }
6950 
6951   /* Check for a PIC address load.  */
6952   if ((code == PLUS || code == MINUS)
6953       && XEXP (value, 0) == pic_offset_table_rtx
6954       && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
6955 	  || GET_CODE (XEXP (value, 1)) == LABEL_REF
6956 	  || GET_CODE (XEXP (value, 1)) == CONST))
6957     {
6958       if (!subtarget)
6959 	subtarget = gen_reg_rtx (GET_MODE (value));
6960       emit_move_insn (subtarget, value);
6961       return subtarget;
6962     }
6963 
6964   if (ARITHMETIC_P (value))
6965     {
6966       op2 = XEXP (value, 1);
6967       if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
6968 	subtarget = 0;
6969       if (code == MINUS && CONST_INT_P (op2))
6970 	{
6971 	  code = PLUS;
6972 	  op2 = negate_rtx (GET_MODE (value), op2);
6973 	}
6974 
6975       /* Check for an addition with OP2 a constant integer and our first
6976          operand a PLUS of a virtual register and something else.  In that
6977          case, we want to emit the sum of the virtual register and the
6978          constant first and then add the other value.  This allows virtual
6979          register instantiation to simply modify the constant rather than
6980          creating another one around this addition.  */
6981       if (code == PLUS && CONST_INT_P (op2)
6982 	  && GET_CODE (XEXP (value, 0)) == PLUS
6983 	  && REG_P (XEXP (XEXP (value, 0), 0))
6984 	  && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6985 	  && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
6986 	{
6987 	  rtx temp = expand_simple_binop (GET_MODE (value), code,
6988 					  XEXP (XEXP (value, 0), 0), op2,
6989 					  subtarget, 0, OPTAB_LIB_WIDEN);
6990 	  return expand_simple_binop (GET_MODE (value), code, temp,
6991 				      force_operand (XEXP (XEXP (value,
6992 								 0), 1), 0),
6993 				      target, 0, OPTAB_LIB_WIDEN);
6994 	}
6995 
6996       op1 = force_operand (XEXP (value, 0), subtarget);
6997       op2 = force_operand (op2, NULL_RTX);
6998       switch (code)
6999 	{
7000 	case MULT:
7001 	  return expand_mult (GET_MODE (value), op1, op2, target, 1);
7002 	case DIV:
7003 	  if (!INTEGRAL_MODE_P (GET_MODE (value)))
7004 	    return expand_simple_binop (GET_MODE (value), code, op1, op2,
7005 					target, 1, OPTAB_LIB_WIDEN);
7006 	  else
7007 	    return expand_divmod (0,
7008 				  FLOAT_MODE_P (GET_MODE (value))
7009 				  ? RDIV_EXPR : TRUNC_DIV_EXPR,
7010 				  GET_MODE (value), op1, op2, target, 0);
7011 	case MOD:
7012 	  return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7013 				target, 0);
7014 	case UDIV:
7015 	  return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
7016 				target, 1);
7017 	case UMOD:
7018 	  return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7019 				target, 1);
7020 	case ASHIFTRT:
7021 	  return expand_simple_binop (GET_MODE (value), code, op1, op2,
7022 				      target, 0, OPTAB_LIB_WIDEN);
7023 	default:
7024 	  return expand_simple_binop (GET_MODE (value), code, op1, op2,
7025 				      target, 1, OPTAB_LIB_WIDEN);
7026 	}
7027     }
7028   if (UNARY_P (value))
7029     {
7030       if (!target)
7031 	target = gen_reg_rtx (GET_MODE (value));
7032       op1 = force_operand (XEXP (value, 0), NULL_RTX);
7033       switch (code)
7034 	{
7035 	case ZERO_EXTEND:
7036 	case SIGN_EXTEND:
7037 	case TRUNCATE:
7038 	case FLOAT_EXTEND:
7039 	case FLOAT_TRUNCATE:
7040 	  convert_move (target, op1, code == ZERO_EXTEND);
7041 	  return target;
7042 
7043 	case FIX:
7044 	case UNSIGNED_FIX:
7045 	  expand_fix (target, op1, code == UNSIGNED_FIX);
7046 	  return target;
7047 
7048 	case FLOAT:
7049 	case UNSIGNED_FLOAT:
7050 	  expand_float (target, op1, code == UNSIGNED_FLOAT);
7051 	  return target;
7052 
7053 	default:
7054 	  return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
7055 	}
7056     }
7057 
7058 #ifdef INSN_SCHEDULING
7059   /* On machines that have insn scheduling, we want all memory reference to be
7060      explicit, so we need to deal with such paradoxical SUBREGs.  */
7061   if (paradoxical_subreg_p (value) && MEM_P (SUBREG_REG (value)))
7062     value
7063       = simplify_gen_subreg (GET_MODE (value),
7064 			     force_reg (GET_MODE (SUBREG_REG (value)),
7065 					force_operand (SUBREG_REG (value),
7066 						       NULL_RTX)),
7067 			     GET_MODE (SUBREG_REG (value)),
7068 			     SUBREG_BYTE (value));
7069 #endif
7070 
7071   return value;
7072 }
7073 
7074 /* Subroutine of expand_expr: return nonzero iff there is no way that
7075    EXP can reference X, which is being modified.  TOP_P is nonzero if this
7076    call is going to be used to determine whether we need a temporary
7077    for EXP, as opposed to a recursive call to this function.
7078 
7079    It is always safe for this routine to return zero since it merely
7080    searches for optimization opportunities.  */
7081 
7082 int
7083 safe_from_p (const_rtx x, tree exp, int top_p)
7084 {
7085   rtx exp_rtl = 0;
7086   int i, nops;
7087 
7088   if (x == 0
7089       /* If EXP has varying size, we MUST use a target since we currently
7090 	 have no way of allocating temporaries of variable size
7091 	 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7092 	 So we assume here that something at a higher level has prevented a
7093 	 clash.  This is somewhat bogus, but the best we can do.  Only
7094 	 do this when X is BLKmode and when we are at the top level.  */
7095       || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
7096 	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
7097 	  && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
7098 	      || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
7099 	      || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
7100 	      != INTEGER_CST)
7101 	  && GET_MODE (x) == BLKmode)
7102       /* If X is in the outgoing argument area, it is always safe.  */
7103       || (MEM_P (x)
7104 	  && (XEXP (x, 0) == virtual_outgoing_args_rtx
7105 	      || (GET_CODE (XEXP (x, 0)) == PLUS
7106 		  && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
7107     return 1;
7108 
7109   /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7110      find the underlying pseudo.  */
7111   if (GET_CODE (x) == SUBREG)
7112     {
7113       x = SUBREG_REG (x);
7114       if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7115 	return 0;
7116     }
7117 
7118   /* Now look at our tree code and possibly recurse.  */
7119   switch (TREE_CODE_CLASS (TREE_CODE (exp)))
7120     {
7121     case tcc_declaration:
7122       exp_rtl = DECL_RTL_IF_SET (exp);
7123       break;
7124 
7125     case tcc_constant:
7126       return 1;
7127 
7128     case tcc_exceptional:
7129       if (TREE_CODE (exp) == TREE_LIST)
7130 	{
7131 	  while (1)
7132 	    {
7133 	      if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
7134 		return 0;
7135 	      exp = TREE_CHAIN (exp);
7136 	      if (!exp)
7137 		return 1;
7138 	      if (TREE_CODE (exp) != TREE_LIST)
7139 		return safe_from_p (x, exp, 0);
7140 	    }
7141 	}
7142       else if (TREE_CODE (exp) == CONSTRUCTOR)
7143 	{
7144 	  constructor_elt *ce;
7145 	  unsigned HOST_WIDE_INT idx;
7146 
7147 	  FOR_EACH_VEC_ELT (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce)
7148 	    if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
7149 		|| !safe_from_p (x, ce->value, 0))
7150 	      return 0;
7151 	  return 1;
7152 	}
7153       else if (TREE_CODE (exp) == ERROR_MARK)
7154 	return 1;	/* An already-visited SAVE_EXPR? */
7155       else
7156 	return 0;
7157 
7158     case tcc_statement:
7159       /* The only case we look at here is the DECL_INITIAL inside a
7160 	 DECL_EXPR.  */
7161       return (TREE_CODE (exp) != DECL_EXPR
7162 	      || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
7163 	      || !DECL_INITIAL (DECL_EXPR_DECL (exp))
7164 	      || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
7165 
7166     case tcc_binary:
7167     case tcc_comparison:
7168       if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
7169 	return 0;
7170       /* Fall through.  */
7171 
7172     case tcc_unary:
7173       return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7174 
7175     case tcc_expression:
7176     case tcc_reference:
7177     case tcc_vl_exp:
7178       /* Now do code-specific tests.  EXP_RTL is set to any rtx we find in
7179 	 the expression.  If it is set, we conflict iff we are that rtx or
7180 	 both are in memory.  Otherwise, we check all operands of the
7181 	 expression recursively.  */
7182 
7183       switch (TREE_CODE (exp))
7184 	{
7185 	case ADDR_EXPR:
7186 	  /* If the operand is static or we are static, we can't conflict.
7187 	     Likewise if we don't conflict with the operand at all.  */
7188 	  if (staticp (TREE_OPERAND (exp, 0))
7189 	      || TREE_STATIC (exp)
7190 	      || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
7191 	    return 1;
7192 
7193 	  /* Otherwise, the only way this can conflict is if we are taking
7194 	     the address of a DECL a that address if part of X, which is
7195 	     very rare.  */
7196 	  exp = TREE_OPERAND (exp, 0);
7197 	  if (DECL_P (exp))
7198 	    {
7199 	      if (!DECL_RTL_SET_P (exp)
7200 		  || !MEM_P (DECL_RTL (exp)))
7201 		return 0;
7202 	      else
7203 		exp_rtl = XEXP (DECL_RTL (exp), 0);
7204 	    }
7205 	  break;
7206 
7207 	case MEM_REF:
7208 	  if (MEM_P (x)
7209 	      && alias_sets_conflict_p (MEM_ALIAS_SET (x),
7210 					get_alias_set (exp)))
7211 	    return 0;
7212 	  break;
7213 
7214 	case CALL_EXPR:
7215 	  /* Assume that the call will clobber all hard registers and
7216 	     all of memory.  */
7217 	  if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7218 	      || MEM_P (x))
7219 	    return 0;
7220 	  break;
7221 
7222 	case WITH_CLEANUP_EXPR:
7223 	case CLEANUP_POINT_EXPR:
7224 	  /* Lowered by gimplify.c.  */
7225 	  gcc_unreachable ();
7226 
7227 	case SAVE_EXPR:
7228 	  return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7229 
7230 	default:
7231 	  break;
7232 	}
7233 
7234       /* If we have an rtx, we do not need to scan our operands.  */
7235       if (exp_rtl)
7236 	break;
7237 
7238       nops = TREE_OPERAND_LENGTH (exp);
7239       for (i = 0; i < nops; i++)
7240 	if (TREE_OPERAND (exp, i) != 0
7241 	    && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
7242 	  return 0;
7243 
7244       break;
7245 
7246     case tcc_type:
7247       /* Should never get a type here.  */
7248       gcc_unreachable ();
7249     }
7250 
7251   /* If we have an rtl, find any enclosed object.  Then see if we conflict
7252      with it.  */
7253   if (exp_rtl)
7254     {
7255       if (GET_CODE (exp_rtl) == SUBREG)
7256 	{
7257 	  exp_rtl = SUBREG_REG (exp_rtl);
7258 	  if (REG_P (exp_rtl)
7259 	      && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
7260 	    return 0;
7261 	}
7262 
7263       /* If the rtl is X, then it is not safe.  Otherwise, it is unless both
7264 	 are memory and they conflict.  */
7265       return ! (rtx_equal_p (x, exp_rtl)
7266 		|| (MEM_P (x) && MEM_P (exp_rtl)
7267 		    && true_dependence (exp_rtl, VOIDmode, x)));
7268     }
7269 
7270   /* If we reach here, it is safe.  */
7271   return 1;
7272 }
7273 
7274 
7275 /* Return the highest power of two that EXP is known to be a multiple of.
7276    This is used in updating alignment of MEMs in array references.  */
7277 
7278 unsigned HOST_WIDE_INT
7279 highest_pow2_factor (const_tree exp)
7280 {
7281   unsigned HOST_WIDE_INT c0, c1;
7282 
7283   switch (TREE_CODE (exp))
7284     {
7285     case INTEGER_CST:
7286       /* We can find the lowest bit that's a one.  If the low
7287 	 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
7288 	 We need to handle this case since we can find it in a COND_EXPR,
7289 	 a MIN_EXPR, or a MAX_EXPR.  If the constant overflows, we have an
7290 	 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
7291 	 later ICE.  */
7292       if (TREE_OVERFLOW (exp))
7293 	return BIGGEST_ALIGNMENT;
7294       else
7295 	{
7296 	  /* Note: tree_low_cst is intentionally not used here,
7297 	     we don't care about the upper bits.  */
7298 	  c0 = TREE_INT_CST_LOW (exp);
7299 	  c0 &= -c0;
7300 	  return c0 ? c0 : BIGGEST_ALIGNMENT;
7301 	}
7302       break;
7303 
7304     case PLUS_EXPR:  case MINUS_EXPR:  case MIN_EXPR:  case MAX_EXPR:
7305       c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
7306       c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
7307       return MIN (c0, c1);
7308 
7309     case MULT_EXPR:
7310       c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
7311       c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
7312       return c0 * c1;
7313 
7314     case ROUND_DIV_EXPR:  case TRUNC_DIV_EXPR:  case FLOOR_DIV_EXPR:
7315     case CEIL_DIV_EXPR:
7316       if (integer_pow2p (TREE_OPERAND (exp, 1))
7317 	  && host_integerp (TREE_OPERAND (exp, 1), 1))
7318 	{
7319 	  c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
7320 	  c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
7321 	  return MAX (1, c0 / c1);
7322 	}
7323       break;
7324 
7325     case BIT_AND_EXPR:
7326       /* The highest power of two of a bit-and expression is the maximum of
7327 	 that of its operands.  We typically get here for a complex LHS and
7328 	 a constant negative power of two on the RHS to force an explicit
7329 	 alignment, so don't bother looking at the LHS.  */
7330       return highest_pow2_factor (TREE_OPERAND (exp, 1));
7331 
7332     CASE_CONVERT:
7333     case SAVE_EXPR:
7334       return highest_pow2_factor (TREE_OPERAND (exp, 0));
7335 
7336     case COMPOUND_EXPR:
7337       return highest_pow2_factor (TREE_OPERAND (exp, 1));
7338 
7339     case COND_EXPR:
7340       c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
7341       c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
7342       return MIN (c0, c1);
7343 
7344     default:
7345       break;
7346     }
7347 
7348   return 1;
7349 }
7350 
7351 /* Similar, except that the alignment requirements of TARGET are
7352    taken into account.  Assume it is at least as aligned as its
7353    type, unless it is a COMPONENT_REF in which case the layout of
7354    the structure gives the alignment.  */
7355 
7356 static unsigned HOST_WIDE_INT
7357 highest_pow2_factor_for_target (const_tree target, const_tree exp)
7358 {
7359   unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
7360   unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
7361 
7362   return MAX (factor, talign);
7363 }
7364 
7365 /* Subroutine of expand_expr.  Expand the two operands of a binary
7366    expression EXP0 and EXP1 placing the results in OP0 and OP1.
7367    The value may be stored in TARGET if TARGET is nonzero.  The
7368    MODIFIER argument is as documented by expand_expr.  */
7369 
7370 static void
7371 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
7372 		 enum expand_modifier modifier)
7373 {
7374   if (! safe_from_p (target, exp1, 1))
7375     target = 0;
7376   if (operand_equal_p (exp0, exp1, 0))
7377     {
7378       *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7379       *op1 = copy_rtx (*op0);
7380     }
7381   else
7382     {
7383       /* If we need to preserve evaluation order, copy exp0 into its own
7384 	 temporary variable so that it can't be clobbered by exp1.  */
7385       if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
7386 	exp0 = save_expr (exp0);
7387       *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7388       *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
7389     }
7390 }
7391 
7392 
7393 /* Return a MEM that contains constant EXP.  DEFER is as for
7394    output_constant_def and MODIFIER is as for expand_expr.  */
7395 
7396 static rtx
7397 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
7398 {
7399   rtx mem;
7400 
7401   mem = output_constant_def (exp, defer);
7402   if (modifier != EXPAND_INITIALIZER)
7403     mem = use_anchored_address (mem);
7404   return mem;
7405 }
7406 
7407 /* A subroutine of expand_expr_addr_expr.  Evaluate the address of EXP.
7408    The TARGET, TMODE and MODIFIER arguments are as for expand_expr.  */
7409 
7410 static rtx
7411 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
7412 		         enum expand_modifier modifier, addr_space_t as)
7413 {
7414   rtx result, subtarget;
7415   tree inner, offset;
7416   HOST_WIDE_INT bitsize, bitpos;
7417   int volatilep, unsignedp;
7418   enum machine_mode mode1;
7419 
7420   /* If we are taking the address of a constant and are at the top level,
7421      we have to use output_constant_def since we can't call force_const_mem
7422      at top level.  */
7423   /* ??? This should be considered a front-end bug.  We should not be
7424      generating ADDR_EXPR of something that isn't an LVALUE.  The only
7425      exception here is STRING_CST.  */
7426   if (CONSTANT_CLASS_P (exp))
7427     {
7428       result = XEXP (expand_expr_constant (exp, 0, modifier), 0);
7429       if (modifier < EXPAND_SUM)
7430 	result = force_operand (result, target);
7431       return result;
7432     }
7433 
7434   /* Everything must be something allowed by is_gimple_addressable.  */
7435   switch (TREE_CODE (exp))
7436     {
7437     case INDIRECT_REF:
7438       /* This case will happen via recursion for &a->b.  */
7439       return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7440 
7441     case MEM_REF:
7442       {
7443 	tree tem = TREE_OPERAND (exp, 0);
7444 	if (!integer_zerop (TREE_OPERAND (exp, 1)))
7445 	  tem = fold_build_pointer_plus (tem, TREE_OPERAND (exp, 1));
7446 	return expand_expr (tem, target, tmode, modifier);
7447       }
7448 
7449     case CONST_DECL:
7450       /* Expand the initializer like constants above.  */
7451       result = XEXP (expand_expr_constant (DECL_INITIAL (exp),
7452 					   0, modifier), 0);
7453       if (modifier < EXPAND_SUM)
7454 	result = force_operand (result, target);
7455       return result;
7456 
7457     case REALPART_EXPR:
7458       /* The real part of the complex number is always first, therefore
7459 	 the address is the same as the address of the parent object.  */
7460       offset = 0;
7461       bitpos = 0;
7462       inner = TREE_OPERAND (exp, 0);
7463       break;
7464 
7465     case IMAGPART_EXPR:
7466       /* The imaginary part of the complex number is always second.
7467 	 The expression is therefore always offset by the size of the
7468 	 scalar type.  */
7469       offset = 0;
7470       bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
7471       inner = TREE_OPERAND (exp, 0);
7472       break;
7473 
7474     default:
7475       /* If the object is a DECL, then expand it for its rtl.  Don't bypass
7476 	 expand_expr, as that can have various side effects; LABEL_DECLs for
7477 	 example, may not have their DECL_RTL set yet.  Expand the rtl of
7478 	 CONSTRUCTORs too, which should yield a memory reference for the
7479 	 constructor's contents.  Assume language specific tree nodes can
7480 	 be expanded in some interesting way.  */
7481       gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
7482       if (DECL_P (exp)
7483 	  || TREE_CODE (exp) == CONSTRUCTOR
7484 	  || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
7485 	{
7486 	  result = expand_expr (exp, target, tmode,
7487 				modifier == EXPAND_INITIALIZER
7488 				? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
7489 
7490 	  /* If the DECL isn't in memory, then the DECL wasn't properly
7491 	     marked TREE_ADDRESSABLE, which will be either a front-end
7492 	     or a tree optimizer bug.  */
7493 
7494 	  if (TREE_ADDRESSABLE (exp)
7495 	      && ! MEM_P (result)
7496 	      && ! targetm.calls.allocate_stack_slots_for_args())
7497 	    {
7498 	      error ("local frame unavailable (naked function?)");
7499 	      return result;
7500 	    }
7501 	  else
7502 	    gcc_assert (MEM_P (result));
7503 	  result = XEXP (result, 0);
7504 
7505 	  /* ??? Is this needed anymore?  */
7506 	  if (DECL_P (exp) && !TREE_USED (exp) == 0)
7507 	    {
7508 	      assemble_external (exp);
7509 	      TREE_USED (exp) = 1;
7510 	    }
7511 
7512 	  if (modifier != EXPAND_INITIALIZER
7513 	      && modifier != EXPAND_CONST_ADDRESS
7514 	      && modifier != EXPAND_SUM)
7515 	    result = force_operand (result, target);
7516 	  return result;
7517 	}
7518 
7519       /* Pass FALSE as the last argument to get_inner_reference although
7520 	 we are expanding to RTL.  The rationale is that we know how to
7521 	 handle "aligning nodes" here: we can just bypass them because
7522 	 they won't change the final object whose address will be returned
7523 	 (they actually exist only for that purpose).  */
7524       inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7525 				   &mode1, &unsignedp, &volatilep, false);
7526       break;
7527     }
7528 
7529   /* We must have made progress.  */
7530   gcc_assert (inner != exp);
7531 
7532   subtarget = offset || bitpos ? NULL_RTX : target;
7533   /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7534      inner alignment, force the inner to be sufficiently aligned.  */
7535   if (CONSTANT_CLASS_P (inner)
7536       && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
7537     {
7538       inner = copy_node (inner);
7539       TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
7540       TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
7541       TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
7542     }
7543   result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
7544 
7545   if (offset)
7546     {
7547       rtx tmp;
7548 
7549       if (modifier != EXPAND_NORMAL)
7550 	result = force_operand (result, NULL);
7551       tmp = expand_expr (offset, NULL_RTX, tmode,
7552 			 modifier == EXPAND_INITIALIZER
7553 			  ? EXPAND_INITIALIZER : EXPAND_NORMAL);
7554 
7555       result = convert_memory_address_addr_space (tmode, result, as);
7556       tmp = convert_memory_address_addr_space (tmode, tmp, as);
7557 
7558       if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7559 	result = simplify_gen_binary (PLUS, tmode, result, tmp);
7560       else
7561 	{
7562 	  subtarget = bitpos ? NULL_RTX : target;
7563 	  result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
7564 					1, OPTAB_LIB_WIDEN);
7565 	}
7566     }
7567 
7568   if (bitpos)
7569     {
7570       /* Someone beforehand should have rejected taking the address
7571 	 of such an object.  */
7572       gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
7573 
7574       result = plus_constant (result, bitpos / BITS_PER_UNIT);
7575       if (modifier < EXPAND_SUM)
7576 	result = force_operand (result, target);
7577     }
7578 
7579   return result;
7580 }
7581 
7582 /* A subroutine of expand_expr.  Evaluate EXP, which is an ADDR_EXPR.
7583    The TARGET, TMODE and MODIFIER arguments are as for expand_expr.  */
7584 
7585 static rtx
7586 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
7587 		       enum expand_modifier modifier)
7588 {
7589   addr_space_t as = ADDR_SPACE_GENERIC;
7590   enum machine_mode address_mode = Pmode;
7591   enum machine_mode pointer_mode = ptr_mode;
7592   enum machine_mode rmode;
7593   rtx result;
7594 
7595   /* Target mode of VOIDmode says "whatever's natural".  */
7596   if (tmode == VOIDmode)
7597     tmode = TYPE_MODE (TREE_TYPE (exp));
7598 
7599   if (POINTER_TYPE_P (TREE_TYPE (exp)))
7600     {
7601       as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
7602       address_mode = targetm.addr_space.address_mode (as);
7603       pointer_mode = targetm.addr_space.pointer_mode (as);
7604     }
7605 
7606   /* We can get called with some Weird Things if the user does silliness
7607      like "(short) &a".  In that case, convert_memory_address won't do
7608      the right thing, so ignore the given target mode.  */
7609   if (tmode != address_mode && tmode != pointer_mode)
7610     tmode = address_mode;
7611 
7612   result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
7613 				    tmode, modifier, as);
7614 
7615   /* Despite expand_expr claims concerning ignoring TMODE when not
7616      strictly convenient, stuff breaks if we don't honor it.  Note
7617      that combined with the above, we only do this for pointer modes.  */
7618   rmode = GET_MODE (result);
7619   if (rmode == VOIDmode)
7620     rmode = tmode;
7621   if (rmode != tmode)
7622     result = convert_memory_address_addr_space (tmode, result, as);
7623 
7624   return result;
7625 }
7626 
7627 /* Generate code for computing CONSTRUCTOR EXP.
7628    An rtx for the computed value is returned.  If AVOID_TEMP_MEM
7629    is TRUE, instead of creating a temporary variable in memory
7630    NULL is returned and the caller needs to handle it differently.  */
7631 
7632 static rtx
7633 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7634 		    bool avoid_temp_mem)
7635 {
7636   tree type = TREE_TYPE (exp);
7637   enum machine_mode mode = TYPE_MODE (type);
7638 
7639   /* Try to avoid creating a temporary at all.  This is possible
7640      if all of the initializer is zero.
7641      FIXME: try to handle all [0..255] initializers we can handle
7642      with memset.  */
7643   if (TREE_STATIC (exp)
7644       && !TREE_ADDRESSABLE (exp)
7645       && target != 0 && mode == BLKmode
7646       && all_zeros_p (exp))
7647     {
7648       clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7649       return target;
7650     }
7651 
7652   /* All elts simple constants => refer to a constant in memory.  But
7653      if this is a non-BLKmode mode, let it store a field at a time
7654      since that should make a CONST_INT or CONST_DOUBLE when we
7655      fold.  Likewise, if we have a target we can use, it is best to
7656      store directly into the target unless the type is large enough
7657      that memcpy will be used.  If we are making an initializer and
7658      all operands are constant, put it in memory as well.
7659 
7660      FIXME: Avoid trying to fill vector constructors piece-meal.
7661      Output them with output_constant_def below unless we're sure
7662      they're zeros.  This should go away when vector initializers
7663      are treated like VECTOR_CST instead of arrays.  */
7664   if ((TREE_STATIC (exp)
7665        && ((mode == BLKmode
7666 	    && ! (target != 0 && safe_from_p (target, exp, 1)))
7667 		  || TREE_ADDRESSABLE (exp)
7668 		  || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7669 		      && (! MOVE_BY_PIECES_P
7670 				     (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7671 				      TYPE_ALIGN (type)))
7672 		      && ! mostly_zeros_p (exp))))
7673       || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7674 	  && TREE_CONSTANT (exp)))
7675     {
7676       rtx constructor;
7677 
7678       if (avoid_temp_mem)
7679 	return NULL_RTX;
7680 
7681       constructor = expand_expr_constant (exp, 1, modifier);
7682 
7683       if (modifier != EXPAND_CONST_ADDRESS
7684 	  && modifier != EXPAND_INITIALIZER
7685 	  && modifier != EXPAND_SUM)
7686 	constructor = validize_mem (constructor);
7687 
7688       return constructor;
7689     }
7690 
7691   /* Handle calls that pass values in multiple non-contiguous
7692      locations.  The Irix 6 ABI has examples of this.  */
7693   if (target == 0 || ! safe_from_p (target, exp, 1)
7694       || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
7695     {
7696       if (avoid_temp_mem)
7697 	return NULL_RTX;
7698 
7699       target
7700 	= assign_temp (build_qualified_type (type, (TYPE_QUALS (type)
7701 						    | (TREE_READONLY (exp)
7702 						       * TYPE_QUAL_CONST))),
7703 		       0, TREE_ADDRESSABLE (exp), 1);
7704     }
7705 
7706   store_constructor (exp, target, 0, int_expr_size (exp));
7707   return target;
7708 }
7709 
7710 
7711 /* expand_expr: generate code for computing expression EXP.
7712    An rtx for the computed value is returned.  The value is never null.
7713    In the case of a void EXP, const0_rtx is returned.
7714 
7715    The value may be stored in TARGET if TARGET is nonzero.
7716    TARGET is just a suggestion; callers must assume that
7717    the rtx returned may not be the same as TARGET.
7718 
7719    If TARGET is CONST0_RTX, it means that the value will be ignored.
7720 
7721    If TMODE is not VOIDmode, it suggests generating the
7722    result in mode TMODE.  But this is done only when convenient.
7723    Otherwise, TMODE is ignored and the value generated in its natural mode.
7724    TMODE is just a suggestion; callers must assume that
7725    the rtx returned may not have mode TMODE.
7726 
7727    Note that TARGET may have neither TMODE nor MODE.  In that case, it
7728    probably will not be used.
7729 
7730    If MODIFIER is EXPAND_SUM then when EXP is an addition
7731    we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7732    or a nest of (PLUS ...) and (MINUS ...) where the terms are
7733    products as above, or REG or MEM, or constant.
7734    Ordinarily in such cases we would output mul or add instructions
7735    and then return a pseudo reg containing the sum.
7736 
7737    EXPAND_INITIALIZER is much like EXPAND_SUM except that
7738    it also marks a label as absolutely required (it can't be dead).
7739    It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7740    This is used for outputting expressions used in initializers.
7741 
7742    EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7743    with a constant address even if that address is not normally legitimate.
7744    EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7745 
7746    EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7747    a call parameter.  Such targets require special care as we haven't yet
7748    marked TARGET so that it's safe from being trashed by libcalls.  We
7749    don't want to use TARGET for anything but the final result;
7750    Intermediate values must go elsewhere.   Additionally, calls to
7751    emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7752 
7753    If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7754    address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7755    DECL_RTL of the VAR_DECL.  *ALT_RTL is also set if EXP is a
7756    COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7757    recursively.  */
7758 
7759 rtx
7760 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
7761 		  enum expand_modifier modifier, rtx *alt_rtl)
7762 {
7763   rtx ret;
7764 
7765   /* Handle ERROR_MARK before anybody tries to access its type.  */
7766   if (TREE_CODE (exp) == ERROR_MARK
7767       || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7768     {
7769       ret = CONST0_RTX (tmode);
7770       return ret ? ret : const0_rtx;
7771     }
7772 
7773   /* If this is an expression of some kind and it has an associated line
7774      number, then emit the line number before expanding the expression.
7775 
7776      We need to save and restore the file and line information so that
7777      errors discovered during expansion are emitted with the right
7778      information.  It would be better of the diagnostic routines
7779      used the file/line information embedded in the tree nodes rather
7780      than globals.  */
7781   if (cfun && EXPR_HAS_LOCATION (exp))
7782     {
7783       location_t saved_location = input_location;
7784       location_t saved_curr_loc = get_curr_insn_source_location ();
7785       tree saved_block = get_curr_insn_block ();
7786       input_location = EXPR_LOCATION (exp);
7787       set_curr_insn_source_location (input_location);
7788 
7789       /* Record where the insns produced belong.  */
7790       set_curr_insn_block (TREE_BLOCK (exp));
7791 
7792       ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7793 
7794       input_location = saved_location;
7795       set_curr_insn_block (saved_block);
7796       set_curr_insn_source_location (saved_curr_loc);
7797     }
7798   else
7799     {
7800       ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7801     }
7802 
7803   return ret;
7804 }
7805 
7806 rtx
7807 expand_expr_real_2 (sepops ops, rtx target, enum machine_mode tmode,
7808 		    enum expand_modifier modifier)
7809 {
7810   rtx op0, op1, op2, temp;
7811   tree type;
7812   int unsignedp;
7813   enum machine_mode mode;
7814   enum tree_code code = ops->code;
7815   optab this_optab;
7816   rtx subtarget, original_target;
7817   int ignore;
7818   bool reduce_bit_field;
7819   location_t loc = ops->location;
7820   tree treeop0, treeop1, treeop2;
7821 #define REDUCE_BIT_FIELD(expr)	(reduce_bit_field			  \
7822 				 ? reduce_to_bit_field_precision ((expr), \
7823 								  target, \
7824 								  type)	  \
7825 				 : (expr))
7826 
7827   type = ops->type;
7828   mode = TYPE_MODE (type);
7829   unsignedp = TYPE_UNSIGNED (type);
7830 
7831   treeop0 = ops->op0;
7832   treeop1 = ops->op1;
7833   treeop2 = ops->op2;
7834 
7835   /* We should be called only on simple (binary or unary) expressions,
7836      exactly those that are valid in gimple expressions that aren't
7837      GIMPLE_SINGLE_RHS (or invalid).  */
7838   gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
7839 	      || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
7840 	      || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
7841 
7842   ignore = (target == const0_rtx
7843 	    || ((CONVERT_EXPR_CODE_P (code)
7844 		 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
7845 		&& TREE_CODE (type) == VOID_TYPE));
7846 
7847   /* We should be called only if we need the result.  */
7848   gcc_assert (!ignore);
7849 
7850   /* An operation in what may be a bit-field type needs the
7851      result to be reduced to the precision of the bit-field type,
7852      which is narrower than that of the type's mode.  */
7853   reduce_bit_field = (INTEGRAL_TYPE_P (type)
7854 		      && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
7855 
7856   if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
7857     target = 0;
7858 
7859   /* Use subtarget as the target for operand 0 of a binary operation.  */
7860   subtarget = get_subtarget (target);
7861   original_target = target;
7862 
7863   switch (code)
7864     {
7865     case NON_LVALUE_EXPR:
7866     case PAREN_EXPR:
7867     CASE_CONVERT:
7868       if (treeop0 == error_mark_node)
7869 	return const0_rtx;
7870 
7871       if (TREE_CODE (type) == UNION_TYPE)
7872 	{
7873 	  tree valtype = TREE_TYPE (treeop0);
7874 
7875 	  /* If both input and output are BLKmode, this conversion isn't doing
7876 	     anything except possibly changing memory attribute.  */
7877 	  if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7878 	    {
7879 	      rtx result = expand_expr (treeop0, target, tmode,
7880 					modifier);
7881 
7882 	      result = copy_rtx (result);
7883 	      set_mem_attributes (result, type, 0);
7884 	      return result;
7885 	    }
7886 
7887 	  if (target == 0)
7888 	    {
7889 	      if (TYPE_MODE (type) != BLKmode)
7890 		target = gen_reg_rtx (TYPE_MODE (type));
7891 	      else
7892 		target = assign_temp (type, 0, 1, 1);
7893 	    }
7894 
7895 	  if (MEM_P (target))
7896 	    /* Store data into beginning of memory target.  */
7897 	    store_expr (treeop0,
7898 			adjust_address (target, TYPE_MODE (valtype), 0),
7899 			modifier == EXPAND_STACK_PARM,
7900 			false);
7901 
7902 	  else
7903 	    {
7904 	      gcc_assert (REG_P (target));
7905 
7906 	      /* Store this field into a union of the proper type.  */
7907 	      store_field (target,
7908 			   MIN ((int_size_in_bytes (TREE_TYPE
7909 						    (treeop0))
7910 				 * BITS_PER_UNIT),
7911 				(HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7912 			   0, 0, 0, TYPE_MODE (valtype), treeop0,
7913 			   type, 0, false);
7914 	    }
7915 
7916 	  /* Return the entire union.  */
7917 	  return target;
7918 	}
7919 
7920       if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
7921 	{
7922 	  op0 = expand_expr (treeop0, target, VOIDmode,
7923 			     modifier);
7924 
7925 	  /* If the signedness of the conversion differs and OP0 is
7926 	     a promoted SUBREG, clear that indication since we now
7927 	     have to do the proper extension.  */
7928 	  if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
7929 	      && GET_CODE (op0) == SUBREG)
7930 	    SUBREG_PROMOTED_VAR_P (op0) = 0;
7931 
7932 	  return REDUCE_BIT_FIELD (op0);
7933 	}
7934 
7935       op0 = expand_expr (treeop0, NULL_RTX, mode,
7936 			 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
7937       if (GET_MODE (op0) == mode)
7938 	;
7939 
7940       /* If OP0 is a constant, just convert it into the proper mode.  */
7941       else if (CONSTANT_P (op0))
7942 	{
7943 	  tree inner_type = TREE_TYPE (treeop0);
7944 	  enum machine_mode inner_mode = GET_MODE (op0);
7945 
7946 	  if (inner_mode == VOIDmode)
7947 	    inner_mode = TYPE_MODE (inner_type);
7948 
7949 	  if (modifier == EXPAND_INITIALIZER)
7950 	    op0 = simplify_gen_subreg (mode, op0, inner_mode,
7951 				       subreg_lowpart_offset (mode,
7952 							      inner_mode));
7953 	  else
7954 	    op0=  convert_modes (mode, inner_mode, op0,
7955 				 TYPE_UNSIGNED (inner_type));
7956 	}
7957 
7958       else if (modifier == EXPAND_INITIALIZER)
7959 	op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7960 
7961       else if (target == 0)
7962 	op0 = convert_to_mode (mode, op0,
7963 			       TYPE_UNSIGNED (TREE_TYPE
7964 					      (treeop0)));
7965       else
7966 	{
7967 	  convert_move (target, op0,
7968 			TYPE_UNSIGNED (TREE_TYPE (treeop0)));
7969 	  op0 = target;
7970 	}
7971 
7972       return REDUCE_BIT_FIELD (op0);
7973 
7974     case ADDR_SPACE_CONVERT_EXPR:
7975       {
7976 	tree treeop0_type = TREE_TYPE (treeop0);
7977 	addr_space_t as_to;
7978 	addr_space_t as_from;
7979 
7980 	gcc_assert (POINTER_TYPE_P (type));
7981 	gcc_assert (POINTER_TYPE_P (treeop0_type));
7982 
7983 	as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
7984 	as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
7985 
7986         /* Conversions between pointers to the same address space should
7987 	   have been implemented via CONVERT_EXPR / NOP_EXPR.  */
7988 	gcc_assert (as_to != as_from);
7989 
7990         /* Ask target code to handle conversion between pointers
7991 	   to overlapping address spaces.  */
7992 	if (targetm.addr_space.subset_p (as_to, as_from)
7993 	    || targetm.addr_space.subset_p (as_from, as_to))
7994 	  {
7995 	    op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
7996 	    op0 = targetm.addr_space.convert (op0, treeop0_type, type);
7997 	    gcc_assert (op0);
7998 	    return op0;
7999 	  }
8000 
8001 	/* For disjoint address spaces, converting anything but
8002 	   a null pointer invokes undefined behaviour.  We simply
8003 	   always return a null pointer here.  */
8004 	return CONST0_RTX (mode);
8005       }
8006 
8007     case POINTER_PLUS_EXPR:
8008       /* Even though the sizetype mode and the pointer's mode can be different
8009          expand is able to handle this correctly and get the correct result out
8010          of the PLUS_EXPR code.  */
8011       /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8012          if sizetype precision is smaller than pointer precision.  */
8013       if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
8014 	treeop1 = fold_convert_loc (loc, type,
8015 				    fold_convert_loc (loc, ssizetype,
8016 						      treeop1));
8017     case PLUS_EXPR:
8018       /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8019 	 something else, make sure we add the register to the constant and
8020 	 then to the other thing.  This case can occur during strength
8021 	 reduction and doing it this way will produce better code if the
8022 	 frame pointer or argument pointer is eliminated.
8023 
8024 	 fold-const.c will ensure that the constant is always in the inner
8025 	 PLUS_EXPR, so the only case we need to do anything about is if
8026 	 sp, ap, or fp is our second argument, in which case we must swap
8027 	 the innermost first argument and our second argument.  */
8028 
8029       if (TREE_CODE (treeop0) == PLUS_EXPR
8030 	  && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
8031 	  && TREE_CODE (treeop1) == VAR_DECL
8032 	  && (DECL_RTL (treeop1) == frame_pointer_rtx
8033 	      || DECL_RTL (treeop1) == stack_pointer_rtx
8034 	      || DECL_RTL (treeop1) == arg_pointer_rtx))
8035 	{
8036 	  tree t = treeop1;
8037 
8038 	  treeop1 = TREE_OPERAND (treeop0, 0);
8039 	  TREE_OPERAND (treeop0, 0) = t;
8040 	}
8041 
8042       /* If the result is to be ptr_mode and we are adding an integer to
8043 	 something, we might be forming a constant.  So try to use
8044 	 plus_constant.  If it produces a sum and we can't accept it,
8045 	 use force_operand.  This allows P = &ARR[const] to generate
8046 	 efficient code on machines where a SYMBOL_REF is not a valid
8047 	 address.
8048 
8049 	 If this is an EXPAND_SUM call, always return the sum.  */
8050       if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8051 	  || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8052 	{
8053 	  if (modifier == EXPAND_STACK_PARM)
8054 	    target = 0;
8055 	  if (TREE_CODE (treeop0) == INTEGER_CST
8056 	      && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8057 	      && TREE_CONSTANT (treeop1))
8058 	    {
8059 	      rtx constant_part;
8060 
8061 	      op1 = expand_expr (treeop1, subtarget, VOIDmode,
8062 				 EXPAND_SUM);
8063 	      /* Use immed_double_const to ensure that the constant is
8064 		 truncated according to the mode of OP1, then sign extended
8065 		 to a HOST_WIDE_INT.  Using the constant directly can result
8066 		 in non-canonical RTL in a 64x32 cross compile.  */
8067 	      constant_part
8068 		= immed_double_const (TREE_INT_CST_LOW (treeop0),
8069 				      (HOST_WIDE_INT) 0,
8070 				      TYPE_MODE (TREE_TYPE (treeop1)));
8071 	      op1 = plus_constant (op1, INTVAL (constant_part));
8072 	      if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8073 		op1 = force_operand (op1, target);
8074 	      return REDUCE_BIT_FIELD (op1);
8075 	    }
8076 
8077 	  else if (TREE_CODE (treeop1) == INTEGER_CST
8078 		   && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8079 		   && TREE_CONSTANT (treeop0))
8080 	    {
8081 	      rtx constant_part;
8082 
8083 	      op0 = expand_expr (treeop0, subtarget, VOIDmode,
8084 				 (modifier == EXPAND_INITIALIZER
8085 				 ? EXPAND_INITIALIZER : EXPAND_SUM));
8086 	      if (! CONSTANT_P (op0))
8087 		{
8088 		  op1 = expand_expr (treeop1, NULL_RTX,
8089 				     VOIDmode, modifier);
8090 		  /* Return a PLUS if modifier says it's OK.  */
8091 		  if (modifier == EXPAND_SUM
8092 		      || modifier == EXPAND_INITIALIZER)
8093 		    return simplify_gen_binary (PLUS, mode, op0, op1);
8094 		  goto binop2;
8095 		}
8096 	      /* Use immed_double_const to ensure that the constant is
8097 		 truncated according to the mode of OP1, then sign extended
8098 		 to a HOST_WIDE_INT.  Using the constant directly can result
8099 		 in non-canonical RTL in a 64x32 cross compile.  */
8100 	      constant_part
8101 		= immed_double_const (TREE_INT_CST_LOW (treeop1),
8102 				      (HOST_WIDE_INT) 0,
8103 				      TYPE_MODE (TREE_TYPE (treeop0)));
8104 	      op0 = plus_constant (op0, INTVAL (constant_part));
8105 	      if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8106 		op0 = force_operand (op0, target);
8107 	      return REDUCE_BIT_FIELD (op0);
8108 	    }
8109 	}
8110 
8111       /* Use TER to expand pointer addition of a negated value
8112 	 as pointer subtraction.  */
8113       if ((POINTER_TYPE_P (TREE_TYPE (treeop0))
8114 	   || (TREE_CODE (TREE_TYPE (treeop0)) == VECTOR_TYPE
8115 	       && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))))
8116 	  && TREE_CODE (treeop1) == SSA_NAME
8117 	  && TYPE_MODE (TREE_TYPE (treeop0))
8118 	     == TYPE_MODE (TREE_TYPE (treeop1)))
8119 	{
8120 	  gimple def = get_def_for_expr (treeop1, NEGATE_EXPR);
8121 	  if (def)
8122 	    {
8123 	      treeop1 = gimple_assign_rhs1 (def);
8124 	      code = MINUS_EXPR;
8125 	      goto do_minus;
8126 	    }
8127 	}
8128 
8129       /* No sense saving up arithmetic to be done
8130 	 if it's all in the wrong mode to form part of an address.
8131 	 And force_operand won't know whether to sign-extend or
8132 	 zero-extend.  */
8133       if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8134 	  || mode != ptr_mode)
8135 	{
8136 	  expand_operands (treeop0, treeop1,
8137 			   subtarget, &op0, &op1, EXPAND_NORMAL);
8138 	  if (op0 == const0_rtx)
8139 	    return op1;
8140 	  if (op1 == const0_rtx)
8141 	    return op0;
8142 	  goto binop2;
8143 	}
8144 
8145       expand_operands (treeop0, treeop1,
8146 		       subtarget, &op0, &op1, modifier);
8147       return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8148 
8149     case MINUS_EXPR:
8150     do_minus:
8151       /* For initializers, we are allowed to return a MINUS of two
8152 	 symbolic constants.  Here we handle all cases when both operands
8153 	 are constant.  */
8154       /* Handle difference of two symbolic constants,
8155 	 for the sake of an initializer.  */
8156       if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8157 	  && really_constant_p (treeop0)
8158 	  && really_constant_p (treeop1))
8159 	{
8160 	  expand_operands (treeop0, treeop1,
8161 			   NULL_RTX, &op0, &op1, modifier);
8162 
8163 	  /* If the last operand is a CONST_INT, use plus_constant of
8164 	     the negated constant.  Else make the MINUS.  */
8165 	  if (CONST_INT_P (op1))
8166 	    return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
8167 	  else
8168 	    return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8169 	}
8170 
8171       /* No sense saving up arithmetic to be done
8172 	 if it's all in the wrong mode to form part of an address.
8173 	 And force_operand won't know whether to sign-extend or
8174 	 zero-extend.  */
8175       if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8176 	  || mode != ptr_mode)
8177 	goto binop;
8178 
8179       expand_operands (treeop0, treeop1,
8180 		       subtarget, &op0, &op1, modifier);
8181 
8182       /* Convert A - const to A + (-const).  */
8183       if (CONST_INT_P (op1))
8184 	{
8185 	  op1 = negate_rtx (mode, op1);
8186 	  return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8187 	}
8188 
8189       goto binop2;
8190 
8191     case WIDEN_MULT_PLUS_EXPR:
8192     case WIDEN_MULT_MINUS_EXPR:
8193       expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8194       op2 = expand_normal (treeop2);
8195       target = expand_widen_pattern_expr (ops, op0, op1, op2,
8196 					  target, unsignedp);
8197       return target;
8198 
8199     case WIDEN_MULT_EXPR:
8200       /* If first operand is constant, swap them.
8201 	 Thus the following special case checks need only
8202 	 check the second operand.  */
8203       if (TREE_CODE (treeop0) == INTEGER_CST)
8204 	{
8205 	  tree t1 = treeop0;
8206 	  treeop0 = treeop1;
8207 	  treeop1 = t1;
8208 	}
8209 
8210       /* First, check if we have a multiplication of one signed and one
8211 	 unsigned operand.  */
8212       if (TREE_CODE (treeop1) != INTEGER_CST
8213 	  && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
8214 	      != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
8215 	{
8216 	  enum machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
8217 	  this_optab = usmul_widen_optab;
8218 	  if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8219 		!= CODE_FOR_nothing)
8220 	    {
8221 	      if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8222 		expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8223 				 EXPAND_NORMAL);
8224 	      else
8225 		expand_operands (treeop0, treeop1, NULL_RTX, &op1, &op0,
8226 				 EXPAND_NORMAL);
8227 	      goto binop3;
8228 	    }
8229 	}
8230       /* Check for a multiplication with matching signedness.  */
8231       else if ((TREE_CODE (treeop1) == INTEGER_CST
8232 		&& int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
8233 	       || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
8234 		   == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
8235 	{
8236 	  tree op0type = TREE_TYPE (treeop0);
8237 	  enum machine_mode innermode = TYPE_MODE (op0type);
8238 	  bool zextend_p = TYPE_UNSIGNED (op0type);
8239 	  optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8240 	  this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8241 
8242 	  if (TREE_CODE (treeop0) != INTEGER_CST)
8243 	    {
8244 	      if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8245 		    != CODE_FOR_nothing)
8246 		{
8247 		  expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8248 				   EXPAND_NORMAL);
8249 		  temp = expand_widening_mult (mode, op0, op1, target,
8250 					       unsignedp, this_optab);
8251 		  return REDUCE_BIT_FIELD (temp);
8252 		}
8253 	      if (find_widening_optab_handler (other_optab, mode, innermode, 0)
8254 		    != CODE_FOR_nothing
8255 		  && innermode == word_mode)
8256 		{
8257 		  rtx htem, hipart;
8258 		  op0 = expand_normal (treeop0);
8259 		  if (TREE_CODE (treeop1) == INTEGER_CST)
8260 		    op1 = convert_modes (innermode, mode,
8261 					 expand_normal (treeop1), unsignedp);
8262 		  else
8263 		    op1 = expand_normal (treeop1);
8264 		  temp = expand_binop (mode, other_optab, op0, op1, target,
8265 				       unsignedp, OPTAB_LIB_WIDEN);
8266 		  hipart = gen_highpart (innermode, temp);
8267 		  htem = expand_mult_highpart_adjust (innermode, hipart,
8268 						      op0, op1, hipart,
8269 						      zextend_p);
8270 		  if (htem != hipart)
8271 		    emit_move_insn (hipart, htem);
8272 		  return REDUCE_BIT_FIELD (temp);
8273 		}
8274 	    }
8275 	}
8276       treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
8277       treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
8278       expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8279       return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8280 
8281     case FMA_EXPR:
8282       {
8283 	optab opt = fma_optab;
8284 	gimple def0, def2;
8285 
8286 	/* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8287 	   call.  */
8288 	if (optab_handler (fma_optab, mode) == CODE_FOR_nothing)
8289 	  {
8290 	    tree fn = mathfn_built_in (TREE_TYPE (treeop0), BUILT_IN_FMA);
8291 	    tree call_expr;
8292 
8293 	    gcc_assert (fn != NULL_TREE);
8294 	    call_expr = build_call_expr (fn, 3, treeop0, treeop1, treeop2);
8295 	    return expand_builtin (call_expr, target, subtarget, mode, false);
8296 	  }
8297 
8298 	def0 = get_def_for_expr (treeop0, NEGATE_EXPR);
8299 	def2 = get_def_for_expr (treeop2, NEGATE_EXPR);
8300 
8301 	op0 = op2 = NULL;
8302 
8303 	if (def0 && def2
8304 	    && optab_handler (fnms_optab, mode) != CODE_FOR_nothing)
8305 	  {
8306 	    opt = fnms_optab;
8307 	    op0 = expand_normal (gimple_assign_rhs1 (def0));
8308 	    op2 = expand_normal (gimple_assign_rhs1 (def2));
8309 	  }
8310 	else if (def0
8311 		 && optab_handler (fnma_optab, mode) != CODE_FOR_nothing)
8312 	  {
8313 	    opt = fnma_optab;
8314 	    op0 = expand_normal (gimple_assign_rhs1 (def0));
8315 	  }
8316 	else if (def2
8317 		 && optab_handler (fms_optab, mode) != CODE_FOR_nothing)
8318 	  {
8319 	    opt = fms_optab;
8320 	    op2 = expand_normal (gimple_assign_rhs1 (def2));
8321 	  }
8322 
8323 	if (op0 == NULL)
8324 	  op0 = expand_expr (treeop0, subtarget, VOIDmode, EXPAND_NORMAL);
8325 	if (op2 == NULL)
8326 	  op2 = expand_normal (treeop2);
8327 	op1 = expand_normal (treeop1);
8328 
8329 	return expand_ternary_op (TYPE_MODE (type), opt,
8330 				  op0, op1, op2, target, 0);
8331       }
8332 
8333     case MULT_EXPR:
8334       /* If this is a fixed-point operation, then we cannot use the code
8335 	 below because "expand_mult" doesn't support sat/no-sat fixed-point
8336          multiplications.   */
8337       if (ALL_FIXED_POINT_MODE_P (mode))
8338 	goto binop;
8339 
8340       /* If first operand is constant, swap them.
8341 	 Thus the following special case checks need only
8342 	 check the second operand.  */
8343       if (TREE_CODE (treeop0) == INTEGER_CST)
8344 	{
8345 	  tree t1 = treeop0;
8346 	  treeop0 = treeop1;
8347 	  treeop1 = t1;
8348 	}
8349 
8350       /* Attempt to return something suitable for generating an
8351 	 indexed address, for machines that support that.  */
8352 
8353       if (modifier == EXPAND_SUM && mode == ptr_mode
8354 	  && host_integerp (treeop1, 0))
8355 	{
8356 	  tree exp1 = treeop1;
8357 
8358 	  op0 = expand_expr (treeop0, subtarget, VOIDmode,
8359 			     EXPAND_SUM);
8360 
8361 	  if (!REG_P (op0))
8362 	    op0 = force_operand (op0, NULL_RTX);
8363 	  if (!REG_P (op0))
8364 	    op0 = copy_to_mode_reg (mode, op0);
8365 
8366 	  return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8367 			       gen_int_mode (tree_low_cst (exp1, 0),
8368 					     TYPE_MODE (TREE_TYPE (exp1)))));
8369 	}
8370 
8371       if (modifier == EXPAND_STACK_PARM)
8372 	target = 0;
8373 
8374       expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8375       return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8376 
8377     case TRUNC_DIV_EXPR:
8378     case FLOOR_DIV_EXPR:
8379     case CEIL_DIV_EXPR:
8380     case ROUND_DIV_EXPR:
8381     case EXACT_DIV_EXPR:
8382       /* If this is a fixed-point operation, then we cannot use the code
8383 	 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8384          divisions.   */
8385       if (ALL_FIXED_POINT_MODE_P (mode))
8386 	goto binop;
8387 
8388       if (modifier == EXPAND_STACK_PARM)
8389 	target = 0;
8390       /* Possible optimization: compute the dividend with EXPAND_SUM
8391 	 then if the divisor is constant can optimize the case
8392 	 where some terms of the dividend have coeffs divisible by it.  */
8393       expand_operands (treeop0, treeop1,
8394 		       subtarget, &op0, &op1, EXPAND_NORMAL);
8395       return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8396 
8397     case RDIV_EXPR:
8398       goto binop;
8399 
8400     case TRUNC_MOD_EXPR:
8401     case FLOOR_MOD_EXPR:
8402     case CEIL_MOD_EXPR:
8403     case ROUND_MOD_EXPR:
8404       if (modifier == EXPAND_STACK_PARM)
8405 	target = 0;
8406       expand_operands (treeop0, treeop1,
8407 		       subtarget, &op0, &op1, EXPAND_NORMAL);
8408       return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8409 
8410     case FIXED_CONVERT_EXPR:
8411       op0 = expand_normal (treeop0);
8412       if (target == 0 || modifier == EXPAND_STACK_PARM)
8413 	target = gen_reg_rtx (mode);
8414 
8415       if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
8416 	   && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8417           || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
8418 	expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
8419       else
8420 	expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
8421       return target;
8422 
8423     case FIX_TRUNC_EXPR:
8424       op0 = expand_normal (treeop0);
8425       if (target == 0 || modifier == EXPAND_STACK_PARM)
8426 	target = gen_reg_rtx (mode);
8427       expand_fix (target, op0, unsignedp);
8428       return target;
8429 
8430     case FLOAT_EXPR:
8431       op0 = expand_normal (treeop0);
8432       if (target == 0 || modifier == EXPAND_STACK_PARM)
8433 	target = gen_reg_rtx (mode);
8434       /* expand_float can't figure out what to do if FROM has VOIDmode.
8435 	 So give it the correct mode.  With -O, cse will optimize this.  */
8436       if (GET_MODE (op0) == VOIDmode)
8437 	op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
8438 				op0);
8439       expand_float (target, op0,
8440 		    TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8441       return target;
8442 
8443     case NEGATE_EXPR:
8444       op0 = expand_expr (treeop0, subtarget,
8445 			 VOIDmode, EXPAND_NORMAL);
8446       if (modifier == EXPAND_STACK_PARM)
8447 	target = 0;
8448       temp = expand_unop (mode,
8449       			  optab_for_tree_code (NEGATE_EXPR, type,
8450 					       optab_default),
8451 			  op0, target, 0);
8452       gcc_assert (temp);
8453       return REDUCE_BIT_FIELD (temp);
8454 
8455     case ABS_EXPR:
8456       op0 = expand_expr (treeop0, subtarget,
8457 			 VOIDmode, EXPAND_NORMAL);
8458       if (modifier == EXPAND_STACK_PARM)
8459 	target = 0;
8460 
8461       /* ABS_EXPR is not valid for complex arguments.  */
8462       gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8463 		  && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8464 
8465       /* Unsigned abs is simply the operand.  Testing here means we don't
8466 	 risk generating incorrect code below.  */
8467       if (TYPE_UNSIGNED (type))
8468 	return op0;
8469 
8470       return expand_abs (mode, op0, target, unsignedp,
8471 			 safe_from_p (target, treeop0, 1));
8472 
8473     case MAX_EXPR:
8474     case MIN_EXPR:
8475       target = original_target;
8476       if (target == 0
8477 	  || modifier == EXPAND_STACK_PARM
8478 	  || (MEM_P (target) && MEM_VOLATILE_P (target))
8479 	  || GET_MODE (target) != mode
8480 	  || (REG_P (target)
8481 	      && REGNO (target) < FIRST_PSEUDO_REGISTER))
8482 	target = gen_reg_rtx (mode);
8483       expand_operands (treeop0, treeop1,
8484 		       target, &op0, &op1, EXPAND_NORMAL);
8485 
8486       /* First try to do it with a special MIN or MAX instruction.
8487 	 If that does not win, use a conditional jump to select the proper
8488 	 value.  */
8489       this_optab = optab_for_tree_code (code, type, optab_default);
8490       temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8491 			   OPTAB_WIDEN);
8492       if (temp != 0)
8493 	return temp;
8494 
8495       /* At this point, a MEM target is no longer useful; we will get better
8496 	 code without it.  */
8497 
8498       if (! REG_P (target))
8499 	target = gen_reg_rtx (mode);
8500 
8501       /* If op1 was placed in target, swap op0 and op1.  */
8502       if (target != op0 && target == op1)
8503 	{
8504 	  temp = op0;
8505 	  op0 = op1;
8506 	  op1 = temp;
8507 	}
8508 
8509       /* We generate better code and avoid problems with op1 mentioning
8510 	 target by forcing op1 into a pseudo if it isn't a constant.  */
8511       if (! CONSTANT_P (op1))
8512 	op1 = force_reg (mode, op1);
8513 
8514       {
8515 	enum rtx_code comparison_code;
8516 	rtx cmpop1 = op1;
8517 
8518 	if (code == MAX_EXPR)
8519 	  comparison_code = unsignedp ? GEU : GE;
8520 	else
8521 	  comparison_code = unsignedp ? LEU : LE;
8522 
8523 	/* Canonicalize to comparisons against 0.  */
8524 	if (op1 == const1_rtx)
8525 	  {
8526 	    /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8527 	       or (a != 0 ? a : 1) for unsigned.
8528 	       For MIN we are safe converting (a <= 1 ? a : 1)
8529 	       into (a <= 0 ? a : 1)  */
8530 	    cmpop1 = const0_rtx;
8531 	    if (code == MAX_EXPR)
8532 	      comparison_code = unsignedp ? NE : GT;
8533 	  }
8534 	if (op1 == constm1_rtx && !unsignedp)
8535 	  {
8536 	    /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8537 	       and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8538 	    cmpop1 = const0_rtx;
8539 	    if (code == MIN_EXPR)
8540 	      comparison_code = LT;
8541 	  }
8542 #ifdef HAVE_conditional_move
8543 	/* Use a conditional move if possible.  */
8544 	if (can_conditionally_move_p (mode))
8545 	  {
8546 	    rtx insn;
8547 
8548 	    /* ??? Same problem as in expmed.c: emit_conditional_move
8549 	       forces a stack adjustment via compare_from_rtx, and we
8550 	       lose the stack adjustment if the sequence we are about
8551 	       to create is discarded.  */
8552 	    do_pending_stack_adjust ();
8553 
8554 	    start_sequence ();
8555 
8556 	    /* Try to emit the conditional move.  */
8557 	    insn = emit_conditional_move (target, comparison_code,
8558 					  op0, cmpop1, mode,
8559 					  op0, op1, mode,
8560 					  unsignedp);
8561 
8562 	    /* If we could do the conditional move, emit the sequence,
8563 	       and return.  */
8564 	    if (insn)
8565 	      {
8566 		rtx seq = get_insns ();
8567 		end_sequence ();
8568 		emit_insn (seq);
8569 		return target;
8570 	      }
8571 
8572 	    /* Otherwise discard the sequence and fall back to code with
8573 	       branches.  */
8574 	    end_sequence ();
8575 	  }
8576 #endif
8577 	if (target != op0)
8578 	  emit_move_insn (target, op0);
8579 
8580 	temp = gen_label_rtx ();
8581 	do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8582 				 unsignedp, mode, NULL_RTX, NULL_RTX, temp,
8583 				 -1);
8584       }
8585       emit_move_insn (target, op1);
8586       emit_label (temp);
8587       return target;
8588 
8589     case BIT_NOT_EXPR:
8590       op0 = expand_expr (treeop0, subtarget,
8591 			 VOIDmode, EXPAND_NORMAL);
8592       if (modifier == EXPAND_STACK_PARM)
8593 	target = 0;
8594       /* In case we have to reduce the result to bitfield precision
8595 	 for unsigned bitfield expand this as XOR with a proper constant
8596 	 instead.  */
8597       if (reduce_bit_field && TYPE_UNSIGNED (type))
8598 	temp = expand_binop (mode, xor_optab, op0,
8599 			     immed_double_int_const
8600 			       (double_int_mask (TYPE_PRECISION (type)), mode),
8601 			     target, 1, OPTAB_LIB_WIDEN);
8602       else
8603 	temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8604       gcc_assert (temp);
8605       return temp;
8606 
8607       /* ??? Can optimize bitwise operations with one arg constant.
8608 	 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8609 	 and (a bitwise1 b) bitwise2 b (etc)
8610 	 but that is probably not worth while.  */
8611 
8612     case BIT_AND_EXPR:
8613     case BIT_IOR_EXPR:
8614     case BIT_XOR_EXPR:
8615       goto binop;
8616 
8617     case LROTATE_EXPR:
8618     case RROTATE_EXPR:
8619       gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8620 		  || (GET_MODE_PRECISION (TYPE_MODE (type))
8621 		      == TYPE_PRECISION (type)));
8622       /* fall through */
8623 
8624     case LSHIFT_EXPR:
8625     case RSHIFT_EXPR:
8626       /* If this is a fixed-point operation, then we cannot use the code
8627 	 below because "expand_shift" doesn't support sat/no-sat fixed-point
8628          shifts.   */
8629       if (ALL_FIXED_POINT_MODE_P (mode))
8630 	goto binop;
8631 
8632       if (! safe_from_p (subtarget, treeop1, 1))
8633 	subtarget = 0;
8634       if (modifier == EXPAND_STACK_PARM)
8635 	target = 0;
8636       op0 = expand_expr (treeop0, subtarget,
8637 			 VOIDmode, EXPAND_NORMAL);
8638       temp = expand_variable_shift (code, mode, op0, treeop1, target,
8639 				    unsignedp);
8640       if (code == LSHIFT_EXPR)
8641 	temp = REDUCE_BIT_FIELD (temp);
8642       return temp;
8643 
8644       /* Could determine the answer when only additive constants differ.  Also,
8645 	 the addition of one can be handled by changing the condition.  */
8646     case LT_EXPR:
8647     case LE_EXPR:
8648     case GT_EXPR:
8649     case GE_EXPR:
8650     case EQ_EXPR:
8651     case NE_EXPR:
8652     case UNORDERED_EXPR:
8653     case ORDERED_EXPR:
8654     case UNLT_EXPR:
8655     case UNLE_EXPR:
8656     case UNGT_EXPR:
8657     case UNGE_EXPR:
8658     case UNEQ_EXPR:
8659     case LTGT_EXPR:
8660       temp = do_store_flag (ops,
8661 			    modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8662 			    tmode != VOIDmode ? tmode : mode);
8663       if (temp)
8664 	return temp;
8665 
8666       /* Use a compare and a jump for BLKmode comparisons, or for function
8667 	 type comparisons is HAVE_canonicalize_funcptr_for_compare.  */
8668 
8669       if ((target == 0
8670 	   || modifier == EXPAND_STACK_PARM
8671 	   || ! safe_from_p (target, treeop0, 1)
8672 	   || ! safe_from_p (target, treeop1, 1)
8673 	   /* Make sure we don't have a hard reg (such as function's return
8674 	      value) live across basic blocks, if not optimizing.  */
8675 	   || (!optimize && REG_P (target)
8676 	       && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8677 	target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8678 
8679       emit_move_insn (target, const0_rtx);
8680 
8681       op1 = gen_label_rtx ();
8682       jumpifnot_1 (code, treeop0, treeop1, op1, -1);
8683 
8684       if (TYPE_PRECISION (type) == 1 && !TYPE_UNSIGNED (type))
8685 	emit_move_insn (target, constm1_rtx);
8686       else
8687 	emit_move_insn (target, const1_rtx);
8688 
8689       emit_label (op1);
8690       return target;
8691 
8692     case COMPLEX_EXPR:
8693       /* Get the rtx code of the operands.  */
8694       op0 = expand_normal (treeop0);
8695       op1 = expand_normal (treeop1);
8696 
8697       if (!target)
8698 	target = gen_reg_rtx (TYPE_MODE (type));
8699       else
8700 	/* If target overlaps with op1, then either we need to force
8701 	   op1 into a pseudo (if target also overlaps with op0),
8702 	   or write the complex parts in reverse order.  */
8703 	switch (GET_CODE (target))
8704 	  {
8705 	  case CONCAT:
8706 	    if (reg_overlap_mentioned_p (XEXP (target, 0), op1))
8707 	      {
8708 		if (reg_overlap_mentioned_p (XEXP (target, 1), op0))
8709 		  {
8710 		  complex_expr_force_op1:
8711 		    temp = gen_reg_rtx (GET_MODE_INNER (GET_MODE (target)));
8712 		    emit_move_insn (temp, op1);
8713 		    op1 = temp;
8714 		    break;
8715 		  }
8716 	      complex_expr_swap_order:
8717 		/* Move the imaginary (op1) and real (op0) parts to their
8718 		   location.  */
8719 		write_complex_part (target, op1, true);
8720 		write_complex_part (target, op0, false);
8721 
8722 		return target;
8723 	      }
8724 	    break;
8725 	  case MEM:
8726 	    temp = adjust_address_nv (target,
8727 				      GET_MODE_INNER (GET_MODE (target)), 0);
8728 	    if (reg_overlap_mentioned_p (temp, op1))
8729 	      {
8730 		enum machine_mode imode = GET_MODE_INNER (GET_MODE (target));
8731 		temp = adjust_address_nv (target, imode,
8732 					  GET_MODE_SIZE (imode));
8733 		if (reg_overlap_mentioned_p (temp, op0))
8734 		  goto complex_expr_force_op1;
8735 		goto complex_expr_swap_order;
8736 	      }
8737 	    break;
8738 	  default:
8739 	    if (reg_overlap_mentioned_p (target, op1))
8740 	      {
8741 		if (reg_overlap_mentioned_p (target, op0))
8742 		  goto complex_expr_force_op1;
8743 		goto complex_expr_swap_order;
8744 	      }
8745 	    break;
8746 	  }
8747 
8748       /* Move the real (op0) and imaginary (op1) parts to their location.  */
8749       write_complex_part (target, op0, false);
8750       write_complex_part (target, op1, true);
8751 
8752       return target;
8753 
8754     case WIDEN_SUM_EXPR:
8755       {
8756         tree oprnd0 = treeop0;
8757         tree oprnd1 = treeop1;
8758 
8759         expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8760         target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
8761                                             target, unsignedp);
8762         return target;
8763       }
8764 
8765     case REDUC_MAX_EXPR:
8766     case REDUC_MIN_EXPR:
8767     case REDUC_PLUS_EXPR:
8768       {
8769         op0 = expand_normal (treeop0);
8770         this_optab = optab_for_tree_code (code, type, optab_default);
8771         temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8772         gcc_assert (temp);
8773         return temp;
8774       }
8775 
8776     case VEC_LSHIFT_EXPR:
8777     case VEC_RSHIFT_EXPR:
8778       {
8779 	target = expand_vec_shift_expr (ops, target);
8780 	return target;
8781       }
8782 
8783     case VEC_UNPACK_HI_EXPR:
8784     case VEC_UNPACK_LO_EXPR:
8785       {
8786 	op0 = expand_normal (treeop0);
8787 	temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
8788 					  target, unsignedp);
8789 	gcc_assert (temp);
8790 	return temp;
8791       }
8792 
8793     case VEC_UNPACK_FLOAT_HI_EXPR:
8794     case VEC_UNPACK_FLOAT_LO_EXPR:
8795       {
8796 	op0 = expand_normal (treeop0);
8797 	/* The signedness is determined from input operand.  */
8798 	temp = expand_widen_pattern_expr
8799 	  (ops, op0, NULL_RTX, NULL_RTX,
8800 	   target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8801 
8802 	gcc_assert (temp);
8803 	return temp;
8804       }
8805 
8806     case VEC_WIDEN_MULT_HI_EXPR:
8807     case VEC_WIDEN_MULT_LO_EXPR:
8808       {
8809 	tree oprnd0 = treeop0;
8810 	tree oprnd1 = treeop1;
8811 
8812 	expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8813 	target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
8814 					    target, unsignedp);
8815 	gcc_assert (target);
8816 	return target;
8817       }
8818 
8819     case VEC_WIDEN_LSHIFT_HI_EXPR:
8820     case VEC_WIDEN_LSHIFT_LO_EXPR:
8821       {
8822         tree oprnd0 = treeop0;
8823         tree oprnd1 = treeop1;
8824 
8825         expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8826         target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
8827                                             target, unsignedp);
8828         gcc_assert (target);
8829         return target;
8830       }
8831 
8832     case VEC_PACK_TRUNC_EXPR:
8833     case VEC_PACK_SAT_EXPR:
8834     case VEC_PACK_FIX_TRUNC_EXPR:
8835       mode = TYPE_MODE (TREE_TYPE (treeop0));
8836       goto binop;
8837 
8838     case VEC_PERM_EXPR:
8839       expand_operands (treeop0, treeop1, target, &op0, &op1, EXPAND_NORMAL);
8840       op2 = expand_normal (treeop2);
8841 
8842       /* Careful here: if the target doesn't support integral vector modes,
8843 	 a constant selection vector could wind up smooshed into a normal
8844 	 integral constant.  */
8845       if (CONSTANT_P (op2) && GET_CODE (op2) != CONST_VECTOR)
8846 	{
8847 	  tree sel_type = TREE_TYPE (treeop2);
8848 	  enum machine_mode vmode
8849 	    = mode_for_vector (TYPE_MODE (TREE_TYPE (sel_type)),
8850 			       TYPE_VECTOR_SUBPARTS (sel_type));
8851 	  gcc_assert (GET_MODE_CLASS (vmode) == MODE_VECTOR_INT);
8852 	  op2 = simplify_subreg (vmode, op2, TYPE_MODE (sel_type), 0);
8853 	  gcc_assert (op2 && GET_CODE (op2) == CONST_VECTOR);
8854 	}
8855       else
8856         gcc_assert (GET_MODE_CLASS (GET_MODE (op2)) == MODE_VECTOR_INT);
8857 
8858       temp = expand_vec_perm (mode, op0, op1, op2, target);
8859       gcc_assert (temp);
8860       return temp;
8861 
8862     case DOT_PROD_EXPR:
8863       {
8864 	tree oprnd0 = treeop0;
8865 	tree oprnd1 = treeop1;
8866 	tree oprnd2 = treeop2;
8867 	rtx op2;
8868 
8869 	expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8870 	op2 = expand_normal (oprnd2);
8871 	target = expand_widen_pattern_expr (ops, op0, op1, op2,
8872 					    target, unsignedp);
8873 	return target;
8874       }
8875 
8876     case REALIGN_LOAD_EXPR:
8877       {
8878         tree oprnd0 = treeop0;
8879         tree oprnd1 = treeop1;
8880         tree oprnd2 = treeop2;
8881         rtx op2;
8882 
8883         this_optab = optab_for_tree_code (code, type, optab_default);
8884         expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8885         op2 = expand_normal (oprnd2);
8886         temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8887 				  target, unsignedp);
8888         gcc_assert (temp);
8889         return temp;
8890       }
8891 
8892     case COND_EXPR:
8893       /* A COND_EXPR with its type being VOID_TYPE represents a
8894 	 conditional jump and is handled in
8895 	 expand_gimple_cond_expr.  */
8896       gcc_assert (!VOID_TYPE_P (type));
8897 
8898       /* Note that COND_EXPRs whose type is a structure or union
8899 	 are required to be constructed to contain assignments of
8900 	 a temporary variable, so that we can evaluate them here
8901 	 for side effect only.  If type is void, we must do likewise.  */
8902 
8903       gcc_assert (!TREE_ADDRESSABLE (type)
8904 		  && !ignore
8905 		  && TREE_TYPE (treeop1) != void_type_node
8906 		  && TREE_TYPE (treeop2) != void_type_node);
8907 
8908       /* If we are not to produce a result, we have no target.  Otherwise,
8909 	 if a target was specified use it; it will not be used as an
8910 	 intermediate target unless it is safe.  If no target, use a
8911 	 temporary.  */
8912 
8913       if (modifier != EXPAND_STACK_PARM
8914 	  && original_target
8915 	  && safe_from_p (original_target, treeop0, 1)
8916 	  && GET_MODE (original_target) == mode
8917 #ifdef HAVE_conditional_move
8918 	  && (! can_conditionally_move_p (mode)
8919 	      || REG_P (original_target))
8920 #endif
8921 	  && !MEM_P (original_target))
8922 	temp = original_target;
8923       else
8924 	temp = assign_temp (type, 0, 0, 1);
8925 
8926       do_pending_stack_adjust ();
8927       NO_DEFER_POP;
8928       op0 = gen_label_rtx ();
8929       op1 = gen_label_rtx ();
8930       jumpifnot (treeop0, op0, -1);
8931       store_expr (treeop1, temp,
8932 		  modifier == EXPAND_STACK_PARM,
8933 		  false);
8934 
8935       emit_jump_insn (gen_jump (op1));
8936       emit_barrier ();
8937       emit_label (op0);
8938       store_expr (treeop2, temp,
8939 		  modifier == EXPAND_STACK_PARM,
8940 		  false);
8941 
8942       emit_label (op1);
8943       OK_DEFER_POP;
8944       return temp;
8945 
8946     case VEC_COND_EXPR:
8947       target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
8948       return target;
8949 
8950     default:
8951       gcc_unreachable ();
8952     }
8953 
8954   /* Here to do an ordinary binary operator.  */
8955  binop:
8956   expand_operands (treeop0, treeop1,
8957 		   subtarget, &op0, &op1, EXPAND_NORMAL);
8958  binop2:
8959   this_optab = optab_for_tree_code (code, type, optab_default);
8960  binop3:
8961   if (modifier == EXPAND_STACK_PARM)
8962     target = 0;
8963   temp = expand_binop (mode, this_optab, op0, op1, target,
8964 		       unsignedp, OPTAB_LIB_WIDEN);
8965   gcc_assert (temp);
8966   /* Bitwise operations do not need bitfield reduction as we expect their
8967      operands being properly truncated.  */
8968   if (code == BIT_XOR_EXPR
8969       || code == BIT_AND_EXPR
8970       || code == BIT_IOR_EXPR)
8971     return temp;
8972   return REDUCE_BIT_FIELD (temp);
8973 }
8974 #undef REDUCE_BIT_FIELD
8975 
8976 rtx
8977 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
8978 		    enum expand_modifier modifier, rtx *alt_rtl)
8979 {
8980   rtx op0, op1, temp, decl_rtl;
8981   tree type;
8982   int unsignedp;
8983   enum machine_mode mode;
8984   enum tree_code code = TREE_CODE (exp);
8985   rtx subtarget, original_target;
8986   int ignore;
8987   tree context;
8988   bool reduce_bit_field;
8989   location_t loc = EXPR_LOCATION (exp);
8990   struct separate_ops ops;
8991   tree treeop0, treeop1, treeop2;
8992   tree ssa_name = NULL_TREE;
8993   gimple g;
8994 
8995   type = TREE_TYPE (exp);
8996   mode = TYPE_MODE (type);
8997   unsignedp = TYPE_UNSIGNED (type);
8998 
8999   treeop0 = treeop1 = treeop2 = NULL_TREE;
9000   if (!VL_EXP_CLASS_P (exp))
9001     switch (TREE_CODE_LENGTH (code))
9002       {
9003 	default:
9004 	case 3: treeop2 = TREE_OPERAND (exp, 2);
9005 	case 2: treeop1 = TREE_OPERAND (exp, 1);
9006 	case 1: treeop0 = TREE_OPERAND (exp, 0);
9007 	case 0: break;
9008       }
9009   ops.code = code;
9010   ops.type = type;
9011   ops.op0 = treeop0;
9012   ops.op1 = treeop1;
9013   ops.op2 = treeop2;
9014   ops.location = loc;
9015 
9016   ignore = (target == const0_rtx
9017 	    || ((CONVERT_EXPR_CODE_P (code)
9018 		 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
9019 		&& TREE_CODE (type) == VOID_TYPE));
9020 
9021   /* An operation in what may be a bit-field type needs the
9022      result to be reduced to the precision of the bit-field type,
9023      which is narrower than that of the type's mode.  */
9024   reduce_bit_field = (!ignore
9025 		      && INTEGRAL_TYPE_P (type)
9026 		      && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
9027 
9028   /* If we are going to ignore this result, we need only do something
9029      if there is a side-effect somewhere in the expression.  If there
9030      is, short-circuit the most common cases here.  Note that we must
9031      not call expand_expr with anything but const0_rtx in case this
9032      is an initial expansion of a size that contains a PLACEHOLDER_EXPR.  */
9033 
9034   if (ignore)
9035     {
9036       if (! TREE_SIDE_EFFECTS (exp))
9037 	return const0_rtx;
9038 
9039       /* Ensure we reference a volatile object even if value is ignored, but
9040 	 don't do this if all we are doing is taking its address.  */
9041       if (TREE_THIS_VOLATILE (exp)
9042 	  && TREE_CODE (exp) != FUNCTION_DECL
9043 	  && mode != VOIDmode && mode != BLKmode
9044 	  && modifier != EXPAND_CONST_ADDRESS)
9045 	{
9046 	  temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
9047 	  if (MEM_P (temp))
9048 	    copy_to_reg (temp);
9049 	  return const0_rtx;
9050 	}
9051 
9052       if (TREE_CODE_CLASS (code) == tcc_unary
9053 	  || code == COMPONENT_REF || code == INDIRECT_REF)
9054 	return expand_expr (treeop0, const0_rtx, VOIDmode,
9055 			    modifier);
9056 
9057       else if (TREE_CODE_CLASS (code) == tcc_binary
9058 	       || TREE_CODE_CLASS (code) == tcc_comparison
9059 	       || code == ARRAY_REF || code == ARRAY_RANGE_REF)
9060 	{
9061 	  expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
9062 	  expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
9063 	  return const0_rtx;
9064 	}
9065       else if (code == BIT_FIELD_REF)
9066 	{
9067 	  expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
9068 	  expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
9069 	  expand_expr (treeop2, const0_rtx, VOIDmode, modifier);
9070 	  return const0_rtx;
9071 	}
9072 
9073       target = 0;
9074     }
9075 
9076   if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
9077     target = 0;
9078 
9079   /* Use subtarget as the target for operand 0 of a binary operation.  */
9080   subtarget = get_subtarget (target);
9081   original_target = target;
9082 
9083   switch (code)
9084     {
9085     case LABEL_DECL:
9086       {
9087 	tree function = decl_function_context (exp);
9088 
9089 	temp = label_rtx (exp);
9090 	temp = gen_rtx_LABEL_REF (Pmode, temp);
9091 
9092 	if (function != current_function_decl
9093 	    && function != 0)
9094 	  LABEL_REF_NONLOCAL_P (temp) = 1;
9095 
9096 	temp = gen_rtx_MEM (FUNCTION_MODE, temp);
9097 	return temp;
9098       }
9099 
9100     case SSA_NAME:
9101       /* ??? ivopts calls expander, without any preparation from
9102          out-of-ssa.  So fake instructions as if this was an access to the
9103 	 base variable.  This unnecessarily allocates a pseudo, see how we can
9104 	 reuse it, if partition base vars have it set already.  */
9105       if (!currently_expanding_to_rtl)
9106 	return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
9107 				   NULL);
9108 
9109       g = get_gimple_for_ssa_name (exp);
9110       /* For EXPAND_INITIALIZER try harder to get something simpler.  */
9111       if (g == NULL
9112 	  && modifier == EXPAND_INITIALIZER
9113 	  && !SSA_NAME_IS_DEFAULT_DEF (exp)
9114 	  && (optimize || DECL_IGNORED_P (SSA_NAME_VAR (exp)))
9115 	  && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp)))
9116 	g = SSA_NAME_DEF_STMT (exp);
9117       if (g)
9118 	return expand_expr_real (gimple_assign_rhs_to_tree (g), target, tmode,
9119 				 modifier, NULL);
9120 
9121       ssa_name = exp;
9122       decl_rtl = get_rtx_for_ssa_name (ssa_name);
9123       exp = SSA_NAME_VAR (ssa_name);
9124       goto expand_decl_rtl;
9125 
9126     case PARM_DECL:
9127     case VAR_DECL:
9128       /* If a static var's type was incomplete when the decl was written,
9129 	 but the type is complete now, lay out the decl now.  */
9130       if (DECL_SIZE (exp) == 0
9131 	  && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
9132 	  && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
9133 	layout_decl (exp, 0);
9134 
9135       /* ... fall through ...  */
9136 
9137     case FUNCTION_DECL:
9138     case RESULT_DECL:
9139       decl_rtl = DECL_RTL (exp);
9140     expand_decl_rtl:
9141       gcc_assert (decl_rtl);
9142       decl_rtl = copy_rtx (decl_rtl);
9143       /* Record writes to register variables.  */
9144       if (modifier == EXPAND_WRITE
9145 	  && REG_P (decl_rtl)
9146 	  && HARD_REGISTER_P (decl_rtl))
9147         add_to_hard_reg_set (&crtl->asm_clobbers,
9148 			     GET_MODE (decl_rtl), REGNO (decl_rtl));
9149 
9150       /* Ensure variable marked as used even if it doesn't go through
9151 	 a parser.  If it hasn't be used yet, write out an external
9152 	 definition.  */
9153       if (! TREE_USED (exp))
9154 	{
9155 	  assemble_external (exp);
9156 	  TREE_USED (exp) = 1;
9157 	}
9158 
9159       /* Show we haven't gotten RTL for this yet.  */
9160       temp = 0;
9161 
9162       /* Variables inherited from containing functions should have
9163 	 been lowered by this point.  */
9164       context = decl_function_context (exp);
9165       gcc_assert (!context
9166 		  || context == current_function_decl
9167 		  || TREE_STATIC (exp)
9168 		  || DECL_EXTERNAL (exp)
9169 		  /* ??? C++ creates functions that are not TREE_STATIC.  */
9170 		  || TREE_CODE (exp) == FUNCTION_DECL);
9171 
9172       /* This is the case of an array whose size is to be determined
9173 	 from its initializer, while the initializer is still being parsed.
9174 	 See expand_decl.  */
9175 
9176       if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
9177 	temp = validize_mem (decl_rtl);
9178 
9179       /* If DECL_RTL is memory, we are in the normal case and the
9180 	 address is not valid, get the address into a register.  */
9181 
9182       else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
9183 	{
9184 	  if (alt_rtl)
9185 	    *alt_rtl = decl_rtl;
9186 	  decl_rtl = use_anchored_address (decl_rtl);
9187 	  if (modifier != EXPAND_CONST_ADDRESS
9188 	      && modifier != EXPAND_SUM
9189 	      && !memory_address_addr_space_p (DECL_MODE (exp),
9190 					       XEXP (decl_rtl, 0),
9191 					       MEM_ADDR_SPACE (decl_rtl)))
9192 	    temp = replace_equiv_address (decl_rtl,
9193 					  copy_rtx (XEXP (decl_rtl, 0)));
9194 	}
9195 
9196       /* If we got something, return it.  But first, set the alignment
9197 	 if the address is a register.  */
9198       if (temp != 0)
9199 	{
9200 	  if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
9201 	    mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
9202 
9203 	  return temp;
9204 	}
9205 
9206       /* If the mode of DECL_RTL does not match that of the decl,
9207 	 there are two cases: we are dealing with a BLKmode value
9208 	 that is returned in a register, or we are dealing with
9209 	 a promoted value.  In the latter case, return a SUBREG
9210 	 of the wanted mode, but mark it so that we know that it
9211 	 was already extended.  */
9212       if (REG_P (decl_rtl)
9213 	  && DECL_MODE (exp) != BLKmode
9214 	  && GET_MODE (decl_rtl) != DECL_MODE (exp))
9215 	{
9216 	  enum machine_mode pmode;
9217 
9218 	  /* Get the signedness to be used for this variable.  Ensure we get
9219 	     the same mode we got when the variable was declared.  */
9220 	  if (code == SSA_NAME
9221 	      && (g = SSA_NAME_DEF_STMT (ssa_name))
9222 	      && gimple_code (g) == GIMPLE_CALL)
9223 	    {
9224 	      gcc_assert (!gimple_call_internal_p (g));
9225 	      pmode = promote_function_mode (type, mode, &unsignedp,
9226 					     gimple_call_fntype (g),
9227 					     2);
9228 	    }
9229 	  else
9230 	    pmode = promote_decl_mode (exp, &unsignedp);
9231 	  gcc_assert (GET_MODE (decl_rtl) == pmode);
9232 
9233 	  temp = gen_lowpart_SUBREG (mode, decl_rtl);
9234 	  SUBREG_PROMOTED_VAR_P (temp) = 1;
9235 	  SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
9236 	  return temp;
9237 	}
9238 
9239       return decl_rtl;
9240 
9241     case INTEGER_CST:
9242       temp = immed_double_const (TREE_INT_CST_LOW (exp),
9243 				 TREE_INT_CST_HIGH (exp), mode);
9244 
9245       return temp;
9246 
9247     case VECTOR_CST:
9248       {
9249 	tree tmp = NULL_TREE;
9250 	if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
9251 	    || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
9252 	    || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
9253 	    || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
9254 	    || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
9255 	    || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
9256 	  return const_vector_from_tree (exp);
9257 	if (GET_MODE_CLASS (mode) == MODE_INT)
9258 	  {
9259 	    tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
9260 	    if (type_for_mode)
9261 	      tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
9262 	  }
9263 	if (!tmp)
9264 	  tmp = build_constructor_from_list (type,
9265 					     TREE_VECTOR_CST_ELTS (exp));
9266 	return expand_expr (tmp, ignore ? const0_rtx : target,
9267 			    tmode, modifier);
9268       }
9269 
9270     case CONST_DECL:
9271       return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
9272 
9273     case REAL_CST:
9274       /* If optimized, generate immediate CONST_DOUBLE
9275 	 which will be turned into memory by reload if necessary.
9276 
9277 	 We used to force a register so that loop.c could see it.  But
9278 	 this does not allow gen_* patterns to perform optimizations with
9279 	 the constants.  It also produces two insns in cases like "x = 1.0;".
9280 	 On most machines, floating-point constants are not permitted in
9281 	 many insns, so we'd end up copying it to a register in any case.
9282 
9283 	 Now, we do the copying in expand_binop, if appropriate.  */
9284       return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
9285 					   TYPE_MODE (TREE_TYPE (exp)));
9286 
9287     case FIXED_CST:
9288       return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
9289 					   TYPE_MODE (TREE_TYPE (exp)));
9290 
9291     case COMPLEX_CST:
9292       /* Handle evaluating a complex constant in a CONCAT target.  */
9293       if (original_target && GET_CODE (original_target) == CONCAT)
9294 	{
9295 	  enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9296 	  rtx rtarg, itarg;
9297 
9298 	  rtarg = XEXP (original_target, 0);
9299 	  itarg = XEXP (original_target, 1);
9300 
9301 	  /* Move the real and imaginary parts separately.  */
9302 	  op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
9303 	  op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
9304 
9305 	  if (op0 != rtarg)
9306 	    emit_move_insn (rtarg, op0);
9307 	  if (op1 != itarg)
9308 	    emit_move_insn (itarg, op1);
9309 
9310 	  return original_target;
9311 	}
9312 
9313       /* ... fall through ...  */
9314 
9315     case STRING_CST:
9316       temp = expand_expr_constant (exp, 1, modifier);
9317 
9318       /* temp contains a constant address.
9319 	 On RISC machines where a constant address isn't valid,
9320 	 make some insns to get that address into a register.  */
9321       if (modifier != EXPAND_CONST_ADDRESS
9322 	  && modifier != EXPAND_INITIALIZER
9323 	  && modifier != EXPAND_SUM
9324 	  && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
9325 					    MEM_ADDR_SPACE (temp)))
9326 	return replace_equiv_address (temp,
9327 				      copy_rtx (XEXP (temp, 0)));
9328       return temp;
9329 
9330     case SAVE_EXPR:
9331       {
9332 	tree val = treeop0;
9333 	rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
9334 
9335 	if (!SAVE_EXPR_RESOLVED_P (exp))
9336 	  {
9337 	    /* We can indeed still hit this case, typically via builtin
9338 	       expanders calling save_expr immediately before expanding
9339 	       something.  Assume this means that we only have to deal
9340 	       with non-BLKmode values.  */
9341 	    gcc_assert (GET_MODE (ret) != BLKmode);
9342 
9343 	    val = build_decl (EXPR_LOCATION (exp),
9344 			      VAR_DECL, NULL, TREE_TYPE (exp));
9345 	    DECL_ARTIFICIAL (val) = 1;
9346 	    DECL_IGNORED_P (val) = 1;
9347 	    treeop0 = val;
9348 	    TREE_OPERAND (exp, 0) = treeop0;
9349 	    SAVE_EXPR_RESOLVED_P (exp) = 1;
9350 
9351 	    if (!CONSTANT_P (ret))
9352 	      ret = copy_to_reg (ret);
9353 	    SET_DECL_RTL (val, ret);
9354 	  }
9355 
9356         return ret;
9357       }
9358 
9359 
9360     case CONSTRUCTOR:
9361       /* If we don't need the result, just ensure we evaluate any
9362 	 subexpressions.  */
9363       if (ignore)
9364 	{
9365 	  unsigned HOST_WIDE_INT idx;
9366 	  tree value;
9367 
9368 	  FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
9369 	    expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
9370 
9371 	  return const0_rtx;
9372 	}
9373 
9374       return expand_constructor (exp, target, modifier, false);
9375 
9376     case TARGET_MEM_REF:
9377       {
9378 	addr_space_t as
9379 	  = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9380 	struct mem_address addr;
9381 	enum insn_code icode;
9382 	unsigned int align;
9383 
9384 	get_address_description (exp, &addr);
9385 	op0 = addr_for_mem_ref (&addr, as, true);
9386 	op0 = memory_address_addr_space (mode, op0, as);
9387 	temp = gen_rtx_MEM (mode, op0);
9388 	set_mem_attributes (temp, exp, 0);
9389 	set_mem_addr_space (temp, as);
9390 	align = get_object_or_type_alignment (exp);
9391 	if (mode != BLKmode
9392 	    && align < GET_MODE_ALIGNMENT (mode)
9393 	    /* If the target does not have special handling for unaligned
9394 	       loads of mode then it can use regular moves for them.  */
9395 	    && ((icode = optab_handler (movmisalign_optab, mode))
9396 		!= CODE_FOR_nothing))
9397 	  {
9398 	    struct expand_operand ops[2];
9399 
9400 	    /* We've already validated the memory, and we're creating a
9401 	       new pseudo destination.  The predicates really can't fail,
9402 	       nor can the generator.  */
9403 	    create_output_operand (&ops[0], NULL_RTX, mode);
9404 	    create_fixed_operand (&ops[1], temp);
9405 	    expand_insn (icode, 2, ops);
9406 	    return ops[0].value;
9407 	  }
9408 	return temp;
9409       }
9410 
9411     case MEM_REF:
9412       {
9413 	addr_space_t as
9414 	  = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9415 	enum machine_mode address_mode;
9416 	tree base = TREE_OPERAND (exp, 0);
9417 	gimple def_stmt;
9418 	enum insn_code icode;
9419 	unsigned align;
9420 	/* Handle expansion of non-aliased memory with non-BLKmode.  That
9421 	   might end up in a register.  */
9422 	if (mem_ref_refers_to_non_mem_p (exp))
9423 	  {
9424 	    HOST_WIDE_INT offset = mem_ref_offset (exp).low;
9425 	    tree bit_offset;
9426 	    tree bftype;
9427 	    base = TREE_OPERAND (base, 0);
9428 	    if (offset == 0
9429 		&& host_integerp (TYPE_SIZE (TREE_TYPE (exp)), 1)
9430 		&& (GET_MODE_BITSIZE (DECL_MODE (base))
9431 		    == TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp)))))
9432 	      return expand_expr (build1 (VIEW_CONVERT_EXPR,
9433 					  TREE_TYPE (exp), base),
9434 				  target, tmode, modifier);
9435 	    bit_offset = bitsize_int (offset * BITS_PER_UNIT);
9436 	    bftype = TREE_TYPE (base);
9437 	    if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
9438 	      bftype = TREE_TYPE (exp);
9439 	    else
9440 	      {
9441 		temp = assign_stack_temp (DECL_MODE (base),
9442 					  GET_MODE_SIZE (DECL_MODE (base)),
9443 					  0);
9444 		store_expr (base, temp, 0, false);
9445 		temp = adjust_address (temp, BLKmode, offset);
9446 		set_mem_size (temp, int_size_in_bytes (TREE_TYPE (exp)));
9447 		return temp;
9448 	      }
9449 	    return expand_expr (build3 (BIT_FIELD_REF, bftype,
9450 					base,
9451 					TYPE_SIZE (TREE_TYPE (exp)),
9452 					bit_offset),
9453 				target, tmode, modifier);
9454 	  }
9455 	address_mode = targetm.addr_space.address_mode (as);
9456 	base = TREE_OPERAND (exp, 0);
9457 	if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
9458 	  {
9459 	    tree mask = gimple_assign_rhs2 (def_stmt);
9460 	    base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
9461 			   gimple_assign_rhs1 (def_stmt), mask);
9462 	    TREE_OPERAND (exp, 0) = base;
9463 	  }
9464 	align = get_object_or_type_alignment (exp);
9465 	op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_SUM);
9466 	op0 = memory_address_addr_space (address_mode, op0, as);
9467 	if (!integer_zerop (TREE_OPERAND (exp, 1)))
9468 	  {
9469 	    rtx off
9470 	      = immed_double_int_const (mem_ref_offset (exp), address_mode);
9471 	    op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
9472 	  }
9473 	op0 = memory_address_addr_space (mode, op0, as);
9474 	temp = gen_rtx_MEM (mode, op0);
9475 	set_mem_attributes (temp, exp, 0);
9476 	set_mem_addr_space (temp, as);
9477 	if (TREE_THIS_VOLATILE (exp))
9478 	  MEM_VOLATILE_P (temp) = 1;
9479 	if (mode != BLKmode
9480 	    && align < GET_MODE_ALIGNMENT (mode)
9481 	    /* If the target does not have special handling for unaligned
9482 	       loads of mode then it can use regular moves for them.  */
9483 	    && ((icode = optab_handler (movmisalign_optab, mode))
9484 		!= CODE_FOR_nothing))
9485 	  {
9486 	    struct expand_operand ops[2];
9487 
9488 	    /* We've already validated the memory, and we're creating a
9489 	       new pseudo destination.  The predicates really can't fail,
9490 	       nor can the generator.  */
9491 	    create_output_operand (&ops[0], NULL_RTX, mode);
9492 	    create_fixed_operand (&ops[1], temp);
9493 	    expand_insn (icode, 2, ops);
9494 	    return ops[0].value;
9495 	  }
9496 	return temp;
9497       }
9498 
9499     case ARRAY_REF:
9500 
9501       {
9502 	tree array = treeop0;
9503 	tree index = treeop1;
9504 
9505 	/* Fold an expression like: "foo"[2].
9506 	   This is not done in fold so it won't happen inside &.
9507 	   Don't fold if this is for wide characters since it's too
9508 	   difficult to do correctly and this is a very rare case.  */
9509 
9510 	if (modifier != EXPAND_CONST_ADDRESS
9511 	    && modifier != EXPAND_INITIALIZER
9512 	    && modifier != EXPAND_MEMORY)
9513 	  {
9514 	    tree t = fold_read_from_constant_string (exp);
9515 
9516 	    if (t)
9517 	      return expand_expr (t, target, tmode, modifier);
9518 	  }
9519 
9520 	/* If this is a constant index into a constant array,
9521 	   just get the value from the array.  Handle both the cases when
9522 	   we have an explicit constructor and when our operand is a variable
9523 	   that was declared const.  */
9524 
9525 	if (modifier != EXPAND_CONST_ADDRESS
9526 	    && modifier != EXPAND_INITIALIZER
9527 	    && modifier != EXPAND_MEMORY
9528 	    && TREE_CODE (array) == CONSTRUCTOR
9529 	    && ! TREE_SIDE_EFFECTS (array)
9530 	    && TREE_CODE (index) == INTEGER_CST)
9531 	  {
9532 	    unsigned HOST_WIDE_INT ix;
9533 	    tree field, value;
9534 
9535 	    FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
9536 				      field, value)
9537 	      if (tree_int_cst_equal (field, index))
9538 		{
9539 		  if (!TREE_SIDE_EFFECTS (value))
9540 		    return expand_expr (fold (value), target, tmode, modifier);
9541 		  break;
9542 		}
9543 	  }
9544 
9545 	else if (optimize >= 1
9546 		 && modifier != EXPAND_CONST_ADDRESS
9547 		 && modifier != EXPAND_INITIALIZER
9548 		 && modifier != EXPAND_MEMORY
9549 		 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
9550 		 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
9551 		 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
9552 		 && const_value_known_p (array))
9553 	  {
9554 	    if (TREE_CODE (index) == INTEGER_CST)
9555 	      {
9556 		tree init = DECL_INITIAL (array);
9557 
9558 		if (TREE_CODE (init) == CONSTRUCTOR)
9559 		  {
9560 		    unsigned HOST_WIDE_INT ix;
9561 		    tree field, value;
9562 
9563 		    FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
9564 					      field, value)
9565 		      if (tree_int_cst_equal (field, index))
9566 			{
9567 			  if (TREE_SIDE_EFFECTS (value))
9568 			    break;
9569 
9570 			  if (TREE_CODE (value) == CONSTRUCTOR)
9571 			    {
9572 			      /* If VALUE is a CONSTRUCTOR, this
9573 				 optimization is only useful if
9574 				 this doesn't store the CONSTRUCTOR
9575 				 into memory.  If it does, it is more
9576 				 efficient to just load the data from
9577 				 the array directly.  */
9578 			      rtx ret = expand_constructor (value, target,
9579 							    modifier, true);
9580 			      if (ret == NULL_RTX)
9581 				break;
9582 			    }
9583 
9584 			  return expand_expr (fold (value), target, tmode,
9585 					      modifier);
9586 			}
9587 		  }
9588 		else if(TREE_CODE (init) == STRING_CST)
9589 		  {
9590 		    tree index1 = index;
9591 		    tree low_bound = array_ref_low_bound (exp);
9592 		    index1 = fold_convert_loc (loc, sizetype,
9593 					       treeop1);
9594 
9595 		    /* Optimize the special-case of a zero lower bound.
9596 
9597 		       We convert the low_bound to sizetype to avoid some problems
9598 		       with constant folding.  (E.g. suppose the lower bound is 1,
9599 		       and its mode is QI.  Without the conversion,l (ARRAY
9600 		       +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
9601 		       +INDEX), which becomes (ARRAY+255+INDEX).  Opps!)  */
9602 
9603 		    if (! integer_zerop (low_bound))
9604 		      index1 = size_diffop_loc (loc, index1,
9605 					    fold_convert_loc (loc, sizetype,
9606 							      low_bound));
9607 
9608 		    if (0 > compare_tree_int (index1,
9609 					      TREE_STRING_LENGTH (init)))
9610 		      {
9611 			tree type = TREE_TYPE (TREE_TYPE (init));
9612 			enum machine_mode mode = TYPE_MODE (type);
9613 
9614 			if (GET_MODE_CLASS (mode) == MODE_INT
9615 			    && GET_MODE_SIZE (mode) == 1)
9616 			  return gen_int_mode (TREE_STRING_POINTER (init)
9617 					       [TREE_INT_CST_LOW (index1)],
9618 					       mode);
9619 		      }
9620 		  }
9621 	      }
9622 	  }
9623       }
9624       goto normal_inner_ref;
9625 
9626     case COMPONENT_REF:
9627       /* If the operand is a CONSTRUCTOR, we can just extract the
9628 	 appropriate field if it is present.  */
9629       if (TREE_CODE (treeop0) == CONSTRUCTOR)
9630 	{
9631 	  unsigned HOST_WIDE_INT idx;
9632 	  tree field, value;
9633 
9634 	  FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
9635 				    idx, field, value)
9636 	    if (field == treeop1
9637 		/* We can normally use the value of the field in the
9638 		   CONSTRUCTOR.  However, if this is a bitfield in
9639 		   an integral mode that we can fit in a HOST_WIDE_INT,
9640 		   we must mask only the number of bits in the bitfield,
9641 		   since this is done implicitly by the constructor.  If
9642 		   the bitfield does not meet either of those conditions,
9643 		   we can't do this optimization.  */
9644 		&& (! DECL_BIT_FIELD (field)
9645 		    || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
9646 			&& (GET_MODE_PRECISION (DECL_MODE (field))
9647 			    <= HOST_BITS_PER_WIDE_INT))))
9648 	      {
9649 		if (DECL_BIT_FIELD (field)
9650 		    && modifier == EXPAND_STACK_PARM)
9651 		  target = 0;
9652 		op0 = expand_expr (value, target, tmode, modifier);
9653 		if (DECL_BIT_FIELD (field))
9654 		  {
9655 		    HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
9656 		    enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
9657 
9658 		    if (TYPE_UNSIGNED (TREE_TYPE (field)))
9659 		      {
9660 			op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
9661 			op0 = expand_and (imode, op0, op1, target);
9662 		      }
9663 		    else
9664 		      {
9665 			int count = GET_MODE_PRECISION (imode) - bitsize;
9666 
9667 			op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
9668 					    target, 0);
9669 			op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
9670 					    target, 0);
9671 		      }
9672 		  }
9673 
9674 		return op0;
9675 	      }
9676 	}
9677       goto normal_inner_ref;
9678 
9679     case BIT_FIELD_REF:
9680     case ARRAY_RANGE_REF:
9681     normal_inner_ref:
9682       {
9683 	enum machine_mode mode1, mode2;
9684 	HOST_WIDE_INT bitsize, bitpos;
9685 	tree offset;
9686 	int volatilep = 0, must_force_mem;
9687 	bool packedp = false;
9688 	tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
9689 					&mode1, &unsignedp, &volatilep, true);
9690 	rtx orig_op0, memloc;
9691 
9692 	/* If we got back the original object, something is wrong.  Perhaps
9693 	   we are evaluating an expression too early.  In any event, don't
9694 	   infinitely recurse.  */
9695 	gcc_assert (tem != exp);
9696 
9697 	if (TYPE_PACKED (TREE_TYPE (TREE_OPERAND (exp, 0)))
9698 	    || (TREE_CODE (TREE_OPERAND (exp, 1)) == FIELD_DECL
9699 		&& DECL_PACKED (TREE_OPERAND (exp, 1))))
9700 	  packedp = true;
9701 
9702 	/* If TEM's type is a union of variable size, pass TARGET to the inner
9703 	   computation, since it will need a temporary and TARGET is known
9704 	   to have to do.  This occurs in unchecked conversion in Ada.  */
9705 	orig_op0 = op0
9706 	  = expand_expr (tem,
9707 			 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
9708 			  && COMPLETE_TYPE_P (TREE_TYPE (tem))
9709 			  && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
9710 			      != INTEGER_CST)
9711 			  && modifier != EXPAND_STACK_PARM
9712 			  ? target : NULL_RTX),
9713 			 VOIDmode,
9714 			 (modifier == EXPAND_INITIALIZER
9715 			  || modifier == EXPAND_CONST_ADDRESS
9716 			  || modifier == EXPAND_STACK_PARM)
9717 			 ? modifier : EXPAND_NORMAL);
9718 
9719 
9720 	/* If the bitfield is volatile, we want to access it in the
9721 	   field's mode, not the computed mode.
9722 	   If a MEM has VOIDmode (external with incomplete type),
9723 	   use BLKmode for it instead.  */
9724 	if (MEM_P (op0))
9725 	  {
9726 	    if (volatilep && flag_strict_volatile_bitfields > 0)
9727 	      op0 = adjust_address (op0, mode1, 0);
9728 	    else if (GET_MODE (op0) == VOIDmode)
9729 	      op0 = adjust_address (op0, BLKmode, 0);
9730 	  }
9731 
9732 	mode2
9733 	  = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
9734 
9735 	/* If we have either an offset, a BLKmode result, or a reference
9736 	   outside the underlying object, we must force it to memory.
9737 	   Such a case can occur in Ada if we have unchecked conversion
9738 	   of an expression from a scalar type to an aggregate type or
9739 	   for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
9740 	   passed a partially uninitialized object or a view-conversion
9741 	   to a larger size.  */
9742 	must_force_mem = (offset
9743 			  || mode1 == BLKmode
9744 			  || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
9745 
9746 	/* Handle CONCAT first.  */
9747 	if (GET_CODE (op0) == CONCAT && !must_force_mem)
9748 	  {
9749 	    if (bitpos == 0
9750 		&& bitsize == GET_MODE_BITSIZE (GET_MODE (op0)))
9751 	      return op0;
9752 	    if (bitpos == 0
9753 		&& bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9754 		&& bitsize)
9755 	      {
9756 		op0 = XEXP (op0, 0);
9757 		mode2 = GET_MODE (op0);
9758 	      }
9759 	    else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9760 		     && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
9761 		     && bitpos
9762 		     && bitsize)
9763 	      {
9764 		op0 = XEXP (op0, 1);
9765 		bitpos = 0;
9766 		mode2 = GET_MODE (op0);
9767 	      }
9768 	    else
9769 	      /* Otherwise force into memory.  */
9770 	      must_force_mem = 1;
9771 	  }
9772 
9773 	/* If this is a constant, put it in a register if it is a legitimate
9774 	   constant and we don't need a memory reference.  */
9775 	if (CONSTANT_P (op0)
9776 	    && mode2 != BLKmode
9777 	    && targetm.legitimate_constant_p (mode2, op0)
9778 	    && !must_force_mem)
9779 	  op0 = force_reg (mode2, op0);
9780 
9781 	/* Otherwise, if this is a constant, try to force it to the constant
9782 	   pool.  Note that back-ends, e.g. MIPS, may refuse to do so if it
9783 	   is a legitimate constant.  */
9784 	else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
9785 	  op0 = validize_mem (memloc);
9786 
9787 	/* Otherwise, if this is a constant or the object is not in memory
9788 	   and need be, put it there.  */
9789 	else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
9790 	  {
9791 	    tree nt = build_qualified_type (TREE_TYPE (tem),
9792 					    (TYPE_QUALS (TREE_TYPE (tem))
9793 					     | TYPE_QUAL_CONST));
9794 	    memloc = assign_temp (nt, 1, 1, 1);
9795 	    emit_move_insn (memloc, op0);
9796 	    op0 = memloc;
9797 	  }
9798 
9799 	if (offset)
9800 	  {
9801 	    enum machine_mode address_mode;
9802 	    rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
9803 					  EXPAND_SUM);
9804 
9805 	    gcc_assert (MEM_P (op0));
9806 
9807 	    address_mode
9808 	      = targetm.addr_space.address_mode (MEM_ADDR_SPACE (op0));
9809 	    if (GET_MODE (offset_rtx) != address_mode)
9810 	      offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
9811 
9812 	    if (GET_MODE (op0) == BLKmode
9813 		/* A constant address in OP0 can have VOIDmode, we must
9814 		   not try to call force_reg in that case.  */
9815 		&& GET_MODE (XEXP (op0, 0)) != VOIDmode
9816 		&& bitsize != 0
9817 		&& (bitpos % bitsize) == 0
9818 		&& (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
9819 		&& MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
9820 	      {
9821 		op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9822 		bitpos = 0;
9823 	      }
9824 
9825 	    op0 = offset_address (op0, offset_rtx,
9826 				  highest_pow2_factor (offset));
9827 	  }
9828 
9829 	/* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
9830 	   record its alignment as BIGGEST_ALIGNMENT.  */
9831 	if (MEM_P (op0) && bitpos == 0 && offset != 0
9832 	    && is_aligning_offset (offset, tem))
9833 	  set_mem_align (op0, BIGGEST_ALIGNMENT);
9834 
9835 	/* Don't forget about volatility even if this is a bitfield.  */
9836 	if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
9837 	  {
9838 	    if (op0 == orig_op0)
9839 	      op0 = copy_rtx (op0);
9840 
9841 	    MEM_VOLATILE_P (op0) = 1;
9842 	  }
9843 
9844 	/* In cases where an aligned union has an unaligned object
9845 	   as a field, we might be extracting a BLKmode value from
9846 	   an integer-mode (e.g., SImode) object.  Handle this case
9847 	   by doing the extract into an object as wide as the field
9848 	   (which we know to be the width of a basic mode), then
9849 	   storing into memory, and changing the mode to BLKmode.  */
9850 	if (mode1 == VOIDmode
9851 	    || REG_P (op0) || GET_CODE (op0) == SUBREG
9852 	    || (mode1 != BLKmode && ! direct_load[(int) mode1]
9853 		&& GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
9854 		&& GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
9855 		&& modifier != EXPAND_CONST_ADDRESS
9856 		&& modifier != EXPAND_INITIALIZER)
9857 	    /* If the field is volatile, we always want an aligned
9858 	       access.  Do this in following two situations:
9859 	       1. the access is not already naturally
9860 	       aligned, otherwise "normal" (non-bitfield) volatile fields
9861 	       become non-addressable.
9862 	       2. the bitsize is narrower than the access size. Need
9863 	       to extract bitfields from the access.  */
9864 	    || (volatilep && flag_strict_volatile_bitfields > 0
9865 		&& (bitpos % GET_MODE_ALIGNMENT (mode) != 0
9866 		    || (mode1 != BLKmode
9867 		        && bitsize < GET_MODE_SIZE (mode1) * BITS_PER_UNIT)))
9868 	    /* If the field isn't aligned enough to fetch as a memref,
9869 	       fetch it as a bit field.  */
9870 	    || (mode1 != BLKmode
9871 		&& (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
9872 		      || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
9873 		      || (MEM_P (op0)
9874 			  && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
9875 			      || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
9876 		     && ((modifier == EXPAND_CONST_ADDRESS
9877 			  || modifier == EXPAND_INITIALIZER)
9878 			 ? STRICT_ALIGNMENT
9879 			 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
9880 		    || (bitpos % BITS_PER_UNIT != 0)))
9881 	    /* If the type and the field are a constant size and the
9882 	       size of the type isn't the same size as the bitfield,
9883 	       we must use bitfield operations.  */
9884 	    || (bitsize >= 0
9885 		&& TYPE_SIZE (TREE_TYPE (exp))
9886 		&& TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
9887 		&& 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
9888 					  bitsize)))
9889 	  {
9890 	    enum machine_mode ext_mode = mode;
9891 
9892 	    if (ext_mode == BLKmode
9893 		&& ! (target != 0 && MEM_P (op0)
9894 		      && MEM_P (target)
9895 		      && bitpos % BITS_PER_UNIT == 0))
9896 	      ext_mode = mode_for_size (bitsize, MODE_INT, 1);
9897 
9898 	    if (ext_mode == BLKmode)
9899 	      {
9900 		if (target == 0)
9901 		  target = assign_temp (type, 0, 1, 1);
9902 
9903 		if (bitsize == 0)
9904 		  return target;
9905 
9906 		/* In this case, BITPOS must start at a byte boundary and
9907 		   TARGET, if specified, must be a MEM.  */
9908 		gcc_assert (MEM_P (op0)
9909 			    && (!target || MEM_P (target))
9910 			    && !(bitpos % BITS_PER_UNIT));
9911 
9912 		emit_block_move (target,
9913 				 adjust_address (op0, VOIDmode,
9914 						 bitpos / BITS_PER_UNIT),
9915 				 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
9916 					  / BITS_PER_UNIT),
9917 				 (modifier == EXPAND_STACK_PARM
9918 				  ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9919 
9920 		return target;
9921 	      }
9922 
9923 	    op0 = validize_mem (op0);
9924 
9925 	    if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
9926 	      mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
9927 
9928 	    op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp, packedp,
9929 				     (modifier == EXPAND_STACK_PARM
9930 				      ? NULL_RTX : target),
9931 				     ext_mode, ext_mode);
9932 
9933 	    /* If the result is a record type and BITSIZE is narrower than
9934 	       the mode of OP0, an integral mode, and this is a big endian
9935 	       machine, we must put the field into the high-order bits.  */
9936 	    if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
9937 		&& GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9938 		&& bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
9939 	      op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
9940 				  GET_MODE_BITSIZE (GET_MODE (op0))
9941 				  - bitsize, op0, 1);
9942 
9943 	    /* If the result type is BLKmode, store the data into a temporary
9944 	       of the appropriate type, but with the mode corresponding to the
9945 	       mode for the data we have (op0's mode).  It's tempting to make
9946 	       this a constant type, since we know it's only being stored once,
9947 	       but that can cause problems if we are taking the address of this
9948 	       COMPONENT_REF because the MEM of any reference via that address
9949 	       will have flags corresponding to the type, which will not
9950 	       necessarily be constant.  */
9951 	    if (mode == BLKmode)
9952 	      {
9953 		HOST_WIDE_INT size = GET_MODE_BITSIZE (ext_mode);
9954 		rtx new_rtx;
9955 
9956 		/* If the reference doesn't use the alias set of its type,
9957 		   we cannot create the temporary using that type.  */
9958 		if (component_uses_parent_alias_set (exp))
9959 		  {
9960 		    new_rtx = assign_stack_local (ext_mode, size, 0);
9961 		    set_mem_alias_set (new_rtx, get_alias_set (exp));
9962 		  }
9963 		else
9964 		  new_rtx = assign_stack_temp_for_type (ext_mode, size, 0, type);
9965 
9966 		emit_move_insn (new_rtx, op0);
9967 		op0 = copy_rtx (new_rtx);
9968 		PUT_MODE (op0, BLKmode);
9969 		set_mem_attributes (op0, exp, 1);
9970 	      }
9971 
9972 	    return op0;
9973 	  }
9974 
9975 	/* If the result is BLKmode, use that to access the object
9976 	   now as well.  */
9977 	if (mode == BLKmode)
9978 	  mode1 = BLKmode;
9979 
9980 	/* Get a reference to just this component.  */
9981 	if (modifier == EXPAND_CONST_ADDRESS
9982 	    || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
9983 	  op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
9984 	else
9985 	  op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9986 
9987 	if (op0 == orig_op0)
9988 	  op0 = copy_rtx (op0);
9989 
9990 	set_mem_attributes (op0, exp, 0);
9991 	if (REG_P (XEXP (op0, 0)))
9992 	  mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
9993 
9994 	MEM_VOLATILE_P (op0) |= volatilep;
9995 	if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
9996 	    || modifier == EXPAND_CONST_ADDRESS
9997 	    || modifier == EXPAND_INITIALIZER)
9998 	  return op0;
9999 	else if (target == 0)
10000 	  target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
10001 
10002 	convert_move (target, op0, unsignedp);
10003 	return target;
10004       }
10005 
10006     case OBJ_TYPE_REF:
10007       return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
10008 
10009     case CALL_EXPR:
10010       /* All valid uses of __builtin_va_arg_pack () are removed during
10011 	 inlining.  */
10012       if (CALL_EXPR_VA_ARG_PACK (exp))
10013 	error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
10014       {
10015 	tree fndecl = get_callee_fndecl (exp), attr;
10016 
10017 	if (fndecl
10018 	    && (attr = lookup_attribute ("error",
10019 					 DECL_ATTRIBUTES (fndecl))) != NULL)
10020 	  error ("%Kcall to %qs declared with attribute error: %s",
10021 		 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10022 		 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10023 	if (fndecl
10024 	    && (attr = lookup_attribute ("warning",
10025 					 DECL_ATTRIBUTES (fndecl))) != NULL)
10026 	  warning_at (tree_nonartificial_location (exp),
10027 		      0, "%Kcall to %qs declared with attribute warning: %s",
10028 		      exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10029 		      TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10030 
10031 	/* Check for a built-in function.  */
10032 	if (fndecl && DECL_BUILT_IN (fndecl))
10033 	  {
10034 	    gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
10035 	    return expand_builtin (exp, target, subtarget, tmode, ignore);
10036 	  }
10037       }
10038       return expand_call (exp, target, ignore);
10039 
10040     case VIEW_CONVERT_EXPR:
10041       op0 = NULL_RTX;
10042 
10043       /* If we are converting to BLKmode, try to avoid an intermediate
10044 	 temporary by fetching an inner memory reference.  */
10045       if (mode == BLKmode
10046 	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
10047 	  && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
10048 	  && handled_component_p (treeop0))
10049       {
10050 	enum machine_mode mode1;
10051 	HOST_WIDE_INT bitsize, bitpos;
10052 	tree offset;
10053 	int unsignedp;
10054 	int volatilep = 0;
10055 	tree tem
10056 	  = get_inner_reference (treeop0, &bitsize, &bitpos,
10057 				 &offset, &mode1, &unsignedp, &volatilep,
10058 				 true);
10059 	rtx orig_op0;
10060 
10061 	/* ??? We should work harder and deal with non-zero offsets.  */
10062 	if (!offset
10063 	    && (bitpos % BITS_PER_UNIT) == 0
10064 	    && bitsize >= 0
10065 	    && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) == 0)
10066 	  {
10067 	    /* See the normal_inner_ref case for the rationale.  */
10068 	    orig_op0
10069 	      = expand_expr (tem,
10070 			     (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10071 			      && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10072 				  != INTEGER_CST)
10073 			      && modifier != EXPAND_STACK_PARM
10074 			      ? target : NULL_RTX),
10075 			     VOIDmode,
10076 			     (modifier == EXPAND_INITIALIZER
10077 			      || modifier == EXPAND_CONST_ADDRESS
10078 			      || modifier == EXPAND_STACK_PARM)
10079 			     ? modifier : EXPAND_NORMAL);
10080 
10081 	    if (MEM_P (orig_op0))
10082 	      {
10083 		op0 = orig_op0;
10084 
10085 		/* Get a reference to just this component.  */
10086 		if (modifier == EXPAND_CONST_ADDRESS
10087 		    || modifier == EXPAND_SUM
10088 		    || modifier == EXPAND_INITIALIZER)
10089 		  op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
10090 		else
10091 		  op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
10092 
10093 		if (op0 == orig_op0)
10094 		  op0 = copy_rtx (op0);
10095 
10096 		set_mem_attributes (op0, treeop0, 0);
10097 		if (REG_P (XEXP (op0, 0)))
10098 		  mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10099 
10100 		MEM_VOLATILE_P (op0) |= volatilep;
10101 	      }
10102 	  }
10103       }
10104 
10105       if (!op0)
10106 	op0 = expand_expr (treeop0,
10107 			   NULL_RTX, VOIDmode, modifier);
10108 
10109       /* If the input and output modes are both the same, we are done.  */
10110       if (mode == GET_MODE (op0))
10111 	;
10112       /* If neither mode is BLKmode, and both modes are the same size
10113 	 then we can use gen_lowpart.  */
10114       else if (mode != BLKmode && GET_MODE (op0) != BLKmode
10115 	       && (GET_MODE_PRECISION (mode)
10116 		   == GET_MODE_PRECISION (GET_MODE (op0)))
10117 	       && !COMPLEX_MODE_P (GET_MODE (op0)))
10118 	{
10119 	  if (GET_CODE (op0) == SUBREG)
10120 	    op0 = force_reg (GET_MODE (op0), op0);
10121 	  temp = gen_lowpart_common (mode, op0);
10122 	  if (temp)
10123 	    op0 = temp;
10124 	  else
10125 	    {
10126 	      if (!REG_P (op0) && !MEM_P (op0))
10127 		op0 = force_reg (GET_MODE (op0), op0);
10128 	      op0 = gen_lowpart (mode, op0);
10129 	    }
10130 	}
10131       /* If both types are integral, convert from one mode to the other.  */
10132       else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
10133 	op0 = convert_modes (mode, GET_MODE (op0), op0,
10134 			     TYPE_UNSIGNED (TREE_TYPE (treeop0)));
10135       /* As a last resort, spill op0 to memory, and reload it in a
10136 	 different mode.  */
10137       else if (!MEM_P (op0))
10138 	{
10139 	  /* If the operand is not a MEM, force it into memory.  Since we
10140 	     are going to be changing the mode of the MEM, don't call
10141 	     force_const_mem for constants because we don't allow pool
10142 	     constants to change mode.  */
10143 	  tree inner_type = TREE_TYPE (treeop0);
10144 
10145 	  gcc_assert (!TREE_ADDRESSABLE (exp));
10146 
10147 	  if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
10148 	    target
10149 	      = assign_stack_temp_for_type
10150 		(TYPE_MODE (inner_type),
10151 		 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
10152 
10153 	  emit_move_insn (target, op0);
10154 	  op0 = target;
10155 	}
10156 
10157       /* At this point, OP0 is in the correct mode.  If the output type is
10158 	 such that the operand is known to be aligned, indicate that it is.
10159 	 Otherwise, we need only be concerned about alignment for non-BLKmode
10160 	 results.  */
10161       if (MEM_P (op0))
10162 	{
10163 	  enum insn_code icode;
10164 
10165 	  op0 = copy_rtx (op0);
10166 
10167 	  if (TYPE_ALIGN_OK (type))
10168 	    set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
10169 	  else if (mode != BLKmode
10170 		   && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode)
10171 		   /* If the target does have special handling for unaligned
10172 		      loads of mode then use them.  */
10173 		   && ((icode = optab_handler (movmisalign_optab, mode))
10174 		       != CODE_FOR_nothing))
10175 	    {
10176 	      rtx reg, insn;
10177 
10178 	      op0 = adjust_address (op0, mode, 0);
10179 	      /* We've already validated the memory, and we're creating a
10180 		 new pseudo destination.  The predicates really can't
10181 		 fail.  */
10182 	      reg = gen_reg_rtx (mode);
10183 
10184 	      /* Nor can the insn generator.  */
10185 	      insn = GEN_FCN (icode) (reg, op0);
10186 	      emit_insn (insn);
10187 	      return reg;
10188 	    }
10189 	  else if (STRICT_ALIGNMENT
10190 		   && mode != BLKmode
10191 		   && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
10192 	    {
10193 	      tree inner_type = TREE_TYPE (treeop0);
10194 	      HOST_WIDE_INT temp_size
10195 		= MAX (int_size_in_bytes (inner_type),
10196 		       (HOST_WIDE_INT) GET_MODE_SIZE (mode));
10197 	      rtx new_rtx
10198 		= assign_stack_temp_for_type (mode, temp_size, 0, type);
10199 	      rtx new_with_op0_mode
10200 		= adjust_address (new_rtx, GET_MODE (op0), 0);
10201 
10202 	      gcc_assert (!TREE_ADDRESSABLE (exp));
10203 
10204 	      if (GET_MODE (op0) == BLKmode)
10205 		emit_block_move (new_with_op0_mode, op0,
10206 				 GEN_INT (GET_MODE_SIZE (mode)),
10207 				 (modifier == EXPAND_STACK_PARM
10208 				  ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10209 	      else
10210 		emit_move_insn (new_with_op0_mode, op0);
10211 
10212 	      op0 = new_rtx;
10213 	    }
10214 
10215 	  op0 = adjust_address (op0, mode, 0);
10216 	}
10217 
10218       return op0;
10219 
10220     case MODIFY_EXPR:
10221       {
10222 	tree lhs = treeop0;
10223 	tree rhs = treeop1;
10224 	gcc_assert (ignore);
10225 
10226 	/* Check for |= or &= of a bitfield of size one into another bitfield
10227 	   of size 1.  In this case, (unless we need the result of the
10228 	   assignment) we can do this more efficiently with a
10229 	   test followed by an assignment, if necessary.
10230 
10231 	   ??? At this point, we can't get a BIT_FIELD_REF here.  But if
10232 	   things change so we do, this code should be enhanced to
10233 	   support it.  */
10234 	if (TREE_CODE (lhs) == COMPONENT_REF
10235 	    && (TREE_CODE (rhs) == BIT_IOR_EXPR
10236 		|| TREE_CODE (rhs) == BIT_AND_EXPR)
10237 	    && TREE_OPERAND (rhs, 0) == lhs
10238 	    && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
10239 	    && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
10240 	    && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
10241 	  {
10242 	    rtx label = gen_label_rtx ();
10243 	    int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
10244 	    do_jump (TREE_OPERAND (rhs, 1),
10245 		     value ? label : 0,
10246 		     value ? 0 : label, -1);
10247 	    expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
10248 			       MOVE_NONTEMPORAL (exp));
10249 	    do_pending_stack_adjust ();
10250 	    emit_label (label);
10251 	    return const0_rtx;
10252 	  }
10253 
10254 	expand_assignment (lhs, rhs, MOVE_NONTEMPORAL (exp));
10255 	return const0_rtx;
10256       }
10257 
10258     case ADDR_EXPR:
10259       return expand_expr_addr_expr (exp, target, tmode, modifier);
10260 
10261     case REALPART_EXPR:
10262       op0 = expand_normal (treeop0);
10263       return read_complex_part (op0, false);
10264 
10265     case IMAGPART_EXPR:
10266       op0 = expand_normal (treeop0);
10267       return read_complex_part (op0, true);
10268 
10269     case RETURN_EXPR:
10270     case LABEL_EXPR:
10271     case GOTO_EXPR:
10272     case SWITCH_EXPR:
10273     case ASM_EXPR:
10274       /* Expanded in cfgexpand.c.  */
10275       gcc_unreachable ();
10276 
10277     case TRY_CATCH_EXPR:
10278     case CATCH_EXPR:
10279     case EH_FILTER_EXPR:
10280     case TRY_FINALLY_EXPR:
10281       /* Lowered by tree-eh.c.  */
10282       gcc_unreachable ();
10283 
10284     case WITH_CLEANUP_EXPR:
10285     case CLEANUP_POINT_EXPR:
10286     case TARGET_EXPR:
10287     case CASE_LABEL_EXPR:
10288     case VA_ARG_EXPR:
10289     case BIND_EXPR:
10290     case INIT_EXPR:
10291     case CONJ_EXPR:
10292     case COMPOUND_EXPR:
10293     case PREINCREMENT_EXPR:
10294     case PREDECREMENT_EXPR:
10295     case POSTINCREMENT_EXPR:
10296     case POSTDECREMENT_EXPR:
10297     case LOOP_EXPR:
10298     case EXIT_EXPR:
10299       /* Lowered by gimplify.c.  */
10300       gcc_unreachable ();
10301 
10302     case FDESC_EXPR:
10303       /* Function descriptors are not valid except for as
10304 	 initialization constants, and should not be expanded.  */
10305       gcc_unreachable ();
10306 
10307     case WITH_SIZE_EXPR:
10308       /* WITH_SIZE_EXPR expands to its first argument.  The caller should
10309 	 have pulled out the size to use in whatever context it needed.  */
10310       return expand_expr_real (treeop0, original_target, tmode,
10311 			       modifier, alt_rtl);
10312 
10313     case COMPOUND_LITERAL_EXPR:
10314       {
10315 	/* Initialize the anonymous variable declared in the compound
10316 	   literal, then return the variable.  */
10317 	tree decl = COMPOUND_LITERAL_EXPR_DECL (exp);
10318 
10319 	/* Create RTL for this variable.  */
10320 	if (!DECL_RTL_SET_P (decl))
10321 	  {
10322 	    if (DECL_HARD_REGISTER (decl))
10323 	      /* The user specified an assembler name for this variable.
10324 	         Set that up now.  */
10325 	      rest_of_decl_compilation (decl, 0, 0);
10326 	    else
10327 	      expand_decl (decl);
10328 	  }
10329 
10330 	return expand_expr_real (decl, original_target, tmode,
10331 				 modifier, alt_rtl);
10332       }
10333 
10334     default:
10335       return expand_expr_real_2 (&ops, target, tmode, modifier);
10336     }
10337 }
10338 
10339 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
10340    signedness of TYPE), possibly returning the result in TARGET.  */
10341 static rtx
10342 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
10343 {
10344   HOST_WIDE_INT prec = TYPE_PRECISION (type);
10345   if (target && GET_MODE (target) != GET_MODE (exp))
10346     target = 0;
10347   /* For constant values, reduce using build_int_cst_type. */
10348   if (CONST_INT_P (exp))
10349     {
10350       HOST_WIDE_INT value = INTVAL (exp);
10351       tree t = build_int_cst_type (type, value);
10352       return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
10353     }
10354   else if (TYPE_UNSIGNED (type))
10355     {
10356       rtx mask = immed_double_int_const (double_int_mask (prec),
10357 					 GET_MODE (exp));
10358       return expand_and (GET_MODE (exp), exp, mask, target);
10359     }
10360   else
10361     {
10362       int count = GET_MODE_PRECISION (GET_MODE (exp)) - prec;
10363       exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp),
10364 			  exp, count, target, 0);
10365       return expand_shift (RSHIFT_EXPR, GET_MODE (exp),
10366 			   exp, count, target, 0);
10367     }
10368 }
10369 
10370 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
10371    when applied to the address of EXP produces an address known to be
10372    aligned more than BIGGEST_ALIGNMENT.  */
10373 
10374 static int
10375 is_aligning_offset (const_tree offset, const_tree exp)
10376 {
10377   /* Strip off any conversions.  */
10378   while (CONVERT_EXPR_P (offset))
10379     offset = TREE_OPERAND (offset, 0);
10380 
10381   /* We must now have a BIT_AND_EXPR with a constant that is one less than
10382      power of 2 and which is larger than BIGGEST_ALIGNMENT.  */
10383   if (TREE_CODE (offset) != BIT_AND_EXPR
10384       || !host_integerp (TREE_OPERAND (offset, 1), 1)
10385       || compare_tree_int (TREE_OPERAND (offset, 1),
10386 			   BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
10387       || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
10388     return 0;
10389 
10390   /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
10391      It must be NEGATE_EXPR.  Then strip any more conversions.  */
10392   offset = TREE_OPERAND (offset, 0);
10393   while (CONVERT_EXPR_P (offset))
10394     offset = TREE_OPERAND (offset, 0);
10395 
10396   if (TREE_CODE (offset) != NEGATE_EXPR)
10397     return 0;
10398 
10399   offset = TREE_OPERAND (offset, 0);
10400   while (CONVERT_EXPR_P (offset))
10401     offset = TREE_OPERAND (offset, 0);
10402 
10403   /* This must now be the address of EXP.  */
10404   return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
10405 }
10406 
10407 /* Return the tree node if an ARG corresponds to a string constant or zero
10408    if it doesn't.  If we return nonzero, set *PTR_OFFSET to the offset
10409    in bytes within the string that ARG is accessing.  The type of the
10410    offset will be `sizetype'.  */
10411 
10412 tree
10413 string_constant (tree arg, tree *ptr_offset)
10414 {
10415   tree array, offset, lower_bound;
10416   STRIP_NOPS (arg);
10417 
10418   if (TREE_CODE (arg) == ADDR_EXPR)
10419     {
10420       if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
10421 	{
10422 	  *ptr_offset = size_zero_node;
10423 	  return TREE_OPERAND (arg, 0);
10424 	}
10425       else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
10426 	{
10427 	  array = TREE_OPERAND (arg, 0);
10428 	  offset = size_zero_node;
10429 	}
10430       else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
10431 	{
10432 	  array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10433 	  offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10434 	  if (TREE_CODE (array) != STRING_CST
10435 	      && TREE_CODE (array) != VAR_DECL)
10436 	    return 0;
10437 
10438 	  /* Check if the array has a nonzero lower bound.  */
10439 	  lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
10440 	  if (!integer_zerop (lower_bound))
10441 	    {
10442 	      /* If the offset and base aren't both constants, return 0.  */
10443 	      if (TREE_CODE (lower_bound) != INTEGER_CST)
10444 	        return 0;
10445 	      if (TREE_CODE (offset) != INTEGER_CST)
10446 		return 0;
10447 	      /* Adjust offset by the lower bound.  */
10448 	      offset = size_diffop (fold_convert (sizetype, offset),
10449 				    fold_convert (sizetype, lower_bound));
10450 	    }
10451 	}
10452       else if (TREE_CODE (TREE_OPERAND (arg, 0)) == MEM_REF)
10453 	{
10454 	  array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10455 	  offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10456 	  if (TREE_CODE (array) != ADDR_EXPR)
10457 	    return 0;
10458 	  array = TREE_OPERAND (array, 0);
10459 	  if (TREE_CODE (array) != STRING_CST
10460 	      && TREE_CODE (array) != VAR_DECL)
10461 	    return 0;
10462 	}
10463       else
10464 	return 0;
10465     }
10466   else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
10467     {
10468       tree arg0 = TREE_OPERAND (arg, 0);
10469       tree arg1 = TREE_OPERAND (arg, 1);
10470 
10471       STRIP_NOPS (arg0);
10472       STRIP_NOPS (arg1);
10473 
10474       if (TREE_CODE (arg0) == ADDR_EXPR
10475 	  && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
10476 	      || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
10477 	{
10478 	  array = TREE_OPERAND (arg0, 0);
10479 	  offset = arg1;
10480 	}
10481       else if (TREE_CODE (arg1) == ADDR_EXPR
10482 	       && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
10483 		   || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
10484 	{
10485 	  array = TREE_OPERAND (arg1, 0);
10486 	  offset = arg0;
10487 	}
10488       else
10489 	return 0;
10490     }
10491   else
10492     return 0;
10493 
10494   if (TREE_CODE (array) == STRING_CST)
10495     {
10496       *ptr_offset = fold_convert (sizetype, offset);
10497       return array;
10498     }
10499   else if (TREE_CODE (array) == VAR_DECL
10500 	   || TREE_CODE (array) == CONST_DECL)
10501     {
10502       int length;
10503 
10504       /* Variables initialized to string literals can be handled too.  */
10505       if (!const_value_known_p (array)
10506 	  || !DECL_INITIAL (array)
10507 	  || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
10508 	return 0;
10509 
10510       /* Avoid const char foo[4] = "abcde";  */
10511       if (DECL_SIZE_UNIT (array) == NULL_TREE
10512 	  || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
10513 	  || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
10514 	  || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
10515 	return 0;
10516 
10517       /* If variable is bigger than the string literal, OFFSET must be constant
10518 	 and inside of the bounds of the string literal.  */
10519       offset = fold_convert (sizetype, offset);
10520       if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
10521 	  && (! host_integerp (offset, 1)
10522 	      || compare_tree_int (offset, length) >= 0))
10523 	return 0;
10524 
10525       *ptr_offset = offset;
10526       return DECL_INITIAL (array);
10527     }
10528 
10529   return 0;
10530 }
10531 
10532 /* Generate code to calculate OPS, and exploded expression
10533    using a store-flag instruction and return an rtx for the result.
10534    OPS reflects a comparison.
10535 
10536    If TARGET is nonzero, store the result there if convenient.
10537 
10538    Return zero if there is no suitable set-flag instruction
10539    available on this machine.
10540 
10541    Once expand_expr has been called on the arguments of the comparison,
10542    we are committed to doing the store flag, since it is not safe to
10543    re-evaluate the expression.  We emit the store-flag insn by calling
10544    emit_store_flag, but only expand the arguments if we have a reason
10545    to believe that emit_store_flag will be successful.  If we think that
10546    it will, but it isn't, we have to simulate the store-flag with a
10547    set/jump/set sequence.  */
10548 
10549 static rtx
10550 do_store_flag (sepops ops, rtx target, enum machine_mode mode)
10551 {
10552   enum rtx_code code;
10553   tree arg0, arg1, type;
10554   tree tem;
10555   enum machine_mode operand_mode;
10556   int unsignedp;
10557   rtx op0, op1;
10558   rtx subtarget = target;
10559   location_t loc = ops->location;
10560 
10561   arg0 = ops->op0;
10562   arg1 = ops->op1;
10563 
10564   /* Don't crash if the comparison was erroneous.  */
10565   if (arg0 == error_mark_node || arg1 == error_mark_node)
10566     return const0_rtx;
10567 
10568   type = TREE_TYPE (arg0);
10569   operand_mode = TYPE_MODE (type);
10570   unsignedp = TYPE_UNSIGNED (type);
10571 
10572   /* We won't bother with BLKmode store-flag operations because it would mean
10573      passing a lot of information to emit_store_flag.  */
10574   if (operand_mode == BLKmode)
10575     return 0;
10576 
10577   /* We won't bother with store-flag operations involving function pointers
10578      when function pointers must be canonicalized before comparisons.  */
10579 #ifdef HAVE_canonicalize_funcptr_for_compare
10580   if (HAVE_canonicalize_funcptr_for_compare
10581       && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
10582 	   && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
10583 	       == FUNCTION_TYPE))
10584 	  || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
10585 	      && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
10586 		  == FUNCTION_TYPE))))
10587     return 0;
10588 #endif
10589 
10590   STRIP_NOPS (arg0);
10591   STRIP_NOPS (arg1);
10592 
10593   /* For vector typed comparisons emit code to generate the desired
10594      all-ones or all-zeros mask.  Conveniently use the VEC_COND_EXPR
10595      expander for this.  */
10596   if (TREE_CODE (ops->type) == VECTOR_TYPE)
10597     {
10598       tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
10599       tree if_true = constant_boolean_node (true, ops->type);
10600       tree if_false = constant_boolean_node (false, ops->type);
10601       return expand_vec_cond_expr (ops->type, ifexp, if_true, if_false, target);
10602     }
10603 
10604   /* For vector typed comparisons emit code to generate the desired
10605      all-ones or all-zeros mask.  Conveniently use the VEC_COND_EXPR
10606      expander for this.  */
10607   if (TREE_CODE (ops->type) == VECTOR_TYPE)
10608     {
10609       tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
10610       tree if_true = constant_boolean_node (true, ops->type);
10611       tree if_false = constant_boolean_node (false, ops->type);
10612       return expand_vec_cond_expr (ops->type, ifexp, if_true, if_false, target);
10613     }
10614 
10615   /* Get the rtx comparison code to use.  We know that EXP is a comparison
10616      operation of some type.  Some comparisons against 1 and -1 can be
10617      converted to comparisons with zero.  Do so here so that the tests
10618      below will be aware that we have a comparison with zero.   These
10619      tests will not catch constants in the first operand, but constants
10620      are rarely passed as the first operand.  */
10621 
10622   switch (ops->code)
10623     {
10624     case EQ_EXPR:
10625       code = EQ;
10626       break;
10627     case NE_EXPR:
10628       code = NE;
10629       break;
10630     case LT_EXPR:
10631       if (integer_onep (arg1))
10632 	arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10633       else
10634 	code = unsignedp ? LTU : LT;
10635       break;
10636     case LE_EXPR:
10637       if (! unsignedp && integer_all_onesp (arg1))
10638 	arg1 = integer_zero_node, code = LT;
10639       else
10640 	code = unsignedp ? LEU : LE;
10641       break;
10642     case GT_EXPR:
10643       if (! unsignedp && integer_all_onesp (arg1))
10644 	arg1 = integer_zero_node, code = GE;
10645       else
10646 	code = unsignedp ? GTU : GT;
10647       break;
10648     case GE_EXPR:
10649       if (integer_onep (arg1))
10650 	arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10651       else
10652 	code = unsignedp ? GEU : GE;
10653       break;
10654 
10655     case UNORDERED_EXPR:
10656       code = UNORDERED;
10657       break;
10658     case ORDERED_EXPR:
10659       code = ORDERED;
10660       break;
10661     case UNLT_EXPR:
10662       code = UNLT;
10663       break;
10664     case UNLE_EXPR:
10665       code = UNLE;
10666       break;
10667     case UNGT_EXPR:
10668       code = UNGT;
10669       break;
10670     case UNGE_EXPR:
10671       code = UNGE;
10672       break;
10673     case UNEQ_EXPR:
10674       code = UNEQ;
10675       break;
10676     case LTGT_EXPR:
10677       code = LTGT;
10678       break;
10679 
10680     default:
10681       gcc_unreachable ();
10682     }
10683 
10684   /* Put a constant second.  */
10685   if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
10686       || TREE_CODE (arg0) == FIXED_CST)
10687     {
10688       tem = arg0; arg0 = arg1; arg1 = tem;
10689       code = swap_condition (code);
10690     }
10691 
10692   /* If this is an equality or inequality test of a single bit, we can
10693      do this by shifting the bit being tested to the low-order bit and
10694      masking the result with the constant 1.  If the condition was EQ,
10695      we xor it with 1.  This does not require an scc insn and is faster
10696      than an scc insn even if we have it.
10697 
10698      The code to make this transformation was moved into fold_single_bit_test,
10699      so we just call into the folder and expand its result.  */
10700 
10701   if ((code == NE || code == EQ)
10702       && integer_zerop (arg1)
10703       && (TYPE_PRECISION (ops->type) != 1 || TYPE_UNSIGNED (ops->type)))
10704     {
10705       gimple srcstmt = get_def_for_expr (arg0, BIT_AND_EXPR);
10706       if (srcstmt
10707 	  && integer_pow2p (gimple_assign_rhs2 (srcstmt)))
10708 	{
10709 	  enum tree_code tcode = code == NE ? NE_EXPR : EQ_EXPR;
10710 	  tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
10711 	  tree temp = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg1),
10712 				       gimple_assign_rhs1 (srcstmt),
10713 				       gimple_assign_rhs2 (srcstmt));
10714 	  temp = fold_single_bit_test (loc, tcode, temp, arg1, type);
10715 	  if (temp)
10716 	    return expand_expr (temp, target, VOIDmode, EXPAND_NORMAL);
10717 	}
10718     }
10719 
10720   if (! get_subtarget (target)
10721       || GET_MODE (subtarget) != operand_mode)
10722     subtarget = 0;
10723 
10724   expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
10725 
10726   if (target == 0)
10727     target = gen_reg_rtx (mode);
10728 
10729   /* Try a cstore if possible.  */
10730   return emit_store_flag_force (target, code, op0, op1,
10731 				operand_mode, unsignedp,
10732 				(TYPE_PRECISION (ops->type) == 1
10733 				 && !TYPE_UNSIGNED (ops->type)) ? -1 : 1);
10734 }
10735 
10736 
10737 /* Stubs in case we haven't got a casesi insn.  */
10738 #ifndef HAVE_casesi
10739 # define HAVE_casesi 0
10740 # define gen_casesi(a, b, c, d, e) (0)
10741 # define CODE_FOR_casesi CODE_FOR_nothing
10742 #endif
10743 
10744 /* Attempt to generate a casesi instruction.  Returns 1 if successful,
10745    0 otherwise (i.e. if there is no casesi instruction).  */
10746 int
10747 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
10748 	    rtx table_label ATTRIBUTE_UNUSED, rtx default_label,
10749 	    rtx fallback_label ATTRIBUTE_UNUSED)
10750 {
10751   struct expand_operand ops[5];
10752   enum machine_mode index_mode = SImode;
10753   int index_bits = GET_MODE_BITSIZE (index_mode);
10754   rtx op1, op2, index;
10755 
10756   if (! HAVE_casesi)
10757     return 0;
10758 
10759   /* Convert the index to SImode.  */
10760   if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10761     {
10762       enum machine_mode omode = TYPE_MODE (index_type);
10763       rtx rangertx = expand_normal (range);
10764 
10765       /* We must handle the endpoints in the original mode.  */
10766       index_expr = build2 (MINUS_EXPR, index_type,
10767 			   index_expr, minval);
10768       minval = integer_zero_node;
10769       index = expand_normal (index_expr);
10770       if (default_label)
10771         emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10772 				 omode, 1, default_label);
10773       /* Now we can safely truncate.  */
10774       index = convert_to_mode (index_mode, index, 0);
10775     }
10776   else
10777     {
10778       if (TYPE_MODE (index_type) != index_mode)
10779 	{
10780 	  index_type = lang_hooks.types.type_for_size (index_bits, 0);
10781 	  index_expr = fold_convert (index_type, index_expr);
10782 	}
10783 
10784       index = expand_normal (index_expr);
10785     }
10786 
10787   do_pending_stack_adjust ();
10788 
10789   op1 = expand_normal (minval);
10790   op2 = expand_normal (range);
10791 
10792   create_input_operand (&ops[0], index, index_mode);
10793   create_convert_operand_from_type (&ops[1], op1, TREE_TYPE (minval));
10794   create_convert_operand_from_type (&ops[2], op2, TREE_TYPE (range));
10795   create_fixed_operand (&ops[3], table_label);
10796   create_fixed_operand (&ops[4], (default_label
10797 				  ? default_label
10798 				  : fallback_label));
10799   expand_jump_insn (CODE_FOR_casesi, 5, ops);
10800   return 1;
10801 }
10802 
10803 /* Attempt to generate a tablejump instruction; same concept.  */
10804 #ifndef HAVE_tablejump
10805 #define HAVE_tablejump 0
10806 #define gen_tablejump(x, y) (0)
10807 #endif
10808 
10809 /* Subroutine of the next function.
10810 
10811    INDEX is the value being switched on, with the lowest value
10812    in the table already subtracted.
10813    MODE is its expected mode (needed if INDEX is constant).
10814    RANGE is the length of the jump table.
10815    TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10816 
10817    DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10818    index value is out of range.  */
10819 
10820 static void
10821 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
10822 	      rtx default_label)
10823 {
10824   rtx temp, vector;
10825 
10826   if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
10827     cfun->cfg->max_jumptable_ents = INTVAL (range);
10828 
10829   /* Do an unsigned comparison (in the proper mode) between the index
10830      expression and the value which represents the length of the range.
10831      Since we just finished subtracting the lower bound of the range
10832      from the index expression, this comparison allows us to simultaneously
10833      check that the original index expression value is both greater than
10834      or equal to the minimum value of the range and less than or equal to
10835      the maximum value of the range.  */
10836 
10837   if (default_label)
10838     emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10839 			     default_label);
10840 
10841   /* If index is in range, it must fit in Pmode.
10842      Convert to Pmode so we can index with it.  */
10843   if (mode != Pmode)
10844     index = convert_to_mode (Pmode, index, 1);
10845 
10846   /* Don't let a MEM slip through, because then INDEX that comes
10847      out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10848      and break_out_memory_refs will go to work on it and mess it up.  */
10849 #ifdef PIC_CASE_VECTOR_ADDRESS
10850   if (flag_pic && !REG_P (index))
10851     index = copy_to_mode_reg (Pmode, index);
10852 #endif
10853 
10854   /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10855      GET_MODE_SIZE, because this indicates how large insns are.  The other
10856      uses should all be Pmode, because they are addresses.  This code
10857      could fail if addresses and insns are not the same size.  */
10858   index = gen_rtx_PLUS (Pmode,
10859 			gen_rtx_MULT (Pmode, index,
10860 				      GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10861 			gen_rtx_LABEL_REF (Pmode, table_label));
10862 #ifdef PIC_CASE_VECTOR_ADDRESS
10863   if (flag_pic)
10864     index = PIC_CASE_VECTOR_ADDRESS (index);
10865   else
10866 #endif
10867     index = memory_address (CASE_VECTOR_MODE, index);
10868   temp = gen_reg_rtx (CASE_VECTOR_MODE);
10869   vector = gen_const_mem (CASE_VECTOR_MODE, index);
10870   convert_move (temp, vector, 0);
10871 
10872   emit_jump_insn (gen_tablejump (temp, table_label));
10873 
10874   /* If we are generating PIC code or if the table is PC-relative, the
10875      table and JUMP_INSN must be adjacent, so don't output a BARRIER.  */
10876   if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10877     emit_barrier ();
10878 }
10879 
10880 int
10881 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
10882 	       rtx table_label, rtx default_label)
10883 {
10884   rtx index;
10885 
10886   if (! HAVE_tablejump)
10887     return 0;
10888 
10889   index_expr = fold_build2 (MINUS_EXPR, index_type,
10890 			    fold_convert (index_type, index_expr),
10891 			    fold_convert (index_type, minval));
10892   index = expand_normal (index_expr);
10893   do_pending_stack_adjust ();
10894 
10895   do_tablejump (index, TYPE_MODE (index_type),
10896 		convert_modes (TYPE_MODE (index_type),
10897 			       TYPE_MODE (TREE_TYPE (range)),
10898 			       expand_normal (range),
10899 			       TYPE_UNSIGNED (TREE_TYPE (range))),
10900 		table_label, default_label);
10901   return 1;
10902 }
10903 
10904 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree.  */
10905 static rtx
10906 const_vector_from_tree (tree exp)
10907 {
10908   rtvec v;
10909   int units, i;
10910   tree link, elt;
10911   enum machine_mode inner, mode;
10912 
10913   mode = TYPE_MODE (TREE_TYPE (exp));
10914 
10915   if (initializer_zerop (exp))
10916     return CONST0_RTX (mode);
10917 
10918   units = GET_MODE_NUNITS (mode);
10919   inner = GET_MODE_INNER (mode);
10920 
10921   v = rtvec_alloc (units);
10922 
10923   link = TREE_VECTOR_CST_ELTS (exp);
10924   for (i = 0; link; link = TREE_CHAIN (link), ++i)
10925     {
10926       elt = TREE_VALUE (link);
10927 
10928       if (TREE_CODE (elt) == REAL_CST)
10929 	RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
10930 							 inner);
10931       else if (TREE_CODE (elt) == FIXED_CST)
10932 	RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
10933 							 inner);
10934       else
10935 	RTVEC_ELT (v, i) = immed_double_int_const (tree_to_double_int (elt),
10936 						   inner);
10937     }
10938 
10939   /* Initialize remaining elements to 0.  */
10940   for (; i < units; ++i)
10941     RTVEC_ELT (v, i) = CONST0_RTX (inner);
10942 
10943   return gen_rtx_CONST_VECTOR (mode, v);
10944 }
10945 
10946 /* Build a decl for a personality function given a language prefix.  */
10947 
10948 tree
10949 build_personality_function (const char *lang)
10950 {
10951   const char *unwind_and_version;
10952   tree decl, type;
10953   char *name;
10954 
10955   switch (targetm_common.except_unwind_info (&global_options))
10956     {
10957     case UI_NONE:
10958       return NULL;
10959     case UI_SJLJ:
10960       unwind_and_version = "_sj0";
10961       break;
10962     case UI_DWARF2:
10963     case UI_TARGET:
10964       unwind_and_version = "_v0";
10965       break;
10966     default:
10967       gcc_unreachable ();
10968     }
10969 
10970   name = ACONCAT (("__", lang, "_personality", unwind_and_version, NULL));
10971 
10972   type = build_function_type_list (integer_type_node, integer_type_node,
10973 				   long_long_unsigned_type_node,
10974 				   ptr_type_node, ptr_type_node, NULL_TREE);
10975   decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
10976 		     get_identifier (name), type);
10977   DECL_ARTIFICIAL (decl) = 1;
10978   DECL_EXTERNAL (decl) = 1;
10979   TREE_PUBLIC (decl) = 1;
10980 
10981   /* Zap the nonsensical SYMBOL_REF_DECL for this.  What we're left with
10982      are the flags assigned by targetm.encode_section_info.  */
10983   SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
10984 
10985   return decl;
10986 }
10987 
10988 /* Extracts the personality function of DECL and returns the corresponding
10989    libfunc.  */
10990 
10991 rtx
10992 get_personality_function (tree decl)
10993 {
10994   tree personality = DECL_FUNCTION_PERSONALITY (decl);
10995   enum eh_personality_kind pk;
10996 
10997   pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
10998   if (pk == eh_personality_none)
10999     return NULL;
11000 
11001   if (!personality
11002       && pk == eh_personality_any)
11003     personality = lang_hooks.eh_personality ();
11004 
11005   if (pk == eh_personality_lang)
11006     gcc_assert (personality != NULL_TREE);
11007 
11008   return XEXP (DECL_RTL (personality), 0);
11009 }
11010 
11011 #include "gt-expr.h"
11012