xref: /dragonfly/contrib/gcc-8.0/gcc/expr.c (revision 9f47dde1)
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2    Copyright (C) 1988-2018 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "predict.h"
29 #include "memmodel.h"
30 #include "tm_p.h"
31 #include "ssa.h"
32 #include "expmed.h"
33 #include "optabs.h"
34 #include "regs.h"
35 #include "emit-rtl.h"
36 #include "recog.h"
37 #include "cgraph.h"
38 #include "diagnostic.h"
39 #include "alias.h"
40 #include "fold-const.h"
41 #include "stor-layout.h"
42 #include "attribs.h"
43 #include "varasm.h"
44 #include "except.h"
45 #include "insn-attr.h"
46 #include "dojump.h"
47 #include "explow.h"
48 #include "calls.h"
49 #include "stmt.h"
50 /* Include expr.h after insn-config.h so we get HAVE_conditional_move.  */
51 #include "expr.h"
52 #include "optabs-tree.h"
53 #include "libfuncs.h"
54 #include "reload.h"
55 #include "langhooks.h"
56 #include "common/common-target.h"
57 #include "tree-ssa-live.h"
58 #include "tree-outof-ssa.h"
59 #include "tree-ssa-address.h"
60 #include "builtins.h"
61 #include "tree-chkp.h"
62 #include "rtl-chkp.h"
63 #include "ccmp.h"
64 #include "rtx-vector-builder.h"
65 
66 
67 /* If this is nonzero, we do not bother generating VOLATILE
68    around volatile memory references, and we are willing to
69    output indirect addresses.  If cse is to follow, we reject
70    indirect addresses so a useful potential cse is generated;
71    if it is used only once, instruction combination will produce
72    the same indirect address eventually.  */
73 int cse_not_expected;
74 
75 static bool block_move_libcall_safe_for_call_parm (void);
76 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT,
77 					unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
78 					unsigned HOST_WIDE_INT);
79 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
80 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
81 static rtx_insn *compress_float_constant (rtx, rtx);
82 static rtx get_subtarget (rtx);
83 static void store_constructor (tree, rtx, int, poly_int64, bool);
84 static rtx store_field (rtx, poly_int64, poly_int64, poly_uint64, poly_uint64,
85 			machine_mode, tree, alias_set_type, bool, bool);
86 
87 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
88 
89 static int is_aligning_offset (const_tree, const_tree);
90 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
91 static rtx do_store_flag (sepops, rtx, machine_mode);
92 #ifdef PUSH_ROUNDING
93 static void emit_single_push_insn (machine_mode, rtx, tree);
94 #endif
95 static void do_tablejump (rtx, machine_mode, rtx, rtx, rtx,
96 			  profile_probability);
97 static rtx const_vector_from_tree (tree);
98 static rtx const_scalar_mask_from_tree (scalar_int_mode, tree);
99 static tree tree_expr_size (const_tree);
100 static HOST_WIDE_INT int_expr_size (tree);
101 static void convert_mode_scalar (rtx, rtx, int);
102 
103 
104 /* This is run to set up which modes can be used
105    directly in memory and to initialize the block move optab.  It is run
106    at the beginning of compilation and when the target is reinitialized.  */
107 
108 void
109 init_expr_target (void)
110 {
111   rtx pat;
112   int num_clobbers;
113   rtx mem, mem1;
114   rtx reg;
115 
116   /* Try indexing by frame ptr and try by stack ptr.
117      It is known that on the Convex the stack ptr isn't a valid index.
118      With luck, one or the other is valid on any machine.  */
119   mem = gen_rtx_MEM (word_mode, stack_pointer_rtx);
120   mem1 = gen_rtx_MEM (word_mode, frame_pointer_rtx);
121 
122   /* A scratch register we can modify in-place below to avoid
123      useless RTL allocations.  */
124   reg = gen_rtx_REG (word_mode, LAST_VIRTUAL_REGISTER + 1);
125 
126   rtx_insn *insn = as_a<rtx_insn *> (rtx_alloc (INSN));
127   pat = gen_rtx_SET (NULL_RTX, NULL_RTX);
128   PATTERN (insn) = pat;
129 
130   for (machine_mode mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
131        mode = (machine_mode) ((int) mode + 1))
132     {
133       int regno;
134 
135       direct_load[(int) mode] = direct_store[(int) mode] = 0;
136       PUT_MODE (mem, mode);
137       PUT_MODE (mem1, mode);
138 
139       /* See if there is some register that can be used in this mode and
140 	 directly loaded or stored from memory.  */
141 
142       if (mode != VOIDmode && mode != BLKmode)
143 	for (regno = 0; regno < FIRST_PSEUDO_REGISTER
144 	     && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
145 	     regno++)
146 	  {
147 	    if (!targetm.hard_regno_mode_ok (regno, mode))
148 	      continue;
149 
150 	    set_mode_and_regno (reg, mode, regno);
151 
152 	    SET_SRC (pat) = mem;
153 	    SET_DEST (pat) = reg;
154 	    if (recog (pat, insn, &num_clobbers) >= 0)
155 	      direct_load[(int) mode] = 1;
156 
157 	    SET_SRC (pat) = mem1;
158 	    SET_DEST (pat) = reg;
159 	    if (recog (pat, insn, &num_clobbers) >= 0)
160 	      direct_load[(int) mode] = 1;
161 
162 	    SET_SRC (pat) = reg;
163 	    SET_DEST (pat) = mem;
164 	    if (recog (pat, insn, &num_clobbers) >= 0)
165 	      direct_store[(int) mode] = 1;
166 
167 	    SET_SRC (pat) = reg;
168 	    SET_DEST (pat) = mem1;
169 	    if (recog (pat, insn, &num_clobbers) >= 0)
170 	      direct_store[(int) mode] = 1;
171 	  }
172     }
173 
174   mem = gen_rtx_MEM (VOIDmode, gen_raw_REG (Pmode, LAST_VIRTUAL_REGISTER + 1));
175 
176   opt_scalar_float_mode mode_iter;
177   FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_FLOAT)
178     {
179       scalar_float_mode mode = mode_iter.require ();
180       scalar_float_mode srcmode;
181       FOR_EACH_MODE_UNTIL (srcmode, mode)
182 	{
183 	  enum insn_code ic;
184 
185 	  ic = can_extend_p (mode, srcmode, 0);
186 	  if (ic == CODE_FOR_nothing)
187 	    continue;
188 
189 	  PUT_MODE (mem, srcmode);
190 
191 	  if (insn_operand_matches (ic, 1, mem))
192 	    float_extend_from_mem[mode][srcmode] = true;
193 	}
194     }
195 }
196 
197 /* This is run at the start of compiling a function.  */
198 
199 void
200 init_expr (void)
201 {
202   memset (&crtl->expr, 0, sizeof (crtl->expr));
203 }
204 
205 /* Copy data from FROM to TO, where the machine modes are not the same.
206    Both modes may be integer, or both may be floating, or both may be
207    fixed-point.
208    UNSIGNEDP should be nonzero if FROM is an unsigned type.
209    This causes zero-extension instead of sign-extension.  */
210 
211 void
212 convert_move (rtx to, rtx from, int unsignedp)
213 {
214   machine_mode to_mode = GET_MODE (to);
215   machine_mode from_mode = GET_MODE (from);
216 
217   gcc_assert (to_mode != BLKmode);
218   gcc_assert (from_mode != BLKmode);
219 
220   /* If the source and destination are already the same, then there's
221      nothing to do.  */
222   if (to == from)
223     return;
224 
225   /* If FROM is a SUBREG that indicates that we have already done at least
226      the required extension, strip it.  We don't handle such SUBREGs as
227      TO here.  */
228 
229   scalar_int_mode to_int_mode;
230   if (GET_CODE (from) == SUBREG
231       && SUBREG_PROMOTED_VAR_P (from)
232       && is_a <scalar_int_mode> (to_mode, &to_int_mode)
233       && (GET_MODE_PRECISION (subreg_promoted_mode (from))
234 	  >= GET_MODE_PRECISION (to_int_mode))
235       && SUBREG_CHECK_PROMOTED_SIGN (from, unsignedp))
236     from = gen_lowpart (to_int_mode, from), from_mode = to_int_mode;
237 
238   gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
239 
240   if (to_mode == from_mode
241       || (from_mode == VOIDmode && CONSTANT_P (from)))
242     {
243       emit_move_insn (to, from);
244       return;
245     }
246 
247   if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
248     {
249       gcc_assert (known_eq (GET_MODE_BITSIZE (from_mode),
250 			    GET_MODE_BITSIZE (to_mode)));
251 
252       if (VECTOR_MODE_P (to_mode))
253 	from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
254       else
255 	to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
256 
257       emit_move_insn (to, from);
258       return;
259     }
260 
261   if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
262     {
263       convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
264       convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
265       return;
266     }
267 
268   convert_mode_scalar (to, from, unsignedp);
269 }
270 
271 /* Like convert_move, but deals only with scalar modes.  */
272 
273 static void
274 convert_mode_scalar (rtx to, rtx from, int unsignedp)
275 {
276   /* Both modes should be scalar types.  */
277   scalar_mode from_mode = as_a <scalar_mode> (GET_MODE (from));
278   scalar_mode to_mode = as_a <scalar_mode> (GET_MODE (to));
279   bool to_real = SCALAR_FLOAT_MODE_P (to_mode);
280   bool from_real = SCALAR_FLOAT_MODE_P (from_mode);
281   enum insn_code code;
282   rtx libcall;
283 
284   gcc_assert (to_real == from_real);
285 
286   /* rtx code for making an equivalent value.  */
287   enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
288 			      : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
289 
290   if (to_real)
291     {
292       rtx value;
293       rtx_insn *insns;
294       convert_optab tab;
295 
296       gcc_assert ((GET_MODE_PRECISION (from_mode)
297 		   != GET_MODE_PRECISION (to_mode))
298 		  || (DECIMAL_FLOAT_MODE_P (from_mode)
299 		      != DECIMAL_FLOAT_MODE_P (to_mode)));
300 
301       if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
302 	/* Conversion between decimal float and binary float, same size.  */
303 	tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
304       else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
305 	tab = sext_optab;
306       else
307 	tab = trunc_optab;
308 
309       /* Try converting directly if the insn is supported.  */
310 
311       code = convert_optab_handler (tab, to_mode, from_mode);
312       if (code != CODE_FOR_nothing)
313 	{
314 	  emit_unop_insn (code, to, from,
315 			  tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
316 	  return;
317 	}
318 
319       /* Otherwise use a libcall.  */
320       libcall = convert_optab_libfunc (tab, to_mode, from_mode);
321 
322       /* Is this conversion implemented yet?  */
323       gcc_assert (libcall);
324 
325       start_sequence ();
326       value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
327 				       from, from_mode);
328       insns = get_insns ();
329       end_sequence ();
330       emit_libcall_block (insns, to, value,
331 			  tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
332 								       from)
333 			  : gen_rtx_FLOAT_EXTEND (to_mode, from));
334       return;
335     }
336 
337   /* Handle pointer conversion.  */			/* SPEE 900220.  */
338   /* If the target has a converter from FROM_MODE to TO_MODE, use it.  */
339   {
340     convert_optab ctab;
341 
342     if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
343       ctab = trunc_optab;
344     else if (unsignedp)
345       ctab = zext_optab;
346     else
347       ctab = sext_optab;
348 
349     if (convert_optab_handler (ctab, to_mode, from_mode)
350 	!= CODE_FOR_nothing)
351       {
352 	emit_unop_insn (convert_optab_handler (ctab, to_mode, from_mode),
353 			to, from, UNKNOWN);
354 	return;
355       }
356   }
357 
358   /* Targets are expected to provide conversion insns between PxImode and
359      xImode for all MODE_PARTIAL_INT modes they use, but no others.  */
360   if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
361     {
362       scalar_int_mode full_mode
363 	= smallest_int_mode_for_size (GET_MODE_BITSIZE (to_mode));
364 
365       gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
366 		  != CODE_FOR_nothing);
367 
368       if (full_mode != from_mode)
369 	from = convert_to_mode (full_mode, from, unsignedp);
370       emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
371 		      to, from, UNKNOWN);
372       return;
373     }
374   if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
375     {
376       rtx new_from;
377       scalar_int_mode full_mode
378 	= smallest_int_mode_for_size (GET_MODE_BITSIZE (from_mode));
379       convert_optab ctab = unsignedp ? zext_optab : sext_optab;
380       enum insn_code icode;
381 
382       icode = convert_optab_handler (ctab, full_mode, from_mode);
383       gcc_assert (icode != CODE_FOR_nothing);
384 
385       if (to_mode == full_mode)
386 	{
387 	  emit_unop_insn (icode, to, from, UNKNOWN);
388 	  return;
389 	}
390 
391       new_from = gen_reg_rtx (full_mode);
392       emit_unop_insn (icode, new_from, from, UNKNOWN);
393 
394       /* else proceed to integer conversions below.  */
395       from_mode = full_mode;
396       from = new_from;
397     }
398 
399    /* Make sure both are fixed-point modes or both are not.  */
400    gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
401 	       ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
402    if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
403     {
404       /* If we widen from_mode to to_mode and they are in the same class,
405 	 we won't saturate the result.
406 	 Otherwise, always saturate the result to play safe.  */
407       if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
408 	  && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
409 	expand_fixed_convert (to, from, 0, 0);
410       else
411 	expand_fixed_convert (to, from, 0, 1);
412       return;
413     }
414 
415   /* Now both modes are integers.  */
416 
417   /* Handle expanding beyond a word.  */
418   if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode)
419       && GET_MODE_PRECISION (to_mode) > BITS_PER_WORD)
420     {
421       rtx_insn *insns;
422       rtx lowpart;
423       rtx fill_value;
424       rtx lowfrom;
425       int i;
426       scalar_mode lowpart_mode;
427       int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
428 
429       /* Try converting directly if the insn is supported.  */
430       if ((code = can_extend_p (to_mode, from_mode, unsignedp))
431 	  != CODE_FOR_nothing)
432 	{
433 	  /* If FROM is a SUBREG, put it into a register.  Do this
434 	     so that we always generate the same set of insns for
435 	     better cse'ing; if an intermediate assignment occurred,
436 	     we won't be doing the operation directly on the SUBREG.  */
437 	  if (optimize > 0 && GET_CODE (from) == SUBREG)
438 	    from = force_reg (from_mode, from);
439 	  emit_unop_insn (code, to, from, equiv_code);
440 	  return;
441 	}
442       /* Next, try converting via full word.  */
443       else if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD
444 	       && ((code = can_extend_p (to_mode, word_mode, unsignedp))
445 		   != CODE_FOR_nothing))
446 	{
447 	  rtx word_to = gen_reg_rtx (word_mode);
448 	  if (REG_P (to))
449 	    {
450 	      if (reg_overlap_mentioned_p (to, from))
451 		from = force_reg (from_mode, from);
452 	      emit_clobber (to);
453 	    }
454 	  convert_move (word_to, from, unsignedp);
455 	  emit_unop_insn (code, to, word_to, equiv_code);
456 	  return;
457 	}
458 
459       /* No special multiword conversion insn; do it by hand.  */
460       start_sequence ();
461 
462       /* Since we will turn this into a no conflict block, we must ensure
463          the source does not overlap the target so force it into an isolated
464          register when maybe so.  Likewise for any MEM input, since the
465          conversion sequence might require several references to it and we
466          must ensure we're getting the same value every time.  */
467 
468       if (MEM_P (from) || reg_overlap_mentioned_p (to, from))
469 	from = force_reg (from_mode, from);
470 
471       /* Get a copy of FROM widened to a word, if necessary.  */
472       if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD)
473 	lowpart_mode = word_mode;
474       else
475 	lowpart_mode = from_mode;
476 
477       lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
478 
479       lowpart = gen_lowpart (lowpart_mode, to);
480       emit_move_insn (lowpart, lowfrom);
481 
482       /* Compute the value to put in each remaining word.  */
483       if (unsignedp)
484 	fill_value = const0_rtx;
485       else
486 	fill_value = emit_store_flag_force (gen_reg_rtx (word_mode),
487 					    LT, lowfrom, const0_rtx,
488 					    lowpart_mode, 0, -1);
489 
490       /* Fill the remaining words.  */
491       for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
492 	{
493 	  int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
494 	  rtx subword = operand_subword (to, index, 1, to_mode);
495 
496 	  gcc_assert (subword);
497 
498 	  if (fill_value != subword)
499 	    emit_move_insn (subword, fill_value);
500 	}
501 
502       insns = get_insns ();
503       end_sequence ();
504 
505       emit_insn (insns);
506       return;
507     }
508 
509   /* Truncating multi-word to a word or less.  */
510   if (GET_MODE_PRECISION (from_mode) > BITS_PER_WORD
511       && GET_MODE_PRECISION (to_mode) <= BITS_PER_WORD)
512     {
513       if (!((MEM_P (from)
514 	     && ! MEM_VOLATILE_P (from)
515 	     && direct_load[(int) to_mode]
516 	     && ! mode_dependent_address_p (XEXP (from, 0),
517 					    MEM_ADDR_SPACE (from)))
518 	    || REG_P (from)
519 	    || GET_CODE (from) == SUBREG))
520 	from = force_reg (from_mode, from);
521       convert_move (to, gen_lowpart (word_mode, from), 0);
522       return;
523     }
524 
525   /* Now follow all the conversions between integers
526      no more than a word long.  */
527 
528   /* For truncation, usually we can just refer to FROM in a narrower mode.  */
529   if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
530       && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, from_mode))
531     {
532       if (!((MEM_P (from)
533 	     && ! MEM_VOLATILE_P (from)
534 	     && direct_load[(int) to_mode]
535 	     && ! mode_dependent_address_p (XEXP (from, 0),
536 					    MEM_ADDR_SPACE (from)))
537 	    || REG_P (from)
538 	    || GET_CODE (from) == SUBREG))
539 	from = force_reg (from_mode, from);
540       if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
541 	  && !targetm.hard_regno_mode_ok (REGNO (from), to_mode))
542 	from = copy_to_reg (from);
543       emit_move_insn (to, gen_lowpart (to_mode, from));
544       return;
545     }
546 
547   /* Handle extension.  */
548   if (GET_MODE_PRECISION (to_mode) > GET_MODE_PRECISION (from_mode))
549     {
550       /* Convert directly if that works.  */
551       if ((code = can_extend_p (to_mode, from_mode, unsignedp))
552 	  != CODE_FOR_nothing)
553 	{
554 	  emit_unop_insn (code, to, from, equiv_code);
555 	  return;
556 	}
557       else
558 	{
559 	  scalar_mode intermediate;
560 	  rtx tmp;
561 	  int shift_amount;
562 
563 	  /* Search for a mode to convert via.  */
564 	  opt_scalar_mode intermediate_iter;
565 	  FOR_EACH_MODE_FROM (intermediate_iter, from_mode)
566 	    {
567 	      scalar_mode intermediate = intermediate_iter.require ();
568 	      if (((can_extend_p (to_mode, intermediate, unsignedp)
569 		    != CODE_FOR_nothing)
570 		   || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
571 		       && TRULY_NOOP_TRUNCATION_MODES_P (to_mode,
572 							 intermediate)))
573 		  && (can_extend_p (intermediate, from_mode, unsignedp)
574 		      != CODE_FOR_nothing))
575 		{
576 		  convert_move (to, convert_to_mode (intermediate, from,
577 						     unsignedp), unsignedp);
578 		  return;
579 		}
580 	    }
581 
582 	  /* No suitable intermediate mode.
583 	     Generate what we need with	shifts.  */
584 	  shift_amount = (GET_MODE_PRECISION (to_mode)
585 			  - GET_MODE_PRECISION (from_mode));
586 	  from = gen_lowpart (to_mode, force_reg (from_mode, from));
587 	  tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
588 			      to, unsignedp);
589 	  tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
590 			      to, unsignedp);
591 	  if (tmp != to)
592 	    emit_move_insn (to, tmp);
593 	  return;
594 	}
595     }
596 
597   /* Support special truncate insns for certain modes.  */
598   if (convert_optab_handler (trunc_optab, to_mode,
599 			     from_mode) != CODE_FOR_nothing)
600     {
601       emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
602 		      to, from, UNKNOWN);
603       return;
604     }
605 
606   /* Handle truncation of volatile memrefs, and so on;
607      the things that couldn't be truncated directly,
608      and for which there was no special instruction.
609 
610      ??? Code above formerly short-circuited this, for most integer
611      mode pairs, with a force_reg in from_mode followed by a recursive
612      call to this routine.  Appears always to have been wrong.  */
613   if (GET_MODE_PRECISION (to_mode) < GET_MODE_PRECISION (from_mode))
614     {
615       rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
616       emit_move_insn (to, temp);
617       return;
618     }
619 
620   /* Mode combination is not recognized.  */
621   gcc_unreachable ();
622 }
623 
624 /* Return an rtx for a value that would result
625    from converting X to mode MODE.
626    Both X and MODE may be floating, or both integer.
627    UNSIGNEDP is nonzero if X is an unsigned value.
628    This can be done by referring to a part of X in place
629    or by copying to a new temporary with conversion.  */
630 
631 rtx
632 convert_to_mode (machine_mode mode, rtx x, int unsignedp)
633 {
634   return convert_modes (mode, VOIDmode, x, unsignedp);
635 }
636 
637 /* Return an rtx for a value that would result
638    from converting X from mode OLDMODE to mode MODE.
639    Both modes may be floating, or both integer.
640    UNSIGNEDP is nonzero if X is an unsigned value.
641 
642    This can be done by referring to a part of X in place
643    or by copying to a new temporary with conversion.
644 
645    You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.  */
646 
647 rtx
648 convert_modes (machine_mode mode, machine_mode oldmode, rtx x, int unsignedp)
649 {
650   rtx temp;
651   scalar_int_mode int_mode;
652 
653   /* If FROM is a SUBREG that indicates that we have already done at least
654      the required extension, strip it.  */
655 
656   if (GET_CODE (x) == SUBREG
657       && SUBREG_PROMOTED_VAR_P (x)
658       && is_a <scalar_int_mode> (mode, &int_mode)
659       && (GET_MODE_PRECISION (subreg_promoted_mode (x))
660 	  >= GET_MODE_PRECISION (int_mode))
661       && SUBREG_CHECK_PROMOTED_SIGN (x, unsignedp))
662     x = gen_lowpart (int_mode, SUBREG_REG (x));
663 
664   if (GET_MODE (x) != VOIDmode)
665     oldmode = GET_MODE (x);
666 
667   if (mode == oldmode)
668     return x;
669 
670   if (CONST_SCALAR_INT_P (x)
671       && is_int_mode (mode, &int_mode))
672     {
673       /* If the caller did not tell us the old mode, then there is not
674 	 much to do with respect to canonicalization.  We have to
675 	 assume that all the bits are significant.  */
676       if (GET_MODE_CLASS (oldmode) != MODE_INT)
677 	oldmode = MAX_MODE_INT;
678       wide_int w = wide_int::from (rtx_mode_t (x, oldmode),
679 				   GET_MODE_PRECISION (int_mode),
680 				   unsignedp ? UNSIGNED : SIGNED);
681       return immed_wide_int_const (w, int_mode);
682     }
683 
684   /* We can do this with a gen_lowpart if both desired and current modes
685      are integer, and this is either a constant integer, a register, or a
686      non-volatile MEM. */
687   scalar_int_mode int_oldmode;
688   if (is_int_mode (mode, &int_mode)
689       && is_int_mode (oldmode, &int_oldmode)
690       && GET_MODE_PRECISION (int_mode) <= GET_MODE_PRECISION (int_oldmode)
691       && ((MEM_P (x) && !MEM_VOLATILE_P (x) && direct_load[(int) int_mode])
692 	  || CONST_POLY_INT_P (x)
693           || (REG_P (x)
694               && (!HARD_REGISTER_P (x)
695 		  || targetm.hard_regno_mode_ok (REGNO (x), int_mode))
696               && TRULY_NOOP_TRUNCATION_MODES_P (int_mode, GET_MODE (x)))))
697    return gen_lowpart (int_mode, x);
698 
699   /* Converting from integer constant into mode is always equivalent to an
700      subreg operation.  */
701   if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
702     {
703       gcc_assert (known_eq (GET_MODE_BITSIZE (mode),
704 			    GET_MODE_BITSIZE (oldmode)));
705       return simplify_gen_subreg (mode, x, oldmode, 0);
706     }
707 
708   temp = gen_reg_rtx (mode);
709   convert_move (temp, x, unsignedp);
710   return temp;
711 }
712 
713 /* Return the largest alignment we can use for doing a move (or store)
714    of MAX_PIECES.  ALIGN is the largest alignment we could use.  */
715 
716 static unsigned int
717 alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
718 {
719   scalar_int_mode tmode
720     = int_mode_for_size (max_pieces * BITS_PER_UNIT, 1).require ();
721 
722   if (align >= GET_MODE_ALIGNMENT (tmode))
723     align = GET_MODE_ALIGNMENT (tmode);
724   else
725     {
726       scalar_int_mode xmode = NARROWEST_INT_MODE;
727       opt_scalar_int_mode mode_iter;
728       FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_INT)
729 	{
730 	  tmode = mode_iter.require ();
731 	  if (GET_MODE_SIZE (tmode) > max_pieces
732 	      || targetm.slow_unaligned_access (tmode, align))
733 	    break;
734 	  xmode = tmode;
735 	}
736 
737       align = MAX (align, GET_MODE_ALIGNMENT (xmode));
738     }
739 
740   return align;
741 }
742 
743 /* Return the widest integer mode that is narrower than SIZE bytes.  */
744 
745 static scalar_int_mode
746 widest_int_mode_for_size (unsigned int size)
747 {
748   scalar_int_mode result = NARROWEST_INT_MODE;
749 
750   gcc_checking_assert (size > 1);
751 
752   opt_scalar_int_mode tmode;
753   FOR_EACH_MODE_IN_CLASS (tmode, MODE_INT)
754     if (GET_MODE_SIZE (tmode.require ()) < size)
755       result = tmode.require ();
756 
757   return result;
758 }
759 
760 /* Determine whether an operation OP on LEN bytes with alignment ALIGN can
761    and should be performed piecewise.  */
762 
763 static bool
764 can_do_by_pieces (unsigned HOST_WIDE_INT len, unsigned int align,
765 		  enum by_pieces_operation op)
766 {
767   return targetm.use_by_pieces_infrastructure_p (len, align, op,
768 						 optimize_insn_for_speed_p ());
769 }
770 
771 /* Determine whether the LEN bytes can be moved by using several move
772    instructions.  Return nonzero if a call to move_by_pieces should
773    succeed.  */
774 
775 bool
776 can_move_by_pieces (unsigned HOST_WIDE_INT len, unsigned int align)
777 {
778   return can_do_by_pieces (len, align, MOVE_BY_PIECES);
779 }
780 
781 /* Return number of insns required to perform operation OP by pieces
782    for L bytes.  ALIGN (in bits) is maximum alignment we can assume.  */
783 
784 unsigned HOST_WIDE_INT
785 by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
786 		  unsigned int max_size, by_pieces_operation op)
787 {
788   unsigned HOST_WIDE_INT n_insns = 0;
789 
790   align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
791 
792   while (max_size > 1 && l > 0)
793     {
794       scalar_int_mode mode = widest_int_mode_for_size (max_size);
795       enum insn_code icode;
796 
797       unsigned int modesize = GET_MODE_SIZE (mode);
798 
799       icode = optab_handler (mov_optab, mode);
800       if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
801 	{
802 	  unsigned HOST_WIDE_INT n_pieces = l / modesize;
803 	  l %= modesize;
804 	  switch (op)
805 	    {
806 	    default:
807 	      n_insns += n_pieces;
808 	      break;
809 
810 	    case COMPARE_BY_PIECES:
811 	      int batch = targetm.compare_by_pieces_branch_ratio (mode);
812 	      int batch_ops = 4 * batch - 1;
813 	      unsigned HOST_WIDE_INT full = n_pieces / batch;
814 	      n_insns += full * batch_ops;
815 	      if (n_pieces % batch != 0)
816 		n_insns++;
817 	      break;
818 
819 	    }
820 	}
821       max_size = modesize;
822     }
823 
824   gcc_assert (!l);
825   return n_insns;
826 }
827 
828 /* Used when performing piecewise block operations, holds information
829    about one of the memory objects involved.  The member functions
830    can be used to generate code for loading from the object and
831    updating the address when iterating.  */
832 
833 class pieces_addr
834 {
835   /* The object being referenced, a MEM.  Can be NULL_RTX to indicate
836      stack pushes.  */
837   rtx m_obj;
838   /* The address of the object.  Can differ from that seen in the
839      MEM rtx if we copied the address to a register.  */
840   rtx m_addr;
841   /* Nonzero if the address on the object has an autoincrement already,
842      signifies whether that was an increment or decrement.  */
843   signed char m_addr_inc;
844   /* Nonzero if we intend to use autoinc without the address already
845      having autoinc form.  We will insert add insns around each memory
846      reference, expecting later passes to form autoinc addressing modes.
847      The only supported options are predecrement and postincrement.  */
848   signed char m_explicit_inc;
849   /* True if we have either of the two possible cases of using
850      autoincrement.  */
851   bool m_auto;
852   /* True if this is an address to be used for load operations rather
853      than stores.  */
854   bool m_is_load;
855 
856   /* Optionally, a function to obtain constants for any given offset into
857      the objects, and data associated with it.  */
858   by_pieces_constfn m_constfn;
859   void *m_cfndata;
860 public:
861   pieces_addr (rtx, bool, by_pieces_constfn, void *);
862   rtx adjust (scalar_int_mode, HOST_WIDE_INT);
863   void increment_address (HOST_WIDE_INT);
864   void maybe_predec (HOST_WIDE_INT);
865   void maybe_postinc (HOST_WIDE_INT);
866   void decide_autoinc (machine_mode, bool, HOST_WIDE_INT);
867   int get_addr_inc ()
868   {
869     return m_addr_inc;
870   }
871 };
872 
873 /* Initialize a pieces_addr structure from an object OBJ.  IS_LOAD is
874    true if the operation to be performed on this object is a load
875    rather than a store.  For stores, OBJ can be NULL, in which case we
876    assume the operation is a stack push.  For loads, the optional
877    CONSTFN and its associated CFNDATA can be used in place of the
878    memory load.  */
879 
880 pieces_addr::pieces_addr (rtx obj, bool is_load, by_pieces_constfn constfn,
881 			  void *cfndata)
882   : m_obj (obj), m_is_load (is_load), m_constfn (constfn), m_cfndata (cfndata)
883 {
884   m_addr_inc = 0;
885   m_auto = false;
886   if (obj)
887     {
888       rtx addr = XEXP (obj, 0);
889       rtx_code code = GET_CODE (addr);
890       m_addr = addr;
891       bool dec = code == PRE_DEC || code == POST_DEC;
892       bool inc = code == PRE_INC || code == POST_INC;
893       m_auto = inc || dec;
894       if (m_auto)
895 	m_addr_inc = dec ? -1 : 1;
896 
897       /* While we have always looked for these codes here, the code
898 	 implementing the memory operation has never handled them.
899 	 Support could be added later if necessary or beneficial.  */
900       gcc_assert (code != PRE_INC && code != POST_DEC);
901     }
902   else
903     {
904       m_addr = NULL_RTX;
905       if (!is_load)
906 	{
907 	  m_auto = true;
908 	  if (STACK_GROWS_DOWNWARD)
909 	    m_addr_inc = -1;
910 	  else
911 	    m_addr_inc = 1;
912 	}
913       else
914 	gcc_assert (constfn != NULL);
915     }
916   m_explicit_inc = 0;
917   if (constfn)
918     gcc_assert (is_load);
919 }
920 
921 /* Decide whether to use autoinc for an address involved in a memory op.
922    MODE is the mode of the accesses, REVERSE is true if we've decided to
923    perform the operation starting from the end, and LEN is the length of
924    the operation.  Don't override an earlier decision to set m_auto.  */
925 
926 void
927 pieces_addr::decide_autoinc (machine_mode ARG_UNUSED (mode), bool reverse,
928 			     HOST_WIDE_INT len)
929 {
930   if (m_auto || m_obj == NULL_RTX)
931     return;
932 
933   bool use_predec = (m_is_load
934 		     ? USE_LOAD_PRE_DECREMENT (mode)
935 		     : USE_STORE_PRE_DECREMENT (mode));
936   bool use_postinc = (m_is_load
937 		      ? USE_LOAD_POST_INCREMENT (mode)
938 		      : USE_STORE_POST_INCREMENT (mode));
939   machine_mode addr_mode = get_address_mode (m_obj);
940 
941   if (use_predec && reverse)
942     {
943       m_addr = copy_to_mode_reg (addr_mode,
944 				 plus_constant (addr_mode,
945 						m_addr, len));
946       m_auto = true;
947       m_explicit_inc = -1;
948     }
949   else if (use_postinc && !reverse)
950     {
951       m_addr = copy_to_mode_reg (addr_mode, m_addr);
952       m_auto = true;
953       m_explicit_inc = 1;
954     }
955   else if (CONSTANT_P (m_addr))
956     m_addr = copy_to_mode_reg (addr_mode, m_addr);
957 }
958 
959 /* Adjust the address to refer to the data at OFFSET in MODE.  If we
960    are using autoincrement for this address, we don't add the offset,
961    but we still modify the MEM's properties.  */
962 
963 rtx
964 pieces_addr::adjust (scalar_int_mode mode, HOST_WIDE_INT offset)
965 {
966   if (m_constfn)
967     return m_constfn (m_cfndata, offset, mode);
968   if (m_obj == NULL_RTX)
969     return NULL_RTX;
970   if (m_auto)
971     return adjust_automodify_address (m_obj, mode, m_addr, offset);
972   else
973     return adjust_address (m_obj, mode, offset);
974 }
975 
976 /* Emit an add instruction to increment the address by SIZE.  */
977 
978 void
979 pieces_addr::increment_address (HOST_WIDE_INT size)
980 {
981   rtx amount = gen_int_mode (size, GET_MODE (m_addr));
982   emit_insn (gen_add2_insn (m_addr, amount));
983 }
984 
985 /* If we are supposed to decrement the address after each access, emit code
986    to do so now.  Increment by SIZE (which has should have the correct sign
987    already).  */
988 
989 void
990 pieces_addr::maybe_predec (HOST_WIDE_INT size)
991 {
992   if (m_explicit_inc >= 0)
993     return;
994   gcc_assert (HAVE_PRE_DECREMENT);
995   increment_address (size);
996 }
997 
998 /* If we are supposed to decrement the address after each access, emit code
999    to do so now.  Increment by SIZE.  */
1000 
1001 void
1002 pieces_addr::maybe_postinc (HOST_WIDE_INT size)
1003 {
1004   if (m_explicit_inc <= 0)
1005     return;
1006   gcc_assert (HAVE_POST_INCREMENT);
1007   increment_address (size);
1008 }
1009 
1010 /* This structure is used by do_op_by_pieces to describe the operation
1011    to be performed.  */
1012 
1013 class op_by_pieces_d
1014 {
1015  protected:
1016   pieces_addr m_to, m_from;
1017   unsigned HOST_WIDE_INT m_len;
1018   HOST_WIDE_INT m_offset;
1019   unsigned int m_align;
1020   unsigned int m_max_size;
1021   bool m_reverse;
1022 
1023   /* Virtual functions, overriden by derived classes for the specific
1024      operation.  */
1025   virtual void generate (rtx, rtx, machine_mode) = 0;
1026   virtual bool prepare_mode (machine_mode, unsigned int) = 0;
1027   virtual void finish_mode (machine_mode)
1028   {
1029   }
1030 
1031  public:
1032   op_by_pieces_d (rtx, bool, rtx, bool, by_pieces_constfn, void *,
1033 		  unsigned HOST_WIDE_INT, unsigned int);
1034   void run ();
1035 };
1036 
1037 /* The constructor for an op_by_pieces_d structure.  We require two
1038    objects named TO and FROM, which are identified as loads or stores
1039    by TO_LOAD and FROM_LOAD.  If FROM is a load, the optional FROM_CFN
1040    and its associated FROM_CFN_DATA can be used to replace loads with
1041    constant values.  LEN describes the length of the operation.  */
1042 
1043 op_by_pieces_d::op_by_pieces_d (rtx to, bool to_load,
1044 				rtx from, bool from_load,
1045 				by_pieces_constfn from_cfn,
1046 				void *from_cfn_data,
1047 				unsigned HOST_WIDE_INT len,
1048 				unsigned int align)
1049   : m_to (to, to_load, NULL, NULL),
1050     m_from (from, from_load, from_cfn, from_cfn_data),
1051     m_len (len), m_max_size (MOVE_MAX_PIECES + 1)
1052 {
1053   int toi = m_to.get_addr_inc ();
1054   int fromi = m_from.get_addr_inc ();
1055   if (toi >= 0 && fromi >= 0)
1056     m_reverse = false;
1057   else if (toi <= 0 && fromi <= 0)
1058     m_reverse = true;
1059   else
1060     gcc_unreachable ();
1061 
1062   m_offset = m_reverse ? len : 0;
1063   align = MIN (to ? MEM_ALIGN (to) : align,
1064 	       from ? MEM_ALIGN (from) : align);
1065 
1066   /* If copying requires more than two move insns,
1067      copy addresses to registers (to make displacements shorter)
1068      and use post-increment if available.  */
1069   if (by_pieces_ninsns (len, align, m_max_size, MOVE_BY_PIECES) > 2)
1070     {
1071       /* Find the mode of the largest comparison.  */
1072       scalar_int_mode mode = widest_int_mode_for_size (m_max_size);
1073 
1074       m_from.decide_autoinc (mode, m_reverse, len);
1075       m_to.decide_autoinc (mode, m_reverse, len);
1076     }
1077 
1078   align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
1079   m_align = align;
1080 }
1081 
1082 /* This function contains the main loop used for expanding a block
1083    operation.  First move what we can in the largest integer mode,
1084    then go to successively smaller modes.  For every access, call
1085    GENFUN with the two operands and the EXTRA_DATA.  */
1086 
1087 void
1088 op_by_pieces_d::run ()
1089 {
1090   while (m_max_size > 1 && m_len > 0)
1091     {
1092       scalar_int_mode mode = widest_int_mode_for_size (m_max_size);
1093 
1094       if (prepare_mode (mode, m_align))
1095 	{
1096 	  unsigned int size = GET_MODE_SIZE (mode);
1097 	  rtx to1 = NULL_RTX, from1;
1098 
1099 	  while (m_len >= size)
1100 	    {
1101 	      if (m_reverse)
1102 		m_offset -= size;
1103 
1104 	      to1 = m_to.adjust (mode, m_offset);
1105 	      from1 = m_from.adjust (mode, m_offset);
1106 
1107 	      m_to.maybe_predec (-(HOST_WIDE_INT)size);
1108 	      m_from.maybe_predec (-(HOST_WIDE_INT)size);
1109 
1110 	      generate (to1, from1, mode);
1111 
1112 	      m_to.maybe_postinc (size);
1113 	      m_from.maybe_postinc (size);
1114 
1115 	      if (!m_reverse)
1116 		m_offset += size;
1117 
1118 	      m_len -= size;
1119 	    }
1120 
1121 	  finish_mode (mode);
1122 	}
1123 
1124       m_max_size = GET_MODE_SIZE (mode);
1125     }
1126 
1127   /* The code above should have handled everything.  */
1128   gcc_assert (!m_len);
1129 }
1130 
1131 /* Derived class from op_by_pieces_d, providing support for block move
1132    operations.  */
1133 
1134 class move_by_pieces_d : public op_by_pieces_d
1135 {
1136   insn_gen_fn m_gen_fun;
1137   void generate (rtx, rtx, machine_mode);
1138   bool prepare_mode (machine_mode, unsigned int);
1139 
1140  public:
1141   move_by_pieces_d (rtx to, rtx from, unsigned HOST_WIDE_INT len,
1142 		    unsigned int align)
1143     : op_by_pieces_d (to, false, from, true, NULL, NULL, len, align)
1144   {
1145   }
1146   rtx finish_endp (int);
1147 };
1148 
1149 /* Return true if MODE can be used for a set of copies, given an
1150    alignment ALIGN.  Prepare whatever data is necessary for later
1151    calls to generate.  */
1152 
1153 bool
1154 move_by_pieces_d::prepare_mode (machine_mode mode, unsigned int align)
1155 {
1156   insn_code icode = optab_handler (mov_optab, mode);
1157   m_gen_fun = GEN_FCN (icode);
1158   return icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode);
1159 }
1160 
1161 /* A callback used when iterating for a compare_by_pieces_operation.
1162    OP0 and OP1 are the values that have been loaded and should be
1163    compared in MODE.  If OP0 is NULL, this means we should generate a
1164    push; otherwise EXTRA_DATA holds a pointer to a pointer to the insn
1165    gen function that should be used to generate the mode.  */
1166 
1167 void
1168 move_by_pieces_d::generate (rtx op0, rtx op1,
1169 			    machine_mode mode ATTRIBUTE_UNUSED)
1170 {
1171 #ifdef PUSH_ROUNDING
1172   if (op0 == NULL_RTX)
1173     {
1174       emit_single_push_insn (mode, op1, NULL);
1175       return;
1176     }
1177 #endif
1178   emit_insn (m_gen_fun (op0, op1));
1179 }
1180 
1181 /* Perform the final adjustment at the end of a string to obtain the
1182    correct return value for the block operation.  If ENDP is 1 return
1183    memory at the end ala mempcpy, and if ENDP is 2 return memory the
1184    end minus one byte ala stpcpy.  */
1185 
1186 rtx
1187 move_by_pieces_d::finish_endp (int endp)
1188 {
1189   gcc_assert (!m_reverse);
1190   if (endp == 2)
1191     {
1192       m_to.maybe_postinc (-1);
1193       --m_offset;
1194     }
1195   return m_to.adjust (QImode, m_offset);
1196 }
1197 
1198 /* Generate several move instructions to copy LEN bytes from block FROM to
1199    block TO.  (These are MEM rtx's with BLKmode).
1200 
1201    If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1202    used to push FROM to the stack.
1203 
1204    ALIGN is maximum stack alignment we can assume.
1205 
1206    If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1207    mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1208    stpcpy.  */
1209 
1210 rtx
1211 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
1212 		unsigned int align, int endp)
1213 {
1214 #ifndef PUSH_ROUNDING
1215   if (to == NULL)
1216     gcc_unreachable ();
1217 #endif
1218 
1219   move_by_pieces_d data (to, from, len, align);
1220 
1221   data.run ();
1222 
1223   if (endp)
1224     return data.finish_endp (endp);
1225   else
1226     return to;
1227 }
1228 
1229 /* Derived class from op_by_pieces_d, providing support for block move
1230    operations.  */
1231 
1232 class store_by_pieces_d : public op_by_pieces_d
1233 {
1234   insn_gen_fn m_gen_fun;
1235   void generate (rtx, rtx, machine_mode);
1236   bool prepare_mode (machine_mode, unsigned int);
1237 
1238  public:
1239   store_by_pieces_d (rtx to, by_pieces_constfn cfn, void *cfn_data,
1240 		     unsigned HOST_WIDE_INT len, unsigned int align)
1241     : op_by_pieces_d (to, false, NULL_RTX, true, cfn, cfn_data, len, align)
1242   {
1243   }
1244   rtx finish_endp (int);
1245 };
1246 
1247 /* Return true if MODE can be used for a set of stores, given an
1248    alignment ALIGN.  Prepare whatever data is necessary for later
1249    calls to generate.  */
1250 
1251 bool
1252 store_by_pieces_d::prepare_mode (machine_mode mode, unsigned int align)
1253 {
1254   insn_code icode = optab_handler (mov_optab, mode);
1255   m_gen_fun = GEN_FCN (icode);
1256   return icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode);
1257 }
1258 
1259 /* A callback used when iterating for a store_by_pieces_operation.
1260    OP0 and OP1 are the values that have been loaded and should be
1261    compared in MODE.  If OP0 is NULL, this means we should generate a
1262    push; otherwise EXTRA_DATA holds a pointer to a pointer to the insn
1263    gen function that should be used to generate the mode.  */
1264 
1265 void
1266 store_by_pieces_d::generate (rtx op0, rtx op1, machine_mode)
1267 {
1268   emit_insn (m_gen_fun (op0, op1));
1269 }
1270 
1271 /* Perform the final adjustment at the end of a string to obtain the
1272    correct return value for the block operation.  If ENDP is 1 return
1273    memory at the end ala mempcpy, and if ENDP is 2 return memory the
1274    end minus one byte ala stpcpy.  */
1275 
1276 rtx
1277 store_by_pieces_d::finish_endp (int endp)
1278 {
1279   gcc_assert (!m_reverse);
1280   if (endp == 2)
1281     {
1282       m_to.maybe_postinc (-1);
1283       --m_offset;
1284     }
1285   return m_to.adjust (QImode, m_offset);
1286 }
1287 
1288 /* Determine whether the LEN bytes generated by CONSTFUN can be
1289    stored to memory using several move instructions.  CONSTFUNDATA is
1290    a pointer which will be passed as argument in every CONSTFUN call.
1291    ALIGN is maximum alignment we can assume.  MEMSETP is true if this is
1292    a memset operation and false if it's a copy of a constant string.
1293    Return nonzero if a call to store_by_pieces should succeed.  */
1294 
1295 int
1296 can_store_by_pieces (unsigned HOST_WIDE_INT len,
1297 		     rtx (*constfun) (void *, HOST_WIDE_INT, scalar_int_mode),
1298 		     void *constfundata, unsigned int align, bool memsetp)
1299 {
1300   unsigned HOST_WIDE_INT l;
1301   unsigned int max_size;
1302   HOST_WIDE_INT offset = 0;
1303   enum insn_code icode;
1304   int reverse;
1305   /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it.  */
1306   rtx cst ATTRIBUTE_UNUSED;
1307 
1308   if (len == 0)
1309     return 1;
1310 
1311   if (!targetm.use_by_pieces_infrastructure_p (len, align,
1312 					       memsetp
1313 						 ? SET_BY_PIECES
1314 						 : STORE_BY_PIECES,
1315 					       optimize_insn_for_speed_p ()))
1316     return 0;
1317 
1318   align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
1319 
1320   /* We would first store what we can in the largest integer mode, then go to
1321      successively smaller modes.  */
1322 
1323   for (reverse = 0;
1324        reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
1325        reverse++)
1326     {
1327       l = len;
1328       max_size = STORE_MAX_PIECES + 1;
1329       while (max_size > 1 && l > 0)
1330 	{
1331 	  scalar_int_mode mode = widest_int_mode_for_size (max_size);
1332 
1333 	  icode = optab_handler (mov_optab, mode);
1334 	  if (icode != CODE_FOR_nothing
1335 	      && align >= GET_MODE_ALIGNMENT (mode))
1336 	    {
1337 	      unsigned int size = GET_MODE_SIZE (mode);
1338 
1339 	      while (l >= size)
1340 		{
1341 		  if (reverse)
1342 		    offset -= size;
1343 
1344 		  cst = (*constfun) (constfundata, offset, mode);
1345 		  if (!targetm.legitimate_constant_p (mode, cst))
1346 		    return 0;
1347 
1348 		  if (!reverse)
1349 		    offset += size;
1350 
1351 		  l -= size;
1352 		}
1353 	    }
1354 
1355 	  max_size = GET_MODE_SIZE (mode);
1356 	}
1357 
1358       /* The code above should have handled everything.  */
1359       gcc_assert (!l);
1360     }
1361 
1362   return 1;
1363 }
1364 
1365 /* Generate several move instructions to store LEN bytes generated by
1366    CONSTFUN to block TO.  (A MEM rtx with BLKmode).  CONSTFUNDATA is a
1367    pointer which will be passed as argument in every CONSTFUN call.
1368    ALIGN is maximum alignment we can assume.  MEMSETP is true if this is
1369    a memset operation and false if it's a copy of a constant string.
1370    If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1371    mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1372    stpcpy.  */
1373 
1374 rtx
1375 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
1376 		 rtx (*constfun) (void *, HOST_WIDE_INT, scalar_int_mode),
1377 		 void *constfundata, unsigned int align, bool memsetp, int endp)
1378 {
1379   if (len == 0)
1380     {
1381       gcc_assert (endp != 2);
1382       return to;
1383     }
1384 
1385   gcc_assert (targetm.use_by_pieces_infrastructure_p
1386 		(len, align,
1387 		 memsetp ? SET_BY_PIECES : STORE_BY_PIECES,
1388 		 optimize_insn_for_speed_p ()));
1389 
1390   store_by_pieces_d data (to, constfun, constfundata, len, align);
1391   data.run ();
1392 
1393   if (endp)
1394     return data.finish_endp (endp);
1395   else
1396     return to;
1397 }
1398 
1399 /* Callback routine for clear_by_pieces.
1400    Return const0_rtx unconditionally.  */
1401 
1402 static rtx
1403 clear_by_pieces_1 (void *, HOST_WIDE_INT, scalar_int_mode)
1404 {
1405   return const0_rtx;
1406 }
1407 
1408 /* Generate several move instructions to clear LEN bytes of block TO.  (A MEM
1409    rtx with BLKmode).  ALIGN is maximum alignment we can assume.  */
1410 
1411 static void
1412 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
1413 {
1414   if (len == 0)
1415     return;
1416 
1417   store_by_pieces_d data (to, clear_by_pieces_1, NULL, len, align);
1418   data.run ();
1419 }
1420 
1421 /* Context used by compare_by_pieces_genfn.  It stores the fail label
1422    to jump to in case of miscomparison, and for branch ratios greater than 1,
1423    it stores an accumulator and the current and maximum counts before
1424    emitting another branch.  */
1425 
1426 class compare_by_pieces_d : public op_by_pieces_d
1427 {
1428   rtx_code_label *m_fail_label;
1429   rtx m_accumulator;
1430   int m_count, m_batch;
1431 
1432   void generate (rtx, rtx, machine_mode);
1433   bool prepare_mode (machine_mode, unsigned int);
1434   void finish_mode (machine_mode);
1435  public:
1436   compare_by_pieces_d (rtx op0, rtx op1, by_pieces_constfn op1_cfn,
1437 		       void *op1_cfn_data, HOST_WIDE_INT len, int align,
1438 		       rtx_code_label *fail_label)
1439     : op_by_pieces_d (op0, true, op1, true, op1_cfn, op1_cfn_data, len, align)
1440   {
1441     m_fail_label = fail_label;
1442   }
1443 };
1444 
1445 /* A callback used when iterating for a compare_by_pieces_operation.
1446    OP0 and OP1 are the values that have been loaded and should be
1447    compared in MODE.  DATA holds a pointer to the compare_by_pieces_data
1448    context structure.  */
1449 
1450 void
1451 compare_by_pieces_d::generate (rtx op0, rtx op1, machine_mode mode)
1452 {
1453   if (m_batch > 1)
1454     {
1455       rtx temp = expand_binop (mode, sub_optab, op0, op1, NULL_RTX,
1456 			       true, OPTAB_LIB_WIDEN);
1457       if (m_count != 0)
1458 	temp = expand_binop (mode, ior_optab, m_accumulator, temp, temp,
1459 			     true, OPTAB_LIB_WIDEN);
1460       m_accumulator = temp;
1461 
1462       if (++m_count < m_batch)
1463 	return;
1464 
1465       m_count = 0;
1466       op0 = m_accumulator;
1467       op1 = const0_rtx;
1468       m_accumulator = NULL_RTX;
1469     }
1470   do_compare_rtx_and_jump (op0, op1, NE, true, mode, NULL_RTX, NULL,
1471 			   m_fail_label, profile_probability::uninitialized ());
1472 }
1473 
1474 /* Return true if MODE can be used for a set of moves and comparisons,
1475    given an alignment ALIGN.  Prepare whatever data is necessary for
1476    later calls to generate.  */
1477 
1478 bool
1479 compare_by_pieces_d::prepare_mode (machine_mode mode, unsigned int align)
1480 {
1481   insn_code icode = optab_handler (mov_optab, mode);
1482   if (icode == CODE_FOR_nothing
1483       || align < GET_MODE_ALIGNMENT (mode)
1484       || !can_compare_p (EQ, mode, ccp_jump))
1485     return false;
1486   m_batch = targetm.compare_by_pieces_branch_ratio (mode);
1487   if (m_batch < 0)
1488     return false;
1489   m_accumulator = NULL_RTX;
1490   m_count = 0;
1491   return true;
1492 }
1493 
1494 /* Called after expanding a series of comparisons in MODE.  If we have
1495    accumulated results for which we haven't emitted a branch yet, do
1496    so now.  */
1497 
1498 void
1499 compare_by_pieces_d::finish_mode (machine_mode mode)
1500 {
1501   if (m_accumulator != NULL_RTX)
1502     do_compare_rtx_and_jump (m_accumulator, const0_rtx, NE, true, mode,
1503 			     NULL_RTX, NULL, m_fail_label,
1504 			     profile_probability::uninitialized ());
1505 }
1506 
1507 /* Generate several move instructions to compare LEN bytes from blocks
1508    ARG0 and ARG1.  (These are MEM rtx's with BLKmode).
1509 
1510    If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1511    used to push FROM to the stack.
1512 
1513    ALIGN is maximum stack alignment we can assume.
1514 
1515    Optionally, the caller can pass a constfn and associated data in A1_CFN
1516    and A1_CFN_DATA. describing that the second operand being compared is a
1517    known constant and how to obtain its data.  */
1518 
1519 static rtx
1520 compare_by_pieces (rtx arg0, rtx arg1, unsigned HOST_WIDE_INT len,
1521 		   rtx target, unsigned int align,
1522 		   by_pieces_constfn a1_cfn, void *a1_cfn_data)
1523 {
1524   rtx_code_label *fail_label = gen_label_rtx ();
1525   rtx_code_label *end_label = gen_label_rtx ();
1526 
1527   if (target == NULL_RTX
1528       || !REG_P (target) || REGNO (target) < FIRST_PSEUDO_REGISTER)
1529     target = gen_reg_rtx (TYPE_MODE (integer_type_node));
1530 
1531   compare_by_pieces_d data (arg0, arg1, a1_cfn, a1_cfn_data, len, align,
1532 			    fail_label);
1533 
1534   data.run ();
1535 
1536   emit_move_insn (target, const0_rtx);
1537   emit_jump (end_label);
1538   emit_barrier ();
1539   emit_label (fail_label);
1540   emit_move_insn (target, const1_rtx);
1541   emit_label (end_label);
1542 
1543   return target;
1544 }
1545 
1546 /* Emit code to move a block Y to a block X.  This may be done with
1547    string-move instructions, with multiple scalar move instructions,
1548    or with a library call.
1549 
1550    Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1551    SIZE is an rtx that says how long they are.
1552    ALIGN is the maximum alignment we can assume they have.
1553    METHOD describes what kind of copy this is, and what mechanisms may be used.
1554    MIN_SIZE is the minimal size of block to move
1555    MAX_SIZE is the maximal size of block to move, if it can not be represented
1556    in unsigned HOST_WIDE_INT, than it is mask of all ones.
1557 
1558    Return the address of the new block, if memcpy is called and returns it,
1559    0 otherwise.  */
1560 
1561 rtx
1562 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1563 		       unsigned int expected_align, HOST_WIDE_INT expected_size,
1564 		       unsigned HOST_WIDE_INT min_size,
1565 		       unsigned HOST_WIDE_INT max_size,
1566 		       unsigned HOST_WIDE_INT probable_max_size)
1567 {
1568   int may_use_call;
1569   rtx retval = 0;
1570   unsigned int align;
1571 
1572   gcc_assert (size);
1573   if (CONST_INT_P (size) && INTVAL (size) == 0)
1574     return 0;
1575 
1576   switch (method)
1577     {
1578     case BLOCK_OP_NORMAL:
1579     case BLOCK_OP_TAILCALL:
1580       may_use_call = 1;
1581       break;
1582 
1583     case BLOCK_OP_CALL_PARM:
1584       may_use_call = block_move_libcall_safe_for_call_parm ();
1585 
1586       /* Make inhibit_defer_pop nonzero around the library call
1587 	 to force it to pop the arguments right away.  */
1588       NO_DEFER_POP;
1589       break;
1590 
1591     case BLOCK_OP_NO_LIBCALL:
1592       may_use_call = 0;
1593       break;
1594 
1595     case BLOCK_OP_NO_LIBCALL_RET:
1596       may_use_call = -1;
1597       break;
1598 
1599     default:
1600       gcc_unreachable ();
1601     }
1602 
1603   gcc_assert (MEM_P (x) && MEM_P (y));
1604   align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1605   gcc_assert (align >= BITS_PER_UNIT);
1606 
1607   /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1608      block copy is more efficient for other large modes, e.g. DCmode.  */
1609   x = adjust_address (x, BLKmode, 0);
1610   y = adjust_address (y, BLKmode, 0);
1611 
1612   /* Set MEM_SIZE as appropriate for this block copy.  The main place this
1613      can be incorrect is coming from __builtin_memcpy.  */
1614   if (CONST_INT_P (size))
1615     {
1616       x = shallow_copy_rtx (x);
1617       y = shallow_copy_rtx (y);
1618       set_mem_size (x, INTVAL (size));
1619       set_mem_size (y, INTVAL (size));
1620     }
1621 
1622   if (CONST_INT_P (size) && can_move_by_pieces (INTVAL (size), align))
1623     move_by_pieces (x, y, INTVAL (size), align, 0);
1624   else if (emit_block_move_via_movmem (x, y, size, align,
1625 				       expected_align, expected_size,
1626 				       min_size, max_size, probable_max_size))
1627     ;
1628   else if (may_use_call
1629 	   && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1630 	   && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1631     {
1632       if (may_use_call < 0)
1633 	return pc_rtx;
1634 
1635       /* Since x and y are passed to a libcall, mark the corresponding
1636 	 tree EXPR as addressable.  */
1637       tree y_expr = MEM_EXPR (y);
1638       tree x_expr = MEM_EXPR (x);
1639       if (y_expr)
1640 	mark_addressable (y_expr);
1641       if (x_expr)
1642 	mark_addressable (x_expr);
1643       retval = emit_block_copy_via_libcall (x, y, size,
1644 					    method == BLOCK_OP_TAILCALL);
1645     }
1646 
1647   else
1648     emit_block_move_via_loop (x, y, size, align);
1649 
1650   if (method == BLOCK_OP_CALL_PARM)
1651     OK_DEFER_POP;
1652 
1653   return retval;
1654 }
1655 
1656 rtx
1657 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1658 {
1659   unsigned HOST_WIDE_INT max, min = 0;
1660   if (GET_CODE (size) == CONST_INT)
1661     min = max = UINTVAL (size);
1662   else
1663     max = GET_MODE_MASK (GET_MODE (size));
1664   return emit_block_move_hints (x, y, size, method, 0, -1,
1665 				min, max, max);
1666 }
1667 
1668 /* A subroutine of emit_block_move.  Returns true if calling the
1669    block move libcall will not clobber any parameters which may have
1670    already been placed on the stack.  */
1671 
1672 static bool
1673 block_move_libcall_safe_for_call_parm (void)
1674 {
1675 #if defined (REG_PARM_STACK_SPACE)
1676   tree fn;
1677 #endif
1678 
1679   /* If arguments are pushed on the stack, then they're safe.  */
1680   if (PUSH_ARGS)
1681     return true;
1682 
1683   /* If registers go on the stack anyway, any argument is sure to clobber
1684      an outgoing argument.  */
1685 #if defined (REG_PARM_STACK_SPACE)
1686   fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1687   /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1688      depend on its argument.  */
1689   (void) fn;
1690   if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1691       && REG_PARM_STACK_SPACE (fn) != 0)
1692     return false;
1693 #endif
1694 
1695   /* If any argument goes in memory, then it might clobber an outgoing
1696      argument.  */
1697   {
1698     CUMULATIVE_ARGS args_so_far_v;
1699     cumulative_args_t args_so_far;
1700     tree fn, arg;
1701 
1702     fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1703     INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
1704     args_so_far = pack_cumulative_args (&args_so_far_v);
1705 
1706     arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1707     for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1708       {
1709 	machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1710 	rtx tmp = targetm.calls.function_arg (args_so_far, mode,
1711 					      NULL_TREE, true);
1712 	if (!tmp || !REG_P (tmp))
1713 	  return false;
1714 	if (targetm.calls.arg_partial_bytes (args_so_far, mode, NULL, 1))
1715 	  return false;
1716 	targetm.calls.function_arg_advance (args_so_far, mode,
1717 					    NULL_TREE, true);
1718       }
1719   }
1720   return true;
1721 }
1722 
1723 /* A subroutine of emit_block_move.  Expand a movmem pattern;
1724    return true if successful.  */
1725 
1726 static bool
1727 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1728 			    unsigned int expected_align, HOST_WIDE_INT expected_size,
1729 			    unsigned HOST_WIDE_INT min_size,
1730 			    unsigned HOST_WIDE_INT max_size,
1731 			    unsigned HOST_WIDE_INT probable_max_size)
1732 {
1733   int save_volatile_ok = volatile_ok;
1734 
1735   if (expected_align < align)
1736     expected_align = align;
1737   if (expected_size != -1)
1738     {
1739       if ((unsigned HOST_WIDE_INT)expected_size > probable_max_size)
1740 	expected_size = probable_max_size;
1741       if ((unsigned HOST_WIDE_INT)expected_size < min_size)
1742 	expected_size = min_size;
1743     }
1744 
1745   /* Since this is a move insn, we don't care about volatility.  */
1746   volatile_ok = 1;
1747 
1748   /* Try the most limited insn first, because there's no point
1749      including more than one in the machine description unless
1750      the more limited one has some advantage.  */
1751 
1752   opt_scalar_int_mode mode_iter;
1753   FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_INT)
1754     {
1755       scalar_int_mode mode = mode_iter.require ();
1756       enum insn_code code = direct_optab_handler (movmem_optab, mode);
1757 
1758       if (code != CODE_FOR_nothing
1759 	  /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1760 	     here because if SIZE is less than the mode mask, as it is
1761 	     returned by the macro, it will definitely be less than the
1762 	     actual mode mask.  Since SIZE is within the Pmode address
1763 	     space, we limit MODE to Pmode.  */
1764 	  && ((CONST_INT_P (size)
1765 	       && ((unsigned HOST_WIDE_INT) INTVAL (size)
1766 		   <= (GET_MODE_MASK (mode) >> 1)))
1767 	      || max_size <= (GET_MODE_MASK (mode) >> 1)
1768 	      || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
1769 	{
1770 	  struct expand_operand ops[9];
1771 	  unsigned int nops;
1772 
1773 	  /* ??? When called via emit_block_move_for_call, it'd be
1774 	     nice if there were some way to inform the backend, so
1775 	     that it doesn't fail the expansion because it thinks
1776 	     emitting the libcall would be more efficient.  */
1777 	  nops = insn_data[(int) code].n_generator_args;
1778 	  gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
1779 
1780 	  create_fixed_operand (&ops[0], x);
1781 	  create_fixed_operand (&ops[1], y);
1782 	  /* The check above guarantees that this size conversion is valid.  */
1783 	  create_convert_operand_to (&ops[2], size, mode, true);
1784 	  create_integer_operand (&ops[3], align / BITS_PER_UNIT);
1785 	  if (nops >= 6)
1786 	    {
1787 	      create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
1788 	      create_integer_operand (&ops[5], expected_size);
1789 	    }
1790 	  if (nops >= 8)
1791 	    {
1792 	      create_integer_operand (&ops[6], min_size);
1793 	      /* If we can not represent the maximal size,
1794 		 make parameter NULL.  */
1795 	      if ((HOST_WIDE_INT) max_size != -1)
1796 	        create_integer_operand (&ops[7], max_size);
1797 	      else
1798 		create_fixed_operand (&ops[7], NULL);
1799 	    }
1800 	  if (nops == 9)
1801 	    {
1802 	      /* If we can not represent the maximal size,
1803 		 make parameter NULL.  */
1804 	      if ((HOST_WIDE_INT) probable_max_size != -1)
1805 	        create_integer_operand (&ops[8], probable_max_size);
1806 	      else
1807 		create_fixed_operand (&ops[8], NULL);
1808 	    }
1809 	  if (maybe_expand_insn (code, nops, ops))
1810 	    {
1811 	      volatile_ok = save_volatile_ok;
1812 	      return true;
1813 	    }
1814 	}
1815     }
1816 
1817   volatile_ok = save_volatile_ok;
1818   return false;
1819 }
1820 
1821 /* A subroutine of emit_block_move.  Copy the data via an explicit
1822    loop.  This is used only when libcalls are forbidden.  */
1823 /* ??? It'd be nice to copy in hunks larger than QImode.  */
1824 
1825 static void
1826 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1827 			  unsigned int align ATTRIBUTE_UNUSED)
1828 {
1829   rtx_code_label *cmp_label, *top_label;
1830   rtx iter, x_addr, y_addr, tmp;
1831   machine_mode x_addr_mode = get_address_mode (x);
1832   machine_mode y_addr_mode = get_address_mode (y);
1833   machine_mode iter_mode;
1834 
1835   iter_mode = GET_MODE (size);
1836   if (iter_mode == VOIDmode)
1837     iter_mode = word_mode;
1838 
1839   top_label = gen_label_rtx ();
1840   cmp_label = gen_label_rtx ();
1841   iter = gen_reg_rtx (iter_mode);
1842 
1843   emit_move_insn (iter, const0_rtx);
1844 
1845   x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1846   y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1847   do_pending_stack_adjust ();
1848 
1849   emit_jump (cmp_label);
1850   emit_label (top_label);
1851 
1852   tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1853   x_addr = simplify_gen_binary (PLUS, x_addr_mode, x_addr, tmp);
1854 
1855   if (x_addr_mode != y_addr_mode)
1856     tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1857   y_addr = simplify_gen_binary (PLUS, y_addr_mode, y_addr, tmp);
1858 
1859   x = change_address (x, QImode, x_addr);
1860   y = change_address (y, QImode, y_addr);
1861 
1862   emit_move_insn (x, y);
1863 
1864   tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1865 			     true, OPTAB_LIB_WIDEN);
1866   if (tmp != iter)
1867     emit_move_insn (iter, tmp);
1868 
1869   emit_label (cmp_label);
1870 
1871   emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1872 			   true, top_label,
1873 			   profile_probability::guessed_always ()
1874 				.apply_scale (9, 10));
1875 }
1876 
1877 /* Expand a call to memcpy or memmove or memcmp, and return the result.
1878    TAILCALL is true if this is a tail call.  */
1879 
1880 rtx
1881 emit_block_op_via_libcall (enum built_in_function fncode, rtx dst, rtx src,
1882 			   rtx size, bool tailcall)
1883 {
1884   rtx dst_addr, src_addr;
1885   tree call_expr, dst_tree, src_tree, size_tree;
1886   machine_mode size_mode;
1887 
1888   dst_addr = copy_addr_to_reg (XEXP (dst, 0));
1889   dst_addr = convert_memory_address (ptr_mode, dst_addr);
1890   dst_tree = make_tree (ptr_type_node, dst_addr);
1891 
1892   src_addr = copy_addr_to_reg (XEXP (src, 0));
1893   src_addr = convert_memory_address (ptr_mode, src_addr);
1894   src_tree = make_tree (ptr_type_node, src_addr);
1895 
1896   size_mode = TYPE_MODE (sizetype);
1897   size = convert_to_mode (size_mode, size, 1);
1898   size = copy_to_mode_reg (size_mode, size);
1899   size_tree = make_tree (sizetype, size);
1900 
1901   /* It is incorrect to use the libcall calling conventions for calls to
1902      memcpy/memmove/memcmp because they can be provided by the user.  */
1903   tree fn = builtin_decl_implicit (fncode);
1904   call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1905   CALL_EXPR_TAILCALL (call_expr) = tailcall;
1906 
1907   return expand_call (call_expr, NULL_RTX, false);
1908 }
1909 
1910 /* Try to expand cmpstrn or cmpmem operation ICODE with the given operands.
1911    ARG3_TYPE is the type of ARG3_RTX.  Return the result rtx on success,
1912    otherwise return null.  */
1913 
1914 rtx
1915 expand_cmpstrn_or_cmpmem (insn_code icode, rtx target, rtx arg1_rtx,
1916 			  rtx arg2_rtx, tree arg3_type, rtx arg3_rtx,
1917 			  HOST_WIDE_INT align)
1918 {
1919   machine_mode insn_mode = insn_data[icode].operand[0].mode;
1920 
1921   if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
1922     target = NULL_RTX;
1923 
1924   struct expand_operand ops[5];
1925   create_output_operand (&ops[0], target, insn_mode);
1926   create_fixed_operand (&ops[1], arg1_rtx);
1927   create_fixed_operand (&ops[2], arg2_rtx);
1928   create_convert_operand_from (&ops[3], arg3_rtx, TYPE_MODE (arg3_type),
1929 			       TYPE_UNSIGNED (arg3_type));
1930   create_integer_operand (&ops[4], align);
1931   if (maybe_expand_insn (icode, 5, ops))
1932     return ops[0].value;
1933   return NULL_RTX;
1934 }
1935 
1936 /* Expand a block compare between X and Y with length LEN using the
1937    cmpmem optab, placing the result in TARGET.  LEN_TYPE is the type
1938    of the expression that was used to calculate the length.  ALIGN
1939    gives the known minimum common alignment.  */
1940 
1941 static rtx
1942 emit_block_cmp_via_cmpmem (rtx x, rtx y, rtx len, tree len_type, rtx target,
1943 			   unsigned align)
1944 {
1945   /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
1946      implementing memcmp because it will stop if it encounters two
1947      zero bytes.  */
1948   insn_code icode = direct_optab_handler (cmpmem_optab, SImode);
1949 
1950   if (icode == CODE_FOR_nothing)
1951     return NULL_RTX;
1952 
1953   return expand_cmpstrn_or_cmpmem (icode, target, x, y, len_type, len, align);
1954 }
1955 
1956 /* Emit code to compare a block Y to a block X.  This may be done with
1957    string-compare instructions, with multiple scalar instructions,
1958    or with a library call.
1959 
1960    Both X and Y must be MEM rtx's.  LEN is an rtx that says how long
1961    they are.  LEN_TYPE is the type of the expression that was used to
1962    calculate it.
1963 
1964    If EQUALITY_ONLY is true, it means we don't have to return the tri-state
1965    value of a normal memcmp call, instead we can just compare for equality.
1966    If FORCE_LIBCALL is true, we should emit a call to memcmp rather than
1967    returning NULL_RTX.
1968 
1969    Optionally, the caller can pass a constfn and associated data in Y_CFN
1970    and Y_CFN_DATA. describing that the second operand being compared is a
1971    known constant and how to obtain its data.
1972    Return the result of the comparison, or NULL_RTX if we failed to
1973    perform the operation.  */
1974 
1975 rtx
1976 emit_block_cmp_hints (rtx x, rtx y, rtx len, tree len_type, rtx target,
1977 		      bool equality_only, by_pieces_constfn y_cfn,
1978 		      void *y_cfndata)
1979 {
1980   rtx result = 0;
1981 
1982   if (CONST_INT_P (len) && INTVAL (len) == 0)
1983     return const0_rtx;
1984 
1985   gcc_assert (MEM_P (x) && MEM_P (y));
1986   unsigned int align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1987   gcc_assert (align >= BITS_PER_UNIT);
1988 
1989   x = adjust_address (x, BLKmode, 0);
1990   y = adjust_address (y, BLKmode, 0);
1991 
1992   if (equality_only
1993       && CONST_INT_P (len)
1994       && can_do_by_pieces (INTVAL (len), align, COMPARE_BY_PIECES))
1995     result = compare_by_pieces (x, y, INTVAL (len), target, align,
1996 				y_cfn, y_cfndata);
1997   else
1998     result = emit_block_cmp_via_cmpmem (x, y, len, len_type, target, align);
1999 
2000   return result;
2001 }
2002 
2003 /* Copy all or part of a value X into registers starting at REGNO.
2004    The number of registers to be filled is NREGS.  */
2005 
2006 void
2007 move_block_to_reg (int regno, rtx x, int nregs, machine_mode mode)
2008 {
2009   if (nregs == 0)
2010     return;
2011 
2012   if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
2013     x = validize_mem (force_const_mem (mode, x));
2014 
2015   /* See if the machine can do this with a load multiple insn.  */
2016   if (targetm.have_load_multiple ())
2017     {
2018       rtx_insn *last = get_last_insn ();
2019       rtx first = gen_rtx_REG (word_mode, regno);
2020       if (rtx_insn *pat = targetm.gen_load_multiple (first, x,
2021 						     GEN_INT (nregs)))
2022 	{
2023 	  emit_insn (pat);
2024 	  return;
2025 	}
2026       else
2027 	delete_insns_since (last);
2028     }
2029 
2030   for (int i = 0; i < nregs; i++)
2031     emit_move_insn (gen_rtx_REG (word_mode, regno + i),
2032 		    operand_subword_force (x, i, mode));
2033 }
2034 
2035 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
2036    The number of registers to be filled is NREGS.  */
2037 
2038 void
2039 move_block_from_reg (int regno, rtx x, int nregs)
2040 {
2041   if (nregs == 0)
2042     return;
2043 
2044   /* See if the machine can do this with a store multiple insn.  */
2045   if (targetm.have_store_multiple ())
2046     {
2047       rtx_insn *last = get_last_insn ();
2048       rtx first = gen_rtx_REG (word_mode, regno);
2049       if (rtx_insn *pat = targetm.gen_store_multiple (x, first,
2050 						      GEN_INT (nregs)))
2051 	{
2052 	  emit_insn (pat);
2053 	  return;
2054 	}
2055       else
2056 	delete_insns_since (last);
2057     }
2058 
2059   for (int i = 0; i < nregs; i++)
2060     {
2061       rtx tem = operand_subword (x, i, 1, BLKmode);
2062 
2063       gcc_assert (tem);
2064 
2065       emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
2066     }
2067 }
2068 
2069 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
2070    ORIG, where ORIG is a non-consecutive group of registers represented by
2071    a PARALLEL.  The clone is identical to the original except in that the
2072    original set of registers is replaced by a new set of pseudo registers.
2073    The new set has the same modes as the original set.  */
2074 
2075 rtx
2076 gen_group_rtx (rtx orig)
2077 {
2078   int i, length;
2079   rtx *tmps;
2080 
2081   gcc_assert (GET_CODE (orig) == PARALLEL);
2082 
2083   length = XVECLEN (orig, 0);
2084   tmps = XALLOCAVEC (rtx, length);
2085 
2086   /* Skip a NULL entry in first slot.  */
2087   i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
2088 
2089   if (i)
2090     tmps[0] = 0;
2091 
2092   for (; i < length; i++)
2093     {
2094       machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
2095       rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
2096 
2097       tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
2098     }
2099 
2100   return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
2101 }
2102 
2103 /* A subroutine of emit_group_load.  Arguments as for emit_group_load,
2104    except that values are placed in TMPS[i], and must later be moved
2105    into corresponding XEXP (XVECEXP (DST, 0, i), 0) element.  */
2106 
2107 static void
2108 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type,
2109 		   poly_int64 ssize)
2110 {
2111   rtx src;
2112   int start, i;
2113   machine_mode m = GET_MODE (orig_src);
2114 
2115   gcc_assert (GET_CODE (dst) == PARALLEL);
2116 
2117   if (m != VOIDmode
2118       && !SCALAR_INT_MODE_P (m)
2119       && !MEM_P (orig_src)
2120       && GET_CODE (orig_src) != CONCAT)
2121     {
2122       scalar_int_mode imode;
2123       if (int_mode_for_mode (GET_MODE (orig_src)).exists (&imode))
2124 	{
2125 	  src = gen_reg_rtx (imode);
2126 	  emit_move_insn (gen_lowpart (GET_MODE (orig_src), src), orig_src);
2127 	}
2128       else
2129 	{
2130 	  src = assign_stack_temp (GET_MODE (orig_src), ssize);
2131 	  emit_move_insn (src, orig_src);
2132 	}
2133       emit_group_load_1 (tmps, dst, src, type, ssize);
2134       return;
2135     }
2136 
2137   /* Check for a NULL entry, used to indicate that the parameter goes
2138      both on the stack and in registers.  */
2139   if (XEXP (XVECEXP (dst, 0, 0), 0))
2140     start = 0;
2141   else
2142     start = 1;
2143 
2144   /* Process the pieces.  */
2145   for (i = start; i < XVECLEN (dst, 0); i++)
2146     {
2147       machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
2148       poly_int64 bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
2149       poly_int64 bytelen = GET_MODE_SIZE (mode);
2150       poly_int64 shift = 0;
2151 
2152       /* Handle trailing fragments that run over the size of the struct.
2153 	 It's the target's responsibility to make sure that the fragment
2154 	 cannot be strictly smaller in some cases and strictly larger
2155 	 in others.  */
2156       gcc_checking_assert (ordered_p (bytepos + bytelen, ssize));
2157       if (known_size_p (ssize) && maybe_gt (bytepos + bytelen, ssize))
2158 	{
2159 	  /* Arrange to shift the fragment to where it belongs.
2160 	     extract_bit_field loads to the lsb of the reg.  */
2161 	  if (
2162 #ifdef BLOCK_REG_PADDING
2163 	      BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
2164 	      == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)
2165 #else
2166 	      BYTES_BIG_ENDIAN
2167 #endif
2168 	      )
2169 	    shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2170 	  bytelen = ssize - bytepos;
2171 	  gcc_assert (maybe_gt (bytelen, 0));
2172 	}
2173 
2174       /* If we won't be loading directly from memory, protect the real source
2175 	 from strange tricks we might play; but make sure that the source can
2176 	 be loaded directly into the destination.  */
2177       src = orig_src;
2178       if (!MEM_P (orig_src)
2179 	  && (!CONSTANT_P (orig_src)
2180 	      || (GET_MODE (orig_src) != mode
2181 		  && GET_MODE (orig_src) != VOIDmode)))
2182 	{
2183 	  if (GET_MODE (orig_src) == VOIDmode)
2184 	    src = gen_reg_rtx (mode);
2185 	  else
2186 	    src = gen_reg_rtx (GET_MODE (orig_src));
2187 
2188 	  emit_move_insn (src, orig_src);
2189 	}
2190 
2191       /* Optimize the access just a bit.  */
2192       if (MEM_P (src)
2193 	  && (! targetm.slow_unaligned_access (mode, MEM_ALIGN (src))
2194 	      || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
2195 	  && multiple_p (bytepos * BITS_PER_UNIT, GET_MODE_ALIGNMENT (mode))
2196 	  && known_eq (bytelen, GET_MODE_SIZE (mode)))
2197 	{
2198 	  tmps[i] = gen_reg_rtx (mode);
2199 	  emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2200 	}
2201       else if (COMPLEX_MODE_P (mode)
2202 	       && GET_MODE (src) == mode
2203 	       && known_eq (bytelen, GET_MODE_SIZE (mode)))
2204 	/* Let emit_move_complex do the bulk of the work.  */
2205 	tmps[i] = src;
2206       else if (GET_CODE (src) == CONCAT)
2207 	{
2208 	  poly_int64 slen = GET_MODE_SIZE (GET_MODE (src));
2209 	  poly_int64 slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
2210 	  unsigned int elt;
2211 	  poly_int64 subpos;
2212 
2213 	  if (can_div_trunc_p (bytepos, slen0, &elt, &subpos)
2214 	      && known_le (subpos + bytelen, slen0))
2215 	    {
2216 	      /* The following assumes that the concatenated objects all
2217 		 have the same size.  In this case, a simple calculation
2218 		 can be used to determine the object and the bit field
2219 		 to be extracted.  */
2220 	      tmps[i] = XEXP (src, elt);
2221 	      if (maybe_ne (subpos, 0)
2222 		  || maybe_ne (subpos + bytelen, slen0)
2223 		  || (!CONSTANT_P (tmps[i])
2224 		      && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode)))
2225 		tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2226 					     subpos * BITS_PER_UNIT,
2227 					     1, NULL_RTX, mode, mode, false,
2228 					     NULL);
2229 	    }
2230 	  else
2231 	    {
2232 	      rtx mem;
2233 
2234 	      gcc_assert (known_eq (bytepos, 0));
2235 	      mem = assign_stack_temp (GET_MODE (src), slen);
2236 	      emit_move_insn (mem, src);
2237 	      tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
2238 					   0, 1, NULL_RTX, mode, mode, false,
2239 					   NULL);
2240 	    }
2241 	}
2242       /* FIXME: A SIMD parallel will eventually lead to a subreg of a
2243 	 SIMD register, which is currently broken.  While we get GCC
2244 	 to emit proper RTL for these cases, let's dump to memory.  */
2245       else if (VECTOR_MODE_P (GET_MODE (dst))
2246 	       && REG_P (src))
2247 	{
2248 	  poly_uint64 slen = GET_MODE_SIZE (GET_MODE (src));
2249 	  rtx mem;
2250 
2251 	  mem = assign_stack_temp (GET_MODE (src), slen);
2252 	  emit_move_insn (mem, src);
2253 	  tmps[i] = adjust_address (mem, mode, bytepos);
2254 	}
2255       else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
2256                && XVECLEN (dst, 0) > 1)
2257         tmps[i] = simplify_gen_subreg (mode, src, GET_MODE (dst), bytepos);
2258       else if (CONSTANT_P (src))
2259 	{
2260 	  if (known_eq (bytelen, ssize))
2261 	    tmps[i] = src;
2262 	  else
2263 	    {
2264 	      rtx first, second;
2265 
2266 	      /* TODO: const_wide_int can have sizes other than this...  */
2267 	      gcc_assert (known_eq (2 * bytelen, ssize));
2268 	      split_double (src, &first, &second);
2269 	      if (i)
2270 		tmps[i] = second;
2271 	      else
2272 		tmps[i] = first;
2273 	    }
2274 	}
2275       else if (REG_P (src) && GET_MODE (src) == mode)
2276 	tmps[i] = src;
2277       else
2278 	tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2279 				     bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2280 				     mode, mode, false, NULL);
2281 
2282       if (maybe_ne (shift, 0))
2283 	tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
2284 				shift, tmps[i], 0);
2285     }
2286 }
2287 
2288 /* Emit code to move a block SRC of type TYPE to a block DST,
2289    where DST is non-consecutive registers represented by a PARALLEL.
2290    SSIZE represents the total size of block ORIG_SRC in bytes, or -1
2291    if not known.  */
2292 
2293 void
2294 emit_group_load (rtx dst, rtx src, tree type, poly_int64 ssize)
2295 {
2296   rtx *tmps;
2297   int i;
2298 
2299   tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
2300   emit_group_load_1 (tmps, dst, src, type, ssize);
2301 
2302   /* Copy the extracted pieces into the proper (probable) hard regs.  */
2303   for (i = 0; i < XVECLEN (dst, 0); i++)
2304     {
2305       rtx d = XEXP (XVECEXP (dst, 0, i), 0);
2306       if (d == NULL)
2307 	continue;
2308       emit_move_insn (d, tmps[i]);
2309     }
2310 }
2311 
2312 /* Similar, but load SRC into new pseudos in a format that looks like
2313    PARALLEL.  This can later be fed to emit_group_move to get things
2314    in the right place.  */
2315 
2316 rtx
2317 emit_group_load_into_temps (rtx parallel, rtx src, tree type, poly_int64 ssize)
2318 {
2319   rtvec vec;
2320   int i;
2321 
2322   vec = rtvec_alloc (XVECLEN (parallel, 0));
2323   emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
2324 
2325   /* Convert the vector to look just like the original PARALLEL, except
2326      with the computed values.  */
2327   for (i = 0; i < XVECLEN (parallel, 0); i++)
2328     {
2329       rtx e = XVECEXP (parallel, 0, i);
2330       rtx d = XEXP (e, 0);
2331 
2332       if (d)
2333 	{
2334 	  d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
2335 	  e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
2336 	}
2337       RTVEC_ELT (vec, i) = e;
2338     }
2339 
2340   return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
2341 }
2342 
2343 /* Emit code to move a block SRC to block DST, where SRC and DST are
2344    non-consecutive groups of registers, each represented by a PARALLEL.  */
2345 
2346 void
2347 emit_group_move (rtx dst, rtx src)
2348 {
2349   int i;
2350 
2351   gcc_assert (GET_CODE (src) == PARALLEL
2352 	      && GET_CODE (dst) == PARALLEL
2353 	      && XVECLEN (src, 0) == XVECLEN (dst, 0));
2354 
2355   /* Skip first entry if NULL.  */
2356   for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
2357     emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
2358 		    XEXP (XVECEXP (src, 0, i), 0));
2359 }
2360 
2361 /* Move a group of registers represented by a PARALLEL into pseudos.  */
2362 
2363 rtx
2364 emit_group_move_into_temps (rtx src)
2365 {
2366   rtvec vec = rtvec_alloc (XVECLEN (src, 0));
2367   int i;
2368 
2369   for (i = 0; i < XVECLEN (src, 0); i++)
2370     {
2371       rtx e = XVECEXP (src, 0, i);
2372       rtx d = XEXP (e, 0);
2373 
2374       if (d)
2375 	e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
2376       RTVEC_ELT (vec, i) = e;
2377     }
2378 
2379   return gen_rtx_PARALLEL (GET_MODE (src), vec);
2380 }
2381 
2382 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
2383    where SRC is non-consecutive registers represented by a PARALLEL.
2384    SSIZE represents the total size of block ORIG_DST, or -1 if not
2385    known.  */
2386 
2387 void
2388 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED,
2389 		  poly_int64 ssize)
2390 {
2391   rtx *tmps, dst;
2392   int start, finish, i;
2393   machine_mode m = GET_MODE (orig_dst);
2394 
2395   gcc_assert (GET_CODE (src) == PARALLEL);
2396 
2397   if (!SCALAR_INT_MODE_P (m)
2398       && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
2399     {
2400       scalar_int_mode imode;
2401       if (int_mode_for_mode (GET_MODE (orig_dst)).exists (&imode))
2402 	{
2403 	  dst = gen_reg_rtx (imode);
2404 	  emit_group_store (dst, src, type, ssize);
2405 	  dst = gen_lowpart (GET_MODE (orig_dst), dst);
2406 	}
2407       else
2408 	{
2409 	  dst = assign_stack_temp (GET_MODE (orig_dst), ssize);
2410 	  emit_group_store (dst, src, type, ssize);
2411 	}
2412       emit_move_insn (orig_dst, dst);
2413       return;
2414     }
2415 
2416   /* Check for a NULL entry, used to indicate that the parameter goes
2417      both on the stack and in registers.  */
2418   if (XEXP (XVECEXP (src, 0, 0), 0))
2419     start = 0;
2420   else
2421     start = 1;
2422   finish = XVECLEN (src, 0);
2423 
2424   tmps = XALLOCAVEC (rtx, finish);
2425 
2426   /* Copy the (probable) hard regs into pseudos.  */
2427   for (i = start; i < finish; i++)
2428     {
2429       rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2430       if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
2431 	{
2432 	  tmps[i] = gen_reg_rtx (GET_MODE (reg));
2433 	  emit_move_insn (tmps[i], reg);
2434 	}
2435       else
2436 	tmps[i] = reg;
2437     }
2438 
2439   /* If we won't be storing directly into memory, protect the real destination
2440      from strange tricks we might play.  */
2441   dst = orig_dst;
2442   if (GET_CODE (dst) == PARALLEL)
2443     {
2444       rtx temp;
2445 
2446       /* We can get a PARALLEL dst if there is a conditional expression in
2447 	 a return statement.  In that case, the dst and src are the same,
2448 	 so no action is necessary.  */
2449       if (rtx_equal_p (dst, src))
2450 	return;
2451 
2452       /* It is unclear if we can ever reach here, but we may as well handle
2453 	 it.  Allocate a temporary, and split this into a store/load to/from
2454 	 the temporary.  */
2455       temp = assign_stack_temp (GET_MODE (dst), ssize);
2456       emit_group_store (temp, src, type, ssize);
2457       emit_group_load (dst, temp, type, ssize);
2458       return;
2459     }
2460   else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
2461     {
2462       machine_mode outer = GET_MODE (dst);
2463       machine_mode inner;
2464       poly_int64 bytepos;
2465       bool done = false;
2466       rtx temp;
2467 
2468       if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
2469 	dst = gen_reg_rtx (outer);
2470 
2471       /* Make life a bit easier for combine.  */
2472       /* If the first element of the vector is the low part
2473 	 of the destination mode, use a paradoxical subreg to
2474 	 initialize the destination.  */
2475       if (start < finish)
2476 	{
2477 	  inner = GET_MODE (tmps[start]);
2478 	  bytepos = subreg_lowpart_offset (inner, outer);
2479 	  if (known_eq (INTVAL (XEXP (XVECEXP (src, 0, start), 1)), bytepos))
2480 	    {
2481 	      temp = simplify_gen_subreg (outer, tmps[start],
2482 					  inner, 0);
2483 	      if (temp)
2484 		{
2485 		  emit_move_insn (dst, temp);
2486 		  done = true;
2487 		  start++;
2488 		}
2489 	    }
2490 	}
2491 
2492       /* If the first element wasn't the low part, try the last.  */
2493       if (!done
2494 	  && start < finish - 1)
2495 	{
2496 	  inner = GET_MODE (tmps[finish - 1]);
2497 	  bytepos = subreg_lowpart_offset (inner, outer);
2498 	  if (known_eq (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)),
2499 			bytepos))
2500 	    {
2501 	      temp = simplify_gen_subreg (outer, tmps[finish - 1],
2502 					  inner, 0);
2503 	      if (temp)
2504 		{
2505 		  emit_move_insn (dst, temp);
2506 		  done = true;
2507 		  finish--;
2508 		}
2509 	    }
2510 	}
2511 
2512       /* Otherwise, simply initialize the result to zero.  */
2513       if (!done)
2514         emit_move_insn (dst, CONST0_RTX (outer));
2515     }
2516 
2517   /* Process the pieces.  */
2518   for (i = start; i < finish; i++)
2519     {
2520       poly_int64 bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2521       machine_mode mode = GET_MODE (tmps[i]);
2522       poly_int64 bytelen = GET_MODE_SIZE (mode);
2523       poly_uint64 adj_bytelen;
2524       rtx dest = dst;
2525 
2526       /* Handle trailing fragments that run over the size of the struct.
2527 	 It's the target's responsibility to make sure that the fragment
2528 	 cannot be strictly smaller in some cases and strictly larger
2529 	 in others.  */
2530       gcc_checking_assert (ordered_p (bytepos + bytelen, ssize));
2531       if (known_size_p (ssize) && maybe_gt (bytepos + bytelen, ssize))
2532 	adj_bytelen = ssize - bytepos;
2533       else
2534 	adj_bytelen = bytelen;
2535 
2536       if (GET_CODE (dst) == CONCAT)
2537 	{
2538 	  if (known_le (bytepos + adj_bytelen,
2539 			GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)))))
2540 	    dest = XEXP (dst, 0);
2541 	  else if (known_ge (bytepos, GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)))))
2542 	    {
2543 	      bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2544 	      dest = XEXP (dst, 1);
2545 	    }
2546 	  else
2547 	    {
2548 	      machine_mode dest_mode = GET_MODE (dest);
2549 	      machine_mode tmp_mode = GET_MODE (tmps[i]);
2550 
2551 	      gcc_assert (known_eq (bytepos, 0) && XVECLEN (src, 0));
2552 
2553 	      if (GET_MODE_ALIGNMENT (dest_mode)
2554 		  >= GET_MODE_ALIGNMENT (tmp_mode))
2555 		{
2556 		  dest = assign_stack_temp (dest_mode,
2557 					    GET_MODE_SIZE (dest_mode));
2558 		  emit_move_insn (adjust_address (dest,
2559 						  tmp_mode,
2560 						  bytepos),
2561 				  tmps[i]);
2562 		  dst = dest;
2563 		}
2564 	      else
2565 		{
2566 		  dest = assign_stack_temp (tmp_mode,
2567 					    GET_MODE_SIZE (tmp_mode));
2568 		  emit_move_insn (dest, tmps[i]);
2569 		  dst = adjust_address (dest, dest_mode, bytepos);
2570 		}
2571 	      break;
2572 	    }
2573 	}
2574 
2575       /* Handle trailing fragments that run over the size of the struct.  */
2576       if (known_size_p (ssize) && maybe_gt (bytepos + bytelen, ssize))
2577 	{
2578 	  /* store_bit_field always takes its value from the lsb.
2579 	     Move the fragment to the lsb if it's not already there.  */
2580 	  if (
2581 #ifdef BLOCK_REG_PADDING
2582 	      BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2583 	      == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)
2584 #else
2585 	      BYTES_BIG_ENDIAN
2586 #endif
2587 	      )
2588 	    {
2589 	      poly_int64 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2590 	      tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2591 				      shift, tmps[i], 0);
2592 	    }
2593 
2594 	  /* Make sure not to write past the end of the struct.  */
2595 	  store_bit_field (dest,
2596 			   adj_bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2597 			   bytepos * BITS_PER_UNIT, ssize * BITS_PER_UNIT - 1,
2598 			   VOIDmode, tmps[i], false);
2599 	}
2600 
2601       /* Optimize the access just a bit.  */
2602       else if (MEM_P (dest)
2603 	       && (!targetm.slow_unaligned_access (mode, MEM_ALIGN (dest))
2604 		   || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2605 	       && multiple_p (bytepos * BITS_PER_UNIT,
2606 			      GET_MODE_ALIGNMENT (mode))
2607 	       && known_eq (bytelen, GET_MODE_SIZE (mode)))
2608 	emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2609 
2610       else
2611 	store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2612 			 0, 0, mode, tmps[i], false);
2613     }
2614 
2615   /* Copy from the pseudo into the (probable) hard reg.  */
2616   if (orig_dst != dst)
2617     emit_move_insn (orig_dst, dst);
2618 }
2619 
2620 /* Return a form of X that does not use a PARALLEL.  TYPE is the type
2621    of the value stored in X.  */
2622 
2623 rtx
2624 maybe_emit_group_store (rtx x, tree type)
2625 {
2626   machine_mode mode = TYPE_MODE (type);
2627   gcc_checking_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
2628   if (GET_CODE (x) == PARALLEL)
2629     {
2630       rtx result = gen_reg_rtx (mode);
2631       emit_group_store (result, x, type, int_size_in_bytes (type));
2632       return result;
2633     }
2634   return x;
2635 }
2636 
2637 /* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
2638 
2639    This is used on targets that return BLKmode values in registers.  */
2640 
2641 static void
2642 copy_blkmode_from_reg (rtx target, rtx srcreg, tree type)
2643 {
2644   unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2645   rtx src = NULL, dst = NULL;
2646   unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2647   unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2648   /* No current ABI uses variable-sized modes to pass a BLKmnode type.  */
2649   fixed_size_mode mode = as_a <fixed_size_mode> (GET_MODE (srcreg));
2650   fixed_size_mode tmode = as_a <fixed_size_mode> (GET_MODE (target));
2651   fixed_size_mode copy_mode;
2652 
2653   /* BLKmode registers created in the back-end shouldn't have survived.  */
2654   gcc_assert (mode != BLKmode);
2655 
2656   /* If the structure doesn't take up a whole number of words, see whether
2657      SRCREG is padded on the left or on the right.  If it's on the left,
2658      set PADDING_CORRECTION to the number of bits to skip.
2659 
2660      In most ABIs, the structure will be returned at the least end of
2661      the register, which translates to right padding on little-endian
2662      targets and left padding on big-endian targets.  The opposite
2663      holds if the structure is returned at the most significant
2664      end of the register.  */
2665   if (bytes % UNITS_PER_WORD != 0
2666       && (targetm.calls.return_in_msb (type)
2667 	  ? !BYTES_BIG_ENDIAN
2668 	  : BYTES_BIG_ENDIAN))
2669     padding_correction
2670       = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2671 
2672   /* We can use a single move if we have an exact mode for the size.  */
2673   else if (MEM_P (target)
2674 	   && (!targetm.slow_unaligned_access (mode, MEM_ALIGN (target))
2675 	       || MEM_ALIGN (target) >= GET_MODE_ALIGNMENT (mode))
2676 	   && bytes == GET_MODE_SIZE (mode))
2677   {
2678     emit_move_insn (adjust_address (target, mode, 0), srcreg);
2679     return;
2680   }
2681 
2682   /* And if we additionally have the same mode for a register.  */
2683   else if (REG_P (target)
2684 	   && GET_MODE (target) == mode
2685 	   && bytes == GET_MODE_SIZE (mode))
2686   {
2687     emit_move_insn (target, srcreg);
2688     return;
2689   }
2690 
2691   /* This code assumes srcreg is at least a full word.  If it isn't, copy it
2692      into a new pseudo which is a full word.  */
2693   if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
2694     {
2695       srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2696       mode = word_mode;
2697     }
2698 
2699   /* Copy the structure BITSIZE bits at a time.  If the target lives in
2700      memory, take care of not reading/writing past its end by selecting
2701      a copy mode suited to BITSIZE.  This should always be possible given
2702      how it is computed.
2703 
2704      If the target lives in register, make sure not to select a copy mode
2705      larger than the mode of the register.
2706 
2707      We could probably emit more efficient code for machines which do not use
2708      strict alignment, but it doesn't seem worth the effort at the current
2709      time.  */
2710 
2711   copy_mode = word_mode;
2712   if (MEM_P (target))
2713     {
2714       opt_scalar_int_mode mem_mode = int_mode_for_size (bitsize, 1);
2715       if (mem_mode.exists ())
2716 	copy_mode = mem_mode.require ();
2717     }
2718   else if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2719     copy_mode = tmode;
2720 
2721   for (bitpos = 0, xbitpos = padding_correction;
2722        bitpos < bytes * BITS_PER_UNIT;
2723        bitpos += bitsize, xbitpos += bitsize)
2724     {
2725       /* We need a new source operand each time xbitpos is on a
2726 	 word boundary and when xbitpos == padding_correction
2727 	 (the first time through).  */
2728       if (xbitpos % BITS_PER_WORD == 0 || xbitpos == padding_correction)
2729 	src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, mode);
2730 
2731       /* We need a new destination operand each time bitpos is on
2732 	 a word boundary.  */
2733       if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2734 	dst = target;
2735       else if (bitpos % BITS_PER_WORD == 0)
2736 	dst = operand_subword (target, bitpos / BITS_PER_WORD, 1, tmode);
2737 
2738       /* Use xbitpos for the source extraction (right justified) and
2739 	 bitpos for the destination store (left justified).  */
2740       store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, 0, 0, copy_mode,
2741 		       extract_bit_field (src, bitsize,
2742 					  xbitpos % BITS_PER_WORD, 1,
2743 					  NULL_RTX, copy_mode, copy_mode,
2744 					  false, NULL),
2745 		       false);
2746     }
2747 }
2748 
2749 /* Copy BLKmode value SRC into a register of mode MODE_IN.  Return the
2750    register if it contains any data, otherwise return null.
2751 
2752    This is used on targets that return BLKmode values in registers.  */
2753 
2754 rtx
2755 copy_blkmode_to_reg (machine_mode mode_in, tree src)
2756 {
2757   int i, n_regs;
2758   unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0, bytes;
2759   unsigned int bitsize;
2760   rtx *dst_words, dst, x, src_word = NULL_RTX, dst_word = NULL_RTX;
2761   /* No current ABI uses variable-sized modes to pass a BLKmnode type.  */
2762   fixed_size_mode mode = as_a <fixed_size_mode> (mode_in);
2763   fixed_size_mode dst_mode;
2764 
2765   gcc_assert (TYPE_MODE (TREE_TYPE (src)) == BLKmode);
2766 
2767   x = expand_normal (src);
2768 
2769   bytes = arg_int_size_in_bytes (TREE_TYPE (src));
2770   if (bytes == 0)
2771     return NULL_RTX;
2772 
2773   /* If the structure doesn't take up a whole number of words, see
2774      whether the register value should be padded on the left or on
2775      the right.  Set PADDING_CORRECTION to the number of padding
2776      bits needed on the left side.
2777 
2778      In most ABIs, the structure will be returned at the least end of
2779      the register, which translates to right padding on little-endian
2780      targets and left padding on big-endian targets.  The opposite
2781      holds if the structure is returned at the most significant
2782      end of the register.  */
2783   if (bytes % UNITS_PER_WORD != 0
2784       && (targetm.calls.return_in_msb (TREE_TYPE (src))
2785 	  ? !BYTES_BIG_ENDIAN
2786 	  : BYTES_BIG_ENDIAN))
2787     padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2788 					   * BITS_PER_UNIT));
2789 
2790   n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2791   dst_words = XALLOCAVEC (rtx, n_regs);
2792   bitsize = MIN (TYPE_ALIGN (TREE_TYPE (src)), BITS_PER_WORD);
2793 
2794   /* Copy the structure BITSIZE bits at a time.  */
2795   for (bitpos = 0, xbitpos = padding_correction;
2796        bitpos < bytes * BITS_PER_UNIT;
2797        bitpos += bitsize, xbitpos += bitsize)
2798     {
2799       /* We need a new destination pseudo each time xbitpos is
2800 	 on a word boundary and when xbitpos == padding_correction
2801 	 (the first time through).  */
2802       if (xbitpos % BITS_PER_WORD == 0
2803 	  || xbitpos == padding_correction)
2804 	{
2805 	  /* Generate an appropriate register.  */
2806 	  dst_word = gen_reg_rtx (word_mode);
2807 	  dst_words[xbitpos / BITS_PER_WORD] = dst_word;
2808 
2809 	  /* Clear the destination before we move anything into it.  */
2810 	  emit_move_insn (dst_word, CONST0_RTX (word_mode));
2811 	}
2812 
2813       /* We need a new source operand each time bitpos is on a word
2814 	 boundary.  */
2815       if (bitpos % BITS_PER_WORD == 0)
2816 	src_word = operand_subword_force (x, bitpos / BITS_PER_WORD, BLKmode);
2817 
2818       /* Use bitpos for the source extraction (left justified) and
2819 	 xbitpos for the destination store (right justified).  */
2820       store_bit_field (dst_word, bitsize, xbitpos % BITS_PER_WORD,
2821 		       0, 0, word_mode,
2822 		       extract_bit_field (src_word, bitsize,
2823 					  bitpos % BITS_PER_WORD, 1,
2824 					  NULL_RTX, word_mode, word_mode,
2825 					  false, NULL),
2826 		       false);
2827     }
2828 
2829   if (mode == BLKmode)
2830     {
2831       /* Find the smallest integer mode large enough to hold the
2832 	 entire structure.  */
2833       opt_scalar_int_mode mode_iter;
2834       FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_INT)
2835 	if (GET_MODE_SIZE (mode_iter.require ()) >= bytes)
2836 	  break;
2837 
2838       /* A suitable mode should have been found.  */
2839       mode = mode_iter.require ();
2840     }
2841 
2842   if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2843     dst_mode = word_mode;
2844   else
2845     dst_mode = mode;
2846   dst = gen_reg_rtx (dst_mode);
2847 
2848   for (i = 0; i < n_regs; i++)
2849     emit_move_insn (operand_subword (dst, i, 0, dst_mode), dst_words[i]);
2850 
2851   if (mode != dst_mode)
2852     dst = gen_lowpart (mode, dst);
2853 
2854   return dst;
2855 }
2856 
2857 /* Add a USE expression for REG to the (possibly empty) list pointed
2858    to by CALL_FUSAGE.  REG must denote a hard register.  */
2859 
2860 void
2861 use_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
2862 {
2863   gcc_assert (REG_P (reg));
2864 
2865   if (!HARD_REGISTER_P (reg))
2866     return;
2867 
2868   *call_fusage
2869     = gen_rtx_EXPR_LIST (mode, gen_rtx_USE (VOIDmode, reg), *call_fusage);
2870 }
2871 
2872 /* Add a CLOBBER expression for REG to the (possibly empty) list pointed
2873    to by CALL_FUSAGE.  REG must denote a hard register.  */
2874 
2875 void
2876 clobber_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
2877 {
2878   gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2879 
2880   *call_fusage
2881     = gen_rtx_EXPR_LIST (mode, gen_rtx_CLOBBER (VOIDmode, reg), *call_fusage);
2882 }
2883 
2884 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2885    starting at REGNO.  All of these registers must be hard registers.  */
2886 
2887 void
2888 use_regs (rtx *call_fusage, int regno, int nregs)
2889 {
2890   int i;
2891 
2892   gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2893 
2894   for (i = 0; i < nregs; i++)
2895     use_reg (call_fusage, regno_reg_rtx[regno + i]);
2896 }
2897 
2898 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2899    PARALLEL REGS.  This is for calls that pass values in multiple
2900    non-contiguous locations.  The Irix 6 ABI has examples of this.  */
2901 
2902 void
2903 use_group_regs (rtx *call_fusage, rtx regs)
2904 {
2905   int i;
2906 
2907   for (i = 0; i < XVECLEN (regs, 0); i++)
2908     {
2909       rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2910 
2911       /* A NULL entry means the parameter goes both on the stack and in
2912 	 registers.  This can also be a MEM for targets that pass values
2913 	 partially on the stack and partially in registers.  */
2914       if (reg != 0 && REG_P (reg))
2915 	use_reg (call_fusage, reg);
2916     }
2917 }
2918 
2919 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2920    assigment and the code of the expresion on the RHS is CODE.  Return
2921    NULL otherwise.  */
2922 
2923 static gimple *
2924 get_def_for_expr (tree name, enum tree_code code)
2925 {
2926   gimple *def_stmt;
2927 
2928   if (TREE_CODE (name) != SSA_NAME)
2929     return NULL;
2930 
2931   def_stmt = get_gimple_for_ssa_name (name);
2932   if (!def_stmt
2933       || gimple_assign_rhs_code (def_stmt) != code)
2934     return NULL;
2935 
2936   return def_stmt;
2937 }
2938 
2939 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2940    assigment and the class of the expresion on the RHS is CLASS.  Return
2941    NULL otherwise.  */
2942 
2943 static gimple *
2944 get_def_for_expr_class (tree name, enum tree_code_class tclass)
2945 {
2946   gimple *def_stmt;
2947 
2948   if (TREE_CODE (name) != SSA_NAME)
2949     return NULL;
2950 
2951   def_stmt = get_gimple_for_ssa_name (name);
2952   if (!def_stmt
2953       || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) != tclass)
2954     return NULL;
2955 
2956   return def_stmt;
2957 }
2958 
2959 /* Write zeros through the storage of OBJECT.  If OBJECT has BLKmode, SIZE is
2960    its length in bytes.  */
2961 
2962 rtx
2963 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2964 		     unsigned int expected_align, HOST_WIDE_INT expected_size,
2965 		     unsigned HOST_WIDE_INT min_size,
2966 		     unsigned HOST_WIDE_INT max_size,
2967 		     unsigned HOST_WIDE_INT probable_max_size)
2968 {
2969   machine_mode mode = GET_MODE (object);
2970   unsigned int align;
2971 
2972   gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2973 
2974   /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2975      just move a zero.  Otherwise, do this a piece at a time.  */
2976   if (mode != BLKmode
2977       && CONST_INT_P (size)
2978       && known_eq (INTVAL (size), GET_MODE_SIZE (mode)))
2979     {
2980       rtx zero = CONST0_RTX (mode);
2981       if (zero != NULL)
2982 	{
2983 	  emit_move_insn (object, zero);
2984 	  return NULL;
2985 	}
2986 
2987       if (COMPLEX_MODE_P (mode))
2988 	{
2989 	  zero = CONST0_RTX (GET_MODE_INNER (mode));
2990 	  if (zero != NULL)
2991 	    {
2992 	      write_complex_part (object, zero, 0);
2993 	      write_complex_part (object, zero, 1);
2994 	      return NULL;
2995 	    }
2996 	}
2997     }
2998 
2999   if (size == const0_rtx)
3000     return NULL;
3001 
3002   align = MEM_ALIGN (object);
3003 
3004   if (CONST_INT_P (size)
3005       && targetm.use_by_pieces_infrastructure_p (INTVAL (size), align,
3006 						 CLEAR_BY_PIECES,
3007 						 optimize_insn_for_speed_p ()))
3008     clear_by_pieces (object, INTVAL (size), align);
3009   else if (set_storage_via_setmem (object, size, const0_rtx, align,
3010 				   expected_align, expected_size,
3011 				   min_size, max_size, probable_max_size))
3012     ;
3013   else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
3014     return set_storage_via_libcall (object, size, const0_rtx,
3015 				    method == BLOCK_OP_TAILCALL);
3016   else
3017     gcc_unreachable ();
3018 
3019   return NULL;
3020 }
3021 
3022 rtx
3023 clear_storage (rtx object, rtx size, enum block_op_methods method)
3024 {
3025   unsigned HOST_WIDE_INT max, min = 0;
3026   if (GET_CODE (size) == CONST_INT)
3027     min = max = UINTVAL (size);
3028   else
3029     max = GET_MODE_MASK (GET_MODE (size));
3030   return clear_storage_hints (object, size, method, 0, -1, min, max, max);
3031 }
3032 
3033 
3034 /* A subroutine of clear_storage.  Expand a call to memset.
3035    Return the return value of memset, 0 otherwise.  */
3036 
3037 rtx
3038 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
3039 {
3040   tree call_expr, fn, object_tree, size_tree, val_tree;
3041   machine_mode size_mode;
3042 
3043   object = copy_addr_to_reg (XEXP (object, 0));
3044   object_tree = make_tree (ptr_type_node, object);
3045 
3046   if (!CONST_INT_P (val))
3047     val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
3048   val_tree = make_tree (integer_type_node, val);
3049 
3050   size_mode = TYPE_MODE (sizetype);
3051   size = convert_to_mode (size_mode, size, 1);
3052   size = copy_to_mode_reg (size_mode, size);
3053   size_tree = make_tree (sizetype, size);
3054 
3055   /* It is incorrect to use the libcall calling conventions for calls to
3056      memset because it can be provided by the user.  */
3057   fn = builtin_decl_implicit (BUILT_IN_MEMSET);
3058   call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
3059   CALL_EXPR_TAILCALL (call_expr) = tailcall;
3060 
3061   return expand_call (call_expr, NULL_RTX, false);
3062 }
3063 
3064 /* Expand a setmem pattern; return true if successful.  */
3065 
3066 bool
3067 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
3068 			unsigned int expected_align, HOST_WIDE_INT expected_size,
3069 			unsigned HOST_WIDE_INT min_size,
3070 			unsigned HOST_WIDE_INT max_size,
3071 			unsigned HOST_WIDE_INT probable_max_size)
3072 {
3073   /* Try the most limited insn first, because there's no point
3074      including more than one in the machine description unless
3075      the more limited one has some advantage.  */
3076 
3077   if (expected_align < align)
3078     expected_align = align;
3079   if (expected_size != -1)
3080     {
3081       if ((unsigned HOST_WIDE_INT)expected_size > max_size)
3082 	expected_size = max_size;
3083       if ((unsigned HOST_WIDE_INT)expected_size < min_size)
3084 	expected_size = min_size;
3085     }
3086 
3087   opt_scalar_int_mode mode_iter;
3088   FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_INT)
3089     {
3090       scalar_int_mode mode = mode_iter.require ();
3091       enum insn_code code = direct_optab_handler (setmem_optab, mode);
3092 
3093       if (code != CODE_FOR_nothing
3094 	  /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
3095 	     here because if SIZE is less than the mode mask, as it is
3096 	     returned by the macro, it will definitely be less than the
3097 	     actual mode mask.  Since SIZE is within the Pmode address
3098 	     space, we limit MODE to Pmode.  */
3099 	  && ((CONST_INT_P (size)
3100 	       && ((unsigned HOST_WIDE_INT) INTVAL (size)
3101 		   <= (GET_MODE_MASK (mode) >> 1)))
3102 	      || max_size <= (GET_MODE_MASK (mode) >> 1)
3103 	      || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
3104 	{
3105 	  struct expand_operand ops[9];
3106 	  unsigned int nops;
3107 
3108 	  nops = insn_data[(int) code].n_generator_args;
3109 	  gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
3110 
3111 	  create_fixed_operand (&ops[0], object);
3112 	  /* The check above guarantees that this size conversion is valid.  */
3113 	  create_convert_operand_to (&ops[1], size, mode, true);
3114 	  create_convert_operand_from (&ops[2], val, byte_mode, true);
3115 	  create_integer_operand (&ops[3], align / BITS_PER_UNIT);
3116 	  if (nops >= 6)
3117 	    {
3118 	      create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
3119 	      create_integer_operand (&ops[5], expected_size);
3120 	    }
3121 	  if (nops >= 8)
3122 	    {
3123 	      create_integer_operand (&ops[6], min_size);
3124 	      /* If we can not represent the maximal size,
3125 		 make parameter NULL.  */
3126 	      if ((HOST_WIDE_INT) max_size != -1)
3127 	        create_integer_operand (&ops[7], max_size);
3128 	      else
3129 		create_fixed_operand (&ops[7], NULL);
3130 	    }
3131 	  if (nops == 9)
3132 	    {
3133 	      /* If we can not represent the maximal size,
3134 		 make parameter NULL.  */
3135 	      if ((HOST_WIDE_INT) probable_max_size != -1)
3136 	        create_integer_operand (&ops[8], probable_max_size);
3137 	      else
3138 		create_fixed_operand (&ops[8], NULL);
3139 	    }
3140 	  if (maybe_expand_insn (code, nops, ops))
3141 	    return true;
3142 	}
3143     }
3144 
3145   return false;
3146 }
3147 
3148 
3149 /* Write to one of the components of the complex value CPLX.  Write VAL to
3150    the real part if IMAG_P is false, and the imaginary part if its true.  */
3151 
3152 void
3153 write_complex_part (rtx cplx, rtx val, bool imag_p)
3154 {
3155   machine_mode cmode;
3156   scalar_mode imode;
3157   unsigned ibitsize;
3158 
3159   if (GET_CODE (cplx) == CONCAT)
3160     {
3161       emit_move_insn (XEXP (cplx, imag_p), val);
3162       return;
3163     }
3164 
3165   cmode = GET_MODE (cplx);
3166   imode = GET_MODE_INNER (cmode);
3167   ibitsize = GET_MODE_BITSIZE (imode);
3168 
3169   /* For MEMs simplify_gen_subreg may generate an invalid new address
3170      because, e.g., the original address is considered mode-dependent
3171      by the target, which restricts simplify_subreg from invoking
3172      adjust_address_nv.  Instead of preparing fallback support for an
3173      invalid address, we call adjust_address_nv directly.  */
3174   if (MEM_P (cplx))
3175     {
3176       emit_move_insn (adjust_address_nv (cplx, imode,
3177 					 imag_p ? GET_MODE_SIZE (imode) : 0),
3178 		      val);
3179       return;
3180     }
3181 
3182   /* If the sub-object is at least word sized, then we know that subregging
3183      will work.  This special case is important, since store_bit_field
3184      wants to operate on integer modes, and there's rarely an OImode to
3185      correspond to TCmode.  */
3186   if (ibitsize >= BITS_PER_WORD
3187       /* For hard regs we have exact predicates.  Assume we can split
3188 	 the original object if it spans an even number of hard regs.
3189 	 This special case is important for SCmode on 64-bit platforms
3190 	 where the natural size of floating-point regs is 32-bit.  */
3191       || (REG_P (cplx)
3192 	  && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3193 	  && REG_NREGS (cplx) % 2 == 0))
3194     {
3195       rtx part = simplify_gen_subreg (imode, cplx, cmode,
3196 				      imag_p ? GET_MODE_SIZE (imode) : 0);
3197       if (part)
3198         {
3199 	  emit_move_insn (part, val);
3200 	  return;
3201 	}
3202       else
3203 	/* simplify_gen_subreg may fail for sub-word MEMs.  */
3204 	gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3205     }
3206 
3207   store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, 0, 0, imode, val,
3208 		   false);
3209 }
3210 
3211 /* Extract one of the components of the complex value CPLX.  Extract the
3212    real part if IMAG_P is false, and the imaginary part if it's true.  */
3213 
3214 rtx
3215 read_complex_part (rtx cplx, bool imag_p)
3216 {
3217   machine_mode cmode;
3218   scalar_mode imode;
3219   unsigned ibitsize;
3220 
3221   if (GET_CODE (cplx) == CONCAT)
3222     return XEXP (cplx, imag_p);
3223 
3224   cmode = GET_MODE (cplx);
3225   imode = GET_MODE_INNER (cmode);
3226   ibitsize = GET_MODE_BITSIZE (imode);
3227 
3228   /* Special case reads from complex constants that got spilled to memory.  */
3229   if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
3230     {
3231       tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
3232       if (decl && TREE_CODE (decl) == COMPLEX_CST)
3233 	{
3234 	  tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
3235 	  if (CONSTANT_CLASS_P (part))
3236 	    return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
3237 	}
3238     }
3239 
3240   /* For MEMs simplify_gen_subreg may generate an invalid new address
3241      because, e.g., the original address is considered mode-dependent
3242      by the target, which restricts simplify_subreg from invoking
3243      adjust_address_nv.  Instead of preparing fallback support for an
3244      invalid address, we call adjust_address_nv directly.  */
3245   if (MEM_P (cplx))
3246     return adjust_address_nv (cplx, imode,
3247 			      imag_p ? GET_MODE_SIZE (imode) : 0);
3248 
3249   /* If the sub-object is at least word sized, then we know that subregging
3250      will work.  This special case is important, since extract_bit_field
3251      wants to operate on integer modes, and there's rarely an OImode to
3252      correspond to TCmode.  */
3253   if (ibitsize >= BITS_PER_WORD
3254       /* For hard regs we have exact predicates.  Assume we can split
3255 	 the original object if it spans an even number of hard regs.
3256 	 This special case is important for SCmode on 64-bit platforms
3257 	 where the natural size of floating-point regs is 32-bit.  */
3258       || (REG_P (cplx)
3259 	  && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3260 	  && REG_NREGS (cplx) % 2 == 0))
3261     {
3262       rtx ret = simplify_gen_subreg (imode, cplx, cmode,
3263 				     imag_p ? GET_MODE_SIZE (imode) : 0);
3264       if (ret)
3265         return ret;
3266       else
3267 	/* simplify_gen_subreg may fail for sub-word MEMs.  */
3268 	gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3269     }
3270 
3271   return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
3272 			    true, NULL_RTX, imode, imode, false, NULL);
3273 }
3274 
3275 /* A subroutine of emit_move_insn_1.  Yet another lowpart generator.
3276    NEW_MODE and OLD_MODE are the same size.  Return NULL if X cannot be
3277    represented in NEW_MODE.  If FORCE is true, this will never happen, as
3278    we'll force-create a SUBREG if needed.  */
3279 
3280 static rtx
3281 emit_move_change_mode (machine_mode new_mode,
3282 		       machine_mode old_mode, rtx x, bool force)
3283 {
3284   rtx ret;
3285 
3286   if (push_operand (x, GET_MODE (x)))
3287     {
3288       ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
3289       MEM_COPY_ATTRIBUTES (ret, x);
3290     }
3291   else if (MEM_P (x))
3292     {
3293       /* We don't have to worry about changing the address since the
3294 	 size in bytes is supposed to be the same.  */
3295       if (reload_in_progress)
3296 	{
3297 	  /* Copy the MEM to change the mode and move any
3298 	     substitutions from the old MEM to the new one.  */
3299 	  ret = adjust_address_nv (x, new_mode, 0);
3300 	  copy_replacements (x, ret);
3301 	}
3302       else
3303 	ret = adjust_address (x, new_mode, 0);
3304     }
3305   else
3306     {
3307       /* Note that we do want simplify_subreg's behavior of validating
3308 	 that the new mode is ok for a hard register.  If we were to use
3309 	 simplify_gen_subreg, we would create the subreg, but would
3310 	 probably run into the target not being able to implement it.  */
3311       /* Except, of course, when FORCE is true, when this is exactly what
3312 	 we want.  Which is needed for CCmodes on some targets.  */
3313       if (force)
3314 	ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
3315       else
3316 	ret = simplify_subreg (new_mode, x, old_mode, 0);
3317     }
3318 
3319   return ret;
3320 }
3321 
3322 /* A subroutine of emit_move_insn_1.  Generate a move from Y into X using
3323    an integer mode of the same size as MODE.  Returns the instruction
3324    emitted, or NULL if such a move could not be generated.  */
3325 
3326 static rtx_insn *
3327 emit_move_via_integer (machine_mode mode, rtx x, rtx y, bool force)
3328 {
3329   scalar_int_mode imode;
3330   enum insn_code code;
3331 
3332   /* There must exist a mode of the exact size we require.  */
3333   if (!int_mode_for_mode (mode).exists (&imode))
3334     return NULL;
3335 
3336   /* The target must support moves in this mode.  */
3337   code = optab_handler (mov_optab, imode);
3338   if (code == CODE_FOR_nothing)
3339     return NULL;
3340 
3341   x = emit_move_change_mode (imode, mode, x, force);
3342   if (x == NULL_RTX)
3343     return NULL;
3344   y = emit_move_change_mode (imode, mode, y, force);
3345   if (y == NULL_RTX)
3346     return NULL;
3347   return emit_insn (GEN_FCN (code) (x, y));
3348 }
3349 
3350 /* A subroutine of emit_move_insn_1.  X is a push_operand in MODE.
3351    Return an equivalent MEM that does not use an auto-increment.  */
3352 
3353 rtx
3354 emit_move_resolve_push (machine_mode mode, rtx x)
3355 {
3356   enum rtx_code code = GET_CODE (XEXP (x, 0));
3357   rtx temp;
3358 
3359   poly_int64 adjust = GET_MODE_SIZE (mode);
3360 #ifdef PUSH_ROUNDING
3361   adjust = PUSH_ROUNDING (adjust);
3362 #endif
3363   if (code == PRE_DEC || code == POST_DEC)
3364     adjust = -adjust;
3365   else if (code == PRE_MODIFY || code == POST_MODIFY)
3366     {
3367       rtx expr = XEXP (XEXP (x, 0), 1);
3368 
3369       gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3370       poly_int64 val = rtx_to_poly_int64 (XEXP (expr, 1));
3371       if (GET_CODE (expr) == MINUS)
3372 	val = -val;
3373       gcc_assert (known_eq (adjust, val) || known_eq (adjust, -val));
3374       adjust = val;
3375     }
3376 
3377   /* Do not use anti_adjust_stack, since we don't want to update
3378      stack_pointer_delta.  */
3379   temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3380 			      gen_int_mode (adjust, Pmode), stack_pointer_rtx,
3381 			      0, OPTAB_LIB_WIDEN);
3382   if (temp != stack_pointer_rtx)
3383     emit_move_insn (stack_pointer_rtx, temp);
3384 
3385   switch (code)
3386     {
3387     case PRE_INC:
3388     case PRE_DEC:
3389     case PRE_MODIFY:
3390       temp = stack_pointer_rtx;
3391       break;
3392     case POST_INC:
3393     case POST_DEC:
3394     case POST_MODIFY:
3395       temp = plus_constant (Pmode, stack_pointer_rtx, -adjust);
3396       break;
3397     default:
3398       gcc_unreachable ();
3399     }
3400 
3401   return replace_equiv_address (x, temp);
3402 }
3403 
3404 /* A subroutine of emit_move_complex.  Generate a move from Y into X.
3405    X is known to satisfy push_operand, and MODE is known to be complex.
3406    Returns the last instruction emitted.  */
3407 
3408 rtx_insn *
3409 emit_move_complex_push (machine_mode mode, rtx x, rtx y)
3410 {
3411   scalar_mode submode = GET_MODE_INNER (mode);
3412   bool imag_first;
3413 
3414 #ifdef PUSH_ROUNDING
3415   poly_int64 submodesize = GET_MODE_SIZE (submode);
3416 
3417   /* In case we output to the stack, but the size is smaller than the
3418      machine can push exactly, we need to use move instructions.  */
3419   if (maybe_ne (PUSH_ROUNDING (submodesize), submodesize))
3420     {
3421       x = emit_move_resolve_push (mode, x);
3422       return emit_move_insn (x, y);
3423     }
3424 #endif
3425 
3426   /* Note that the real part always precedes the imag part in memory
3427      regardless of machine's endianness.  */
3428   switch (GET_CODE (XEXP (x, 0)))
3429     {
3430     case PRE_DEC:
3431     case POST_DEC:
3432       imag_first = true;
3433       break;
3434     case PRE_INC:
3435     case POST_INC:
3436       imag_first = false;
3437       break;
3438     default:
3439       gcc_unreachable ();
3440     }
3441 
3442   emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3443 		  read_complex_part (y, imag_first));
3444   return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3445 			 read_complex_part (y, !imag_first));
3446 }
3447 
3448 /* A subroutine of emit_move_complex.  Perform the move from Y to X
3449    via two moves of the parts.  Returns the last instruction emitted.  */
3450 
3451 rtx_insn *
3452 emit_move_complex_parts (rtx x, rtx y)
3453 {
3454   /* Show the output dies here.  This is necessary for SUBREGs
3455      of pseudos since we cannot track their lifetimes correctly;
3456      hard regs shouldn't appear here except as return values.  */
3457   if (!reload_completed && !reload_in_progress
3458       && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3459     emit_clobber (x);
3460 
3461   write_complex_part (x, read_complex_part (y, false), false);
3462   write_complex_part (x, read_complex_part (y, true), true);
3463 
3464   return get_last_insn ();
3465 }
3466 
3467 /* A subroutine of emit_move_insn_1.  Generate a move from Y into X.
3468    MODE is known to be complex.  Returns the last instruction emitted.  */
3469 
3470 static rtx_insn *
3471 emit_move_complex (machine_mode mode, rtx x, rtx y)
3472 {
3473   bool try_int;
3474 
3475   /* Need to take special care for pushes, to maintain proper ordering
3476      of the data, and possibly extra padding.  */
3477   if (push_operand (x, mode))
3478     return emit_move_complex_push (mode, x, y);
3479 
3480   /* See if we can coerce the target into moving both values at once, except
3481      for floating point where we favor moving as parts if this is easy.  */
3482   if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3483       && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing
3484       && !(REG_P (x)
3485 	   && HARD_REGISTER_P (x)
3486 	   && REG_NREGS (x) == 1)
3487       && !(REG_P (y)
3488 	   && HARD_REGISTER_P (y)
3489 	   && REG_NREGS (y) == 1))
3490     try_int = false;
3491   /* Not possible if the values are inherently not adjacent.  */
3492   else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3493     try_int = false;
3494   /* Is possible if both are registers (or subregs of registers).  */
3495   else if (register_operand (x, mode) && register_operand (y, mode))
3496     try_int = true;
3497   /* If one of the operands is a memory, and alignment constraints
3498      are friendly enough, we may be able to do combined memory operations.
3499      We do not attempt this if Y is a constant because that combination is
3500      usually better with the by-parts thing below.  */
3501   else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3502 	   && (!STRICT_ALIGNMENT
3503 	       || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3504     try_int = true;
3505   else
3506     try_int = false;
3507 
3508   if (try_int)
3509     {
3510       rtx_insn *ret;
3511 
3512       /* For memory to memory moves, optimal behavior can be had with the
3513 	 existing block move logic.  */
3514       if (MEM_P (x) && MEM_P (y))
3515 	{
3516 	  emit_block_move (x, y, gen_int_mode (GET_MODE_SIZE (mode), Pmode),
3517 			   BLOCK_OP_NO_LIBCALL);
3518 	  return get_last_insn ();
3519 	}
3520 
3521       ret = emit_move_via_integer (mode, x, y, true);
3522       if (ret)
3523 	return ret;
3524     }
3525 
3526   return emit_move_complex_parts (x, y);
3527 }
3528 
3529 /* A subroutine of emit_move_insn_1.  Generate a move from Y into X.
3530    MODE is known to be MODE_CC.  Returns the last instruction emitted.  */
3531 
3532 static rtx_insn *
3533 emit_move_ccmode (machine_mode mode, rtx x, rtx y)
3534 {
3535   rtx_insn *ret;
3536 
3537   /* Assume all MODE_CC modes are equivalent; if we have movcc, use it.  */
3538   if (mode != CCmode)
3539     {
3540       enum insn_code code = optab_handler (mov_optab, CCmode);
3541       if (code != CODE_FOR_nothing)
3542 	{
3543 	  x = emit_move_change_mode (CCmode, mode, x, true);
3544 	  y = emit_move_change_mode (CCmode, mode, y, true);
3545 	  return emit_insn (GEN_FCN (code) (x, y));
3546 	}
3547     }
3548 
3549   /* Otherwise, find the MODE_INT mode of the same width.  */
3550   ret = emit_move_via_integer (mode, x, y, false);
3551   gcc_assert (ret != NULL);
3552   return ret;
3553 }
3554 
3555 /* Return true if word I of OP lies entirely in the
3556    undefined bits of a paradoxical subreg.  */
3557 
3558 static bool
3559 undefined_operand_subword_p (const_rtx op, int i)
3560 {
3561   if (GET_CODE (op) != SUBREG)
3562     return false;
3563   machine_mode innermostmode = GET_MODE (SUBREG_REG (op));
3564   poly_int64 offset = i * UNITS_PER_WORD + subreg_memory_offset (op);
3565   return (known_ge (offset, GET_MODE_SIZE (innermostmode))
3566 	  || known_le (offset, -UNITS_PER_WORD));
3567 }
3568 
3569 /* A subroutine of emit_move_insn_1.  Generate a move from Y into X.
3570    MODE is any multi-word or full-word mode that lacks a move_insn
3571    pattern.  Note that you will get better code if you define such
3572    patterns, even if they must turn into multiple assembler instructions.  */
3573 
3574 static rtx_insn *
3575 emit_move_multi_word (machine_mode mode, rtx x, rtx y)
3576 {
3577   rtx_insn *last_insn = 0;
3578   rtx_insn *seq;
3579   rtx inner;
3580   bool need_clobber;
3581   int i, mode_size;
3582 
3583   /* This function can only handle cases where the number of words is
3584      known at compile time.  */
3585   mode_size = GET_MODE_SIZE (mode).to_constant ();
3586   gcc_assert (mode_size >= UNITS_PER_WORD);
3587 
3588   /* If X is a push on the stack, do the push now and replace
3589      X with a reference to the stack pointer.  */
3590   if (push_operand (x, mode))
3591     x = emit_move_resolve_push (mode, x);
3592 
3593   /* If we are in reload, see if either operand is a MEM whose address
3594      is scheduled for replacement.  */
3595   if (reload_in_progress && MEM_P (x)
3596       && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3597     x = replace_equiv_address_nv (x, inner);
3598   if (reload_in_progress && MEM_P (y)
3599       && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3600     y = replace_equiv_address_nv (y, inner);
3601 
3602   start_sequence ();
3603 
3604   need_clobber = false;
3605   for (i = 0; i < CEIL (mode_size, UNITS_PER_WORD); i++)
3606     {
3607       rtx xpart = operand_subword (x, i, 1, mode);
3608       rtx ypart;
3609 
3610       /* Do not generate code for a move if it would come entirely
3611 	 from the undefined bits of a paradoxical subreg.  */
3612       if (undefined_operand_subword_p (y, i))
3613 	continue;
3614 
3615       ypart = operand_subword (y, i, 1, mode);
3616 
3617       /* If we can't get a part of Y, put Y into memory if it is a
3618 	 constant.  Otherwise, force it into a register.  Then we must
3619 	 be able to get a part of Y.  */
3620       if (ypart == 0 && CONSTANT_P (y))
3621 	{
3622 	  y = use_anchored_address (force_const_mem (mode, y));
3623 	  ypart = operand_subword (y, i, 1, mode);
3624 	}
3625       else if (ypart == 0)
3626 	ypart = operand_subword_force (y, i, mode);
3627 
3628       gcc_assert (xpart && ypart);
3629 
3630       need_clobber |= (GET_CODE (xpart) == SUBREG);
3631 
3632       last_insn = emit_move_insn (xpart, ypart);
3633     }
3634 
3635   seq = get_insns ();
3636   end_sequence ();
3637 
3638   /* Show the output dies here.  This is necessary for SUBREGs
3639      of pseudos since we cannot track their lifetimes correctly;
3640      hard regs shouldn't appear here except as return values.
3641      We never want to emit such a clobber after reload.  */
3642   if (x != y
3643       && ! (reload_in_progress || reload_completed)
3644       && need_clobber != 0)
3645     emit_clobber (x);
3646 
3647   emit_insn (seq);
3648 
3649   return last_insn;
3650 }
3651 
3652 /* Low level part of emit_move_insn.
3653    Called just like emit_move_insn, but assumes X and Y
3654    are basically valid.  */
3655 
3656 rtx_insn *
3657 emit_move_insn_1 (rtx x, rtx y)
3658 {
3659   machine_mode mode = GET_MODE (x);
3660   enum insn_code code;
3661 
3662   gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3663 
3664   code = optab_handler (mov_optab, mode);
3665   if (code != CODE_FOR_nothing)
3666     return emit_insn (GEN_FCN (code) (x, y));
3667 
3668   /* Expand complex moves by moving real part and imag part.  */
3669   if (COMPLEX_MODE_P (mode))
3670     return emit_move_complex (mode, x, y);
3671 
3672   if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3673       || ALL_FIXED_POINT_MODE_P (mode))
3674     {
3675       rtx_insn *result = emit_move_via_integer (mode, x, y, true);
3676 
3677       /* If we can't find an integer mode, use multi words.  */
3678       if (result)
3679 	return result;
3680       else
3681 	return emit_move_multi_word (mode, x, y);
3682     }
3683 
3684   if (GET_MODE_CLASS (mode) == MODE_CC)
3685     return emit_move_ccmode (mode, x, y);
3686 
3687   /* Try using a move pattern for the corresponding integer mode.  This is
3688      only safe when simplify_subreg can convert MODE constants into integer
3689      constants.  At present, it can only do this reliably if the value
3690      fits within a HOST_WIDE_INT.  */
3691   if (!CONSTANT_P (y)
3692       || known_le (GET_MODE_BITSIZE (mode), HOST_BITS_PER_WIDE_INT))
3693     {
3694       rtx_insn *ret = emit_move_via_integer (mode, x, y, lra_in_progress);
3695 
3696       if (ret)
3697 	{
3698 	  if (! lra_in_progress || recog (PATTERN (ret), ret, 0) >= 0)
3699 	    return ret;
3700 	}
3701     }
3702 
3703   return emit_move_multi_word (mode, x, y);
3704 }
3705 
3706 /* Generate code to copy Y into X.
3707    Both Y and X must have the same mode, except that
3708    Y can be a constant with VOIDmode.
3709    This mode cannot be BLKmode; use emit_block_move for that.
3710 
3711    Return the last instruction emitted.  */
3712 
3713 rtx_insn *
3714 emit_move_insn (rtx x, rtx y)
3715 {
3716   machine_mode mode = GET_MODE (x);
3717   rtx y_cst = NULL_RTX;
3718   rtx_insn *last_insn;
3719   rtx set;
3720 
3721   gcc_assert (mode != BLKmode
3722 	      && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3723 
3724   if (CONSTANT_P (y))
3725     {
3726       if (optimize
3727 	  && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3728 	  && (last_insn = compress_float_constant (x, y)))
3729 	return last_insn;
3730 
3731       y_cst = y;
3732 
3733       if (!targetm.legitimate_constant_p (mode, y))
3734 	{
3735 	  y = force_const_mem (mode, y);
3736 
3737 	  /* If the target's cannot_force_const_mem prevented the spill,
3738 	     assume that the target's move expanders will also take care
3739 	     of the non-legitimate constant.  */
3740 	  if (!y)
3741 	    y = y_cst;
3742 	  else
3743 	    y = use_anchored_address (y);
3744 	}
3745     }
3746 
3747   /* If X or Y are memory references, verify that their addresses are valid
3748      for the machine.  */
3749   if (MEM_P (x)
3750       && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3751 					 MEM_ADDR_SPACE (x))
3752 	  && ! push_operand (x, GET_MODE (x))))
3753     x = validize_mem (x);
3754 
3755   if (MEM_P (y)
3756       && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3757 					MEM_ADDR_SPACE (y)))
3758     y = validize_mem (y);
3759 
3760   gcc_assert (mode != BLKmode);
3761 
3762   last_insn = emit_move_insn_1 (x, y);
3763 
3764   if (y_cst && REG_P (x)
3765       && (set = single_set (last_insn)) != NULL_RTX
3766       && SET_DEST (set) == x
3767       && ! rtx_equal_p (y_cst, SET_SRC (set)))
3768     set_unique_reg_note (last_insn, REG_EQUAL, copy_rtx (y_cst));
3769 
3770   return last_insn;
3771 }
3772 
3773 /* Generate the body of an instruction to copy Y into X.
3774    It may be a list of insns, if one insn isn't enough.  */
3775 
3776 rtx_insn *
3777 gen_move_insn (rtx x, rtx y)
3778 {
3779   rtx_insn *seq;
3780 
3781   start_sequence ();
3782   emit_move_insn_1 (x, y);
3783   seq = get_insns ();
3784   end_sequence ();
3785   return seq;
3786 }
3787 
3788 /* If Y is representable exactly in a narrower mode, and the target can
3789    perform the extension directly from constant or memory, then emit the
3790    move as an extension.  */
3791 
3792 static rtx_insn *
3793 compress_float_constant (rtx x, rtx y)
3794 {
3795   machine_mode dstmode = GET_MODE (x);
3796   machine_mode orig_srcmode = GET_MODE (y);
3797   machine_mode srcmode;
3798   const REAL_VALUE_TYPE *r;
3799   int oldcost, newcost;
3800   bool speed = optimize_insn_for_speed_p ();
3801 
3802   r = CONST_DOUBLE_REAL_VALUE (y);
3803 
3804   if (targetm.legitimate_constant_p (dstmode, y))
3805     oldcost = set_src_cost (y, orig_srcmode, speed);
3806   else
3807     oldcost = set_src_cost (force_const_mem (dstmode, y), dstmode, speed);
3808 
3809   FOR_EACH_MODE_UNTIL (srcmode, orig_srcmode)
3810     {
3811       enum insn_code ic;
3812       rtx trunc_y;
3813       rtx_insn *last_insn;
3814 
3815       /* Skip if the target can't extend this way.  */
3816       ic = can_extend_p (dstmode, srcmode, 0);
3817       if (ic == CODE_FOR_nothing)
3818 	continue;
3819 
3820       /* Skip if the narrowed value isn't exact.  */
3821       if (! exact_real_truncate (srcmode, r))
3822 	continue;
3823 
3824       trunc_y = const_double_from_real_value (*r, srcmode);
3825 
3826       if (targetm.legitimate_constant_p (srcmode, trunc_y))
3827 	{
3828 	  /* Skip if the target needs extra instructions to perform
3829 	     the extension.  */
3830 	  if (!insn_operand_matches (ic, 1, trunc_y))
3831 	    continue;
3832 	  /* This is valid, but may not be cheaper than the original. */
3833 	  newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3834 				  dstmode, speed);
3835 	  if (oldcost < newcost)
3836 	    continue;
3837 	}
3838       else if (float_extend_from_mem[dstmode][srcmode])
3839 	{
3840 	  trunc_y = force_const_mem (srcmode, trunc_y);
3841 	  /* This is valid, but may not be cheaper than the original. */
3842 	  newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3843 				  dstmode, speed);
3844 	  if (oldcost < newcost)
3845 	    continue;
3846 	  trunc_y = validize_mem (trunc_y);
3847 	}
3848       else
3849 	continue;
3850 
3851       /* For CSE's benefit, force the compressed constant pool entry
3852 	 into a new pseudo.  This constant may be used in different modes,
3853 	 and if not, combine will put things back together for us.  */
3854       trunc_y = force_reg (srcmode, trunc_y);
3855 
3856       /* If x is a hard register, perform the extension into a pseudo,
3857 	 so that e.g. stack realignment code is aware of it.  */
3858       rtx target = x;
3859       if (REG_P (x) && HARD_REGISTER_P (x))
3860 	target = gen_reg_rtx (dstmode);
3861 
3862       emit_unop_insn (ic, target, trunc_y, UNKNOWN);
3863       last_insn = get_last_insn ();
3864 
3865       if (REG_P (target))
3866 	set_unique_reg_note (last_insn, REG_EQUAL, y);
3867 
3868       if (target != x)
3869 	return emit_move_insn (x, target);
3870       return last_insn;
3871     }
3872 
3873   return NULL;
3874 }
3875 
3876 /* Pushing data onto the stack.  */
3877 
3878 /* Push a block of length SIZE (perhaps variable)
3879    and return an rtx to address the beginning of the block.
3880    The value may be virtual_outgoing_args_rtx.
3881 
3882    EXTRA is the number of bytes of padding to push in addition to SIZE.
3883    BELOW nonzero means this padding comes at low addresses;
3884    otherwise, the padding comes at high addresses.  */
3885 
3886 rtx
3887 push_block (rtx size, poly_int64 extra, int below)
3888 {
3889   rtx temp;
3890 
3891   size = convert_modes (Pmode, ptr_mode, size, 1);
3892   if (CONSTANT_P (size))
3893     anti_adjust_stack (plus_constant (Pmode, size, extra));
3894   else if (REG_P (size) && known_eq (extra, 0))
3895     anti_adjust_stack (size);
3896   else
3897     {
3898       temp = copy_to_mode_reg (Pmode, size);
3899       if (maybe_ne (extra, 0))
3900 	temp = expand_binop (Pmode, add_optab, temp,
3901 			     gen_int_mode (extra, Pmode),
3902 			     temp, 0, OPTAB_LIB_WIDEN);
3903       anti_adjust_stack (temp);
3904     }
3905 
3906   if (STACK_GROWS_DOWNWARD)
3907     {
3908       temp = virtual_outgoing_args_rtx;
3909       if (maybe_ne (extra, 0) && below)
3910 	temp = plus_constant (Pmode, temp, extra);
3911     }
3912   else
3913     {
3914       if (CONST_INT_P (size))
3915 	temp = plus_constant (Pmode, virtual_outgoing_args_rtx,
3916 			      -INTVAL (size) - (below ? 0 : extra));
3917       else if (maybe_ne (extra, 0) && !below)
3918 	temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3919 			     negate_rtx (Pmode, plus_constant (Pmode, size,
3920 							       extra)));
3921       else
3922 	temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3923 			     negate_rtx (Pmode, size));
3924     }
3925 
3926   return memory_address (NARROWEST_INT_MODE, temp);
3927 }
3928 
3929 /* A utility routine that returns the base of an auto-inc memory, or NULL.  */
3930 
3931 static rtx
3932 mem_autoinc_base (rtx mem)
3933 {
3934   if (MEM_P (mem))
3935     {
3936       rtx addr = XEXP (mem, 0);
3937       if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC)
3938 	return XEXP (addr, 0);
3939     }
3940   return NULL;
3941 }
3942 
3943 /* A utility routine used here, in reload, and in try_split.  The insns
3944    after PREV up to and including LAST are known to adjust the stack,
3945    with a final value of END_ARGS_SIZE.  Iterate backward from LAST
3946    placing notes as appropriate.  PREV may be NULL, indicating the
3947    entire insn sequence prior to LAST should be scanned.
3948 
3949    The set of allowed stack pointer modifications is small:
3950      (1) One or more auto-inc style memory references (aka pushes),
3951      (2) One or more addition/subtraction with the SP as destination,
3952      (3) A single move insn with the SP as destination,
3953      (4) A call_pop insn,
3954      (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3955 
3956    Insns in the sequence that do not modify the SP are ignored,
3957    except for noreturn calls.
3958 
3959    The return value is the amount of adjustment that can be trivially
3960    verified, via immediate operand or auto-inc.  If the adjustment
3961    cannot be trivially extracted, the return value is HOST_WIDE_INT_MIN.  */
3962 
3963 poly_int64
3964 find_args_size_adjust (rtx_insn *insn)
3965 {
3966   rtx dest, set, pat;
3967   int i;
3968 
3969   pat = PATTERN (insn);
3970   set = NULL;
3971 
3972   /* Look for a call_pop pattern.  */
3973   if (CALL_P (insn))
3974     {
3975       /* We have to allow non-call_pop patterns for the case
3976 	 of emit_single_push_insn of a TLS address.  */
3977       if (GET_CODE (pat) != PARALLEL)
3978 	return 0;
3979 
3980       /* All call_pop have a stack pointer adjust in the parallel.
3981 	 The call itself is always first, and the stack adjust is
3982 	 usually last, so search from the end.  */
3983       for (i = XVECLEN (pat, 0) - 1; i > 0; --i)
3984 	{
3985 	  set = XVECEXP (pat, 0, i);
3986 	  if (GET_CODE (set) != SET)
3987 	    continue;
3988 	  dest = SET_DEST (set);
3989 	  if (dest == stack_pointer_rtx)
3990 	    break;
3991 	}
3992       /* We'd better have found the stack pointer adjust.  */
3993       if (i == 0)
3994 	return 0;
3995       /* Fall through to process the extracted SET and DEST
3996 	 as if it was a standalone insn.  */
3997     }
3998   else if (GET_CODE (pat) == SET)
3999     set = pat;
4000   else if ((set = single_set (insn)) != NULL)
4001     ;
4002   else if (GET_CODE (pat) == PARALLEL)
4003     {
4004       /* ??? Some older ports use a parallel with a stack adjust
4005 	 and a store for a PUSH_ROUNDING pattern, rather than a
4006 	 PRE/POST_MODIFY rtx.  Don't force them to update yet...  */
4007       /* ??? See h8300 and m68k, pushqi1.  */
4008       for (i = XVECLEN (pat, 0) - 1; i >= 0; --i)
4009 	{
4010 	  set = XVECEXP (pat, 0, i);
4011 	  if (GET_CODE (set) != SET)
4012 	    continue;
4013 	  dest = SET_DEST (set);
4014 	  if (dest == stack_pointer_rtx)
4015 	    break;
4016 
4017 	  /* We do not expect an auto-inc of the sp in the parallel.  */
4018 	  gcc_checking_assert (mem_autoinc_base (dest) != stack_pointer_rtx);
4019 	  gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
4020 			       != stack_pointer_rtx);
4021 	}
4022       if (i < 0)
4023 	return 0;
4024     }
4025   else
4026     return 0;
4027 
4028   dest = SET_DEST (set);
4029 
4030   /* Look for direct modifications of the stack pointer.  */
4031   if (REG_P (dest) && REGNO (dest) == STACK_POINTER_REGNUM)
4032     {
4033       /* Look for a trivial adjustment, otherwise assume nothing.  */
4034       /* Note that the SPU restore_stack_block pattern refers to
4035 	 the stack pointer in V4SImode.  Consider that non-trivial.  */
4036       if (SCALAR_INT_MODE_P (GET_MODE (dest))
4037 	  && GET_CODE (SET_SRC (set)) == PLUS
4038 	  && XEXP (SET_SRC (set), 0) == stack_pointer_rtx
4039 	  && CONST_INT_P (XEXP (SET_SRC (set), 1)))
4040 	return INTVAL (XEXP (SET_SRC (set), 1));
4041       /* ??? Reload can generate no-op moves, which will be cleaned
4042 	 up later.  Recognize it and continue searching.  */
4043       else if (rtx_equal_p (dest, SET_SRC (set)))
4044 	return 0;
4045       else
4046 	return HOST_WIDE_INT_MIN;
4047     }
4048   else
4049     {
4050       rtx mem, addr;
4051 
4052       /* Otherwise only think about autoinc patterns.  */
4053       if (mem_autoinc_base (dest) == stack_pointer_rtx)
4054 	{
4055 	  mem = dest;
4056 	  gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
4057 			       != stack_pointer_rtx);
4058 	}
4059       else if (mem_autoinc_base (SET_SRC (set)) == stack_pointer_rtx)
4060 	mem = SET_SRC (set);
4061       else
4062 	return 0;
4063 
4064       addr = XEXP (mem, 0);
4065       switch (GET_CODE (addr))
4066 	{
4067 	case PRE_INC:
4068 	case POST_INC:
4069 	  return GET_MODE_SIZE (GET_MODE (mem));
4070 	case PRE_DEC:
4071 	case POST_DEC:
4072 	  return -GET_MODE_SIZE (GET_MODE (mem));
4073 	case PRE_MODIFY:
4074 	case POST_MODIFY:
4075 	  addr = XEXP (addr, 1);
4076 	  gcc_assert (GET_CODE (addr) == PLUS);
4077 	  gcc_assert (XEXP (addr, 0) == stack_pointer_rtx);
4078 	  gcc_assert (CONST_INT_P (XEXP (addr, 1)));
4079 	  return INTVAL (XEXP (addr, 1));
4080 	default:
4081 	  gcc_unreachable ();
4082 	}
4083     }
4084 }
4085 
4086 poly_int64
4087 fixup_args_size_notes (rtx_insn *prev, rtx_insn *last,
4088 		       poly_int64 end_args_size)
4089 {
4090   poly_int64 args_size = end_args_size;
4091   bool saw_unknown = false;
4092   rtx_insn *insn;
4093 
4094   for (insn = last; insn != prev; insn = PREV_INSN (insn))
4095     {
4096       if (!NONDEBUG_INSN_P (insn))
4097 	continue;
4098 
4099       /* We might have existing REG_ARGS_SIZE notes, e.g. when pushing
4100 	 a call argument containing a TLS address that itself requires
4101 	 a call to __tls_get_addr.  The handling of stack_pointer_delta
4102 	 in emit_single_push_insn is supposed to ensure that any such
4103 	 notes are already correct.  */
4104       rtx note = find_reg_note (insn, REG_ARGS_SIZE, NULL_RTX);
4105       gcc_assert (!note || known_eq (args_size, get_args_size (note)));
4106 
4107       poly_int64 this_delta = find_args_size_adjust (insn);
4108       if (known_eq (this_delta, 0))
4109 	{
4110 	  if (!CALL_P (insn)
4111 	      || ACCUMULATE_OUTGOING_ARGS
4112 	      || find_reg_note (insn, REG_NORETURN, NULL_RTX) == NULL_RTX)
4113 	    continue;
4114 	}
4115 
4116       gcc_assert (!saw_unknown);
4117       if (known_eq (this_delta, HOST_WIDE_INT_MIN))
4118 	saw_unknown = true;
4119 
4120       if (!note)
4121 	add_args_size_note (insn, args_size);
4122       if (STACK_GROWS_DOWNWARD)
4123 	this_delta = -poly_uint64 (this_delta);
4124 
4125       if (saw_unknown)
4126 	args_size = HOST_WIDE_INT_MIN;
4127       else
4128 	args_size -= this_delta;
4129     }
4130 
4131   return args_size;
4132 }
4133 
4134 #ifdef PUSH_ROUNDING
4135 /* Emit single push insn.  */
4136 
4137 static void
4138 emit_single_push_insn_1 (machine_mode mode, rtx x, tree type)
4139 {
4140   rtx dest_addr;
4141   poly_int64 rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
4142   rtx dest;
4143   enum insn_code icode;
4144 
4145   /* If there is push pattern, use it.  Otherwise try old way of throwing
4146      MEM representing push operation to move expander.  */
4147   icode = optab_handler (push_optab, mode);
4148   if (icode != CODE_FOR_nothing)
4149     {
4150       struct expand_operand ops[1];
4151 
4152       create_input_operand (&ops[0], x, mode);
4153       if (maybe_expand_insn (icode, 1, ops))
4154 	return;
4155     }
4156   if (known_eq (GET_MODE_SIZE (mode), rounded_size))
4157     dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
4158   /* If we are to pad downward, adjust the stack pointer first and
4159      then store X into the stack location using an offset.  This is
4160      because emit_move_insn does not know how to pad; it does not have
4161      access to type.  */
4162   else if (targetm.calls.function_arg_padding (mode, type) == PAD_DOWNWARD)
4163     {
4164       emit_move_insn (stack_pointer_rtx,
4165 		      expand_binop (Pmode,
4166 				    STACK_GROWS_DOWNWARD ? sub_optab
4167 				    : add_optab,
4168 				    stack_pointer_rtx,
4169 				    gen_int_mode (rounded_size, Pmode),
4170 				    NULL_RTX, 0, OPTAB_LIB_WIDEN));
4171 
4172       poly_int64 offset = rounded_size - GET_MODE_SIZE (mode);
4173       if (STACK_GROWS_DOWNWARD && STACK_PUSH_CODE == POST_DEC)
4174 	/* We have already decremented the stack pointer, so get the
4175 	   previous value.  */
4176 	offset += rounded_size;
4177 
4178       if (!STACK_GROWS_DOWNWARD && STACK_PUSH_CODE == POST_INC)
4179 	/* We have already incremented the stack pointer, so get the
4180 	   previous value.  */
4181 	offset -= rounded_size;
4182 
4183       dest_addr = plus_constant (Pmode, stack_pointer_rtx, offset);
4184     }
4185   else
4186     {
4187       if (STACK_GROWS_DOWNWARD)
4188 	/* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC.  */
4189 	dest_addr = plus_constant (Pmode, stack_pointer_rtx, -rounded_size);
4190       else
4191 	/* ??? This seems wrong if STACK_PUSH_CODE == POST_INC.  */
4192 	dest_addr = plus_constant (Pmode, stack_pointer_rtx, rounded_size);
4193 
4194       dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
4195     }
4196 
4197   dest = gen_rtx_MEM (mode, dest_addr);
4198 
4199   if (type != 0)
4200     {
4201       set_mem_attributes (dest, type, 1);
4202 
4203       if (cfun->tail_call_marked)
4204 	/* Function incoming arguments may overlap with sibling call
4205 	   outgoing arguments and we cannot allow reordering of reads
4206 	   from function arguments with stores to outgoing arguments
4207 	   of sibling calls.  */
4208 	set_mem_alias_set (dest, 0);
4209     }
4210   emit_move_insn (dest, x);
4211 }
4212 
4213 /* Emit and annotate a single push insn.  */
4214 
4215 static void
4216 emit_single_push_insn (machine_mode mode, rtx x, tree type)
4217 {
4218   poly_int64 delta, old_delta = stack_pointer_delta;
4219   rtx_insn *prev = get_last_insn ();
4220   rtx_insn *last;
4221 
4222   emit_single_push_insn_1 (mode, x, type);
4223 
4224   /* Adjust stack_pointer_delta to describe the situation after the push
4225      we just performed.  Note that we must do this after the push rather
4226      than before the push in case calculating X needs pushes and pops of
4227      its own (e.g. if calling __tls_get_addr).  The REG_ARGS_SIZE notes
4228      for such pushes and pops must not include the effect of the future
4229      push of X.  */
4230   stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
4231 
4232   last = get_last_insn ();
4233 
4234   /* Notice the common case where we emitted exactly one insn.  */
4235   if (PREV_INSN (last) == prev)
4236     {
4237       add_args_size_note (last, stack_pointer_delta);
4238       return;
4239     }
4240 
4241   delta = fixup_args_size_notes (prev, last, stack_pointer_delta);
4242   gcc_assert (known_eq (delta, HOST_WIDE_INT_MIN)
4243 	      || known_eq (delta, old_delta));
4244 }
4245 #endif
4246 
4247 /* If reading SIZE bytes from X will end up reading from
4248    Y return the number of bytes that overlap.  Return -1
4249    if there is no overlap or -2 if we can't determine
4250    (for example when X and Y have different base registers).  */
4251 
4252 static int
4253 memory_load_overlap (rtx x, rtx y, HOST_WIDE_INT size)
4254 {
4255   rtx tmp = plus_constant (Pmode, x, size);
4256   rtx sub = simplify_gen_binary (MINUS, Pmode, tmp, y);
4257 
4258   if (!CONST_INT_P (sub))
4259     return -2;
4260 
4261   HOST_WIDE_INT val = INTVAL (sub);
4262 
4263   return IN_RANGE (val, 1, size) ? val : -1;
4264 }
4265 
4266 /* Generate code to push X onto the stack, assuming it has mode MODE and
4267    type TYPE.
4268    MODE is redundant except when X is a CONST_INT (since they don't
4269    carry mode info).
4270    SIZE is an rtx for the size of data to be copied (in bytes),
4271    needed only if X is BLKmode.
4272    Return true if successful.  May return false if asked to push a
4273    partial argument during a sibcall optimization (as specified by
4274    SIBCALL_P) and the incoming and outgoing pointers cannot be shown
4275    to not overlap.
4276 
4277    ALIGN (in bits) is maximum alignment we can assume.
4278 
4279    If PARTIAL and REG are both nonzero, then copy that many of the first
4280    bytes of X into registers starting with REG, and push the rest of X.
4281    The amount of space pushed is decreased by PARTIAL bytes.
4282    REG must be a hard register in this case.
4283    If REG is zero but PARTIAL is not, take any all others actions for an
4284    argument partially in registers, but do not actually load any
4285    registers.
4286 
4287    EXTRA is the amount in bytes of extra space to leave next to this arg.
4288    This is ignored if an argument block has already been allocated.
4289 
4290    On a machine that lacks real push insns, ARGS_ADDR is the address of
4291    the bottom of the argument block for this call.  We use indexing off there
4292    to store the arg.  On machines with push insns, ARGS_ADDR is 0 when a
4293    argument block has not been preallocated.
4294 
4295    ARGS_SO_FAR is the size of args previously pushed for this call.
4296 
4297    REG_PARM_STACK_SPACE is nonzero if functions require stack space
4298    for arguments passed in registers.  If nonzero, it will be the number
4299    of bytes required.  */
4300 
4301 bool
4302 emit_push_insn (rtx x, machine_mode mode, tree type, rtx size,
4303 		unsigned int align, int partial, rtx reg, poly_int64 extra,
4304 		rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
4305 		rtx alignment_pad, bool sibcall_p)
4306 {
4307   rtx xinner;
4308   pad_direction stack_direction
4309     = STACK_GROWS_DOWNWARD ? PAD_DOWNWARD : PAD_UPWARD;
4310 
4311   /* Decide where to pad the argument: PAD_DOWNWARD for below,
4312      PAD_UPWARD for above, or PAD_NONE for don't pad it.
4313      Default is below for small data on big-endian machines; else above.  */
4314   pad_direction where_pad = targetm.calls.function_arg_padding (mode, type);
4315 
4316   /* Invert direction if stack is post-decrement.
4317      FIXME: why?  */
4318   if (STACK_PUSH_CODE == POST_DEC)
4319     if (where_pad != PAD_NONE)
4320       where_pad = (where_pad == PAD_DOWNWARD ? PAD_UPWARD : PAD_DOWNWARD);
4321 
4322   xinner = x;
4323 
4324   int nregs = partial / UNITS_PER_WORD;
4325   rtx *tmp_regs = NULL;
4326   int overlapping = 0;
4327 
4328   if (mode == BLKmode
4329       || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
4330     {
4331       /* Copy a block into the stack, entirely or partially.  */
4332 
4333       rtx temp;
4334       int used;
4335       int offset;
4336       int skip;
4337 
4338       offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4339       used = partial - offset;
4340 
4341       if (mode != BLKmode)
4342 	{
4343 	  /* A value is to be stored in an insufficiently aligned
4344 	     stack slot; copy via a suitably aligned slot if
4345 	     necessary.  */
4346 	  size = gen_int_mode (GET_MODE_SIZE (mode), Pmode);
4347 	  if (!MEM_P (xinner))
4348 	    {
4349 	      temp = assign_temp (type, 1, 1);
4350 	      emit_move_insn (temp, xinner);
4351 	      xinner = temp;
4352 	    }
4353 	}
4354 
4355       gcc_assert (size);
4356 
4357       /* USED is now the # of bytes we need not copy to the stack
4358 	 because registers will take care of them.  */
4359 
4360       if (partial != 0)
4361 	xinner = adjust_address (xinner, BLKmode, used);
4362 
4363       /* If the partial register-part of the arg counts in its stack size,
4364 	 skip the part of stack space corresponding to the registers.
4365 	 Otherwise, start copying to the beginning of the stack space,
4366 	 by setting SKIP to 0.  */
4367       skip = (reg_parm_stack_space == 0) ? 0 : used;
4368 
4369 #ifdef PUSH_ROUNDING
4370       /* Do it with several push insns if that doesn't take lots of insns
4371 	 and if there is no difficulty with push insns that skip bytes
4372 	 on the stack for alignment purposes.  */
4373       if (args_addr == 0
4374 	  && PUSH_ARGS
4375 	  && CONST_INT_P (size)
4376 	  && skip == 0
4377 	  && MEM_ALIGN (xinner) >= align
4378 	  && can_move_by_pieces ((unsigned) INTVAL (size) - used, align)
4379 	  /* Here we avoid the case of a structure whose weak alignment
4380 	     forces many pushes of a small amount of data,
4381 	     and such small pushes do rounding that causes trouble.  */
4382 	  && ((!targetm.slow_unaligned_access (word_mode, align))
4383 	      || align >= BIGGEST_ALIGNMENT
4384 	      || known_eq (PUSH_ROUNDING (align / BITS_PER_UNIT),
4385 			   align / BITS_PER_UNIT))
4386 	  && known_eq (PUSH_ROUNDING (INTVAL (size)), INTVAL (size)))
4387 	{
4388 	  /* Push padding now if padding above and stack grows down,
4389 	     or if padding below and stack grows up.
4390 	     But if space already allocated, this has already been done.  */
4391 	  if (maybe_ne (extra, 0)
4392 	      && args_addr == 0
4393 	      && where_pad != PAD_NONE
4394 	      && where_pad != stack_direction)
4395 	    anti_adjust_stack (gen_int_mode (extra, Pmode));
4396 
4397 	  move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
4398 	}
4399       else
4400 #endif /* PUSH_ROUNDING  */
4401 	{
4402 	  rtx target;
4403 
4404 	  /* Otherwise make space on the stack and copy the data
4405 	     to the address of that space.  */
4406 
4407 	  /* Deduct words put into registers from the size we must copy.  */
4408 	  if (partial != 0)
4409 	    {
4410 	      if (CONST_INT_P (size))
4411 		size = GEN_INT (INTVAL (size) - used);
4412 	      else
4413 		size = expand_binop (GET_MODE (size), sub_optab, size,
4414 				     gen_int_mode (used, GET_MODE (size)),
4415 				     NULL_RTX, 0, OPTAB_LIB_WIDEN);
4416 	    }
4417 
4418 	  /* Get the address of the stack space.
4419 	     In this case, we do not deal with EXTRA separately.
4420 	     A single stack adjust will do.  */
4421 	  if (! args_addr)
4422 	    {
4423 	      temp = push_block (size, extra, where_pad == PAD_DOWNWARD);
4424 	      extra = 0;
4425 	    }
4426 	  else if (CONST_INT_P (args_so_far))
4427 	    temp = memory_address (BLKmode,
4428 				   plus_constant (Pmode, args_addr,
4429 						  skip + INTVAL (args_so_far)));
4430 	  else
4431 	    temp = memory_address (BLKmode,
4432 				   plus_constant (Pmode,
4433 						  gen_rtx_PLUS (Pmode,
4434 								args_addr,
4435 								args_so_far),
4436 						  skip));
4437 
4438 	  if (!ACCUMULATE_OUTGOING_ARGS)
4439 	    {
4440 	      /* If the source is referenced relative to the stack pointer,
4441 		 copy it to another register to stabilize it.  We do not need
4442 		 to do this if we know that we won't be changing sp.  */
4443 
4444 	      if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
4445 		  || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
4446 		temp = copy_to_reg (temp);
4447 	    }
4448 
4449 	  target = gen_rtx_MEM (BLKmode, temp);
4450 
4451 	  /* We do *not* set_mem_attributes here, because incoming arguments
4452 	     may overlap with sibling call outgoing arguments and we cannot
4453 	     allow reordering of reads from function arguments with stores
4454 	     to outgoing arguments of sibling calls.  We do, however, want
4455 	     to record the alignment of the stack slot.  */
4456 	  /* ALIGN may well be better aligned than TYPE, e.g. due to
4457 	     PARM_BOUNDARY.  Assume the caller isn't lying.  */
4458 	  set_mem_align (target, align);
4459 
4460 	  /* If part should go in registers and pushing to that part would
4461 	     overwrite some of the values that need to go into regs, load the
4462 	     overlapping values into temporary pseudos to be moved into the hard
4463 	     regs at the end after the stack pushing has completed.
4464 	     We cannot load them directly into the hard regs here because
4465 	     they can be clobbered by the block move expansions.
4466 	     See PR 65358.  */
4467 
4468 	  if (partial > 0 && reg != 0 && mode == BLKmode
4469 	      && GET_CODE (reg) != PARALLEL)
4470 	    {
4471 	      overlapping = memory_load_overlap (XEXP (x, 0), temp, partial);
4472 	      if (overlapping > 0)
4473 	        {
4474 		  gcc_assert (overlapping % UNITS_PER_WORD == 0);
4475 		  overlapping /= UNITS_PER_WORD;
4476 
4477 		  tmp_regs = XALLOCAVEC (rtx, overlapping);
4478 
4479 		  for (int i = 0; i < overlapping; i++)
4480 		    tmp_regs[i] = gen_reg_rtx (word_mode);
4481 
4482 		  for (int i = 0; i < overlapping; i++)
4483 		    emit_move_insn (tmp_regs[i],
4484 				    operand_subword_force (target, i, mode));
4485 	        }
4486 	      else if (overlapping == -1)
4487 		overlapping = 0;
4488 	      /* Could not determine whether there is overlap.
4489 	         Fail the sibcall.  */
4490 	      else
4491 		{
4492 		  overlapping = 0;
4493 		  if (sibcall_p)
4494 		    return false;
4495 		}
4496 	    }
4497 	  emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
4498 	}
4499     }
4500   else if (partial > 0)
4501     {
4502       /* Scalar partly in registers.  This case is only supported
4503 	 for fixed-wdth modes.  */
4504       int size = GET_MODE_SIZE (mode).to_constant ();
4505       size /= UNITS_PER_WORD;
4506       int i;
4507       int not_stack;
4508       /* # bytes of start of argument
4509 	 that we must make space for but need not store.  */
4510       int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4511       int args_offset = INTVAL (args_so_far);
4512       int skip;
4513 
4514       /* Push padding now if padding above and stack grows down,
4515 	 or if padding below and stack grows up.
4516 	 But if space already allocated, this has already been done.  */
4517       if (maybe_ne (extra, 0)
4518 	  && args_addr == 0
4519 	  && where_pad != PAD_NONE
4520 	  && where_pad != stack_direction)
4521 	anti_adjust_stack (gen_int_mode (extra, Pmode));
4522 
4523       /* If we make space by pushing it, we might as well push
4524 	 the real data.  Otherwise, we can leave OFFSET nonzero
4525 	 and leave the space uninitialized.  */
4526       if (args_addr == 0)
4527 	offset = 0;
4528 
4529       /* Now NOT_STACK gets the number of words that we don't need to
4530 	 allocate on the stack.  Convert OFFSET to words too.  */
4531       not_stack = (partial - offset) / UNITS_PER_WORD;
4532       offset /= UNITS_PER_WORD;
4533 
4534       /* If the partial register-part of the arg counts in its stack size,
4535 	 skip the part of stack space corresponding to the registers.
4536 	 Otherwise, start copying to the beginning of the stack space,
4537 	 by setting SKIP to 0.  */
4538       skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4539 
4540       if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
4541 	x = validize_mem (force_const_mem (mode, x));
4542 
4543       /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4544 	 SUBREGs of such registers are not allowed.  */
4545       if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
4546 	   && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4547 	x = copy_to_reg (x);
4548 
4549       /* Loop over all the words allocated on the stack for this arg.  */
4550       /* We can do it by words, because any scalar bigger than a word
4551 	 has a size a multiple of a word.  */
4552       for (i = size - 1; i >= not_stack; i--)
4553 	if (i >= not_stack + offset)
4554 	  if (!emit_push_insn (operand_subword_force (x, i, mode),
4555 			  word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4556 			  0, args_addr,
4557 			  GEN_INT (args_offset + ((i - not_stack + skip)
4558 						  * UNITS_PER_WORD)),
4559 			  reg_parm_stack_space, alignment_pad, sibcall_p))
4560 	    return false;
4561     }
4562   else
4563     {
4564       rtx addr;
4565       rtx dest;
4566 
4567       /* Push padding now if padding above and stack grows down,
4568 	 or if padding below and stack grows up.
4569 	 But if space already allocated, this has already been done.  */
4570       if (maybe_ne (extra, 0)
4571 	  && args_addr == 0
4572 	  && where_pad != PAD_NONE
4573 	  && where_pad != stack_direction)
4574 	anti_adjust_stack (gen_int_mode (extra, Pmode));
4575 
4576 #ifdef PUSH_ROUNDING
4577       if (args_addr == 0 && PUSH_ARGS)
4578 	emit_single_push_insn (mode, x, type);
4579       else
4580 #endif
4581 	{
4582 	  addr = simplify_gen_binary (PLUS, Pmode, args_addr, args_so_far);
4583 	  dest = gen_rtx_MEM (mode, memory_address (mode, addr));
4584 
4585 	  /* We do *not* set_mem_attributes here, because incoming arguments
4586 	     may overlap with sibling call outgoing arguments and we cannot
4587 	     allow reordering of reads from function arguments with stores
4588 	     to outgoing arguments of sibling calls.  We do, however, want
4589 	     to record the alignment of the stack slot.  */
4590 	  /* ALIGN may well be better aligned than TYPE, e.g. due to
4591 	     PARM_BOUNDARY.  Assume the caller isn't lying.  */
4592 	  set_mem_align (dest, align);
4593 
4594 	  emit_move_insn (dest, x);
4595 	}
4596     }
4597 
4598   /* Move the partial arguments into the registers and any overlapping
4599      values that we moved into the pseudos in tmp_regs.  */
4600   if (partial > 0 && reg != 0)
4601     {
4602       /* Handle calls that pass values in multiple non-contiguous locations.
4603 	 The Irix 6 ABI has examples of this.  */
4604       if (GET_CODE (reg) == PARALLEL)
4605 	emit_group_load (reg, x, type, -1);
4606       else
4607         {
4608 	  gcc_assert (partial % UNITS_PER_WORD == 0);
4609 	  move_block_to_reg (REGNO (reg), x, nregs - overlapping, mode);
4610 
4611 	  for (int i = 0; i < overlapping; i++)
4612 	    emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg)
4613 						    + nregs - overlapping + i),
4614 			    tmp_regs[i]);
4615 
4616 	}
4617     }
4618 
4619   if (maybe_ne (extra, 0) && args_addr == 0 && where_pad == stack_direction)
4620     anti_adjust_stack (gen_int_mode (extra, Pmode));
4621 
4622   if (alignment_pad && args_addr == 0)
4623     anti_adjust_stack (alignment_pad);
4624 
4625   return true;
4626 }
4627 
4628 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4629    operations.  */
4630 
4631 static rtx
4632 get_subtarget (rtx x)
4633 {
4634   return (optimize
4635           || x == 0
4636 	   /* Only registers can be subtargets.  */
4637 	   || !REG_P (x)
4638 	   /* Don't use hard regs to avoid extending their life.  */
4639 	   || REGNO (x) < FIRST_PSEUDO_REGISTER
4640 	  ? 0 : x);
4641 }
4642 
4643 /* A subroutine of expand_assignment.  Optimize FIELD op= VAL, where
4644    FIELD is a bitfield.  Returns true if the optimization was successful,
4645    and there's nothing else to do.  */
4646 
4647 static bool
4648 optimize_bitfield_assignment_op (poly_uint64 pbitsize,
4649 				 poly_uint64 pbitpos,
4650 				 poly_uint64 pbitregion_start,
4651 				 poly_uint64 pbitregion_end,
4652 				 machine_mode mode1, rtx str_rtx,
4653 				 tree to, tree src, bool reverse)
4654 {
4655   /* str_mode is not guaranteed to be a scalar type.  */
4656   machine_mode str_mode = GET_MODE (str_rtx);
4657   unsigned int str_bitsize;
4658   tree op0, op1;
4659   rtx value, result;
4660   optab binop;
4661   gimple *srcstmt;
4662   enum tree_code code;
4663 
4664   unsigned HOST_WIDE_INT bitsize, bitpos, bitregion_start, bitregion_end;
4665   if (mode1 != VOIDmode
4666       || !pbitsize.is_constant (&bitsize)
4667       || !pbitpos.is_constant (&bitpos)
4668       || !pbitregion_start.is_constant (&bitregion_start)
4669       || !pbitregion_end.is_constant (&bitregion_end)
4670       || bitsize >= BITS_PER_WORD
4671       || !GET_MODE_BITSIZE (str_mode).is_constant (&str_bitsize)
4672       || str_bitsize > BITS_PER_WORD
4673       || TREE_SIDE_EFFECTS (to)
4674       || TREE_THIS_VOLATILE (to))
4675     return false;
4676 
4677   STRIP_NOPS (src);
4678   if (TREE_CODE (src) != SSA_NAME)
4679     return false;
4680   if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4681     return false;
4682 
4683   srcstmt = get_gimple_for_ssa_name (src);
4684   if (!srcstmt
4685       || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt)) != tcc_binary)
4686     return false;
4687 
4688   code = gimple_assign_rhs_code (srcstmt);
4689 
4690   op0 = gimple_assign_rhs1 (srcstmt);
4691 
4692   /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4693      to find its initialization.  Hopefully the initialization will
4694      be from a bitfield load.  */
4695   if (TREE_CODE (op0) == SSA_NAME)
4696     {
4697       gimple *op0stmt = get_gimple_for_ssa_name (op0);
4698 
4699       /* We want to eventually have OP0 be the same as TO, which
4700 	 should be a bitfield.  */
4701       if (!op0stmt
4702 	  || !is_gimple_assign (op0stmt)
4703 	  || gimple_assign_rhs_code (op0stmt) != TREE_CODE (to))
4704 	return false;
4705       op0 = gimple_assign_rhs1 (op0stmt);
4706     }
4707 
4708   op1 = gimple_assign_rhs2 (srcstmt);
4709 
4710   if (!operand_equal_p (to, op0, 0))
4711     return false;
4712 
4713   if (MEM_P (str_rtx))
4714     {
4715       unsigned HOST_WIDE_INT offset1;
4716 
4717       if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4718 	str_bitsize = BITS_PER_WORD;
4719 
4720       scalar_int_mode best_mode;
4721       if (!get_best_mode (bitsize, bitpos, bitregion_start, bitregion_end,
4722 			  MEM_ALIGN (str_rtx), str_bitsize, false, &best_mode))
4723 	return false;
4724       str_mode = best_mode;
4725       str_bitsize = GET_MODE_BITSIZE (best_mode);
4726 
4727       offset1 = bitpos;
4728       bitpos %= str_bitsize;
4729       offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4730       str_rtx = adjust_address (str_rtx, str_mode, offset1);
4731     }
4732   else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4733     return false;
4734 
4735   /* If the bit field covers the whole REG/MEM, store_field
4736      will likely generate better code.  */
4737   if (bitsize >= str_bitsize)
4738     return false;
4739 
4740   /* We can't handle fields split across multiple entities.  */
4741   if (bitpos + bitsize > str_bitsize)
4742     return false;
4743 
4744   if (reverse ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4745     bitpos = str_bitsize - bitpos - bitsize;
4746 
4747   switch (code)
4748     {
4749     case PLUS_EXPR:
4750     case MINUS_EXPR:
4751       /* For now, just optimize the case of the topmost bitfield
4752 	 where we don't need to do any masking and also
4753 	 1 bit bitfields where xor can be used.
4754 	 We might win by one instruction for the other bitfields
4755 	 too if insv/extv instructions aren't used, so that
4756 	 can be added later.  */
4757       if ((reverse || bitpos + bitsize != str_bitsize)
4758 	  && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4759 	break;
4760 
4761       value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4762       value = convert_modes (str_mode,
4763 			     TYPE_MODE (TREE_TYPE (op1)), value,
4764 			     TYPE_UNSIGNED (TREE_TYPE (op1)));
4765 
4766       /* We may be accessing data outside the field, which means
4767 	 we can alias adjacent data.  */
4768       if (MEM_P (str_rtx))
4769 	{
4770 	  str_rtx = shallow_copy_rtx (str_rtx);
4771 	  set_mem_alias_set (str_rtx, 0);
4772 	  set_mem_expr (str_rtx, 0);
4773 	}
4774 
4775       if (bitsize == 1 && (reverse || bitpos + bitsize != str_bitsize))
4776 	{
4777 	  value = expand_and (str_mode, value, const1_rtx, NULL);
4778 	  binop = xor_optab;
4779 	}
4780       else
4781 	binop = code == PLUS_EXPR ? add_optab : sub_optab;
4782 
4783       value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4784       if (reverse)
4785 	value = flip_storage_order (str_mode, value);
4786       result = expand_binop (str_mode, binop, str_rtx,
4787 			     value, str_rtx, 1, OPTAB_WIDEN);
4788       if (result != str_rtx)
4789 	emit_move_insn (str_rtx, result);
4790       return true;
4791 
4792     case BIT_IOR_EXPR:
4793     case BIT_XOR_EXPR:
4794       if (TREE_CODE (op1) != INTEGER_CST)
4795 	break;
4796       value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4797       value = convert_modes (str_mode,
4798 			     TYPE_MODE (TREE_TYPE (op1)), value,
4799 			     TYPE_UNSIGNED (TREE_TYPE (op1)));
4800 
4801       /* We may be accessing data outside the field, which means
4802 	 we can alias adjacent data.  */
4803       if (MEM_P (str_rtx))
4804 	{
4805 	  str_rtx = shallow_copy_rtx (str_rtx);
4806 	  set_mem_alias_set (str_rtx, 0);
4807 	  set_mem_expr (str_rtx, 0);
4808 	}
4809 
4810       binop = code == BIT_IOR_EXPR ? ior_optab : xor_optab;
4811       if (bitpos + bitsize != str_bitsize)
4812 	{
4813 	  rtx mask = gen_int_mode ((HOST_WIDE_INT_1U << bitsize) - 1,
4814 				   str_mode);
4815 	  value = expand_and (str_mode, value, mask, NULL_RTX);
4816 	}
4817       value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4818       if (reverse)
4819 	value = flip_storage_order (str_mode, value);
4820       result = expand_binop (str_mode, binop, str_rtx,
4821 			     value, str_rtx, 1, OPTAB_WIDEN);
4822       if (result != str_rtx)
4823 	emit_move_insn (str_rtx, result);
4824       return true;
4825 
4826     default:
4827       break;
4828     }
4829 
4830   return false;
4831 }
4832 
4833 /* In the C++ memory model, consecutive bit fields in a structure are
4834    considered one memory location.
4835 
4836    Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4837    returns the bit range of consecutive bits in which this COMPONENT_REF
4838    belongs.  The values are returned in *BITSTART and *BITEND.  *BITPOS
4839    and *OFFSET may be adjusted in the process.
4840 
4841    If the access does not need to be restricted, 0 is returned in both
4842    *BITSTART and *BITEND.  */
4843 
4844 void
4845 get_bit_range (poly_uint64_pod *bitstart, poly_uint64_pod *bitend, tree exp,
4846 	       poly_int64_pod *bitpos, tree *offset)
4847 {
4848   poly_int64 bitoffset;
4849   tree field, repr;
4850 
4851   gcc_assert (TREE_CODE (exp) == COMPONENT_REF);
4852 
4853   field = TREE_OPERAND (exp, 1);
4854   repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
4855   /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4856      need to limit the range we can access.  */
4857   if (!repr)
4858     {
4859       *bitstart = *bitend = 0;
4860       return;
4861     }
4862 
4863   /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4864      part of a larger bit field, then the representative does not serve any
4865      useful purpose.  This can occur in Ada.  */
4866   if (handled_component_p (TREE_OPERAND (exp, 0)))
4867     {
4868       machine_mode rmode;
4869       poly_int64 rbitsize, rbitpos;
4870       tree roffset;
4871       int unsignedp, reversep, volatilep = 0;
4872       get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos,
4873 			   &roffset, &rmode, &unsignedp, &reversep,
4874 			   &volatilep);
4875       if (!multiple_p (rbitpos, BITS_PER_UNIT))
4876 	{
4877 	  *bitstart = *bitend = 0;
4878 	  return;
4879 	}
4880     }
4881 
4882   /* Compute the adjustment to bitpos from the offset of the field
4883      relative to the representative.  DECL_FIELD_OFFSET of field and
4884      repr are the same by construction if they are not constants,
4885      see finish_bitfield_layout.  */
4886   poly_uint64 field_offset, repr_offset;
4887   if (poly_int_tree_p (DECL_FIELD_OFFSET (field), &field_offset)
4888       && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset))
4889     bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
4890   else
4891     bitoffset = 0;
4892   bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
4893 		- tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
4894 
4895   /* If the adjustment is larger than bitpos, we would have a negative bit
4896      position for the lower bound and this may wreak havoc later.  Adjust
4897      offset and bitpos to make the lower bound non-negative in that case.  */
4898   if (maybe_gt (bitoffset, *bitpos))
4899     {
4900       poly_int64 adjust_bits = upper_bound (bitoffset, *bitpos) - *bitpos;
4901       poly_int64 adjust_bytes = exact_div (adjust_bits, BITS_PER_UNIT);
4902 
4903       *bitpos += adjust_bits;
4904       if (*offset == NULL_TREE)
4905 	*offset = size_int (-adjust_bytes);
4906       else
4907 	*offset = size_binop (MINUS_EXPR, *offset, size_int (adjust_bytes));
4908       *bitstart = 0;
4909     }
4910   else
4911     *bitstart = *bitpos - bitoffset;
4912 
4913   *bitend = *bitstart + tree_to_uhwi (DECL_SIZE (repr)) - 1;
4914 }
4915 
4916 /* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside
4917    in memory and has non-BLKmode.  DECL_RTL must not be a MEM; if
4918    DECL_RTL was not set yet, return NORTL.  */
4919 
4920 static inline bool
4921 addr_expr_of_non_mem_decl_p_1 (tree addr, bool nortl)
4922 {
4923   if (TREE_CODE (addr) != ADDR_EXPR)
4924     return false;
4925 
4926   tree base = TREE_OPERAND (addr, 0);
4927 
4928   if (!DECL_P (base)
4929       || TREE_ADDRESSABLE (base)
4930       || DECL_MODE (base) == BLKmode)
4931     return false;
4932 
4933   if (!DECL_RTL_SET_P (base))
4934     return nortl;
4935 
4936   return (!MEM_P (DECL_RTL (base)));
4937 }
4938 
4939 /* Returns true if the MEM_REF REF refers to an object that does not
4940    reside in memory and has non-BLKmode.  */
4941 
4942 static inline bool
4943 mem_ref_refers_to_non_mem_p (tree ref)
4944 {
4945   tree base = TREE_OPERAND (ref, 0);
4946   return addr_expr_of_non_mem_decl_p_1 (base, false);
4947 }
4948 
4949 /* Expand an assignment that stores the value of FROM into TO.  If NONTEMPORAL
4950    is true, try generating a nontemporal store.  */
4951 
4952 void
4953 expand_assignment (tree to, tree from, bool nontemporal)
4954 {
4955   rtx to_rtx = 0;
4956   rtx result;
4957   machine_mode mode;
4958   unsigned int align;
4959   enum insn_code icode;
4960 
4961   /* Don't crash if the lhs of the assignment was erroneous.  */
4962   if (TREE_CODE (to) == ERROR_MARK)
4963     {
4964       expand_normal (from);
4965       return;
4966     }
4967 
4968   /* Optimize away no-op moves without side-effects.  */
4969   if (operand_equal_p (to, from, 0))
4970     return;
4971 
4972   /* Handle misaligned stores.  */
4973   mode = TYPE_MODE (TREE_TYPE (to));
4974   if ((TREE_CODE (to) == MEM_REF
4975        || TREE_CODE (to) == TARGET_MEM_REF)
4976       && mode != BLKmode
4977       && !mem_ref_refers_to_non_mem_p (to)
4978       && ((align = get_object_alignment (to))
4979 	  < GET_MODE_ALIGNMENT (mode))
4980       && (((icode = optab_handler (movmisalign_optab, mode))
4981 	   != CODE_FOR_nothing)
4982 	  || targetm.slow_unaligned_access (mode, align)))
4983     {
4984       rtx reg, mem;
4985 
4986       reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4987       reg = force_not_mem (reg);
4988       mem = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4989       if (TREE_CODE (to) == MEM_REF && REF_REVERSE_STORAGE_ORDER (to))
4990 	reg = flip_storage_order (mode, reg);
4991 
4992       if (icode != CODE_FOR_nothing)
4993 	{
4994 	  struct expand_operand ops[2];
4995 
4996 	  create_fixed_operand (&ops[0], mem);
4997 	  create_input_operand (&ops[1], reg, mode);
4998 	  /* The movmisalign<mode> pattern cannot fail, else the assignment
4999 	     would silently be omitted.  */
5000 	  expand_insn (icode, 2, ops);
5001 	}
5002       else
5003 	store_bit_field (mem, GET_MODE_BITSIZE (mode), 0, 0, 0, mode, reg,
5004 			 false);
5005       return;
5006     }
5007 
5008   /* Assignment of a structure component needs special treatment
5009      if the structure component's rtx is not simply a MEM.
5010      Assignment of an array element at a constant index, and assignment of
5011      an array element in an unaligned packed structure field, has the same
5012      problem.  Same for (partially) storing into a non-memory object.  */
5013   if (handled_component_p (to)
5014       || (TREE_CODE (to) == MEM_REF
5015 	  && (REF_REVERSE_STORAGE_ORDER (to)
5016 	      || mem_ref_refers_to_non_mem_p (to)))
5017       || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
5018     {
5019       machine_mode mode1;
5020       poly_int64 bitsize, bitpos;
5021       poly_uint64 bitregion_start = 0;
5022       poly_uint64 bitregion_end = 0;
5023       tree offset;
5024       int unsignedp, reversep, volatilep = 0;
5025       tree tem;
5026 
5027       push_temp_slots ();
5028       tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
5029 				 &unsignedp, &reversep, &volatilep);
5030 
5031       /* Make sure bitpos is not negative, it can wreak havoc later.  */
5032       if (maybe_lt (bitpos, 0))
5033 	{
5034 	  gcc_assert (offset == NULL_TREE);
5035 	  offset = size_int (bits_to_bytes_round_down (bitpos));
5036 	  bitpos = num_trailing_bits (bitpos);
5037 	}
5038 
5039       if (TREE_CODE (to) == COMPONENT_REF
5040 	  && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to, 1)))
5041 	get_bit_range (&bitregion_start, &bitregion_end, to, &bitpos, &offset);
5042       /* The C++ memory model naturally applies to byte-aligned fields.
5043 	 However, if we do not have a DECL_BIT_FIELD_TYPE but BITPOS or
5044 	 BITSIZE are not byte-aligned, there is no need to limit the range
5045 	 we can access.  This can occur with packed structures in Ada.  */
5046       else if (maybe_gt (bitsize, 0)
5047 	       && multiple_p (bitsize, BITS_PER_UNIT)
5048 	       && multiple_p (bitpos, BITS_PER_UNIT))
5049 	{
5050 	  bitregion_start = bitpos;
5051 	  bitregion_end = bitpos + bitsize - 1;
5052 	}
5053 
5054       to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
5055 
5056       /* If the field has a mode, we want to access it in the
5057 	 field's mode, not the computed mode.
5058 	 If a MEM has VOIDmode (external with incomplete type),
5059 	 use BLKmode for it instead.  */
5060       if (MEM_P (to_rtx))
5061 	{
5062 	  if (mode1 != VOIDmode)
5063 	    to_rtx = adjust_address (to_rtx, mode1, 0);
5064 	  else if (GET_MODE (to_rtx) == VOIDmode)
5065 	    to_rtx = adjust_address (to_rtx, BLKmode, 0);
5066 	}
5067 
5068       if (offset != 0)
5069 	{
5070 	  machine_mode address_mode;
5071 	  rtx offset_rtx;
5072 
5073 	  if (!MEM_P (to_rtx))
5074 	    {
5075 	      /* We can get constant negative offsets into arrays with broken
5076 		 user code.  Translate this to a trap instead of ICEing.  */
5077 	      gcc_assert (TREE_CODE (offset) == INTEGER_CST);
5078 	      expand_builtin_trap ();
5079 	      to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
5080 	    }
5081 
5082 	  offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
5083 	  address_mode = get_address_mode (to_rtx);
5084 	  if (GET_MODE (offset_rtx) != address_mode)
5085 	    {
5086 		/* We cannot be sure that the RTL in offset_rtx is valid outside
5087 		   of a memory address context, so force it into a register
5088 		   before attempting to convert it to the desired mode.  */
5089 	      offset_rtx = force_operand (offset_rtx, NULL_RTX);
5090 	      offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
5091 	    }
5092 
5093 	  /* If we have an expression in OFFSET_RTX and a non-zero
5094 	     byte offset in BITPOS, adding the byte offset before the
5095 	     OFFSET_RTX results in better intermediate code, which makes
5096 	     later rtl optimization passes perform better.
5097 
5098 	     We prefer intermediate code like this:
5099 
5100 	     r124:DI=r123:DI+0x18
5101 	     [r124:DI]=r121:DI
5102 
5103 	     ... instead of ...
5104 
5105 	     r124:DI=r123:DI+0x10
5106 	     [r124:DI+0x8]=r121:DI
5107 
5108 	     This is only done for aligned data values, as these can
5109 	     be expected to result in single move instructions.  */
5110 	  poly_int64 bytepos;
5111 	  if (mode1 != VOIDmode
5112 	      && maybe_ne (bitpos, 0)
5113 	      && maybe_gt (bitsize, 0)
5114 	      && multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
5115 	      && multiple_p (bitpos, bitsize)
5116 	      && multiple_p (bitsize, GET_MODE_ALIGNMENT (mode1))
5117 	      && MEM_ALIGN (to_rtx) >= GET_MODE_ALIGNMENT (mode1))
5118 	    {
5119 	      to_rtx = adjust_address (to_rtx, mode1, bytepos);
5120 	      bitregion_start = 0;
5121 	      if (known_ge (bitregion_end, poly_uint64 (bitpos)))
5122 		bitregion_end -= bitpos;
5123 	      bitpos = 0;
5124 	    }
5125 
5126 	  to_rtx = offset_address (to_rtx, offset_rtx,
5127 				   highest_pow2_factor_for_target (to,
5128 				   				   offset));
5129 	}
5130 
5131       /* No action is needed if the target is not a memory and the field
5132 	 lies completely outside that target.  This can occur if the source
5133 	 code contains an out-of-bounds access to a small array.  */
5134       if (!MEM_P (to_rtx)
5135 	  && GET_MODE (to_rtx) != BLKmode
5136 	  && known_ge (bitpos, GET_MODE_PRECISION (GET_MODE (to_rtx))))
5137 	{
5138 	  expand_normal (from);
5139 	  result = NULL;
5140 	}
5141       /* Handle expand_expr of a complex value returning a CONCAT.  */
5142       else if (GET_CODE (to_rtx) == CONCAT)
5143 	{
5144 	  machine_mode to_mode = GET_MODE (to_rtx);
5145 	  gcc_checking_assert (COMPLEX_MODE_P (to_mode));
5146 	  poly_int64 mode_bitsize = GET_MODE_BITSIZE (to_mode);
5147 	  unsigned short inner_bitsize = GET_MODE_UNIT_BITSIZE (to_mode);
5148 	  if (TYPE_MODE (TREE_TYPE (from)) == GET_MODE (to_rtx)
5149 	      && COMPLEX_MODE_P (GET_MODE (to_rtx))
5150 	      && known_eq (bitpos, 0)
5151 	      && known_eq (bitsize, mode_bitsize))
5152 	    result = store_expr (from, to_rtx, false, nontemporal, reversep);
5153 	  else if (known_eq (bitsize, inner_bitsize)
5154 		   && (known_eq (bitpos, 0)
5155 		       || known_eq (bitpos, inner_bitsize)))
5156 	    result = store_expr (from, XEXP (to_rtx, maybe_ne (bitpos, 0)),
5157 				 false, nontemporal, reversep);
5158 	  else if (known_le (bitpos + bitsize, inner_bitsize))
5159 	    result = store_field (XEXP (to_rtx, 0), bitsize, bitpos,
5160 				  bitregion_start, bitregion_end,
5161 				  mode1, from, get_alias_set (to),
5162 				  nontemporal, reversep);
5163 	  else if (known_ge (bitpos, inner_bitsize))
5164 	    result = store_field (XEXP (to_rtx, 1), bitsize,
5165 				  bitpos - inner_bitsize,
5166 				  bitregion_start, bitregion_end,
5167 				  mode1, from, get_alias_set (to),
5168 				  nontemporal, reversep);
5169 	  else if (known_eq (bitpos, 0) && known_eq (bitsize, mode_bitsize))
5170 	    {
5171 	      result = expand_normal (from);
5172 	      if (GET_CODE (result) == CONCAT)
5173 		{
5174 		  to_mode = GET_MODE_INNER (to_mode);
5175 		  machine_mode from_mode = GET_MODE_INNER (GET_MODE (result));
5176 		  rtx from_real
5177 		    = simplify_gen_subreg (to_mode, XEXP (result, 0),
5178 					   from_mode, 0);
5179 		  rtx from_imag
5180 		    = simplify_gen_subreg (to_mode, XEXP (result, 1),
5181 					   from_mode, 0);
5182 		  if (!from_real || !from_imag)
5183 		    goto concat_store_slow;
5184 		  emit_move_insn (XEXP (to_rtx, 0), from_real);
5185 		  emit_move_insn (XEXP (to_rtx, 1), from_imag);
5186 		}
5187 	      else
5188 		{
5189 		  rtx from_rtx
5190 		    = simplify_gen_subreg (to_mode, result,
5191 					   TYPE_MODE (TREE_TYPE (from)), 0);
5192 		  if (from_rtx)
5193 		    {
5194 		      emit_move_insn (XEXP (to_rtx, 0),
5195 				      read_complex_part (from_rtx, false));
5196 		      emit_move_insn (XEXP (to_rtx, 1),
5197 				      read_complex_part (from_rtx, true));
5198 		    }
5199 		  else
5200 		    {
5201 		      machine_mode to_mode
5202 			= GET_MODE_INNER (GET_MODE (to_rtx));
5203 		      rtx from_real
5204 			= simplify_gen_subreg (to_mode, result,
5205 					       TYPE_MODE (TREE_TYPE (from)),
5206 					       0);
5207 		      rtx from_imag
5208 			= simplify_gen_subreg (to_mode, result,
5209 					       TYPE_MODE (TREE_TYPE (from)),
5210 					       GET_MODE_SIZE (to_mode));
5211 		      if (!from_real || !from_imag)
5212 			goto concat_store_slow;
5213 		      emit_move_insn (XEXP (to_rtx, 0), from_real);
5214 		      emit_move_insn (XEXP (to_rtx, 1), from_imag);
5215 		    }
5216 		}
5217 	    }
5218 	  else
5219 	    {
5220 	    concat_store_slow:;
5221 	      rtx temp = assign_stack_temp (to_mode,
5222 					    GET_MODE_SIZE (GET_MODE (to_rtx)));
5223 	      write_complex_part (temp, XEXP (to_rtx, 0), false);
5224 	      write_complex_part (temp, XEXP (to_rtx, 1), true);
5225 	      result = store_field (temp, bitsize, bitpos,
5226 				    bitregion_start, bitregion_end,
5227 				    mode1, from, get_alias_set (to),
5228 				    nontemporal, reversep);
5229 	      emit_move_insn (XEXP (to_rtx, 0), read_complex_part (temp, false));
5230 	      emit_move_insn (XEXP (to_rtx, 1), read_complex_part (temp, true));
5231 	    }
5232 	}
5233       else
5234 	{
5235 	  if (MEM_P (to_rtx))
5236 	    {
5237 	      /* If the field is at offset zero, we could have been given the
5238 		 DECL_RTX of the parent struct.  Don't munge it.  */
5239 	      to_rtx = shallow_copy_rtx (to_rtx);
5240 	      set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
5241 	      if (volatilep)
5242 		MEM_VOLATILE_P (to_rtx) = 1;
5243 	    }
5244 
5245 	  if (optimize_bitfield_assignment_op (bitsize, bitpos,
5246 					       bitregion_start, bitregion_end,
5247 					       mode1, to_rtx, to, from,
5248 					       reversep))
5249 	    result = NULL;
5250 	  else
5251 	    result = store_field (to_rtx, bitsize, bitpos,
5252 				  bitregion_start, bitregion_end,
5253 				  mode1, from, get_alias_set (to),
5254 				  nontemporal, reversep);
5255 	}
5256 
5257       if (result)
5258 	preserve_temp_slots (result);
5259       pop_temp_slots ();
5260       return;
5261     }
5262 
5263   /* If the rhs is a function call and its value is not an aggregate,
5264      call the function before we start to compute the lhs.
5265      This is needed for correct code for cases such as
5266      val = setjmp (buf) on machines where reference to val
5267      requires loading up part of an address in a separate insn.
5268 
5269      Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
5270      since it might be a promoted variable where the zero- or sign- extension
5271      needs to be done.  Handling this in the normal way is safe because no
5272      computation is done before the call.  The same is true for SSA names.  */
5273   if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
5274       && COMPLETE_TYPE_P (TREE_TYPE (from))
5275       && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
5276       && ! (((VAR_P (to)
5277 	      || TREE_CODE (to) == PARM_DECL
5278 	      || TREE_CODE (to) == RESULT_DECL)
5279 	     && REG_P (DECL_RTL (to)))
5280 	    || TREE_CODE (to) == SSA_NAME))
5281     {
5282       rtx value;
5283       rtx bounds;
5284 
5285       push_temp_slots ();
5286       value = expand_normal (from);
5287 
5288       /* Split value and bounds to store them separately.  */
5289       chkp_split_slot (value, &value, &bounds);
5290 
5291       if (to_rtx == 0)
5292 	to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5293 
5294       /* Handle calls that return values in multiple non-contiguous locations.
5295 	 The Irix 6 ABI has examples of this.  */
5296       if (GET_CODE (to_rtx) == PARALLEL)
5297 	{
5298 	  if (GET_CODE (value) == PARALLEL)
5299 	    emit_group_move (to_rtx, value);
5300 	  else
5301 	    emit_group_load (to_rtx, value, TREE_TYPE (from),
5302 			     int_size_in_bytes (TREE_TYPE (from)));
5303 	}
5304       else if (GET_CODE (value) == PARALLEL)
5305 	emit_group_store (to_rtx, value, TREE_TYPE (from),
5306 			  int_size_in_bytes (TREE_TYPE (from)));
5307       else if (GET_MODE (to_rtx) == BLKmode)
5308 	{
5309 	  /* Handle calls that return BLKmode values in registers.  */
5310 	  if (REG_P (value))
5311 	    copy_blkmode_from_reg (to_rtx, value, TREE_TYPE (from));
5312 	  else
5313 	    emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
5314 	}
5315       else
5316 	{
5317 	  if (POINTER_TYPE_P (TREE_TYPE (to)))
5318 	    value = convert_memory_address_addr_space
5319 	      (as_a <scalar_int_mode> (GET_MODE (to_rtx)), value,
5320 	       TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
5321 
5322 	  emit_move_insn (to_rtx, value);
5323 	}
5324 
5325       /* Store bounds if required.  */
5326       if (bounds
5327 	  && (BOUNDED_P (to) || chkp_type_has_pointer (TREE_TYPE (to))))
5328 	{
5329 	  gcc_assert (MEM_P (to_rtx));
5330 	  chkp_emit_bounds_store (bounds, value, to_rtx);
5331 	}
5332 
5333       preserve_temp_slots (to_rtx);
5334       pop_temp_slots ();
5335       return;
5336     }
5337 
5338   /* Ordinary treatment.  Expand TO to get a REG or MEM rtx.  */
5339   to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5340 
5341   /* Don't move directly into a return register.  */
5342   if (TREE_CODE (to) == RESULT_DECL
5343       && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
5344     {
5345       rtx temp;
5346 
5347       push_temp_slots ();
5348 
5349       /* If the source is itself a return value, it still is in a pseudo at
5350 	 this point so we can move it back to the return register directly.  */
5351       if (REG_P (to_rtx)
5352 	  && TYPE_MODE (TREE_TYPE (from)) == BLKmode
5353 	  && TREE_CODE (from) != CALL_EXPR)
5354 	temp = copy_blkmode_to_reg (GET_MODE (to_rtx), from);
5355       else
5356 	temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
5357 
5358       /* Handle calls that return values in multiple non-contiguous locations.
5359 	 The Irix 6 ABI has examples of this.  */
5360       if (GET_CODE (to_rtx) == PARALLEL)
5361 	{
5362 	  if (GET_CODE (temp) == PARALLEL)
5363 	    emit_group_move (to_rtx, temp);
5364 	  else
5365 	    emit_group_load (to_rtx, temp, TREE_TYPE (from),
5366 			     int_size_in_bytes (TREE_TYPE (from)));
5367 	}
5368       else if (temp)
5369 	emit_move_insn (to_rtx, temp);
5370 
5371       preserve_temp_slots (to_rtx);
5372       pop_temp_slots ();
5373       return;
5374     }
5375 
5376   /* In case we are returning the contents of an object which overlaps
5377      the place the value is being stored, use a safe function when copying
5378      a value through a pointer into a structure value return block.  */
5379   if (TREE_CODE (to) == RESULT_DECL
5380       && TREE_CODE (from) == INDIRECT_REF
5381       && ADDR_SPACE_GENERIC_P
5382 	   (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
5383       && refs_may_alias_p (to, from)
5384       && cfun->returns_struct
5385       && !cfun->returns_pcc_struct)
5386     {
5387       rtx from_rtx, size;
5388 
5389       push_temp_slots ();
5390       size = expr_size (from);
5391       from_rtx = expand_normal (from);
5392 
5393       emit_block_move_via_libcall (XEXP (to_rtx, 0), XEXP (from_rtx, 0), size);
5394 
5395       preserve_temp_slots (to_rtx);
5396       pop_temp_slots ();
5397       return;
5398     }
5399 
5400   /* Compute FROM and store the value in the rtx we got.  */
5401 
5402   push_temp_slots ();
5403   result = store_expr_with_bounds (from, to_rtx, 0, nontemporal, false, to);
5404   preserve_temp_slots (result);
5405   pop_temp_slots ();
5406   return;
5407 }
5408 
5409 /* Emits nontemporal store insn that moves FROM to TO.  Returns true if this
5410    succeeded, false otherwise.  */
5411 
5412 bool
5413 emit_storent_insn (rtx to, rtx from)
5414 {
5415   struct expand_operand ops[2];
5416   machine_mode mode = GET_MODE (to);
5417   enum insn_code code = optab_handler (storent_optab, mode);
5418 
5419   if (code == CODE_FOR_nothing)
5420     return false;
5421 
5422   create_fixed_operand (&ops[0], to);
5423   create_input_operand (&ops[1], from, mode);
5424   return maybe_expand_insn (code, 2, ops);
5425 }
5426 
5427 /* Generate code for computing expression EXP,
5428    and storing the value into TARGET.
5429 
5430    If the mode is BLKmode then we may return TARGET itself.
5431    It turns out that in BLKmode it doesn't cause a problem.
5432    because C has no operators that could combine two different
5433    assignments into the same BLKmode object with different values
5434    with no sequence point.  Will other languages need this to
5435    be more thorough?
5436 
5437    If CALL_PARAM_P is nonzero, this is a store into a call param on the
5438    stack, and block moves may need to be treated specially.
5439 
5440    If NONTEMPORAL is true, try using a nontemporal store instruction.
5441 
5442    If REVERSE is true, the store is to be done in reverse order.
5443 
5444    If BTARGET is not NULL then computed bounds of EXP are
5445    associated with BTARGET.  */
5446 
5447 rtx
5448 store_expr_with_bounds (tree exp, rtx target, int call_param_p,
5449 			bool nontemporal, bool reverse, tree btarget)
5450 {
5451   rtx temp;
5452   rtx alt_rtl = NULL_RTX;
5453   location_t loc = curr_insn_location ();
5454 
5455   if (VOID_TYPE_P (TREE_TYPE (exp)))
5456     {
5457       /* C++ can generate ?: expressions with a throw expression in one
5458 	 branch and an rvalue in the other. Here, we resolve attempts to
5459 	 store the throw expression's nonexistent result.  */
5460       gcc_assert (!call_param_p);
5461       expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5462       return NULL_RTX;
5463     }
5464   if (TREE_CODE (exp) == COMPOUND_EXPR)
5465     {
5466       /* Perform first part of compound expression, then assign from second
5467 	 part.  */
5468       expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5469 		   call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5470       return store_expr_with_bounds (TREE_OPERAND (exp, 1), target,
5471 				     call_param_p, nontemporal, reverse,
5472 				     btarget);
5473     }
5474   else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
5475     {
5476       /* For conditional expression, get safe form of the target.  Then
5477 	 test the condition, doing the appropriate assignment on either
5478 	 side.  This avoids the creation of unnecessary temporaries.
5479 	 For non-BLKmode, it is more efficient not to do this.  */
5480 
5481       rtx_code_label *lab1 = gen_label_rtx (), *lab2 = gen_label_rtx ();
5482 
5483       do_pending_stack_adjust ();
5484       NO_DEFER_POP;
5485       jumpifnot (TREE_OPERAND (exp, 0), lab1,
5486 		 profile_probability::uninitialized ());
5487       store_expr_with_bounds (TREE_OPERAND (exp, 1), target, call_param_p,
5488 			      nontemporal, reverse, btarget);
5489       emit_jump_insn (targetm.gen_jump (lab2));
5490       emit_barrier ();
5491       emit_label (lab1);
5492       store_expr_with_bounds (TREE_OPERAND (exp, 2), target, call_param_p,
5493 			      nontemporal, reverse, btarget);
5494       emit_label (lab2);
5495       OK_DEFER_POP;
5496 
5497       return NULL_RTX;
5498     }
5499   else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
5500     /* If this is a scalar in a register that is stored in a wider mode
5501        than the declared mode, compute the result into its declared mode
5502        and then convert to the wider mode.  Our value is the computed
5503        expression.  */
5504     {
5505       rtx inner_target = 0;
5506       scalar_int_mode outer_mode = subreg_unpromoted_mode (target);
5507       scalar_int_mode inner_mode = subreg_promoted_mode (target);
5508 
5509       /* We can do the conversion inside EXP, which will often result
5510 	 in some optimizations.  Do the conversion in two steps: first
5511 	 change the signedness, if needed, then the extend.  But don't
5512 	 do this if the type of EXP is a subtype of something else
5513 	 since then the conversion might involve more than just
5514 	 converting modes.  */
5515       if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
5516 	  && TREE_TYPE (TREE_TYPE (exp)) == 0
5517 	  && GET_MODE_PRECISION (outer_mode)
5518 	     == TYPE_PRECISION (TREE_TYPE (exp)))
5519 	{
5520 	  if (!SUBREG_CHECK_PROMOTED_SIGN (target,
5521 					  TYPE_UNSIGNED (TREE_TYPE (exp))))
5522 	    {
5523 	      /* Some types, e.g. Fortran's logical*4, won't have a signed
5524 		 version, so use the mode instead.  */
5525 	      tree ntype
5526 		= (signed_or_unsigned_type_for
5527 		   (SUBREG_PROMOTED_SIGN (target), TREE_TYPE (exp)));
5528 	      if (ntype == NULL)
5529 		ntype = lang_hooks.types.type_for_mode
5530 		  (TYPE_MODE (TREE_TYPE (exp)),
5531 		   SUBREG_PROMOTED_SIGN (target));
5532 
5533 	      exp = fold_convert_loc (loc, ntype, exp);
5534 	    }
5535 
5536 	  exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
5537 				  (inner_mode, SUBREG_PROMOTED_SIGN (target)),
5538 				  exp);
5539 
5540 	  inner_target = SUBREG_REG (target);
5541 	}
5542 
5543       temp = expand_expr (exp, inner_target, VOIDmode,
5544 			  call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5545 
5546       /* Handle bounds returned by call.  */
5547       if (TREE_CODE (exp) == CALL_EXPR)
5548 	{
5549 	  rtx bounds;
5550 	  chkp_split_slot (temp, &temp, &bounds);
5551 	  if (bounds && btarget)
5552 	    {
5553 	      gcc_assert (TREE_CODE (btarget) == SSA_NAME);
5554 	      rtx tmp = targetm.calls.load_returned_bounds (bounds);
5555 	      chkp_set_rtl_bounds (btarget, tmp);
5556 	    }
5557 	}
5558 
5559       /* If TEMP is a VOIDmode constant, use convert_modes to make
5560 	 sure that we properly convert it.  */
5561       if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
5562 	{
5563 	  temp = convert_modes (outer_mode, TYPE_MODE (TREE_TYPE (exp)),
5564 				temp, SUBREG_PROMOTED_SIGN (target));
5565 	  temp = convert_modes (inner_mode, outer_mode, temp,
5566 				SUBREG_PROMOTED_SIGN (target));
5567 	}
5568 
5569       convert_move (SUBREG_REG (target), temp,
5570 		    SUBREG_PROMOTED_SIGN (target));
5571 
5572       return NULL_RTX;
5573     }
5574   else if ((TREE_CODE (exp) == STRING_CST
5575 	    || (TREE_CODE (exp) == MEM_REF
5576 		&& TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5577 		&& TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5578 		   == STRING_CST
5579 		&& integer_zerop (TREE_OPERAND (exp, 1))))
5580 	   && !nontemporal && !call_param_p
5581 	   && MEM_P (target))
5582     {
5583       /* Optimize initialization of an array with a STRING_CST.  */
5584       HOST_WIDE_INT exp_len, str_copy_len;
5585       rtx dest_mem;
5586       tree str = TREE_CODE (exp) == STRING_CST
5587 		 ? exp : TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5588 
5589       exp_len = int_expr_size (exp);
5590       if (exp_len <= 0)
5591 	goto normal_expr;
5592 
5593       if (TREE_STRING_LENGTH (str) <= 0)
5594 	goto normal_expr;
5595 
5596       str_copy_len = strlen (TREE_STRING_POINTER (str));
5597       if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
5598 	goto normal_expr;
5599 
5600       str_copy_len = TREE_STRING_LENGTH (str);
5601       if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0
5602 	  && TREE_STRING_POINTER (str)[TREE_STRING_LENGTH (str) - 1] == '\0')
5603 	{
5604 	  str_copy_len += STORE_MAX_PIECES - 1;
5605 	  str_copy_len &= ~(STORE_MAX_PIECES - 1);
5606 	}
5607       str_copy_len = MIN (str_copy_len, exp_len);
5608       if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
5609 				CONST_CAST (char *, TREE_STRING_POINTER (str)),
5610 				MEM_ALIGN (target), false))
5611 	goto normal_expr;
5612 
5613       dest_mem = target;
5614 
5615       dest_mem = store_by_pieces (dest_mem,
5616 				  str_copy_len, builtin_strncpy_read_str,
5617 				  CONST_CAST (char *,
5618 					      TREE_STRING_POINTER (str)),
5619 				  MEM_ALIGN (target), false,
5620 				  exp_len > str_copy_len ? 1 : 0);
5621       if (exp_len > str_copy_len)
5622 	clear_storage (adjust_address (dest_mem, BLKmode, 0),
5623 		       GEN_INT (exp_len - str_copy_len),
5624 		       BLOCK_OP_NORMAL);
5625       return NULL_RTX;
5626     }
5627   else
5628     {
5629       rtx tmp_target;
5630 
5631   normal_expr:
5632       /* If we want to use a nontemporal or a reverse order store, force the
5633 	 value into a register first.  */
5634       tmp_target = nontemporal || reverse ? NULL_RTX : target;
5635       temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
5636 			       (call_param_p
5637 				? EXPAND_STACK_PARM : EXPAND_NORMAL),
5638 			       &alt_rtl, false);
5639 
5640       /* Handle bounds returned by call.  */
5641       if (TREE_CODE (exp) == CALL_EXPR)
5642 	{
5643 	  rtx bounds;
5644 	  chkp_split_slot (temp, &temp, &bounds);
5645 	  if (bounds && btarget)
5646 	    {
5647 	      gcc_assert (TREE_CODE (btarget) == SSA_NAME);
5648 	      rtx tmp = targetm.calls.load_returned_bounds (bounds);
5649 	      chkp_set_rtl_bounds (btarget, tmp);
5650 	    }
5651 	}
5652     }
5653 
5654   /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5655      the same as that of TARGET, adjust the constant.  This is needed, for
5656      example, in case it is a CONST_DOUBLE or CONST_WIDE_INT and we want
5657      only a word-sized value.  */
5658   if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
5659       && TREE_CODE (exp) != ERROR_MARK
5660       && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
5661     {
5662       if (GET_MODE_CLASS (GET_MODE (target))
5663 	  != GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp)))
5664 	  && known_eq (GET_MODE_BITSIZE (GET_MODE (target)),
5665 		       GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)))))
5666 	{
5667 	  rtx t = simplify_gen_subreg (GET_MODE (target), temp,
5668 				       TYPE_MODE (TREE_TYPE (exp)), 0);
5669 	  if (t)
5670 	    temp = t;
5671 	}
5672       if (GET_MODE (temp) == VOIDmode)
5673 	temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5674 			      temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5675     }
5676 
5677   /* If value was not generated in the target, store it there.
5678      Convert the value to TARGET's type first if necessary and emit the
5679      pending incrementations that have been queued when expanding EXP.
5680      Note that we cannot emit the whole queue blindly because this will
5681      effectively disable the POST_INC optimization later.
5682 
5683      If TEMP and TARGET compare equal according to rtx_equal_p, but
5684      one or both of them are volatile memory refs, we have to distinguish
5685      two cases:
5686      - expand_expr has used TARGET.  In this case, we must not generate
5687        another copy.  This can be detected by TARGET being equal according
5688        to == .
5689      - expand_expr has not used TARGET - that means that the source just
5690        happens to have the same RTX form.  Since temp will have been created
5691        by expand_expr, it will compare unequal according to == .
5692        We must generate a copy in this case, to reach the correct number
5693        of volatile memory references.  */
5694 
5695   if ((! rtx_equal_p (temp, target)
5696        || (temp != target && (side_effects_p (temp)
5697 			      || side_effects_p (target))))
5698       && TREE_CODE (exp) != ERROR_MARK
5699       /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5700 	 but TARGET is not valid memory reference, TEMP will differ
5701 	 from TARGET although it is really the same location.  */
5702       && !(alt_rtl
5703 	   && rtx_equal_p (alt_rtl, target)
5704 	   && !side_effects_p (alt_rtl)
5705 	   && !side_effects_p (target))
5706       /* If there's nothing to copy, don't bother.  Don't call
5707 	 expr_size unless necessary, because some front-ends (C++)
5708 	 expr_size-hook must not be given objects that are not
5709 	 supposed to be bit-copied or bit-initialized.  */
5710       && expr_size (exp) != const0_rtx)
5711     {
5712       if (GET_MODE (temp) != GET_MODE (target) && GET_MODE (temp) != VOIDmode)
5713 	{
5714 	  if (GET_MODE (target) == BLKmode)
5715 	    {
5716 	      /* Handle calls that return BLKmode values in registers.  */
5717 	      if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
5718 		copy_blkmode_from_reg (target, temp, TREE_TYPE (exp));
5719 	      else
5720 		store_bit_field (target,
5721 				 INTVAL (expr_size (exp)) * BITS_PER_UNIT,
5722 				 0, 0, 0, GET_MODE (temp), temp, reverse);
5723 	    }
5724 	  else
5725 	    convert_move (target, temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5726 	}
5727 
5728       else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
5729 	{
5730 	  /* Handle copying a string constant into an array.  The string
5731 	     constant may be shorter than the array.  So copy just the string's
5732 	     actual length, and clear the rest.  First get the size of the data
5733 	     type of the string, which is actually the size of the target.  */
5734 	  rtx size = expr_size (exp);
5735 
5736 	  if (CONST_INT_P (size)
5737 	      && INTVAL (size) < TREE_STRING_LENGTH (exp))
5738 	    emit_block_move (target, temp, size,
5739 			     (call_param_p
5740 			      ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5741 	  else
5742 	    {
5743 	      machine_mode pointer_mode
5744 		= targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
5745 	      machine_mode address_mode = get_address_mode (target);
5746 
5747 	      /* Compute the size of the data to copy from the string.  */
5748 	      tree copy_size
5749 		= size_binop_loc (loc, MIN_EXPR,
5750 				  make_tree (sizetype, size),
5751 				  size_int (TREE_STRING_LENGTH (exp)));
5752 	      rtx copy_size_rtx
5753 		= expand_expr (copy_size, NULL_RTX, VOIDmode,
5754 			       (call_param_p
5755 				? EXPAND_STACK_PARM : EXPAND_NORMAL));
5756 	      rtx_code_label *label = 0;
5757 
5758 	      /* Copy that much.  */
5759 	      copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
5760 					       TYPE_UNSIGNED (sizetype));
5761 	      emit_block_move (target, temp, copy_size_rtx,
5762 			       (call_param_p
5763 				? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5764 
5765 	      /* Figure out how much is left in TARGET that we have to clear.
5766 		 Do all calculations in pointer_mode.  */
5767 	      if (CONST_INT_P (copy_size_rtx))
5768 		{
5769 		  size = plus_constant (address_mode, size,
5770 					-INTVAL (copy_size_rtx));
5771 		  target = adjust_address (target, BLKmode,
5772 					   INTVAL (copy_size_rtx));
5773 		}
5774 	      else
5775 		{
5776 		  size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
5777 				       copy_size_rtx, NULL_RTX, 0,
5778 				       OPTAB_LIB_WIDEN);
5779 
5780 		  if (GET_MODE (copy_size_rtx) != address_mode)
5781 		    copy_size_rtx = convert_to_mode (address_mode,
5782 						     copy_size_rtx,
5783 						     TYPE_UNSIGNED (sizetype));
5784 
5785 		  target = offset_address (target, copy_size_rtx,
5786 					   highest_pow2_factor (copy_size));
5787 		  label = gen_label_rtx ();
5788 		  emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
5789 					   GET_MODE (size), 0, label);
5790 		}
5791 
5792 	      if (size != const0_rtx)
5793 		clear_storage (target, size, BLOCK_OP_NORMAL);
5794 
5795 	      if (label)
5796 		emit_label (label);
5797 	    }
5798 	}
5799       /* Handle calls that return values in multiple non-contiguous locations.
5800 	 The Irix 6 ABI has examples of this.  */
5801       else if (GET_CODE (target) == PARALLEL)
5802 	{
5803 	  if (GET_CODE (temp) == PARALLEL)
5804 	    emit_group_move (target, temp);
5805 	  else
5806 	    emit_group_load (target, temp, TREE_TYPE (exp),
5807 			     int_size_in_bytes (TREE_TYPE (exp)));
5808 	}
5809       else if (GET_CODE (temp) == PARALLEL)
5810 	emit_group_store (target, temp, TREE_TYPE (exp),
5811 			  int_size_in_bytes (TREE_TYPE (exp)));
5812       else if (GET_MODE (temp) == BLKmode)
5813 	emit_block_move (target, temp, expr_size (exp),
5814 			 (call_param_p
5815 			  ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5816       /* If we emit a nontemporal store, there is nothing else to do.  */
5817       else if (nontemporal && emit_storent_insn (target, temp))
5818 	;
5819       else
5820 	{
5821 	  if (reverse)
5822 	    temp = flip_storage_order (GET_MODE (target), temp);
5823 	  temp = force_operand (temp, target);
5824 	  if (temp != target)
5825 	    emit_move_insn (target, temp);
5826 	}
5827     }
5828 
5829   return NULL_RTX;
5830 }
5831 
5832 /* Same as store_expr_with_bounds but ignoring bounds of EXP.  */
5833 rtx
5834 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal,
5835 	    bool reverse)
5836 {
5837   return store_expr_with_bounds (exp, target, call_param_p, nontemporal,
5838 				 reverse, NULL);
5839 }
5840 
5841 /* Return true if field F of structure TYPE is a flexible array.  */
5842 
5843 static bool
5844 flexible_array_member_p (const_tree f, const_tree type)
5845 {
5846   const_tree tf;
5847 
5848   tf = TREE_TYPE (f);
5849   return (DECL_CHAIN (f) == NULL
5850 	  && TREE_CODE (tf) == ARRAY_TYPE
5851 	  && TYPE_DOMAIN (tf)
5852 	  && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5853 	  && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5854 	  && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5855 	  && int_size_in_bytes (type) >= 0);
5856 }
5857 
5858 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5859    must have in order for it to completely initialize a value of type TYPE.
5860    Return -1 if the number isn't known.
5861 
5862    If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE.  */
5863 
5864 static HOST_WIDE_INT
5865 count_type_elements (const_tree type, bool for_ctor_p)
5866 {
5867   switch (TREE_CODE (type))
5868     {
5869     case ARRAY_TYPE:
5870       {
5871 	tree nelts;
5872 
5873 	nelts = array_type_nelts (type);
5874 	if (nelts && tree_fits_uhwi_p (nelts))
5875 	  {
5876 	    unsigned HOST_WIDE_INT n;
5877 
5878 	    n = tree_to_uhwi (nelts) + 1;
5879 	    if (n == 0 || for_ctor_p)
5880 	      return n;
5881 	    else
5882 	      return n * count_type_elements (TREE_TYPE (type), false);
5883 	  }
5884 	return for_ctor_p ? -1 : 1;
5885       }
5886 
5887     case RECORD_TYPE:
5888       {
5889 	unsigned HOST_WIDE_INT n;
5890 	tree f;
5891 
5892 	n = 0;
5893 	for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5894 	  if (TREE_CODE (f) == FIELD_DECL)
5895 	    {
5896 	      if (!for_ctor_p)
5897 		n += count_type_elements (TREE_TYPE (f), false);
5898 	      else if (!flexible_array_member_p (f, type))
5899 		/* Don't count flexible arrays, which are not supposed
5900 		   to be initialized.  */
5901 		n += 1;
5902 	    }
5903 
5904 	return n;
5905       }
5906 
5907     case UNION_TYPE:
5908     case QUAL_UNION_TYPE:
5909       {
5910 	tree f;
5911 	HOST_WIDE_INT n, m;
5912 
5913 	gcc_assert (!for_ctor_p);
5914 	/* Estimate the number of scalars in each field and pick the
5915 	   maximum.  Other estimates would do instead; the idea is simply
5916 	   to make sure that the estimate is not sensitive to the ordering
5917 	   of the fields.  */
5918 	n = 1;
5919 	for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5920 	  if (TREE_CODE (f) == FIELD_DECL)
5921 	    {
5922 	      m = count_type_elements (TREE_TYPE (f), false);
5923 	      /* If the field doesn't span the whole union, add an extra
5924 		 scalar for the rest.  */
5925 	      if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f)),
5926 				    TYPE_SIZE (type)) != 1)
5927 		m++;
5928 	      if (n < m)
5929 		n = m;
5930 	    }
5931 	return n;
5932       }
5933 
5934     case COMPLEX_TYPE:
5935       return 2;
5936 
5937     case VECTOR_TYPE:
5938       {
5939 	unsigned HOST_WIDE_INT nelts;
5940 	if (TYPE_VECTOR_SUBPARTS (type).is_constant (&nelts))
5941 	  return nelts;
5942 	else
5943 	  return -1;
5944       }
5945 
5946     case INTEGER_TYPE:
5947     case REAL_TYPE:
5948     case FIXED_POINT_TYPE:
5949     case ENUMERAL_TYPE:
5950     case BOOLEAN_TYPE:
5951     case POINTER_TYPE:
5952     case OFFSET_TYPE:
5953     case REFERENCE_TYPE:
5954     case NULLPTR_TYPE:
5955       return 1;
5956 
5957     case ERROR_MARK:
5958       return 0;
5959 
5960     case VOID_TYPE:
5961     case METHOD_TYPE:
5962     case FUNCTION_TYPE:
5963     case LANG_TYPE:
5964     default:
5965       gcc_unreachable ();
5966     }
5967 }
5968 
5969 /* Helper for categorize_ctor_elements.  Identical interface.  */
5970 
5971 static bool
5972 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5973 			    HOST_WIDE_INT *p_init_elts, bool *p_complete)
5974 {
5975   unsigned HOST_WIDE_INT idx;
5976   HOST_WIDE_INT nz_elts, init_elts, num_fields;
5977   tree value, purpose, elt_type;
5978 
5979   /* Whether CTOR is a valid constant initializer, in accordance with what
5980      initializer_constant_valid_p does.  If inferred from the constructor
5981      elements, true until proven otherwise.  */
5982   bool const_from_elts_p = constructor_static_from_elts_p (ctor);
5983   bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
5984 
5985   nz_elts = 0;
5986   init_elts = 0;
5987   num_fields = 0;
5988   elt_type = NULL_TREE;
5989 
5990   FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
5991     {
5992       HOST_WIDE_INT mult = 1;
5993 
5994       if (purpose && TREE_CODE (purpose) == RANGE_EXPR)
5995 	{
5996 	  tree lo_index = TREE_OPERAND (purpose, 0);
5997 	  tree hi_index = TREE_OPERAND (purpose, 1);
5998 
5999 	  if (tree_fits_uhwi_p (lo_index) && tree_fits_uhwi_p (hi_index))
6000 	    mult = (tree_to_uhwi (hi_index)
6001 		    - tree_to_uhwi (lo_index) + 1);
6002 	}
6003       num_fields += mult;
6004       elt_type = TREE_TYPE (value);
6005 
6006       switch (TREE_CODE (value))
6007 	{
6008 	case CONSTRUCTOR:
6009 	  {
6010 	    HOST_WIDE_INT nz = 0, ic = 0;
6011 
6012 	    bool const_elt_p = categorize_ctor_elements_1 (value, &nz, &ic,
6013 							   p_complete);
6014 
6015 	    nz_elts += mult * nz;
6016  	    init_elts += mult * ic;
6017 
6018 	    if (const_from_elts_p && const_p)
6019 	      const_p = const_elt_p;
6020 	  }
6021 	  break;
6022 
6023 	case INTEGER_CST:
6024 	case REAL_CST:
6025 	case FIXED_CST:
6026 	  if (!initializer_zerop (value))
6027 	    nz_elts += mult;
6028 	  init_elts += mult;
6029 	  break;
6030 
6031 	case STRING_CST:
6032 	  nz_elts += mult * TREE_STRING_LENGTH (value);
6033 	  init_elts += mult * TREE_STRING_LENGTH (value);
6034 	  break;
6035 
6036 	case COMPLEX_CST:
6037 	  if (!initializer_zerop (TREE_REALPART (value)))
6038 	    nz_elts += mult;
6039 	  if (!initializer_zerop (TREE_IMAGPART (value)))
6040 	    nz_elts += mult;
6041 	  init_elts += mult;
6042 	  break;
6043 
6044 	case VECTOR_CST:
6045 	  {
6046 	    /* We can only construct constant-length vectors using
6047 	       CONSTRUCTOR.  */
6048 	    unsigned int nunits = VECTOR_CST_NELTS (value).to_constant ();
6049 	    for (unsigned int i = 0; i < nunits; ++i)
6050 	      {
6051 		tree v = VECTOR_CST_ELT (value, i);
6052 		if (!initializer_zerop (v))
6053 		  nz_elts += mult;
6054 		init_elts += mult;
6055 	      }
6056 	  }
6057 	  break;
6058 
6059 	default:
6060 	  {
6061 	    HOST_WIDE_INT tc = count_type_elements (elt_type, false);
6062 	    nz_elts += mult * tc;
6063 	    init_elts += mult * tc;
6064 
6065 	    if (const_from_elts_p && const_p)
6066 	      const_p
6067 		= initializer_constant_valid_p (value,
6068 						elt_type,
6069 						TYPE_REVERSE_STORAGE_ORDER
6070 						(TREE_TYPE (ctor)))
6071 		  != NULL_TREE;
6072 	  }
6073 	  break;
6074 	}
6075     }
6076 
6077   if (*p_complete && !complete_ctor_at_level_p (TREE_TYPE (ctor),
6078 						num_fields, elt_type))
6079     *p_complete = false;
6080 
6081   *p_nz_elts += nz_elts;
6082   *p_init_elts += init_elts;
6083 
6084   return const_p;
6085 }
6086 
6087 /* Examine CTOR to discover:
6088    * how many scalar fields are set to nonzero values,
6089      and place it in *P_NZ_ELTS;
6090    * how many scalar fields in total are in CTOR,
6091      and place it in *P_ELT_COUNT.
6092    * whether the constructor is complete -- in the sense that every
6093      meaningful byte is explicitly given a value --
6094      and place it in *P_COMPLETE.
6095 
6096    Return whether or not CTOR is a valid static constant initializer, the same
6097    as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0".  */
6098 
6099 bool
6100 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
6101 			  HOST_WIDE_INT *p_init_elts, bool *p_complete)
6102 {
6103   *p_nz_elts = 0;
6104   *p_init_elts = 0;
6105   *p_complete = true;
6106 
6107   return categorize_ctor_elements_1 (ctor, p_nz_elts, p_init_elts, p_complete);
6108 }
6109 
6110 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
6111    of which had type LAST_TYPE.  Each element was itself a complete
6112    initializer, in the sense that every meaningful byte was explicitly
6113    given a value.  Return true if the same is true for the constructor
6114    as a whole.  */
6115 
6116 bool
6117 complete_ctor_at_level_p (const_tree type, HOST_WIDE_INT num_elts,
6118 			  const_tree last_type)
6119 {
6120   if (TREE_CODE (type) == UNION_TYPE
6121       || TREE_CODE (type) == QUAL_UNION_TYPE)
6122     {
6123       if (num_elts == 0)
6124 	return false;
6125 
6126       gcc_assert (num_elts == 1 && last_type);
6127 
6128       /* ??? We could look at each element of the union, and find the
6129 	 largest element.  Which would avoid comparing the size of the
6130 	 initialized element against any tail padding in the union.
6131 	 Doesn't seem worth the effort...  */
6132       return simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (last_type)) == 1;
6133     }
6134 
6135   return count_type_elements (type, true) == num_elts;
6136 }
6137 
6138 /* Return 1 if EXP contains mostly (3/4)  zeros.  */
6139 
6140 static int
6141 mostly_zeros_p (const_tree exp)
6142 {
6143   if (TREE_CODE (exp) == CONSTRUCTOR)
6144     {
6145       HOST_WIDE_INT nz_elts, init_elts;
6146       bool complete_p;
6147 
6148       categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
6149       return !complete_p || nz_elts < init_elts / 4;
6150     }
6151 
6152   return initializer_zerop (exp);
6153 }
6154 
6155 /* Return 1 if EXP contains all zeros.  */
6156 
6157 static int
6158 all_zeros_p (const_tree exp)
6159 {
6160   if (TREE_CODE (exp) == CONSTRUCTOR)
6161     {
6162       HOST_WIDE_INT nz_elts, init_elts;
6163       bool complete_p;
6164 
6165       categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
6166       return nz_elts == 0;
6167     }
6168 
6169   return initializer_zerop (exp);
6170 }
6171 
6172 /* Helper function for store_constructor.
6173    TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
6174    CLEARED is as for store_constructor.
6175    ALIAS_SET is the alias set to use for any stores.
6176    If REVERSE is true, the store is to be done in reverse order.
6177 
6178    This provides a recursive shortcut back to store_constructor when it isn't
6179    necessary to go through store_field.  This is so that we can pass through
6180    the cleared field to let store_constructor know that we may not have to
6181    clear a substructure if the outer structure has already been cleared.  */
6182 
6183 static void
6184 store_constructor_field (rtx target, poly_uint64 bitsize, poly_int64 bitpos,
6185 			 poly_uint64 bitregion_start,
6186 			 poly_uint64 bitregion_end,
6187 			 machine_mode mode,
6188 			 tree exp, int cleared,
6189 			 alias_set_type alias_set, bool reverse)
6190 {
6191   poly_int64 bytepos;
6192   poly_uint64 bytesize;
6193   if (TREE_CODE (exp) == CONSTRUCTOR
6194       /* We can only call store_constructor recursively if the size and
6195 	 bit position are on a byte boundary.  */
6196       && multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
6197       && maybe_ne (bitsize, 0U)
6198       && multiple_p (bitsize, BITS_PER_UNIT, &bytesize)
6199       /* If we have a nonzero bitpos for a register target, then we just
6200 	 let store_field do the bitfield handling.  This is unlikely to
6201 	 generate unnecessary clear instructions anyways.  */
6202       && (known_eq (bitpos, 0) || MEM_P (target)))
6203     {
6204       if (MEM_P (target))
6205 	{
6206 	  machine_mode target_mode = GET_MODE (target);
6207 	  if (target_mode != BLKmode
6208 	      && !multiple_p (bitpos, GET_MODE_ALIGNMENT (target_mode)))
6209 	    target_mode = BLKmode;
6210 	  target = adjust_address (target, target_mode, bytepos);
6211 	}
6212 
6213 
6214       /* Update the alias set, if required.  */
6215       if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
6216 	  && MEM_ALIAS_SET (target) != 0)
6217 	{
6218 	  target = copy_rtx (target);
6219 	  set_mem_alias_set (target, alias_set);
6220 	}
6221 
6222       store_constructor (exp, target, cleared, bytesize, reverse);
6223     }
6224   else
6225     store_field (target, bitsize, bitpos, bitregion_start, bitregion_end, mode,
6226 		 exp, alias_set, false, reverse);
6227 }
6228 
6229 
6230 /* Returns the number of FIELD_DECLs in TYPE.  */
6231 
6232 static int
6233 fields_length (const_tree type)
6234 {
6235   tree t = TYPE_FIELDS (type);
6236   int count = 0;
6237 
6238   for (; t; t = DECL_CHAIN (t))
6239     if (TREE_CODE (t) == FIELD_DECL)
6240       ++count;
6241 
6242   return count;
6243 }
6244 
6245 
6246 /* Store the value of constructor EXP into the rtx TARGET.
6247    TARGET is either a REG or a MEM; we know it cannot conflict, since
6248    safe_from_p has been called.
6249    CLEARED is true if TARGET is known to have been zero'd.
6250    SIZE is the number of bytes of TARGET we are allowed to modify: this
6251    may not be the same as the size of EXP if we are assigning to a field
6252    which has been packed to exclude padding bits.
6253    If REVERSE is true, the store is to be done in reverse order.  */
6254 
6255 static void
6256 store_constructor (tree exp, rtx target, int cleared, poly_int64 size,
6257 		   bool reverse)
6258 {
6259   tree type = TREE_TYPE (exp);
6260   HOST_WIDE_INT exp_size = int_size_in_bytes (type);
6261   poly_int64 bitregion_end = known_gt (size, 0) ? size * BITS_PER_UNIT - 1 : 0;
6262 
6263   switch (TREE_CODE (type))
6264     {
6265     case RECORD_TYPE:
6266     case UNION_TYPE:
6267     case QUAL_UNION_TYPE:
6268       {
6269 	unsigned HOST_WIDE_INT idx;
6270 	tree field, value;
6271 
6272 	/* The storage order is specified for every aggregate type.  */
6273 	reverse = TYPE_REVERSE_STORAGE_ORDER (type);
6274 
6275 	/* If size is zero or the target is already cleared, do nothing.  */
6276 	if (known_eq (size, 0) || cleared)
6277 	  cleared = 1;
6278 	/* We either clear the aggregate or indicate the value is dead.  */
6279 	else if ((TREE_CODE (type) == UNION_TYPE
6280 		  || TREE_CODE (type) == QUAL_UNION_TYPE)
6281 		 && ! CONSTRUCTOR_ELTS (exp))
6282 	  /* If the constructor is empty, clear the union.  */
6283 	  {
6284 	    clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
6285 	    cleared = 1;
6286 	  }
6287 
6288 	/* If we are building a static constructor into a register,
6289 	   set the initial value as zero so we can fold the value into
6290 	   a constant.  But if more than one register is involved,
6291 	   this probably loses.  */
6292 	else if (REG_P (target) && TREE_STATIC (exp)
6293 		 && known_le (GET_MODE_SIZE (GET_MODE (target)),
6294 			      REGMODE_NATURAL_SIZE (GET_MODE (target))))
6295 	  {
6296 	    emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6297 	    cleared = 1;
6298 	  }
6299 
6300         /* If the constructor has fewer fields than the structure or
6301 	   if we are initializing the structure to mostly zeros, clear
6302 	   the whole structure first.  Don't do this if TARGET is a
6303 	   register whose mode size isn't equal to SIZE since
6304 	   clear_storage can't handle this case.  */
6305 	else if (known_size_p (size)
6306 		 && (((int) CONSTRUCTOR_NELTS (exp) != fields_length (type))
6307 		     || mostly_zeros_p (exp))
6308 		 && (!REG_P (target)
6309 		     || known_eq (GET_MODE_SIZE (GET_MODE (target)), size)))
6310 	  {
6311 	    clear_storage (target, gen_int_mode (size, Pmode),
6312 			   BLOCK_OP_NORMAL);
6313 	    cleared = 1;
6314 	  }
6315 
6316 	if (REG_P (target) && !cleared)
6317 	  emit_clobber (target);
6318 
6319 	/* Store each element of the constructor into the
6320 	   corresponding field of TARGET.  */
6321 	FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
6322 	  {
6323 	    machine_mode mode;
6324 	    HOST_WIDE_INT bitsize;
6325 	    HOST_WIDE_INT bitpos = 0;
6326 	    tree offset;
6327 	    rtx to_rtx = target;
6328 
6329 	    /* Just ignore missing fields.  We cleared the whole
6330 	       structure, above, if any fields are missing.  */
6331 	    if (field == 0)
6332 	      continue;
6333 
6334 	    if (cleared && initializer_zerop (value))
6335 	      continue;
6336 
6337 	    if (tree_fits_uhwi_p (DECL_SIZE (field)))
6338 	      bitsize = tree_to_uhwi (DECL_SIZE (field));
6339 	    else
6340 	      gcc_unreachable ();
6341 
6342 	    mode = DECL_MODE (field);
6343 	    if (DECL_BIT_FIELD (field))
6344 	      mode = VOIDmode;
6345 
6346 	    offset = DECL_FIELD_OFFSET (field);
6347 	    if (tree_fits_shwi_p (offset)
6348 		&& tree_fits_shwi_p (bit_position (field)))
6349 	      {
6350 		bitpos = int_bit_position (field);
6351 		offset = NULL_TREE;
6352 	      }
6353 	    else
6354 	      gcc_unreachable ();
6355 
6356 	    /* If this initializes a field that is smaller than a
6357 	       word, at the start of a word, try to widen it to a full
6358 	       word.  This special case allows us to output C++ member
6359 	       function initializations in a form that the optimizers
6360 	       can understand.  */
6361 	    if (WORD_REGISTER_OPERATIONS
6362 		&& REG_P (target)
6363 		&& bitsize < BITS_PER_WORD
6364 		&& bitpos % BITS_PER_WORD == 0
6365 		&& GET_MODE_CLASS (mode) == MODE_INT
6366 		&& TREE_CODE (value) == INTEGER_CST
6367 		&& exp_size >= 0
6368 		&& bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
6369 	      {
6370 		tree type = TREE_TYPE (value);
6371 
6372 		if (TYPE_PRECISION (type) < BITS_PER_WORD)
6373 		  {
6374 		    type = lang_hooks.types.type_for_mode
6375 		      (word_mode, TYPE_UNSIGNED (type));
6376 		    value = fold_convert (type, value);
6377 		    /* Make sure the bits beyond the original bitsize are zero
6378 		       so that we can correctly avoid extra zeroing stores in
6379 		       later constructor elements.  */
6380 		    tree bitsize_mask
6381 		      = wide_int_to_tree (type, wi::mask (bitsize, false,
6382 							   BITS_PER_WORD));
6383 		    value = fold_build2 (BIT_AND_EXPR, type, value, bitsize_mask);
6384 		  }
6385 
6386 		if (BYTES_BIG_ENDIAN)
6387 		  value
6388 		   = fold_build2 (LSHIFT_EXPR, type, value,
6389 				   build_int_cst (type,
6390 						  BITS_PER_WORD - bitsize));
6391 		bitsize = BITS_PER_WORD;
6392 		mode = word_mode;
6393 	      }
6394 
6395 	    if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
6396 		&& DECL_NONADDRESSABLE_P (field))
6397 	      {
6398 		to_rtx = copy_rtx (to_rtx);
6399 		MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
6400 	      }
6401 
6402 	    store_constructor_field (to_rtx, bitsize, bitpos,
6403 				     0, bitregion_end, mode,
6404 				     value, cleared,
6405 				     get_alias_set (TREE_TYPE (field)),
6406 				     reverse);
6407 	  }
6408 	break;
6409       }
6410     case ARRAY_TYPE:
6411       {
6412 	tree value, index;
6413 	unsigned HOST_WIDE_INT i;
6414 	int need_to_clear;
6415 	tree domain;
6416 	tree elttype = TREE_TYPE (type);
6417 	int const_bounds_p;
6418 	HOST_WIDE_INT minelt = 0;
6419 	HOST_WIDE_INT maxelt = 0;
6420 
6421 	/* The storage order is specified for every aggregate type.  */
6422 	reverse = TYPE_REVERSE_STORAGE_ORDER (type);
6423 
6424 	domain = TYPE_DOMAIN (type);
6425 	const_bounds_p = (TYPE_MIN_VALUE (domain)
6426 			  && TYPE_MAX_VALUE (domain)
6427 			  && tree_fits_shwi_p (TYPE_MIN_VALUE (domain))
6428 			  && tree_fits_shwi_p (TYPE_MAX_VALUE (domain)));
6429 
6430 	/* If we have constant bounds for the range of the type, get them.  */
6431 	if (const_bounds_p)
6432 	  {
6433 	    minelt = tree_to_shwi (TYPE_MIN_VALUE (domain));
6434 	    maxelt = tree_to_shwi (TYPE_MAX_VALUE (domain));
6435 	  }
6436 
6437 	/* If the constructor has fewer elements than the array, clear
6438            the whole array first.  Similarly if this is static
6439            constructor of a non-BLKmode object.  */
6440 	if (cleared)
6441 	  need_to_clear = 0;
6442 	else if (REG_P (target) && TREE_STATIC (exp))
6443 	  need_to_clear = 1;
6444 	else
6445 	  {
6446 	    unsigned HOST_WIDE_INT idx;
6447 	    tree index, value;
6448 	    HOST_WIDE_INT count = 0, zero_count = 0;
6449 	    need_to_clear = ! const_bounds_p;
6450 
6451 	    /* This loop is a more accurate version of the loop in
6452 	       mostly_zeros_p (it handles RANGE_EXPR in an index).  It
6453 	       is also needed to check for missing elements.  */
6454 	    FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
6455 	      {
6456 		HOST_WIDE_INT this_node_count;
6457 
6458 		if (need_to_clear)
6459 		  break;
6460 
6461 		if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6462 		  {
6463 		    tree lo_index = TREE_OPERAND (index, 0);
6464 		    tree hi_index = TREE_OPERAND (index, 1);
6465 
6466 		    if (! tree_fits_uhwi_p (lo_index)
6467 			|| ! tree_fits_uhwi_p (hi_index))
6468 		      {
6469 			need_to_clear = 1;
6470 			break;
6471 		      }
6472 
6473 		    this_node_count = (tree_to_uhwi (hi_index)
6474 				       - tree_to_uhwi (lo_index) + 1);
6475 		  }
6476 		else
6477 		  this_node_count = 1;
6478 
6479 		count += this_node_count;
6480 		if (mostly_zeros_p (value))
6481 		  zero_count += this_node_count;
6482 	      }
6483 
6484 	    /* Clear the entire array first if there are any missing
6485 	       elements, or if the incidence of zero elements is >=
6486 	       75%.  */
6487 	    if (! need_to_clear
6488 		&& (count < maxelt - minelt + 1
6489 		    || 4 * zero_count >= 3 * count))
6490 	      need_to_clear = 1;
6491 	  }
6492 
6493 	if (need_to_clear && maybe_gt (size, 0))
6494 	  {
6495 	    if (REG_P (target))
6496 	      emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6497 	    else
6498 	      clear_storage (target, gen_int_mode (size, Pmode),
6499 			     BLOCK_OP_NORMAL);
6500 	    cleared = 1;
6501 	  }
6502 
6503 	if (!cleared && REG_P (target))
6504 	  /* Inform later passes that the old value is dead.  */
6505 	  emit_clobber (target);
6506 
6507 	/* Store each element of the constructor into the
6508 	   corresponding element of TARGET, determined by counting the
6509 	   elements.  */
6510 	FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
6511 	  {
6512 	    machine_mode mode;
6513 	    poly_int64 bitsize;
6514 	    HOST_WIDE_INT bitpos;
6515 	    rtx xtarget = target;
6516 
6517 	    if (cleared && initializer_zerop (value))
6518 	      continue;
6519 
6520 	    mode = TYPE_MODE (elttype);
6521 	    if (mode == BLKmode)
6522 	      bitsize = (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6523 			 ? tree_to_uhwi (TYPE_SIZE (elttype))
6524 			 : -1);
6525 	    else
6526 	      bitsize = GET_MODE_BITSIZE (mode);
6527 
6528 	    if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6529 	      {
6530 		tree lo_index = TREE_OPERAND (index, 0);
6531 		tree hi_index = TREE_OPERAND (index, 1);
6532 		rtx index_r, pos_rtx;
6533 		HOST_WIDE_INT lo, hi, count;
6534 		tree position;
6535 
6536 		/* If the range is constant and "small", unroll the loop.  */
6537 		if (const_bounds_p
6538 		    && tree_fits_shwi_p (lo_index)
6539 		    && tree_fits_shwi_p (hi_index)
6540 		    && (lo = tree_to_shwi (lo_index),
6541 			hi = tree_to_shwi (hi_index),
6542 			count = hi - lo + 1,
6543 			(!MEM_P (target)
6544 			 || count <= 2
6545 			 || (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6546 			     && (tree_to_uhwi (TYPE_SIZE (elttype)) * count
6547 				 <= 40 * 8)))))
6548 		  {
6549 		    lo -= minelt;  hi -= minelt;
6550 		    for (; lo <= hi; lo++)
6551 		      {
6552 			bitpos = lo * tree_to_shwi (TYPE_SIZE (elttype));
6553 
6554 			if (MEM_P (target)
6555 			    && !MEM_KEEP_ALIAS_SET_P (target)
6556 			    && TREE_CODE (type) == ARRAY_TYPE
6557 			    && TYPE_NONALIASED_COMPONENT (type))
6558 			  {
6559 			    target = copy_rtx (target);
6560 			    MEM_KEEP_ALIAS_SET_P (target) = 1;
6561 			  }
6562 
6563 			store_constructor_field
6564 			  (target, bitsize, bitpos, 0, bitregion_end,
6565 			   mode, value, cleared,
6566 			   get_alias_set (elttype), reverse);
6567 		      }
6568 		  }
6569 		else
6570 		  {
6571 		    rtx_code_label *loop_start = gen_label_rtx ();
6572 		    rtx_code_label *loop_end = gen_label_rtx ();
6573 		    tree exit_cond;
6574 
6575 		    expand_normal (hi_index);
6576 
6577 		    index = build_decl (EXPR_LOCATION (exp),
6578 					VAR_DECL, NULL_TREE, domain);
6579 		    index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
6580 		    SET_DECL_RTL (index, index_r);
6581 		    store_expr (lo_index, index_r, 0, false, reverse);
6582 
6583 		    /* Build the head of the loop.  */
6584 		    do_pending_stack_adjust ();
6585 		    emit_label (loop_start);
6586 
6587 		    /* Assign value to element index.  */
6588 		    position =
6589 		      fold_convert (ssizetype,
6590 				    fold_build2 (MINUS_EXPR,
6591 						 TREE_TYPE (index),
6592 						 index,
6593 						 TYPE_MIN_VALUE (domain)));
6594 
6595 		    position =
6596 			size_binop (MULT_EXPR, position,
6597 				    fold_convert (ssizetype,
6598 						  TYPE_SIZE_UNIT (elttype)));
6599 
6600 		    pos_rtx = expand_normal (position);
6601 		    xtarget = offset_address (target, pos_rtx,
6602 					      highest_pow2_factor (position));
6603 		    xtarget = adjust_address (xtarget, mode, 0);
6604 		    if (TREE_CODE (value) == CONSTRUCTOR)
6605 		      store_constructor (value, xtarget, cleared,
6606 					 exact_div (bitsize, BITS_PER_UNIT),
6607 					 reverse);
6608 		    else
6609 		      store_expr (value, xtarget, 0, false, reverse);
6610 
6611 		    /* Generate a conditional jump to exit the loop.  */
6612 		    exit_cond = build2 (LT_EXPR, integer_type_node,
6613 					index, hi_index);
6614 		    jumpif (exit_cond, loop_end,
6615 			    profile_probability::uninitialized ());
6616 
6617 		    /* Update the loop counter, and jump to the head of
6618 		       the loop.  */
6619 		    expand_assignment (index,
6620 				       build2 (PLUS_EXPR, TREE_TYPE (index),
6621 					       index, integer_one_node),
6622 				       false);
6623 
6624 		    emit_jump (loop_start);
6625 
6626 		    /* Build the end of the loop.  */
6627 		    emit_label (loop_end);
6628 		  }
6629 	      }
6630 	    else if ((index != 0 && ! tree_fits_shwi_p (index))
6631 		     || ! tree_fits_uhwi_p (TYPE_SIZE (elttype)))
6632 	      {
6633 		tree position;
6634 
6635 		if (index == 0)
6636 		  index = ssize_int (1);
6637 
6638 		if (minelt)
6639 		  index = fold_convert (ssizetype,
6640 					fold_build2 (MINUS_EXPR,
6641 						     TREE_TYPE (index),
6642 						     index,
6643 						     TYPE_MIN_VALUE (domain)));
6644 
6645 		position =
6646 		  size_binop (MULT_EXPR, index,
6647 			      fold_convert (ssizetype,
6648 					    TYPE_SIZE_UNIT (elttype)));
6649 		xtarget = offset_address (target,
6650 					  expand_normal (position),
6651 					  highest_pow2_factor (position));
6652 		xtarget = adjust_address (xtarget, mode, 0);
6653 		store_expr (value, xtarget, 0, false, reverse);
6654 	      }
6655 	    else
6656 	      {
6657 		if (index != 0)
6658 		  bitpos = ((tree_to_shwi (index) - minelt)
6659 			    * tree_to_uhwi (TYPE_SIZE (elttype)));
6660 		else
6661 		  bitpos = (i * tree_to_uhwi (TYPE_SIZE (elttype)));
6662 
6663 		if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
6664 		    && TREE_CODE (type) == ARRAY_TYPE
6665 		    && TYPE_NONALIASED_COMPONENT (type))
6666 		  {
6667 		    target = copy_rtx (target);
6668 		    MEM_KEEP_ALIAS_SET_P (target) = 1;
6669 		  }
6670 		store_constructor_field (target, bitsize, bitpos, 0,
6671 					 bitregion_end, mode, value,
6672 					 cleared, get_alias_set (elttype),
6673 					 reverse);
6674 	      }
6675 	  }
6676 	break;
6677       }
6678 
6679     case VECTOR_TYPE:
6680       {
6681 	unsigned HOST_WIDE_INT idx;
6682 	constructor_elt *ce;
6683 	int i;
6684 	int need_to_clear;
6685 	insn_code icode = CODE_FOR_nothing;
6686 	tree elt;
6687 	tree elttype = TREE_TYPE (type);
6688 	int elt_size = tree_to_uhwi (TYPE_SIZE (elttype));
6689 	machine_mode eltmode = TYPE_MODE (elttype);
6690 	HOST_WIDE_INT bitsize;
6691 	HOST_WIDE_INT bitpos;
6692 	rtvec vector = NULL;
6693 	poly_uint64 n_elts;
6694 	unsigned HOST_WIDE_INT const_n_elts;
6695 	alias_set_type alias;
6696 	bool vec_vec_init_p = false;
6697 	machine_mode mode = GET_MODE (target);
6698 
6699 	gcc_assert (eltmode != BLKmode);
6700 
6701 	/* Try using vec_duplicate_optab for uniform vectors.  */
6702 	if (!TREE_SIDE_EFFECTS (exp)
6703 	    && VECTOR_MODE_P (mode)
6704 	    && eltmode == GET_MODE_INNER (mode)
6705 	    && ((icode = optab_handler (vec_duplicate_optab, mode))
6706 		!= CODE_FOR_nothing)
6707 	    && (elt = uniform_vector_p (exp)))
6708 	  {
6709 	    struct expand_operand ops[2];
6710 	    create_output_operand (&ops[0], target, mode);
6711 	    create_input_operand (&ops[1], expand_normal (elt), eltmode);
6712 	    expand_insn (icode, 2, ops);
6713 	    if (!rtx_equal_p (target, ops[0].value))
6714 	      emit_move_insn (target, ops[0].value);
6715 	    break;
6716 	  }
6717 
6718 	n_elts = TYPE_VECTOR_SUBPARTS (type);
6719 	if (REG_P (target)
6720 	    && VECTOR_MODE_P (mode)
6721 	    && n_elts.is_constant (&const_n_elts))
6722 	  {
6723 	    machine_mode emode = eltmode;
6724 
6725 	    if (CONSTRUCTOR_NELTS (exp)
6726 		&& (TREE_CODE (TREE_TYPE (CONSTRUCTOR_ELT (exp, 0)->value))
6727 		    == VECTOR_TYPE))
6728 	      {
6729 		tree etype = TREE_TYPE (CONSTRUCTOR_ELT (exp, 0)->value);
6730 		gcc_assert (known_eq (CONSTRUCTOR_NELTS (exp)
6731 				      * TYPE_VECTOR_SUBPARTS (etype),
6732 				      n_elts));
6733 		emode = TYPE_MODE (etype);
6734 	      }
6735 	    icode = convert_optab_handler (vec_init_optab, mode, emode);
6736 	    if (icode != CODE_FOR_nothing)
6737 	      {
6738 		unsigned int i, n = const_n_elts;
6739 
6740 		if (emode != eltmode)
6741 		  {
6742 		    n = CONSTRUCTOR_NELTS (exp);
6743 		    vec_vec_init_p = true;
6744 		  }
6745 		vector = rtvec_alloc (n);
6746 		for (i = 0; i < n; i++)
6747 		  RTVEC_ELT (vector, i) = CONST0_RTX (emode);
6748 	      }
6749 	  }
6750 
6751 	/* If the constructor has fewer elements than the vector,
6752 	   clear the whole array first.  Similarly if this is static
6753 	   constructor of a non-BLKmode object.  */
6754 	if (cleared)
6755 	  need_to_clear = 0;
6756 	else if (REG_P (target) && TREE_STATIC (exp))
6757 	  need_to_clear = 1;
6758 	else
6759 	  {
6760 	    unsigned HOST_WIDE_INT count = 0, zero_count = 0;
6761 	    tree value;
6762 
6763 	    FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6764 	      {
6765 		tree sz = TYPE_SIZE (TREE_TYPE (value));
6766 		int n_elts_here
6767 		  = tree_to_uhwi (int_const_binop (TRUNC_DIV_EXPR, sz,
6768 						   TYPE_SIZE (elttype)));
6769 
6770 		count += n_elts_here;
6771 		if (mostly_zeros_p (value))
6772 		  zero_count += n_elts_here;
6773 	      }
6774 
6775 	    /* Clear the entire vector first if there are any missing elements,
6776 	       or if the incidence of zero elements is >= 75%.  */
6777 	    need_to_clear = (maybe_lt (count, n_elts)
6778 			     || 4 * zero_count >= 3 * count);
6779 	  }
6780 
6781 	if (need_to_clear && maybe_gt (size, 0) && !vector)
6782 	  {
6783 	    if (REG_P (target))
6784 	      emit_move_insn (target, CONST0_RTX (mode));
6785 	    else
6786 	      clear_storage (target, gen_int_mode (size, Pmode),
6787 			     BLOCK_OP_NORMAL);
6788 	    cleared = 1;
6789 	  }
6790 
6791 	/* Inform later passes that the old value is dead.  */
6792 	if (!cleared && !vector && REG_P (target))
6793 	  emit_move_insn (target, CONST0_RTX (mode));
6794 
6795         if (MEM_P (target))
6796 	  alias = MEM_ALIAS_SET (target);
6797 	else
6798 	  alias = get_alias_set (elttype);
6799 
6800         /* Store each element of the constructor into the corresponding
6801 	   element of TARGET, determined by counting the elements.  */
6802 	for (idx = 0, i = 0;
6803 	     vec_safe_iterate (CONSTRUCTOR_ELTS (exp), idx, &ce);
6804 	     idx++, i += bitsize / elt_size)
6805 	  {
6806 	    HOST_WIDE_INT eltpos;
6807 	    tree value = ce->value;
6808 
6809 	    bitsize = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (value)));
6810 	    if (cleared && initializer_zerop (value))
6811 	      continue;
6812 
6813 	    if (ce->index)
6814 	      eltpos = tree_to_uhwi (ce->index);
6815 	    else
6816 	      eltpos = i;
6817 
6818 	    if (vector)
6819 	      {
6820 		if (vec_vec_init_p)
6821 		  {
6822 		    gcc_assert (ce->index == NULL_TREE);
6823 		    gcc_assert (TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE);
6824 		    eltpos = idx;
6825 		  }
6826 		else
6827 		  gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
6828 		RTVEC_ELT (vector, eltpos) = expand_normal (value);
6829 	      }
6830 	    else
6831 	      {
6832 		machine_mode value_mode
6833 		  = (TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
6834 		     ? TYPE_MODE (TREE_TYPE (value)) : eltmode);
6835 		bitpos = eltpos * elt_size;
6836 		store_constructor_field (target, bitsize, bitpos, 0,
6837 					 bitregion_end, value_mode,
6838 					 value, cleared, alias, reverse);
6839 	      }
6840 	  }
6841 
6842 	if (vector)
6843 	  emit_insn (GEN_FCN (icode) (target,
6844 				      gen_rtx_PARALLEL (mode, vector)));
6845 	break;
6846       }
6847 
6848     default:
6849       gcc_unreachable ();
6850     }
6851 }
6852 
6853 /* Store the value of EXP (an expression tree)
6854    into a subfield of TARGET which has mode MODE and occupies
6855    BITSIZE bits, starting BITPOS bits from the start of TARGET.
6856    If MODE is VOIDmode, it means that we are storing into a bit-field.
6857 
6858    BITREGION_START is bitpos of the first bitfield in this region.
6859    BITREGION_END is the bitpos of the ending bitfield in this region.
6860    These two fields are 0, if the C++ memory model does not apply,
6861    or we are not interested in keeping track of bitfield regions.
6862 
6863    Always return const0_rtx unless we have something particular to
6864    return.
6865 
6866    ALIAS_SET is the alias set for the destination.  This value will
6867    (in general) be different from that for TARGET, since TARGET is a
6868    reference to the containing structure.
6869 
6870    If NONTEMPORAL is true, try generating a nontemporal store.
6871 
6872    If REVERSE is true, the store is to be done in reverse order.  */
6873 
6874 static rtx
6875 store_field (rtx target, poly_int64 bitsize, poly_int64 bitpos,
6876 	     poly_uint64 bitregion_start, poly_uint64 bitregion_end,
6877 	     machine_mode mode, tree exp,
6878 	     alias_set_type alias_set, bool nontemporal,  bool reverse)
6879 {
6880   if (TREE_CODE (exp) == ERROR_MARK)
6881     return const0_rtx;
6882 
6883   /* If we have nothing to store, do nothing unless the expression has
6884      side-effects.  Don't do that for zero sized addressable lhs of
6885      calls.  */
6886   if (known_eq (bitsize, 0)
6887       && (!TREE_ADDRESSABLE (TREE_TYPE (exp))
6888 	  || TREE_CODE (exp) != CALL_EXPR))
6889     return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
6890 
6891   if (GET_CODE (target) == CONCAT)
6892     {
6893       /* We're storing into a struct containing a single __complex.  */
6894 
6895       gcc_assert (known_eq (bitpos, 0));
6896       return store_expr (exp, target, 0, nontemporal, reverse);
6897     }
6898 
6899   /* If the structure is in a register or if the component
6900      is a bit field, we cannot use addressing to access it.
6901      Use bit-field techniques or SUBREG to store in it.  */
6902 
6903   poly_int64 decl_bitsize;
6904   if (mode == VOIDmode
6905       || (mode != BLKmode && ! direct_store[(int) mode]
6906 	  && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6907 	  && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6908       || REG_P (target)
6909       || GET_CODE (target) == SUBREG
6910       /* If the field isn't aligned enough to store as an ordinary memref,
6911 	 store it as a bit field.  */
6912       || (mode != BLKmode
6913 	  && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
6914 		|| !multiple_p (bitpos, GET_MODE_ALIGNMENT (mode)))
6915 	       && targetm.slow_unaligned_access (mode, MEM_ALIGN (target)))
6916 	      || !multiple_p (bitpos, BITS_PER_UNIT)))
6917       || (known_size_p (bitsize)
6918 	  && mode != BLKmode
6919 	  && maybe_gt (GET_MODE_BITSIZE (mode), bitsize))
6920       /* If the RHS and field are a constant size and the size of the
6921 	 RHS isn't the same size as the bitfield, we must use bitfield
6922 	 operations.  */
6923       || (known_size_p (bitsize)
6924 	  && poly_int_tree_p (TYPE_SIZE (TREE_TYPE (exp)))
6925 	  && maybe_ne (wi::to_poly_offset (TYPE_SIZE (TREE_TYPE (exp))),
6926 		       bitsize)
6927 	  /* Except for initialization of full bytes from a CONSTRUCTOR, which
6928 	     we will handle specially below.  */
6929 	  && !(TREE_CODE (exp) == CONSTRUCTOR
6930 	       && multiple_p (bitsize, BITS_PER_UNIT))
6931 	  /* And except for bitwise copying of TREE_ADDRESSABLE types,
6932 	     where the FIELD_DECL has the right bitsize, but TREE_TYPE (exp)
6933 	     includes some extra padding.  store_expr / expand_expr will in
6934 	     that case call get_inner_reference that will have the bitsize
6935 	     we check here and thus the block move will not clobber the
6936 	     padding that shouldn't be clobbered.  In the future we could
6937 	     replace the TREE_ADDRESSABLE check with a check that
6938 	     get_base_address needs to live in memory.  */
6939 	  && (!TREE_ADDRESSABLE (TREE_TYPE (exp))
6940 	      || TREE_CODE (exp) != COMPONENT_REF
6941 	      || !multiple_p (bitsize, BITS_PER_UNIT)
6942 	      || !multiple_p (bitpos, BITS_PER_UNIT)
6943 	      || !poly_int_tree_p (DECL_SIZE (TREE_OPERAND (exp, 1)),
6944 				   &decl_bitsize)
6945 	      || maybe_ne (decl_bitsize, bitsize)))
6946       /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6947          decl we must use bitfield operations.  */
6948       || (known_size_p (bitsize)
6949 	  && TREE_CODE (exp) == MEM_REF
6950 	  && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6951 	  && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6952 	  && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6953 	  && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
6954     {
6955       rtx temp;
6956       gimple *nop_def;
6957 
6958       /* If EXP is a NOP_EXPR of precision less than its mode, then that
6959 	 implies a mask operation.  If the precision is the same size as
6960 	 the field we're storing into, that mask is redundant.  This is
6961 	 particularly common with bit field assignments generated by the
6962 	 C front end.  */
6963       nop_def = get_def_for_expr (exp, NOP_EXPR);
6964       if (nop_def)
6965 	{
6966 	  tree type = TREE_TYPE (exp);
6967 	  if (INTEGRAL_TYPE_P (type)
6968 	      && maybe_ne (TYPE_PRECISION (type),
6969 			   GET_MODE_BITSIZE (TYPE_MODE (type)))
6970 	      && known_eq (bitsize, TYPE_PRECISION (type)))
6971 	    {
6972 	      tree op = gimple_assign_rhs1 (nop_def);
6973 	      type = TREE_TYPE (op);
6974 	      if (INTEGRAL_TYPE_P (type)
6975 		  && known_ge (TYPE_PRECISION (type), bitsize))
6976 		exp = op;
6977 	    }
6978 	}
6979 
6980       temp = expand_normal (exp);
6981 
6982       /* We don't support variable-sized BLKmode bitfields, since our
6983 	 handling of BLKmode is bound up with the ability to break
6984 	 things into words.  */
6985       gcc_assert (mode != BLKmode || bitsize.is_constant ());
6986 
6987       /* Handle calls that return values in multiple non-contiguous locations.
6988 	 The Irix 6 ABI has examples of this.  */
6989       if (GET_CODE (temp) == PARALLEL)
6990 	{
6991 	  HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6992 	  machine_mode temp_mode = GET_MODE (temp);
6993 	  if (temp_mode == BLKmode || temp_mode == VOIDmode)
6994 	    temp_mode = smallest_int_mode_for_size (size * BITS_PER_UNIT);
6995 	  rtx temp_target = gen_reg_rtx (temp_mode);
6996 	  emit_group_store (temp_target, temp, TREE_TYPE (exp), size);
6997 	  temp = temp_target;
6998 	}
6999 
7000       /* Handle calls that return BLKmode values in registers.  */
7001       else if (mode == BLKmode && REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
7002 	{
7003 	  rtx temp_target = gen_reg_rtx (GET_MODE (temp));
7004 	  copy_blkmode_from_reg (temp_target, temp, TREE_TYPE (exp));
7005 	  temp = temp_target;
7006 	}
7007 
7008       /* If the value has aggregate type and an integral mode then, if BITSIZE
7009 	 is narrower than this mode and this is for big-endian data, we first
7010 	 need to put the value into the low-order bits for store_bit_field,
7011 	 except when MODE is BLKmode and BITSIZE larger than the word size
7012 	 (see the handling of fields larger than a word in store_bit_field).
7013 	 Moreover, the field may be not aligned on a byte boundary; in this
7014 	 case, if it has reverse storage order, it needs to be accessed as a
7015 	 scalar field with reverse storage order and we must first put the
7016 	 value into target order.  */
7017       scalar_int_mode temp_mode;
7018       if (AGGREGATE_TYPE_P (TREE_TYPE (exp))
7019 	  && is_int_mode (GET_MODE (temp), &temp_mode))
7020 	{
7021 	  HOST_WIDE_INT size = GET_MODE_BITSIZE (temp_mode);
7022 
7023 	  reverse = TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (exp));
7024 
7025 	  if (reverse)
7026 	    temp = flip_storage_order (temp_mode, temp);
7027 
7028 	  gcc_checking_assert (known_le (bitsize, size));
7029 	  if (maybe_lt (bitsize, size)
7030 	      && reverse ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN
7031 	      /* Use of to_constant for BLKmode was checked above.  */
7032 	      && !(mode == BLKmode && bitsize.to_constant () > BITS_PER_WORD))
7033 	    temp = expand_shift (RSHIFT_EXPR, temp_mode, temp,
7034 				 size - bitsize, NULL_RTX, 1);
7035 	}
7036 
7037       /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE.  */
7038       if (mode != VOIDmode && mode != BLKmode
7039 	  && mode != TYPE_MODE (TREE_TYPE (exp)))
7040 	temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
7041 
7042       /* If the mode of TEMP and TARGET is BLKmode, both must be in memory
7043 	 and BITPOS must be aligned on a byte boundary.  If so, we simply do
7044 	 a block copy.  Likewise for a BLKmode-like TARGET.  */
7045       if (GET_MODE (temp) == BLKmode
7046 	  && (GET_MODE (target) == BLKmode
7047 	      || (MEM_P (target)
7048 		  && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
7049 		  && multiple_p (bitpos, BITS_PER_UNIT)
7050 		  && multiple_p (bitsize, BITS_PER_UNIT))))
7051 	{
7052 	  gcc_assert (MEM_P (target) && MEM_P (temp));
7053 	  poly_int64 bytepos = exact_div (bitpos, BITS_PER_UNIT);
7054 	  poly_int64 bytesize = bits_to_bytes_round_up (bitsize);
7055 
7056 	  target = adjust_address (target, VOIDmode, bytepos);
7057 	  emit_block_move (target, temp,
7058 			   gen_int_mode (bytesize, Pmode),
7059 			   BLOCK_OP_NORMAL);
7060 
7061 	  return const0_rtx;
7062 	}
7063 
7064       /* If the mode of TEMP is still BLKmode and BITSIZE not larger than the
7065 	 word size, we need to load the value (see again store_bit_field).  */
7066       if (GET_MODE (temp) == BLKmode && known_le (bitsize, BITS_PER_WORD))
7067 	{
7068 	  scalar_int_mode temp_mode = smallest_int_mode_for_size (bitsize);
7069 	  temp = extract_bit_field (temp, bitsize, 0, 1, NULL_RTX, temp_mode,
7070 				    temp_mode, false, NULL);
7071 	}
7072 
7073       /* Store the value in the bitfield.  */
7074       store_bit_field (target, bitsize, bitpos,
7075 		       bitregion_start, bitregion_end,
7076 		       mode, temp, reverse);
7077 
7078       return const0_rtx;
7079     }
7080   else
7081     {
7082       /* Now build a reference to just the desired component.  */
7083       rtx to_rtx = adjust_address (target, mode,
7084 				   exact_div (bitpos, BITS_PER_UNIT));
7085 
7086       if (to_rtx == target)
7087 	to_rtx = copy_rtx (to_rtx);
7088 
7089       if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
7090 	set_mem_alias_set (to_rtx, alias_set);
7091 
7092       /* Above we avoided using bitfield operations for storing a CONSTRUCTOR
7093 	 into a target smaller than its type; handle that case now.  */
7094       if (TREE_CODE (exp) == CONSTRUCTOR && known_size_p (bitsize))
7095 	{
7096 	  poly_int64 bytesize = exact_div (bitsize, BITS_PER_UNIT);
7097 	  store_constructor (exp, to_rtx, 0, bytesize, reverse);
7098 	  return to_rtx;
7099 	}
7100 
7101       return store_expr (exp, to_rtx, 0, nontemporal, reverse);
7102     }
7103 }
7104 
7105 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
7106    an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
7107    codes and find the ultimate containing object, which we return.
7108 
7109    We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
7110    bit position, *PUNSIGNEDP to the signedness and *PREVERSEP to the
7111    storage order of the field.
7112    If the position of the field is variable, we store a tree
7113    giving the variable offset (in units) in *POFFSET.
7114    This offset is in addition to the bit position.
7115    If the position is not variable, we store 0 in *POFFSET.
7116 
7117    If any of the extraction expressions is volatile,
7118    we store 1 in *PVOLATILEP.  Otherwise we don't change that.
7119 
7120    If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
7121    Otherwise, it is a mode that can be used to access the field.
7122 
7123    If the field describes a variable-sized object, *PMODE is set to
7124    BLKmode and *PBITSIZE is set to -1.  An access cannot be made in
7125    this case, but the address of the object can be found.  */
7126 
7127 tree
7128 get_inner_reference (tree exp, poly_int64_pod *pbitsize,
7129 		     poly_int64_pod *pbitpos, tree *poffset,
7130 		     machine_mode *pmode, int *punsignedp,
7131 		     int *preversep, int *pvolatilep)
7132 {
7133   tree size_tree = 0;
7134   machine_mode mode = VOIDmode;
7135   bool blkmode_bitfield = false;
7136   tree offset = size_zero_node;
7137   poly_offset_int bit_offset = 0;
7138 
7139   /* First get the mode, signedness, storage order and size.  We do this from
7140      just the outermost expression.  */
7141   *pbitsize = -1;
7142   if (TREE_CODE (exp) == COMPONENT_REF)
7143     {
7144       tree field = TREE_OPERAND (exp, 1);
7145       size_tree = DECL_SIZE (field);
7146       if (flag_strict_volatile_bitfields > 0
7147 	  && TREE_THIS_VOLATILE (exp)
7148 	  && DECL_BIT_FIELD_TYPE (field)
7149 	  && DECL_MODE (field) != BLKmode)
7150 	/* Volatile bitfields should be accessed in the mode of the
7151 	     field's type, not the mode computed based on the bit
7152 	     size.  */
7153 	mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
7154       else if (!DECL_BIT_FIELD (field))
7155 	{
7156 	  mode = DECL_MODE (field);
7157 	  /* For vector fields re-check the target flags, as DECL_MODE
7158 	     could have been set with different target flags than
7159 	     the current function has.  */
7160 	  if (mode == BLKmode
7161 	      && VECTOR_TYPE_P (TREE_TYPE (field))
7162 	      && VECTOR_MODE_P (TYPE_MODE_RAW (TREE_TYPE (field))))
7163 	    mode = TYPE_MODE (TREE_TYPE (field));
7164 	}
7165       else if (DECL_MODE (field) == BLKmode)
7166 	blkmode_bitfield = true;
7167 
7168       *punsignedp = DECL_UNSIGNED (field);
7169     }
7170   else if (TREE_CODE (exp) == BIT_FIELD_REF)
7171     {
7172       size_tree = TREE_OPERAND (exp, 1);
7173       *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
7174 		     || TYPE_UNSIGNED (TREE_TYPE (exp)));
7175 
7176       /* For vector types, with the correct size of access, use the mode of
7177 	 inner type.  */
7178       if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
7179 	  && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
7180 	  && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
7181         mode = TYPE_MODE (TREE_TYPE (exp));
7182     }
7183   else
7184     {
7185       mode = TYPE_MODE (TREE_TYPE (exp));
7186       *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
7187 
7188       if (mode == BLKmode)
7189 	size_tree = TYPE_SIZE (TREE_TYPE (exp));
7190       else
7191 	*pbitsize = GET_MODE_BITSIZE (mode);
7192     }
7193 
7194   if (size_tree != 0)
7195     {
7196       if (! tree_fits_uhwi_p (size_tree))
7197 	mode = BLKmode, *pbitsize = -1;
7198       else
7199 	*pbitsize = tree_to_uhwi (size_tree);
7200     }
7201 
7202   *preversep = reverse_storage_order_for_component_p (exp);
7203 
7204   /* Compute cumulative bit-offset for nested component-refs and array-refs,
7205      and find the ultimate containing object.  */
7206   while (1)
7207     {
7208       switch (TREE_CODE (exp))
7209 	{
7210 	case BIT_FIELD_REF:
7211 	  bit_offset += wi::to_poly_offset (TREE_OPERAND (exp, 2));
7212 	  break;
7213 
7214 	case COMPONENT_REF:
7215 	  {
7216 	    tree field = TREE_OPERAND (exp, 1);
7217 	    tree this_offset = component_ref_field_offset (exp);
7218 
7219 	    /* If this field hasn't been filled in yet, don't go past it.
7220 	       This should only happen when folding expressions made during
7221 	       type construction.  */
7222 	    if (this_offset == 0)
7223 	      break;
7224 
7225 	    offset = size_binop (PLUS_EXPR, offset, this_offset);
7226 	    bit_offset += wi::to_poly_offset (DECL_FIELD_BIT_OFFSET (field));
7227 
7228 	    /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN.  */
7229 	  }
7230 	  break;
7231 
7232 	case ARRAY_REF:
7233 	case ARRAY_RANGE_REF:
7234 	  {
7235 	    tree index = TREE_OPERAND (exp, 1);
7236 	    tree low_bound = array_ref_low_bound (exp);
7237 	    tree unit_size = array_ref_element_size (exp);
7238 
7239 	    /* We assume all arrays have sizes that are a multiple of a byte.
7240 	       First subtract the lower bound, if any, in the type of the
7241 	       index, then convert to sizetype and multiply by the size of
7242 	       the array element.  */
7243 	    if (! integer_zerop (low_bound))
7244 	      index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
7245 				   index, low_bound);
7246 
7247 	    offset = size_binop (PLUS_EXPR, offset,
7248 			         size_binop (MULT_EXPR,
7249 					     fold_convert (sizetype, index),
7250 					     unit_size));
7251 	  }
7252 	  break;
7253 
7254 	case REALPART_EXPR:
7255 	  break;
7256 
7257 	case IMAGPART_EXPR:
7258 	  bit_offset += *pbitsize;
7259 	  break;
7260 
7261 	case VIEW_CONVERT_EXPR:
7262 	  break;
7263 
7264 	case MEM_REF:
7265 	  /* Hand back the decl for MEM[&decl, off].  */
7266 	  if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
7267 	    {
7268 	      tree off = TREE_OPERAND (exp, 1);
7269 	      if (!integer_zerop (off))
7270 		{
7271 		  poly_offset_int boff = mem_ref_offset (exp);
7272 		  boff <<= LOG2_BITS_PER_UNIT;
7273 		  bit_offset += boff;
7274 		}
7275 	      exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7276 	    }
7277 	  goto done;
7278 
7279 	default:
7280 	  goto done;
7281 	}
7282 
7283       /* If any reference in the chain is volatile, the effect is volatile.  */
7284       if (TREE_THIS_VOLATILE (exp))
7285 	*pvolatilep = 1;
7286 
7287       exp = TREE_OPERAND (exp, 0);
7288     }
7289  done:
7290 
7291   /* If OFFSET is constant, see if we can return the whole thing as a
7292      constant bit position.  Make sure to handle overflow during
7293      this conversion.  */
7294   if (poly_int_tree_p (offset))
7295     {
7296       poly_offset_int tem = wi::sext (wi::to_poly_offset (offset),
7297 				      TYPE_PRECISION (sizetype));
7298       tem <<= LOG2_BITS_PER_UNIT;
7299       tem += bit_offset;
7300       if (tem.to_shwi (pbitpos))
7301 	*poffset = offset = NULL_TREE;
7302     }
7303 
7304   /* Otherwise, split it up.  */
7305   if (offset)
7306     {
7307       /* Avoid returning a negative bitpos as this may wreak havoc later.  */
7308       if (!bit_offset.to_shwi (pbitpos) || maybe_lt (*pbitpos, 0))
7309         {
7310 	  *pbitpos = num_trailing_bits (bit_offset.force_shwi ());
7311 	  poly_offset_int bytes = bits_to_bytes_round_down (bit_offset);
7312 	  offset = size_binop (PLUS_EXPR, offset,
7313 			       build_int_cst (sizetype, bytes.force_shwi ()));
7314 	}
7315 
7316       *poffset = offset;
7317     }
7318 
7319   /* We can use BLKmode for a byte-aligned BLKmode bitfield.  */
7320   if (mode == VOIDmode
7321       && blkmode_bitfield
7322       && multiple_p (*pbitpos, BITS_PER_UNIT)
7323       && multiple_p (*pbitsize, BITS_PER_UNIT))
7324     *pmode = BLKmode;
7325   else
7326     *pmode = mode;
7327 
7328   return exp;
7329 }
7330 
7331 /* Alignment in bits the TARGET of an assignment may be assumed to have.  */
7332 
7333 static unsigned HOST_WIDE_INT
7334 target_align (const_tree target)
7335 {
7336   /* We might have a chain of nested references with intermediate misaligning
7337      bitfields components, so need to recurse to find out.  */
7338 
7339   unsigned HOST_WIDE_INT this_align, outer_align;
7340 
7341   switch (TREE_CODE (target))
7342     {
7343     case BIT_FIELD_REF:
7344       return 1;
7345 
7346     case COMPONENT_REF:
7347       this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
7348       outer_align = target_align (TREE_OPERAND (target, 0));
7349       return MIN (this_align, outer_align);
7350 
7351     case ARRAY_REF:
7352     case ARRAY_RANGE_REF:
7353       this_align = TYPE_ALIGN (TREE_TYPE (target));
7354       outer_align = target_align (TREE_OPERAND (target, 0));
7355       return MIN (this_align, outer_align);
7356 
7357     CASE_CONVERT:
7358     case NON_LVALUE_EXPR:
7359     case VIEW_CONVERT_EXPR:
7360       this_align = TYPE_ALIGN (TREE_TYPE (target));
7361       outer_align = target_align (TREE_OPERAND (target, 0));
7362       return MAX (this_align, outer_align);
7363 
7364     default:
7365       return TYPE_ALIGN (TREE_TYPE (target));
7366     }
7367 }
7368 
7369 
7370 /* Given an rtx VALUE that may contain additions and multiplications, return
7371    an equivalent value that just refers to a register, memory, or constant.
7372    This is done by generating instructions to perform the arithmetic and
7373    returning a pseudo-register containing the value.
7374 
7375    The returned value may be a REG, SUBREG, MEM or constant.  */
7376 
7377 rtx
7378 force_operand (rtx value, rtx target)
7379 {
7380   rtx op1, op2;
7381   /* Use subtarget as the target for operand 0 of a binary operation.  */
7382   rtx subtarget = get_subtarget (target);
7383   enum rtx_code code = GET_CODE (value);
7384 
7385   /* Check for subreg applied to an expression produced by loop optimizer.  */
7386   if (code == SUBREG
7387       && !REG_P (SUBREG_REG (value))
7388       && !MEM_P (SUBREG_REG (value)))
7389     {
7390       value
7391 	= simplify_gen_subreg (GET_MODE (value),
7392 			       force_reg (GET_MODE (SUBREG_REG (value)),
7393 					  force_operand (SUBREG_REG (value),
7394 							 NULL_RTX)),
7395 			       GET_MODE (SUBREG_REG (value)),
7396 			       SUBREG_BYTE (value));
7397       code = GET_CODE (value);
7398     }
7399 
7400   /* Check for a PIC address load.  */
7401   if ((code == PLUS || code == MINUS)
7402       && XEXP (value, 0) == pic_offset_table_rtx
7403       && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
7404 	  || GET_CODE (XEXP (value, 1)) == LABEL_REF
7405 	  || GET_CODE (XEXP (value, 1)) == CONST))
7406     {
7407       if (!subtarget)
7408 	subtarget = gen_reg_rtx (GET_MODE (value));
7409       emit_move_insn (subtarget, value);
7410       return subtarget;
7411     }
7412 
7413   if (ARITHMETIC_P (value))
7414     {
7415       op2 = XEXP (value, 1);
7416       if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
7417 	subtarget = 0;
7418       if (code == MINUS && CONST_INT_P (op2))
7419 	{
7420 	  code = PLUS;
7421 	  op2 = negate_rtx (GET_MODE (value), op2);
7422 	}
7423 
7424       /* Check for an addition with OP2 a constant integer and our first
7425          operand a PLUS of a virtual register and something else.  In that
7426          case, we want to emit the sum of the virtual register and the
7427          constant first and then add the other value.  This allows virtual
7428          register instantiation to simply modify the constant rather than
7429          creating another one around this addition.  */
7430       if (code == PLUS && CONST_INT_P (op2)
7431 	  && GET_CODE (XEXP (value, 0)) == PLUS
7432 	  && REG_P (XEXP (XEXP (value, 0), 0))
7433 	  && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
7434 	  && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
7435 	{
7436 	  rtx temp = expand_simple_binop (GET_MODE (value), code,
7437 					  XEXP (XEXP (value, 0), 0), op2,
7438 					  subtarget, 0, OPTAB_LIB_WIDEN);
7439 	  return expand_simple_binop (GET_MODE (value), code, temp,
7440 				      force_operand (XEXP (XEXP (value,
7441 								 0), 1), 0),
7442 				      target, 0, OPTAB_LIB_WIDEN);
7443 	}
7444 
7445       op1 = force_operand (XEXP (value, 0), subtarget);
7446       op2 = force_operand (op2, NULL_RTX);
7447       switch (code)
7448 	{
7449 	case MULT:
7450 	  return expand_mult (GET_MODE (value), op1, op2, target, 1);
7451 	case DIV:
7452 	  if (!INTEGRAL_MODE_P (GET_MODE (value)))
7453 	    return expand_simple_binop (GET_MODE (value), code, op1, op2,
7454 					target, 1, OPTAB_LIB_WIDEN);
7455 	  else
7456 	    return expand_divmod (0,
7457 				  FLOAT_MODE_P (GET_MODE (value))
7458 				  ? RDIV_EXPR : TRUNC_DIV_EXPR,
7459 				  GET_MODE (value), op1, op2, target, 0);
7460 	case MOD:
7461 	  return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7462 				target, 0);
7463 	case UDIV:
7464 	  return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
7465 				target, 1);
7466 	case UMOD:
7467 	  return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7468 				target, 1);
7469 	case ASHIFTRT:
7470 	  return expand_simple_binop (GET_MODE (value), code, op1, op2,
7471 				      target, 0, OPTAB_LIB_WIDEN);
7472 	default:
7473 	  return expand_simple_binop (GET_MODE (value), code, op1, op2,
7474 				      target, 1, OPTAB_LIB_WIDEN);
7475 	}
7476     }
7477   if (UNARY_P (value))
7478     {
7479       if (!target)
7480 	target = gen_reg_rtx (GET_MODE (value));
7481       op1 = force_operand (XEXP (value, 0), NULL_RTX);
7482       switch (code)
7483 	{
7484 	case ZERO_EXTEND:
7485 	case SIGN_EXTEND:
7486 	case TRUNCATE:
7487 	case FLOAT_EXTEND:
7488 	case FLOAT_TRUNCATE:
7489 	  convert_move (target, op1, code == ZERO_EXTEND);
7490 	  return target;
7491 
7492 	case FIX:
7493 	case UNSIGNED_FIX:
7494 	  expand_fix (target, op1, code == UNSIGNED_FIX);
7495 	  return target;
7496 
7497 	case FLOAT:
7498 	case UNSIGNED_FLOAT:
7499 	  expand_float (target, op1, code == UNSIGNED_FLOAT);
7500 	  return target;
7501 
7502 	default:
7503 	  return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
7504 	}
7505     }
7506 
7507 #ifdef INSN_SCHEDULING
7508   /* On machines that have insn scheduling, we want all memory reference to be
7509      explicit, so we need to deal with such paradoxical SUBREGs.  */
7510   if (paradoxical_subreg_p (value) && MEM_P (SUBREG_REG (value)))
7511     value
7512       = simplify_gen_subreg (GET_MODE (value),
7513 			     force_reg (GET_MODE (SUBREG_REG (value)),
7514 					force_operand (SUBREG_REG (value),
7515 						       NULL_RTX)),
7516 			     GET_MODE (SUBREG_REG (value)),
7517 			     SUBREG_BYTE (value));
7518 #endif
7519 
7520   return value;
7521 }
7522 
7523 /* Subroutine of expand_expr: return nonzero iff there is no way that
7524    EXP can reference X, which is being modified.  TOP_P is nonzero if this
7525    call is going to be used to determine whether we need a temporary
7526    for EXP, as opposed to a recursive call to this function.
7527 
7528    It is always safe for this routine to return zero since it merely
7529    searches for optimization opportunities.  */
7530 
7531 int
7532 safe_from_p (const_rtx x, tree exp, int top_p)
7533 {
7534   rtx exp_rtl = 0;
7535   int i, nops;
7536 
7537   if (x == 0
7538       /* If EXP has varying size, we MUST use a target since we currently
7539 	 have no way of allocating temporaries of variable size
7540 	 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7541 	 So we assume here that something at a higher level has prevented a
7542 	 clash.  This is somewhat bogus, but the best we can do.  Only
7543 	 do this when X is BLKmode and when we are at the top level.  */
7544       || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
7545 	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
7546 	  && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
7547 	      || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
7548 	      || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
7549 	      != INTEGER_CST)
7550 	  && GET_MODE (x) == BLKmode)
7551       /* If X is in the outgoing argument area, it is always safe.  */
7552       || (MEM_P (x)
7553 	  && (XEXP (x, 0) == virtual_outgoing_args_rtx
7554 	      || (GET_CODE (XEXP (x, 0)) == PLUS
7555 		  && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
7556     return 1;
7557 
7558   /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7559      find the underlying pseudo.  */
7560   if (GET_CODE (x) == SUBREG)
7561     {
7562       x = SUBREG_REG (x);
7563       if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7564 	return 0;
7565     }
7566 
7567   /* Now look at our tree code and possibly recurse.  */
7568   switch (TREE_CODE_CLASS (TREE_CODE (exp)))
7569     {
7570     case tcc_declaration:
7571       exp_rtl = DECL_RTL_IF_SET (exp);
7572       break;
7573 
7574     case tcc_constant:
7575       return 1;
7576 
7577     case tcc_exceptional:
7578       if (TREE_CODE (exp) == TREE_LIST)
7579 	{
7580 	  while (1)
7581 	    {
7582 	      if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
7583 		return 0;
7584 	      exp = TREE_CHAIN (exp);
7585 	      if (!exp)
7586 		return 1;
7587 	      if (TREE_CODE (exp) != TREE_LIST)
7588 		return safe_from_p (x, exp, 0);
7589 	    }
7590 	}
7591       else if (TREE_CODE (exp) == CONSTRUCTOR)
7592 	{
7593 	  constructor_elt *ce;
7594 	  unsigned HOST_WIDE_INT idx;
7595 
7596 	  FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp), idx, ce)
7597 	    if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
7598 		|| !safe_from_p (x, ce->value, 0))
7599 	      return 0;
7600 	  return 1;
7601 	}
7602       else if (TREE_CODE (exp) == ERROR_MARK)
7603 	return 1;	/* An already-visited SAVE_EXPR? */
7604       else
7605 	return 0;
7606 
7607     case tcc_statement:
7608       /* The only case we look at here is the DECL_INITIAL inside a
7609 	 DECL_EXPR.  */
7610       return (TREE_CODE (exp) != DECL_EXPR
7611 	      || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
7612 	      || !DECL_INITIAL (DECL_EXPR_DECL (exp))
7613 	      || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
7614 
7615     case tcc_binary:
7616     case tcc_comparison:
7617       if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
7618 	return 0;
7619       /* Fall through.  */
7620 
7621     case tcc_unary:
7622       return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7623 
7624     case tcc_expression:
7625     case tcc_reference:
7626     case tcc_vl_exp:
7627       /* Now do code-specific tests.  EXP_RTL is set to any rtx we find in
7628 	 the expression.  If it is set, we conflict iff we are that rtx or
7629 	 both are in memory.  Otherwise, we check all operands of the
7630 	 expression recursively.  */
7631 
7632       switch (TREE_CODE (exp))
7633 	{
7634 	case ADDR_EXPR:
7635 	  /* If the operand is static or we are static, we can't conflict.
7636 	     Likewise if we don't conflict with the operand at all.  */
7637 	  if (staticp (TREE_OPERAND (exp, 0))
7638 	      || TREE_STATIC (exp)
7639 	      || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
7640 	    return 1;
7641 
7642 	  /* Otherwise, the only way this can conflict is if we are taking
7643 	     the address of a DECL a that address if part of X, which is
7644 	     very rare.  */
7645 	  exp = TREE_OPERAND (exp, 0);
7646 	  if (DECL_P (exp))
7647 	    {
7648 	      if (!DECL_RTL_SET_P (exp)
7649 		  || !MEM_P (DECL_RTL (exp)))
7650 		return 0;
7651 	      else
7652 		exp_rtl = XEXP (DECL_RTL (exp), 0);
7653 	    }
7654 	  break;
7655 
7656 	case MEM_REF:
7657 	  if (MEM_P (x)
7658 	      && alias_sets_conflict_p (MEM_ALIAS_SET (x),
7659 					get_alias_set (exp)))
7660 	    return 0;
7661 	  break;
7662 
7663 	case CALL_EXPR:
7664 	  /* Assume that the call will clobber all hard registers and
7665 	     all of memory.  */
7666 	  if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7667 	      || MEM_P (x))
7668 	    return 0;
7669 	  break;
7670 
7671 	case WITH_CLEANUP_EXPR:
7672 	case CLEANUP_POINT_EXPR:
7673 	  /* Lowered by gimplify.c.  */
7674 	  gcc_unreachable ();
7675 
7676 	case SAVE_EXPR:
7677 	  return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7678 
7679 	default:
7680 	  break;
7681 	}
7682 
7683       /* If we have an rtx, we do not need to scan our operands.  */
7684       if (exp_rtl)
7685 	break;
7686 
7687       nops = TREE_OPERAND_LENGTH (exp);
7688       for (i = 0; i < nops; i++)
7689 	if (TREE_OPERAND (exp, i) != 0
7690 	    && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
7691 	  return 0;
7692 
7693       break;
7694 
7695     case tcc_type:
7696       /* Should never get a type here.  */
7697       gcc_unreachable ();
7698     }
7699 
7700   /* If we have an rtl, find any enclosed object.  Then see if we conflict
7701      with it.  */
7702   if (exp_rtl)
7703     {
7704       if (GET_CODE (exp_rtl) == SUBREG)
7705 	{
7706 	  exp_rtl = SUBREG_REG (exp_rtl);
7707 	  if (REG_P (exp_rtl)
7708 	      && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
7709 	    return 0;
7710 	}
7711 
7712       /* If the rtl is X, then it is not safe.  Otherwise, it is unless both
7713 	 are memory and they conflict.  */
7714       return ! (rtx_equal_p (x, exp_rtl)
7715 		|| (MEM_P (x) && MEM_P (exp_rtl)
7716 		    && true_dependence (exp_rtl, VOIDmode, x)));
7717     }
7718 
7719   /* If we reach here, it is safe.  */
7720   return 1;
7721 }
7722 
7723 
7724 /* Return the highest power of two that EXP is known to be a multiple of.
7725    This is used in updating alignment of MEMs in array references.  */
7726 
7727 unsigned HOST_WIDE_INT
7728 highest_pow2_factor (const_tree exp)
7729 {
7730   unsigned HOST_WIDE_INT ret;
7731   int trailing_zeros = tree_ctz (exp);
7732   if (trailing_zeros >= HOST_BITS_PER_WIDE_INT)
7733     return BIGGEST_ALIGNMENT;
7734   ret = HOST_WIDE_INT_1U << trailing_zeros;
7735   if (ret > BIGGEST_ALIGNMENT)
7736     return BIGGEST_ALIGNMENT;
7737   return ret;
7738 }
7739 
7740 /* Similar, except that the alignment requirements of TARGET are
7741    taken into account.  Assume it is at least as aligned as its
7742    type, unless it is a COMPONENT_REF in which case the layout of
7743    the structure gives the alignment.  */
7744 
7745 static unsigned HOST_WIDE_INT
7746 highest_pow2_factor_for_target (const_tree target, const_tree exp)
7747 {
7748   unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
7749   unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
7750 
7751   return MAX (factor, talign);
7752 }
7753 
7754 /* Convert the tree comparison code TCODE to the rtl one where the
7755    signedness is UNSIGNEDP.  */
7756 
7757 static enum rtx_code
7758 convert_tree_comp_to_rtx (enum tree_code tcode, int unsignedp)
7759 {
7760   enum rtx_code code;
7761   switch (tcode)
7762     {
7763     case EQ_EXPR:
7764       code = EQ;
7765       break;
7766     case NE_EXPR:
7767       code = NE;
7768       break;
7769     case LT_EXPR:
7770       code = unsignedp ? LTU : LT;
7771       break;
7772     case LE_EXPR:
7773       code = unsignedp ? LEU : LE;
7774       break;
7775     case GT_EXPR:
7776       code = unsignedp ? GTU : GT;
7777       break;
7778     case GE_EXPR:
7779       code = unsignedp ? GEU : GE;
7780       break;
7781     case UNORDERED_EXPR:
7782       code = UNORDERED;
7783       break;
7784     case ORDERED_EXPR:
7785       code = ORDERED;
7786       break;
7787     case UNLT_EXPR:
7788       code = UNLT;
7789       break;
7790     case UNLE_EXPR:
7791       code = UNLE;
7792       break;
7793     case UNGT_EXPR:
7794       code = UNGT;
7795       break;
7796     case UNGE_EXPR:
7797       code = UNGE;
7798       break;
7799     case UNEQ_EXPR:
7800       code = UNEQ;
7801       break;
7802     case LTGT_EXPR:
7803       code = LTGT;
7804       break;
7805 
7806     default:
7807       gcc_unreachable ();
7808     }
7809   return code;
7810 }
7811 
7812 /* Subroutine of expand_expr.  Expand the two operands of a binary
7813    expression EXP0 and EXP1 placing the results in OP0 and OP1.
7814    The value may be stored in TARGET if TARGET is nonzero.  The
7815    MODIFIER argument is as documented by expand_expr.  */
7816 
7817 void
7818 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
7819 		 enum expand_modifier modifier)
7820 {
7821   if (! safe_from_p (target, exp1, 1))
7822     target = 0;
7823   if (operand_equal_p (exp0, exp1, 0))
7824     {
7825       *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7826       *op1 = copy_rtx (*op0);
7827     }
7828   else
7829     {
7830       *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7831       *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
7832     }
7833 }
7834 
7835 
7836 /* Return a MEM that contains constant EXP.  DEFER is as for
7837    output_constant_def and MODIFIER is as for expand_expr.  */
7838 
7839 static rtx
7840 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
7841 {
7842   rtx mem;
7843 
7844   mem = output_constant_def (exp, defer);
7845   if (modifier != EXPAND_INITIALIZER)
7846     mem = use_anchored_address (mem);
7847   return mem;
7848 }
7849 
7850 /* A subroutine of expand_expr_addr_expr.  Evaluate the address of EXP.
7851    The TARGET, TMODE and MODIFIER arguments are as for expand_expr.  */
7852 
7853 static rtx
7854 expand_expr_addr_expr_1 (tree exp, rtx target, scalar_int_mode tmode,
7855 		         enum expand_modifier modifier, addr_space_t as)
7856 {
7857   rtx result, subtarget;
7858   tree inner, offset;
7859   poly_int64 bitsize, bitpos;
7860   int unsignedp, reversep, volatilep = 0;
7861   machine_mode mode1;
7862 
7863   /* If we are taking the address of a constant and are at the top level,
7864      we have to use output_constant_def since we can't call force_const_mem
7865      at top level.  */
7866   /* ??? This should be considered a front-end bug.  We should not be
7867      generating ADDR_EXPR of something that isn't an LVALUE.  The only
7868      exception here is STRING_CST.  */
7869   if (CONSTANT_CLASS_P (exp))
7870     {
7871       result = XEXP (expand_expr_constant (exp, 0, modifier), 0);
7872       if (modifier < EXPAND_SUM)
7873 	result = force_operand (result, target);
7874       return result;
7875     }
7876 
7877   /* Everything must be something allowed by is_gimple_addressable.  */
7878   switch (TREE_CODE (exp))
7879     {
7880     case INDIRECT_REF:
7881       /* This case will happen via recursion for &a->b.  */
7882       return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7883 
7884     case MEM_REF:
7885       {
7886 	tree tem = TREE_OPERAND (exp, 0);
7887 	if (!integer_zerop (TREE_OPERAND (exp, 1)))
7888 	  tem = fold_build_pointer_plus (tem, TREE_OPERAND (exp, 1));
7889 	return expand_expr (tem, target, tmode, modifier);
7890       }
7891 
7892     case TARGET_MEM_REF:
7893       return addr_for_mem_ref (exp, as, true);
7894 
7895     case CONST_DECL:
7896       /* Expand the initializer like constants above.  */
7897       result = XEXP (expand_expr_constant (DECL_INITIAL (exp),
7898 					   0, modifier), 0);
7899       if (modifier < EXPAND_SUM)
7900 	result = force_operand (result, target);
7901       return result;
7902 
7903     case REALPART_EXPR:
7904       /* The real part of the complex number is always first, therefore
7905 	 the address is the same as the address of the parent object.  */
7906       offset = 0;
7907       bitpos = 0;
7908       inner = TREE_OPERAND (exp, 0);
7909       break;
7910 
7911     case IMAGPART_EXPR:
7912       /* The imaginary part of the complex number is always second.
7913 	 The expression is therefore always offset by the size of the
7914 	 scalar type.  */
7915       offset = 0;
7916       bitpos = GET_MODE_BITSIZE (SCALAR_TYPE_MODE (TREE_TYPE (exp)));
7917       inner = TREE_OPERAND (exp, 0);
7918       break;
7919 
7920     case COMPOUND_LITERAL_EXPR:
7921       /* Allow COMPOUND_LITERAL_EXPR in initializers or coming from
7922 	 initializers, if e.g. rtl_for_decl_init is called on DECL_INITIAL
7923 	 with COMPOUND_LITERAL_EXPRs in it, or ARRAY_REF on a const static
7924 	 array with address of COMPOUND_LITERAL_EXPR in DECL_INITIAL;
7925 	 the initializers aren't gimplified.  */
7926       if (COMPOUND_LITERAL_EXPR_DECL (exp)
7927 	  && TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (exp)))
7928 	return expand_expr_addr_expr_1 (COMPOUND_LITERAL_EXPR_DECL (exp),
7929 					target, tmode, modifier, as);
7930       /* FALLTHRU */
7931     default:
7932       /* If the object is a DECL, then expand it for its rtl.  Don't bypass
7933 	 expand_expr, as that can have various side effects; LABEL_DECLs for
7934 	 example, may not have their DECL_RTL set yet.  Expand the rtl of
7935 	 CONSTRUCTORs too, which should yield a memory reference for the
7936 	 constructor's contents.  Assume language specific tree nodes can
7937 	 be expanded in some interesting way.  */
7938       gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
7939       if (DECL_P (exp)
7940 	  || TREE_CODE (exp) == CONSTRUCTOR
7941 	  || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
7942 	{
7943 	  result = expand_expr (exp, target, tmode,
7944 				modifier == EXPAND_INITIALIZER
7945 				? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
7946 
7947 	  /* If the DECL isn't in memory, then the DECL wasn't properly
7948 	     marked TREE_ADDRESSABLE, which will be either a front-end
7949 	     or a tree optimizer bug.  */
7950 
7951 	  gcc_assert (MEM_P (result));
7952 	  result = XEXP (result, 0);
7953 
7954 	  /* ??? Is this needed anymore?  */
7955 	  if (DECL_P (exp))
7956 	    TREE_USED (exp) = 1;
7957 
7958 	  if (modifier != EXPAND_INITIALIZER
7959 	      && modifier != EXPAND_CONST_ADDRESS
7960 	      && modifier != EXPAND_SUM)
7961 	    result = force_operand (result, target);
7962 	  return result;
7963 	}
7964 
7965       /* Pass FALSE as the last argument to get_inner_reference although
7966 	 we are expanding to RTL.  The rationale is that we know how to
7967 	 handle "aligning nodes" here: we can just bypass them because
7968 	 they won't change the final object whose address will be returned
7969 	 (they actually exist only for that purpose).  */
7970       inner = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
7971 				   &unsignedp, &reversep, &volatilep);
7972       break;
7973     }
7974 
7975   /* We must have made progress.  */
7976   gcc_assert (inner != exp);
7977 
7978   subtarget = offset || maybe_ne (bitpos, 0) ? NULL_RTX : target;
7979   /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7980      inner alignment, force the inner to be sufficiently aligned.  */
7981   if (CONSTANT_CLASS_P (inner)
7982       && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
7983     {
7984       inner = copy_node (inner);
7985       TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
7986       SET_TYPE_ALIGN (TREE_TYPE (inner), TYPE_ALIGN (TREE_TYPE (exp)));
7987       TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
7988     }
7989   result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
7990 
7991   if (offset)
7992     {
7993       rtx tmp;
7994 
7995       if (modifier != EXPAND_NORMAL)
7996 	result = force_operand (result, NULL);
7997       tmp = expand_expr (offset, NULL_RTX, tmode,
7998 			 modifier == EXPAND_INITIALIZER
7999 			  ? EXPAND_INITIALIZER : EXPAND_NORMAL);
8000 
8001       /* expand_expr is allowed to return an object in a mode other
8002 	 than TMODE.  If it did, we need to convert.  */
8003       if (GET_MODE (tmp) != VOIDmode && tmode != GET_MODE (tmp))
8004 	tmp = convert_modes (tmode, GET_MODE (tmp),
8005 			     tmp, TYPE_UNSIGNED (TREE_TYPE (offset)));
8006       result = convert_memory_address_addr_space (tmode, result, as);
8007       tmp = convert_memory_address_addr_space (tmode, tmp, as);
8008 
8009       if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8010 	result = simplify_gen_binary (PLUS, tmode, result, tmp);
8011       else
8012 	{
8013 	  subtarget = maybe_ne (bitpos, 0) ? NULL_RTX : target;
8014 	  result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
8015 					1, OPTAB_LIB_WIDEN);
8016 	}
8017     }
8018 
8019   if (maybe_ne (bitpos, 0))
8020     {
8021       /* Someone beforehand should have rejected taking the address
8022 	 of an object that isn't byte-aligned.  */
8023       poly_int64 bytepos = exact_div (bitpos, BITS_PER_UNIT);
8024       result = convert_memory_address_addr_space (tmode, result, as);
8025       result = plus_constant (tmode, result, bytepos);
8026       if (modifier < EXPAND_SUM)
8027 	result = force_operand (result, target);
8028     }
8029 
8030   return result;
8031 }
8032 
8033 /* A subroutine of expand_expr.  Evaluate EXP, which is an ADDR_EXPR.
8034    The TARGET, TMODE and MODIFIER arguments are as for expand_expr.  */
8035 
8036 static rtx
8037 expand_expr_addr_expr (tree exp, rtx target, machine_mode tmode,
8038 		       enum expand_modifier modifier)
8039 {
8040   addr_space_t as = ADDR_SPACE_GENERIC;
8041   scalar_int_mode address_mode = Pmode;
8042   scalar_int_mode pointer_mode = ptr_mode;
8043   machine_mode rmode;
8044   rtx result;
8045 
8046   /* Target mode of VOIDmode says "whatever's natural".  */
8047   if (tmode == VOIDmode)
8048     tmode = TYPE_MODE (TREE_TYPE (exp));
8049 
8050   if (POINTER_TYPE_P (TREE_TYPE (exp)))
8051     {
8052       as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
8053       address_mode = targetm.addr_space.address_mode (as);
8054       pointer_mode = targetm.addr_space.pointer_mode (as);
8055     }
8056 
8057   /* We can get called with some Weird Things if the user does silliness
8058      like "(short) &a".  In that case, convert_memory_address won't do
8059      the right thing, so ignore the given target mode.  */
8060   scalar_int_mode new_tmode = (tmode == pointer_mode
8061 			       ? pointer_mode
8062 			       : address_mode);
8063 
8064   result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
8065 				    new_tmode, modifier, as);
8066 
8067   /* Despite expand_expr claims concerning ignoring TMODE when not
8068      strictly convenient, stuff breaks if we don't honor it.  Note
8069      that combined with the above, we only do this for pointer modes.  */
8070   rmode = GET_MODE (result);
8071   if (rmode == VOIDmode)
8072     rmode = new_tmode;
8073   if (rmode != new_tmode)
8074     result = convert_memory_address_addr_space (new_tmode, result, as);
8075 
8076   return result;
8077 }
8078 
8079 /* Generate code for computing CONSTRUCTOR EXP.
8080    An rtx for the computed value is returned.  If AVOID_TEMP_MEM
8081    is TRUE, instead of creating a temporary variable in memory
8082    NULL is returned and the caller needs to handle it differently.  */
8083 
8084 static rtx
8085 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
8086 		    bool avoid_temp_mem)
8087 {
8088   tree type = TREE_TYPE (exp);
8089   machine_mode mode = TYPE_MODE (type);
8090 
8091   /* Try to avoid creating a temporary at all.  This is possible
8092      if all of the initializer is zero.
8093      FIXME: try to handle all [0..255] initializers we can handle
8094      with memset.  */
8095   if (TREE_STATIC (exp)
8096       && !TREE_ADDRESSABLE (exp)
8097       && target != 0 && mode == BLKmode
8098       && all_zeros_p (exp))
8099     {
8100       clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
8101       return target;
8102     }
8103 
8104   /* All elts simple constants => refer to a constant in memory.  But
8105      if this is a non-BLKmode mode, let it store a field at a time
8106      since that should make a CONST_INT, CONST_WIDE_INT or
8107      CONST_DOUBLE when we fold.  Likewise, if we have a target we can
8108      use, it is best to store directly into the target unless the type
8109      is large enough that memcpy will be used.  If we are making an
8110      initializer and all operands are constant, put it in memory as
8111      well.
8112 
8113      FIXME: Avoid trying to fill vector constructors piece-meal.
8114      Output them with output_constant_def below unless we're sure
8115      they're zeros.  This should go away when vector initializers
8116      are treated like VECTOR_CST instead of arrays.  */
8117   if ((TREE_STATIC (exp)
8118        && ((mode == BLKmode
8119 	    && ! (target != 0 && safe_from_p (target, exp, 1)))
8120 		  || TREE_ADDRESSABLE (exp)
8121 		  || (tree_fits_uhwi_p (TYPE_SIZE_UNIT (type))
8122 		      && (! can_move_by_pieces
8123 				     (tree_to_uhwi (TYPE_SIZE_UNIT (type)),
8124 				      TYPE_ALIGN (type)))
8125 		      && ! mostly_zeros_p (exp))))
8126       || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
8127 	  && TREE_CONSTANT (exp)))
8128     {
8129       rtx constructor;
8130 
8131       if (avoid_temp_mem)
8132 	return NULL_RTX;
8133 
8134       constructor = expand_expr_constant (exp, 1, modifier);
8135 
8136       if (modifier != EXPAND_CONST_ADDRESS
8137 	  && modifier != EXPAND_INITIALIZER
8138 	  && modifier != EXPAND_SUM)
8139 	constructor = validize_mem (constructor);
8140 
8141       return constructor;
8142     }
8143 
8144   /* Handle calls that pass values in multiple non-contiguous
8145      locations.  The Irix 6 ABI has examples of this.  */
8146   if (target == 0 || ! safe_from_p (target, exp, 1)
8147       || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
8148     {
8149       if (avoid_temp_mem)
8150 	return NULL_RTX;
8151 
8152       target = assign_temp (type, TREE_ADDRESSABLE (exp), 1);
8153     }
8154 
8155   store_constructor (exp, target, 0, int_expr_size (exp), false);
8156   return target;
8157 }
8158 
8159 
8160 /* expand_expr: generate code for computing expression EXP.
8161    An rtx for the computed value is returned.  The value is never null.
8162    In the case of a void EXP, const0_rtx is returned.
8163 
8164    The value may be stored in TARGET if TARGET is nonzero.
8165    TARGET is just a suggestion; callers must assume that
8166    the rtx returned may not be the same as TARGET.
8167 
8168    If TARGET is CONST0_RTX, it means that the value will be ignored.
8169 
8170    If TMODE is not VOIDmode, it suggests generating the
8171    result in mode TMODE.  But this is done only when convenient.
8172    Otherwise, TMODE is ignored and the value generated in its natural mode.
8173    TMODE is just a suggestion; callers must assume that
8174    the rtx returned may not have mode TMODE.
8175 
8176    Note that TARGET may have neither TMODE nor MODE.  In that case, it
8177    probably will not be used.
8178 
8179    If MODIFIER is EXPAND_SUM then when EXP is an addition
8180    we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
8181    or a nest of (PLUS ...) and (MINUS ...) where the terms are
8182    products as above, or REG or MEM, or constant.
8183    Ordinarily in such cases we would output mul or add instructions
8184    and then return a pseudo reg containing the sum.
8185 
8186    EXPAND_INITIALIZER is much like EXPAND_SUM except that
8187    it also marks a label as absolutely required (it can't be dead).
8188    It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
8189    This is used for outputting expressions used in initializers.
8190 
8191    EXPAND_CONST_ADDRESS says that it is okay to return a MEM
8192    with a constant address even if that address is not normally legitimate.
8193    EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
8194 
8195    EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
8196    a call parameter.  Such targets require special care as we haven't yet
8197    marked TARGET so that it's safe from being trashed by libcalls.  We
8198    don't want to use TARGET for anything but the final result;
8199    Intermediate values must go elsewhere.   Additionally, calls to
8200    emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
8201 
8202    If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
8203    address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
8204    DECL_RTL of the VAR_DECL.  *ALT_RTL is also set if EXP is a
8205    COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
8206    recursively.
8207 
8208    If INNER_REFERENCE_P is true, we are expanding an inner reference.
8209    In this case, we don't adjust a returned MEM rtx that wouldn't be
8210    sufficiently aligned for its mode; instead, it's up to the caller
8211    to deal with it afterwards.  This is used to make sure that unaligned
8212    base objects for which out-of-bounds accesses are supported, for
8213    example record types with trailing arrays, aren't realigned behind
8214    the back of the caller.
8215    The normal operating mode is to pass FALSE for this parameter.  */
8216 
8217 rtx
8218 expand_expr_real (tree exp, rtx target, machine_mode tmode,
8219 		  enum expand_modifier modifier, rtx *alt_rtl,
8220 		  bool inner_reference_p)
8221 {
8222   rtx ret;
8223 
8224   /* Handle ERROR_MARK before anybody tries to access its type.  */
8225   if (TREE_CODE (exp) == ERROR_MARK
8226       || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
8227     {
8228       ret = CONST0_RTX (tmode);
8229       return ret ? ret : const0_rtx;
8230     }
8231 
8232   ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl,
8233 			    inner_reference_p);
8234   return ret;
8235 }
8236 
8237 /* Try to expand the conditional expression which is represented by
8238    TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves.  If it succeeds
8239    return the rtl reg which represents the result.  Otherwise return
8240    NULL_RTX.  */
8241 
8242 static rtx
8243 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED,
8244 			      tree treeop1 ATTRIBUTE_UNUSED,
8245 			      tree treeop2 ATTRIBUTE_UNUSED)
8246 {
8247   rtx insn;
8248   rtx op00, op01, op1, op2;
8249   enum rtx_code comparison_code;
8250   machine_mode comparison_mode;
8251   gimple *srcstmt;
8252   rtx temp;
8253   tree type = TREE_TYPE (treeop1);
8254   int unsignedp = TYPE_UNSIGNED (type);
8255   machine_mode mode = TYPE_MODE (type);
8256   machine_mode orig_mode = mode;
8257   static bool expanding_cond_expr_using_cmove = false;
8258 
8259   /* Conditional move expansion can end up TERing two operands which,
8260      when recursively hitting conditional expressions can result in
8261      exponential behavior if the cmove expansion ultimatively fails.
8262      It's hardly profitable to TER a cmove into a cmove so avoid doing
8263      that by failing early if we end up recursing.  */
8264   if (expanding_cond_expr_using_cmove)
8265     return NULL_RTX;
8266 
8267   /* If we cannot do a conditional move on the mode, try doing it
8268      with the promoted mode. */
8269   if (!can_conditionally_move_p (mode))
8270     {
8271       mode = promote_mode (type, mode, &unsignedp);
8272       if (!can_conditionally_move_p (mode))
8273 	return NULL_RTX;
8274       temp = assign_temp (type, 0, 0); /* Use promoted mode for temp.  */
8275     }
8276   else
8277     temp = assign_temp (type, 0, 1);
8278 
8279   expanding_cond_expr_using_cmove = true;
8280   start_sequence ();
8281   expand_operands (treeop1, treeop2,
8282 		   temp, &op1, &op2, EXPAND_NORMAL);
8283 
8284   if (TREE_CODE (treeop0) == SSA_NAME
8285       && (srcstmt = get_def_for_expr_class (treeop0, tcc_comparison)))
8286     {
8287       tree type = TREE_TYPE (gimple_assign_rhs1 (srcstmt));
8288       enum tree_code cmpcode = gimple_assign_rhs_code (srcstmt);
8289       op00 = expand_normal (gimple_assign_rhs1 (srcstmt));
8290       op01 = expand_normal (gimple_assign_rhs2 (srcstmt));
8291       comparison_mode = TYPE_MODE (type);
8292       unsignedp = TYPE_UNSIGNED (type);
8293       comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
8294     }
8295   else if (COMPARISON_CLASS_P (treeop0))
8296     {
8297       tree type = TREE_TYPE (TREE_OPERAND (treeop0, 0));
8298       enum tree_code cmpcode = TREE_CODE (treeop0);
8299       op00 = expand_normal (TREE_OPERAND (treeop0, 0));
8300       op01 = expand_normal (TREE_OPERAND (treeop0, 1));
8301       unsignedp = TYPE_UNSIGNED (type);
8302       comparison_mode = TYPE_MODE (type);
8303       comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
8304     }
8305   else
8306     {
8307       op00 = expand_normal (treeop0);
8308       op01 = const0_rtx;
8309       comparison_code = NE;
8310       comparison_mode = GET_MODE (op00);
8311       if (comparison_mode == VOIDmode)
8312 	comparison_mode = TYPE_MODE (TREE_TYPE (treeop0));
8313     }
8314   expanding_cond_expr_using_cmove = false;
8315 
8316   if (GET_MODE (op1) != mode)
8317     op1 = gen_lowpart (mode, op1);
8318 
8319   if (GET_MODE (op2) != mode)
8320     op2 = gen_lowpart (mode, op2);
8321 
8322   /* Try to emit the conditional move.  */
8323   insn = emit_conditional_move (temp, comparison_code,
8324 				op00, op01, comparison_mode,
8325 				op1, op2, mode,
8326 				unsignedp);
8327 
8328   /* If we could do the conditional move, emit the sequence,
8329      and return.  */
8330   if (insn)
8331     {
8332       rtx_insn *seq = get_insns ();
8333       end_sequence ();
8334       emit_insn (seq);
8335       return convert_modes (orig_mode, mode, temp, 0);
8336     }
8337 
8338   /* Otherwise discard the sequence and fall back to code with
8339      branches.  */
8340   end_sequence ();
8341   return NULL_RTX;
8342 }
8343 
8344 rtx
8345 expand_expr_real_2 (sepops ops, rtx target, machine_mode tmode,
8346 		    enum expand_modifier modifier)
8347 {
8348   rtx op0, op1, op2, temp;
8349   rtx_code_label *lab;
8350   tree type;
8351   int unsignedp;
8352   machine_mode mode;
8353   scalar_int_mode int_mode;
8354   enum tree_code code = ops->code;
8355   optab this_optab;
8356   rtx subtarget, original_target;
8357   int ignore;
8358   bool reduce_bit_field;
8359   location_t loc = ops->location;
8360   tree treeop0, treeop1, treeop2;
8361 #define REDUCE_BIT_FIELD(expr)	(reduce_bit_field			  \
8362 				 ? reduce_to_bit_field_precision ((expr), \
8363 								  target, \
8364 								  type)	  \
8365 				 : (expr))
8366 
8367   type = ops->type;
8368   mode = TYPE_MODE (type);
8369   unsignedp = TYPE_UNSIGNED (type);
8370 
8371   treeop0 = ops->op0;
8372   treeop1 = ops->op1;
8373   treeop2 = ops->op2;
8374 
8375   /* We should be called only on simple (binary or unary) expressions,
8376      exactly those that are valid in gimple expressions that aren't
8377      GIMPLE_SINGLE_RHS (or invalid).  */
8378   gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
8379 	      || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
8380 	      || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
8381 
8382   ignore = (target == const0_rtx
8383 	    || ((CONVERT_EXPR_CODE_P (code)
8384 		 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
8385 		&& TREE_CODE (type) == VOID_TYPE));
8386 
8387   /* We should be called only if we need the result.  */
8388   gcc_assert (!ignore);
8389 
8390   /* An operation in what may be a bit-field type needs the
8391      result to be reduced to the precision of the bit-field type,
8392      which is narrower than that of the type's mode.  */
8393   reduce_bit_field = (INTEGRAL_TYPE_P (type)
8394 		      && !type_has_mode_precision_p (type));
8395 
8396   if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
8397     target = 0;
8398 
8399   /* Use subtarget as the target for operand 0 of a binary operation.  */
8400   subtarget = get_subtarget (target);
8401   original_target = target;
8402 
8403   switch (code)
8404     {
8405     case NON_LVALUE_EXPR:
8406     case PAREN_EXPR:
8407     CASE_CONVERT:
8408       if (treeop0 == error_mark_node)
8409 	return const0_rtx;
8410 
8411       if (TREE_CODE (type) == UNION_TYPE)
8412 	{
8413 	  tree valtype = TREE_TYPE (treeop0);
8414 
8415 	  /* If both input and output are BLKmode, this conversion isn't doing
8416 	     anything except possibly changing memory attribute.  */
8417 	  if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
8418 	    {
8419 	      rtx result = expand_expr (treeop0, target, tmode,
8420 					modifier);
8421 
8422 	      result = copy_rtx (result);
8423 	      set_mem_attributes (result, type, 0);
8424 	      return result;
8425 	    }
8426 
8427 	  if (target == 0)
8428 	    {
8429 	      if (TYPE_MODE (type) != BLKmode)
8430 		target = gen_reg_rtx (TYPE_MODE (type));
8431 	      else
8432 		target = assign_temp (type, 1, 1);
8433 	    }
8434 
8435 	  if (MEM_P (target))
8436 	    /* Store data into beginning of memory target.  */
8437 	    store_expr (treeop0,
8438 			adjust_address (target, TYPE_MODE (valtype), 0),
8439 			modifier == EXPAND_STACK_PARM,
8440 			false, TYPE_REVERSE_STORAGE_ORDER (type));
8441 
8442 	  else
8443 	    {
8444 	      gcc_assert (REG_P (target)
8445 			  && !TYPE_REVERSE_STORAGE_ORDER (type));
8446 
8447 	      /* Store this field into a union of the proper type.  */
8448 	      poly_uint64 op0_size
8449 		= tree_to_poly_uint64 (TYPE_SIZE (TREE_TYPE (treeop0)));
8450 	      poly_uint64 union_size = GET_MODE_BITSIZE (mode);
8451 	      store_field (target,
8452 			   /* The conversion must be constructed so that
8453 			      we know at compile time how many bits
8454 			      to preserve.  */
8455 			   ordered_min (op0_size, union_size),
8456 			   0, 0, 0, TYPE_MODE (valtype), treeop0, 0,
8457 			   false, false);
8458 	    }
8459 
8460 	  /* Return the entire union.  */
8461 	  return target;
8462 	}
8463 
8464       if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
8465 	{
8466 	  op0 = expand_expr (treeop0, target, VOIDmode,
8467 			     modifier);
8468 
8469 	  /* If the signedness of the conversion differs and OP0 is
8470 	     a promoted SUBREG, clear that indication since we now
8471 	     have to do the proper extension.  */
8472 	  if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
8473 	      && GET_CODE (op0) == SUBREG)
8474 	    SUBREG_PROMOTED_VAR_P (op0) = 0;
8475 
8476 	  return REDUCE_BIT_FIELD (op0);
8477 	}
8478 
8479       op0 = expand_expr (treeop0, NULL_RTX, mode,
8480 			 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8481       if (GET_MODE (op0) == mode)
8482 	;
8483 
8484       /* If OP0 is a constant, just convert it into the proper mode.  */
8485       else if (CONSTANT_P (op0))
8486 	{
8487 	  tree inner_type = TREE_TYPE (treeop0);
8488 	  machine_mode inner_mode = GET_MODE (op0);
8489 
8490 	  if (inner_mode == VOIDmode)
8491 	    inner_mode = TYPE_MODE (inner_type);
8492 
8493 	  if (modifier == EXPAND_INITIALIZER)
8494 	    op0 = lowpart_subreg (mode, op0, inner_mode);
8495 	  else
8496 	    op0=  convert_modes (mode, inner_mode, op0,
8497 				 TYPE_UNSIGNED (inner_type));
8498 	}
8499 
8500       else if (modifier == EXPAND_INITIALIZER)
8501 	op0 = gen_rtx_fmt_e (TYPE_UNSIGNED (TREE_TYPE (treeop0))
8502 			     ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8503 
8504       else if (target == 0)
8505 	op0 = convert_to_mode (mode, op0,
8506 			       TYPE_UNSIGNED (TREE_TYPE
8507 					      (treeop0)));
8508       else
8509 	{
8510 	  convert_move (target, op0,
8511 			TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8512 	  op0 = target;
8513 	}
8514 
8515       return REDUCE_BIT_FIELD (op0);
8516 
8517     case ADDR_SPACE_CONVERT_EXPR:
8518       {
8519 	tree treeop0_type = TREE_TYPE (treeop0);
8520 
8521 	gcc_assert (POINTER_TYPE_P (type));
8522 	gcc_assert (POINTER_TYPE_P (treeop0_type));
8523 
8524 	addr_space_t as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
8525 	addr_space_t as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
8526 
8527         /* Conversions between pointers to the same address space should
8528 	   have been implemented via CONVERT_EXPR / NOP_EXPR.  */
8529 	gcc_assert (as_to != as_from);
8530 
8531 	op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
8532 
8533         /* Ask target code to handle conversion between pointers
8534 	   to overlapping address spaces.  */
8535 	if (targetm.addr_space.subset_p (as_to, as_from)
8536 	    || targetm.addr_space.subset_p (as_from, as_to))
8537 	  {
8538 	    op0 = targetm.addr_space.convert (op0, treeop0_type, type);
8539 	  }
8540         else
8541           {
8542 	    /* For disjoint address spaces, converting anything but a null
8543 	       pointer invokes undefined behavior.  We truncate or extend the
8544 	       value as if we'd converted via integers, which handles 0 as
8545 	       required, and all others as the programmer likely expects.  */
8546 #ifndef POINTERS_EXTEND_UNSIGNED
8547 	    const int POINTERS_EXTEND_UNSIGNED = 1;
8548 #endif
8549 	    op0 = convert_modes (mode, TYPE_MODE (treeop0_type),
8550 				 op0, POINTERS_EXTEND_UNSIGNED);
8551 	  }
8552 	gcc_assert (op0);
8553 	return op0;
8554       }
8555 
8556     case POINTER_PLUS_EXPR:
8557       /* Even though the sizetype mode and the pointer's mode can be different
8558          expand is able to handle this correctly and get the correct result out
8559          of the PLUS_EXPR code.  */
8560       /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8561          if sizetype precision is smaller than pointer precision.  */
8562       if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
8563 	treeop1 = fold_convert_loc (loc, type,
8564 				    fold_convert_loc (loc, ssizetype,
8565 						      treeop1));
8566       /* If sizetype precision is larger than pointer precision, truncate the
8567 	 offset to have matching modes.  */
8568       else if (TYPE_PRECISION (sizetype) > TYPE_PRECISION (type))
8569 	treeop1 = fold_convert_loc (loc, type, treeop1);
8570       /* FALLTHRU */
8571 
8572     case PLUS_EXPR:
8573       /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8574 	 something else, make sure we add the register to the constant and
8575 	 then to the other thing.  This case can occur during strength
8576 	 reduction and doing it this way will produce better code if the
8577 	 frame pointer or argument pointer is eliminated.
8578 
8579 	 fold-const.c will ensure that the constant is always in the inner
8580 	 PLUS_EXPR, so the only case we need to do anything about is if
8581 	 sp, ap, or fp is our second argument, in which case we must swap
8582 	 the innermost first argument and our second argument.  */
8583 
8584       if (TREE_CODE (treeop0) == PLUS_EXPR
8585 	  && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
8586 	  && VAR_P (treeop1)
8587 	  && (DECL_RTL (treeop1) == frame_pointer_rtx
8588 	      || DECL_RTL (treeop1) == stack_pointer_rtx
8589 	      || DECL_RTL (treeop1) == arg_pointer_rtx))
8590 	{
8591 	  gcc_unreachable ();
8592 	}
8593 
8594       /* If the result is to be ptr_mode and we are adding an integer to
8595 	 something, we might be forming a constant.  So try to use
8596 	 plus_constant.  If it produces a sum and we can't accept it,
8597 	 use force_operand.  This allows P = &ARR[const] to generate
8598 	 efficient code on machines where a SYMBOL_REF is not a valid
8599 	 address.
8600 
8601 	 If this is an EXPAND_SUM call, always return the sum.  */
8602       if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8603 	  || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8604 	{
8605 	  if (modifier == EXPAND_STACK_PARM)
8606 	    target = 0;
8607 	  if (TREE_CODE (treeop0) == INTEGER_CST
8608 	      && HWI_COMPUTABLE_MODE_P (mode)
8609 	      && TREE_CONSTANT (treeop1))
8610 	    {
8611 	      rtx constant_part;
8612 	      HOST_WIDE_INT wc;
8613 	      machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop1));
8614 
8615 	      op1 = expand_expr (treeop1, subtarget, VOIDmode,
8616 				 EXPAND_SUM);
8617 	      /* Use wi::shwi to ensure that the constant is
8618 		 truncated according to the mode of OP1, then sign extended
8619 		 to a HOST_WIDE_INT.  Using the constant directly can result
8620 		 in non-canonical RTL in a 64x32 cross compile.  */
8621 	      wc = TREE_INT_CST_LOW (treeop0);
8622 	      constant_part =
8623 		immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8624 	      op1 = plus_constant (mode, op1, INTVAL (constant_part));
8625 	      if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8626 		op1 = force_operand (op1, target);
8627 	      return REDUCE_BIT_FIELD (op1);
8628 	    }
8629 
8630 	  else if (TREE_CODE (treeop1) == INTEGER_CST
8631 		   && HWI_COMPUTABLE_MODE_P (mode)
8632 		   && TREE_CONSTANT (treeop0))
8633 	    {
8634 	      rtx constant_part;
8635 	      HOST_WIDE_INT wc;
8636 	      machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop0));
8637 
8638 	      op0 = expand_expr (treeop0, subtarget, VOIDmode,
8639 				 (modifier == EXPAND_INITIALIZER
8640 				 ? EXPAND_INITIALIZER : EXPAND_SUM));
8641 	      if (! CONSTANT_P (op0))
8642 		{
8643 		  op1 = expand_expr (treeop1, NULL_RTX,
8644 				     VOIDmode, modifier);
8645 		  /* Return a PLUS if modifier says it's OK.  */
8646 		  if (modifier == EXPAND_SUM
8647 		      || modifier == EXPAND_INITIALIZER)
8648 		    return simplify_gen_binary (PLUS, mode, op0, op1);
8649 		  goto binop2;
8650 		}
8651 	      /* Use wi::shwi to ensure that the constant is
8652 		 truncated according to the mode of OP1, then sign extended
8653 		 to a HOST_WIDE_INT.  Using the constant directly can result
8654 		 in non-canonical RTL in a 64x32 cross compile.  */
8655 	      wc = TREE_INT_CST_LOW (treeop1);
8656 	      constant_part
8657 		= immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8658 	      op0 = plus_constant (mode, op0, INTVAL (constant_part));
8659 	      if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8660 		op0 = force_operand (op0, target);
8661 	      return REDUCE_BIT_FIELD (op0);
8662 	    }
8663 	}
8664 
8665       /* Use TER to expand pointer addition of a negated value
8666 	 as pointer subtraction.  */
8667       if ((POINTER_TYPE_P (TREE_TYPE (treeop0))
8668 	   || (TREE_CODE (TREE_TYPE (treeop0)) == VECTOR_TYPE
8669 	       && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))))
8670 	  && TREE_CODE (treeop1) == SSA_NAME
8671 	  && TYPE_MODE (TREE_TYPE (treeop0))
8672 	     == TYPE_MODE (TREE_TYPE (treeop1)))
8673 	{
8674 	  gimple *def = get_def_for_expr (treeop1, NEGATE_EXPR);
8675 	  if (def)
8676 	    {
8677 	      treeop1 = gimple_assign_rhs1 (def);
8678 	      code = MINUS_EXPR;
8679 	      goto do_minus;
8680 	    }
8681 	}
8682 
8683       /* No sense saving up arithmetic to be done
8684 	 if it's all in the wrong mode to form part of an address.
8685 	 And force_operand won't know whether to sign-extend or
8686 	 zero-extend.  */
8687       if (modifier != EXPAND_INITIALIZER
8688 	  && (modifier != EXPAND_SUM || mode != ptr_mode))
8689 	{
8690 	  expand_operands (treeop0, treeop1,
8691 			   subtarget, &op0, &op1, modifier);
8692 	  if (op0 == const0_rtx)
8693 	    return op1;
8694 	  if (op1 == const0_rtx)
8695 	    return op0;
8696 	  goto binop2;
8697 	}
8698 
8699       expand_operands (treeop0, treeop1,
8700 		       subtarget, &op0, &op1, modifier);
8701       return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8702 
8703     case MINUS_EXPR:
8704     case POINTER_DIFF_EXPR:
8705     do_minus:
8706       /* For initializers, we are allowed to return a MINUS of two
8707 	 symbolic constants.  Here we handle all cases when both operands
8708 	 are constant.  */
8709       /* Handle difference of two symbolic constants,
8710 	 for the sake of an initializer.  */
8711       if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8712 	  && really_constant_p (treeop0)
8713 	  && really_constant_p (treeop1))
8714 	{
8715 	  expand_operands (treeop0, treeop1,
8716 			   NULL_RTX, &op0, &op1, modifier);
8717 	  return simplify_gen_binary (MINUS, mode, op0, op1);
8718 	}
8719 
8720       /* No sense saving up arithmetic to be done
8721 	 if it's all in the wrong mode to form part of an address.
8722 	 And force_operand won't know whether to sign-extend or
8723 	 zero-extend.  */
8724       if (modifier != EXPAND_INITIALIZER
8725 	  && (modifier != EXPAND_SUM || mode != ptr_mode))
8726 	goto binop;
8727 
8728       expand_operands (treeop0, treeop1,
8729 		       subtarget, &op0, &op1, modifier);
8730 
8731       /* Convert A - const to A + (-const).  */
8732       if (CONST_INT_P (op1))
8733 	{
8734 	  op1 = negate_rtx (mode, op1);
8735 	  return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8736 	}
8737 
8738       goto binop2;
8739 
8740     case WIDEN_MULT_PLUS_EXPR:
8741     case WIDEN_MULT_MINUS_EXPR:
8742       expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8743       op2 = expand_normal (treeop2);
8744       target = expand_widen_pattern_expr (ops, op0, op1, op2,
8745 					  target, unsignedp);
8746       return target;
8747 
8748     case WIDEN_MULT_EXPR:
8749       /* If first operand is constant, swap them.
8750 	 Thus the following special case checks need only
8751 	 check the second operand.  */
8752       if (TREE_CODE (treeop0) == INTEGER_CST)
8753 	std::swap (treeop0, treeop1);
8754 
8755       /* First, check if we have a multiplication of one signed and one
8756 	 unsigned operand.  */
8757       if (TREE_CODE (treeop1) != INTEGER_CST
8758 	  && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
8759 	      != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
8760 	{
8761 	  machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
8762 	  this_optab = usmul_widen_optab;
8763 	  if (find_widening_optab_handler (this_optab, mode, innermode)
8764 		!= CODE_FOR_nothing)
8765 	    {
8766 	      if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8767 		expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8768 				 EXPAND_NORMAL);
8769 	      else
8770 		expand_operands (treeop0, treeop1, NULL_RTX, &op1, &op0,
8771 				 EXPAND_NORMAL);
8772 	      /* op0 and op1 might still be constant, despite the above
8773 		 != INTEGER_CST check.  Handle it.  */
8774 	      if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8775 		{
8776 		  op0 = convert_modes (innermode, mode, op0, true);
8777 		  op1 = convert_modes (innermode, mode, op1, false);
8778 		  return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8779 							target, unsignedp));
8780 		}
8781 	      goto binop3;
8782 	    }
8783 	}
8784       /* Check for a multiplication with matching signedness.  */
8785       else if ((TREE_CODE (treeop1) == INTEGER_CST
8786 		&& int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
8787 	       || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
8788 		   == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
8789 	{
8790 	  tree op0type = TREE_TYPE (treeop0);
8791 	  machine_mode innermode = TYPE_MODE (op0type);
8792 	  bool zextend_p = TYPE_UNSIGNED (op0type);
8793 	  optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8794 	  this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8795 
8796 	  if (TREE_CODE (treeop0) != INTEGER_CST)
8797 	    {
8798 	      if (find_widening_optab_handler (this_optab, mode, innermode)
8799 		    != CODE_FOR_nothing)
8800 		{
8801 		  expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8802 				   EXPAND_NORMAL);
8803 		  /* op0 and op1 might still be constant, despite the above
8804 		     != INTEGER_CST check.  Handle it.  */
8805 		  if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8806 		    {
8807 		     widen_mult_const:
8808 		      op0 = convert_modes (innermode, mode, op0, zextend_p);
8809 		      op1
8810 			= convert_modes (innermode, mode, op1,
8811 					 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8812 		      return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8813 							    target,
8814 							    unsignedp));
8815 		    }
8816 		  temp = expand_widening_mult (mode, op0, op1, target,
8817 					       unsignedp, this_optab);
8818 		  return REDUCE_BIT_FIELD (temp);
8819 		}
8820 	      if (find_widening_optab_handler (other_optab, mode, innermode)
8821 		    != CODE_FOR_nothing
8822 		  && innermode == word_mode)
8823 		{
8824 		  rtx htem, hipart;
8825 		  op0 = expand_normal (treeop0);
8826 		  if (TREE_CODE (treeop1) == INTEGER_CST)
8827 		    op1 = convert_modes (word_mode, mode,
8828 					 expand_normal (treeop1),
8829 					 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8830 		  else
8831 		    op1 = expand_normal (treeop1);
8832 		  /* op0 and op1 might still be constant, despite the above
8833 		     != INTEGER_CST check.  Handle it.  */
8834 		  if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8835 		    goto widen_mult_const;
8836 		  temp = expand_binop (mode, other_optab, op0, op1, target,
8837 				       unsignedp, OPTAB_LIB_WIDEN);
8838 		  hipart = gen_highpart (word_mode, temp);
8839 		  htem = expand_mult_highpart_adjust (word_mode, hipart,
8840 						      op0, op1, hipart,
8841 						      zextend_p);
8842 		  if (htem != hipart)
8843 		    emit_move_insn (hipart, htem);
8844 		  return REDUCE_BIT_FIELD (temp);
8845 		}
8846 	    }
8847 	}
8848       treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
8849       treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
8850       expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8851       return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8852 
8853     case FMA_EXPR:
8854       {
8855 	optab opt = fma_optab;
8856 	gimple *def0, *def2;
8857 
8858 	/* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8859 	   call.  */
8860 	if (optab_handler (fma_optab, mode) == CODE_FOR_nothing)
8861 	  {
8862 	    tree fn = mathfn_built_in (TREE_TYPE (treeop0), BUILT_IN_FMA);
8863 	    tree call_expr;
8864 
8865 	    gcc_assert (fn != NULL_TREE);
8866 	    call_expr = build_call_expr (fn, 3, treeop0, treeop1, treeop2);
8867 	    return expand_builtin (call_expr, target, subtarget, mode, false);
8868 	  }
8869 
8870 	def0 = get_def_for_expr (treeop0, NEGATE_EXPR);
8871 	/* The multiplication is commutative - look at its 2nd operand
8872 	   if the first isn't fed by a negate.  */
8873 	if (!def0)
8874 	  {
8875 	    def0 = get_def_for_expr (treeop1, NEGATE_EXPR);
8876 	    /* Swap operands if the 2nd operand is fed by a negate.  */
8877 	    if (def0)
8878 	      std::swap (treeop0, treeop1);
8879 	  }
8880 	def2 = get_def_for_expr (treeop2, NEGATE_EXPR);
8881 
8882 	op0 = op2 = NULL;
8883 
8884 	if (def0 && def2
8885 	    && optab_handler (fnms_optab, mode) != CODE_FOR_nothing)
8886 	  {
8887 	    opt = fnms_optab;
8888 	    op0 = expand_normal (gimple_assign_rhs1 (def0));
8889 	    op2 = expand_normal (gimple_assign_rhs1 (def2));
8890 	  }
8891 	else if (def0
8892 		 && optab_handler (fnma_optab, mode) != CODE_FOR_nothing)
8893 	  {
8894 	    opt = fnma_optab;
8895 	    op0 = expand_normal (gimple_assign_rhs1 (def0));
8896 	  }
8897 	else if (def2
8898 		 && optab_handler (fms_optab, mode) != CODE_FOR_nothing)
8899 	  {
8900 	    opt = fms_optab;
8901 	    op2 = expand_normal (gimple_assign_rhs1 (def2));
8902 	  }
8903 
8904 	if (op0 == NULL)
8905 	  op0 = expand_expr (treeop0, subtarget, VOIDmode, EXPAND_NORMAL);
8906 	if (op2 == NULL)
8907 	  op2 = expand_normal (treeop2);
8908 	op1 = expand_normal (treeop1);
8909 
8910 	return expand_ternary_op (TYPE_MODE (type), opt,
8911 				  op0, op1, op2, target, 0);
8912       }
8913 
8914     case MULT_EXPR:
8915       /* If this is a fixed-point operation, then we cannot use the code
8916 	 below because "expand_mult" doesn't support sat/no-sat fixed-point
8917          multiplications.   */
8918       if (ALL_FIXED_POINT_MODE_P (mode))
8919 	goto binop;
8920 
8921       /* If first operand is constant, swap them.
8922 	 Thus the following special case checks need only
8923 	 check the second operand.  */
8924       if (TREE_CODE (treeop0) == INTEGER_CST)
8925 	std::swap (treeop0, treeop1);
8926 
8927       /* Attempt to return something suitable for generating an
8928 	 indexed address, for machines that support that.  */
8929 
8930       if (modifier == EXPAND_SUM && mode == ptr_mode
8931 	  && tree_fits_shwi_p (treeop1))
8932 	{
8933 	  tree exp1 = treeop1;
8934 
8935 	  op0 = expand_expr (treeop0, subtarget, VOIDmode,
8936 			     EXPAND_SUM);
8937 
8938 	  if (!REG_P (op0))
8939 	    op0 = force_operand (op0, NULL_RTX);
8940 	  if (!REG_P (op0))
8941 	    op0 = copy_to_mode_reg (mode, op0);
8942 
8943 	  return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8944 			       gen_int_mode (tree_to_shwi (exp1),
8945 					     TYPE_MODE (TREE_TYPE (exp1)))));
8946 	}
8947 
8948       if (modifier == EXPAND_STACK_PARM)
8949 	target = 0;
8950 
8951       expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8952       return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8953 
8954     case TRUNC_MOD_EXPR:
8955     case FLOOR_MOD_EXPR:
8956     case CEIL_MOD_EXPR:
8957     case ROUND_MOD_EXPR:
8958 
8959     case TRUNC_DIV_EXPR:
8960     case FLOOR_DIV_EXPR:
8961     case CEIL_DIV_EXPR:
8962     case ROUND_DIV_EXPR:
8963     case EXACT_DIV_EXPR:
8964      {
8965        /* If this is a fixed-point operation, then we cannot use the code
8966 	  below because "expand_divmod" doesn't support sat/no-sat fixed-point
8967 	  divisions.   */
8968        if (ALL_FIXED_POINT_MODE_P (mode))
8969 	 goto binop;
8970 
8971        if (modifier == EXPAND_STACK_PARM)
8972 	 target = 0;
8973        /* Possible optimization: compute the dividend with EXPAND_SUM
8974 	  then if the divisor is constant can optimize the case
8975 	  where some terms of the dividend have coeffs divisible by it.  */
8976        expand_operands (treeop0, treeop1,
8977 			subtarget, &op0, &op1, EXPAND_NORMAL);
8978        bool mod_p = code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR
8979 		    || code == CEIL_MOD_EXPR || code == ROUND_MOD_EXPR;
8980        if (SCALAR_INT_MODE_P (mode)
8981 	   && optimize >= 2
8982 	   && get_range_pos_neg (treeop0) == 1
8983 	   && get_range_pos_neg (treeop1) == 1)
8984 	 {
8985 	   /* If both arguments are known to be positive when interpreted
8986 	      as signed, we can expand it as both signed and unsigned
8987 	      division or modulo.  Choose the cheaper sequence in that case.  */
8988 	   bool speed_p = optimize_insn_for_speed_p ();
8989 	   do_pending_stack_adjust ();
8990 	   start_sequence ();
8991 	   rtx uns_ret = expand_divmod (mod_p, code, mode, op0, op1, target, 1);
8992 	   rtx_insn *uns_insns = get_insns ();
8993 	   end_sequence ();
8994 	   start_sequence ();
8995 	   rtx sgn_ret = expand_divmod (mod_p, code, mode, op0, op1, target, 0);
8996 	   rtx_insn *sgn_insns = get_insns ();
8997 	   end_sequence ();
8998 	   unsigned uns_cost = seq_cost (uns_insns, speed_p);
8999 	   unsigned sgn_cost = seq_cost (sgn_insns, speed_p);
9000 
9001 	   /* If costs are the same then use as tie breaker the other
9002 	      other factor.  */
9003 	   if (uns_cost == sgn_cost)
9004 	     {
9005 		uns_cost = seq_cost (uns_insns, !speed_p);
9006 		sgn_cost = seq_cost (sgn_insns, !speed_p);
9007 	     }
9008 
9009 	   if (uns_cost < sgn_cost || (uns_cost == sgn_cost && unsignedp))
9010 	     {
9011 	       emit_insn (uns_insns);
9012 	       return uns_ret;
9013 	     }
9014 	   emit_insn (sgn_insns);
9015 	   return sgn_ret;
9016 	 }
9017        return expand_divmod (mod_p, code, mode, op0, op1, target, unsignedp);
9018      }
9019     case RDIV_EXPR:
9020       goto binop;
9021 
9022     case MULT_HIGHPART_EXPR:
9023       expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
9024       temp = expand_mult_highpart (mode, op0, op1, target, unsignedp);
9025       gcc_assert (temp);
9026       return temp;
9027 
9028     case FIXED_CONVERT_EXPR:
9029       op0 = expand_normal (treeop0);
9030       if (target == 0 || modifier == EXPAND_STACK_PARM)
9031 	target = gen_reg_rtx (mode);
9032 
9033       if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
9034 	   && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
9035           || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
9036 	expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
9037       else
9038 	expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
9039       return target;
9040 
9041     case FIX_TRUNC_EXPR:
9042       op0 = expand_normal (treeop0);
9043       if (target == 0 || modifier == EXPAND_STACK_PARM)
9044 	target = gen_reg_rtx (mode);
9045       expand_fix (target, op0, unsignedp);
9046       return target;
9047 
9048     case FLOAT_EXPR:
9049       op0 = expand_normal (treeop0);
9050       if (target == 0 || modifier == EXPAND_STACK_PARM)
9051 	target = gen_reg_rtx (mode);
9052       /* expand_float can't figure out what to do if FROM has VOIDmode.
9053 	 So give it the correct mode.  With -O, cse will optimize this.  */
9054       if (GET_MODE (op0) == VOIDmode)
9055 	op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
9056 				op0);
9057       expand_float (target, op0,
9058 		    TYPE_UNSIGNED (TREE_TYPE (treeop0)));
9059       return target;
9060 
9061     case NEGATE_EXPR:
9062       op0 = expand_expr (treeop0, subtarget,
9063 			 VOIDmode, EXPAND_NORMAL);
9064       if (modifier == EXPAND_STACK_PARM)
9065 	target = 0;
9066       temp = expand_unop (mode,
9067       			  optab_for_tree_code (NEGATE_EXPR, type,
9068 					       optab_default),
9069 			  op0, target, 0);
9070       gcc_assert (temp);
9071       return REDUCE_BIT_FIELD (temp);
9072 
9073     case ABS_EXPR:
9074       op0 = expand_expr (treeop0, subtarget,
9075 			 VOIDmode, EXPAND_NORMAL);
9076       if (modifier == EXPAND_STACK_PARM)
9077 	target = 0;
9078 
9079       /* ABS_EXPR is not valid for complex arguments.  */
9080       gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
9081 		  && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
9082 
9083       /* Unsigned abs is simply the operand.  Testing here means we don't
9084 	 risk generating incorrect code below.  */
9085       if (TYPE_UNSIGNED (type))
9086 	return op0;
9087 
9088       return expand_abs (mode, op0, target, unsignedp,
9089 			 safe_from_p (target, treeop0, 1));
9090 
9091     case MAX_EXPR:
9092     case MIN_EXPR:
9093       target = original_target;
9094       if (target == 0
9095 	  || modifier == EXPAND_STACK_PARM
9096 	  || (MEM_P (target) && MEM_VOLATILE_P (target))
9097 	  || GET_MODE (target) != mode
9098 	  || (REG_P (target)
9099 	      && REGNO (target) < FIRST_PSEUDO_REGISTER))
9100 	target = gen_reg_rtx (mode);
9101       expand_operands (treeop0, treeop1,
9102 		       target, &op0, &op1, EXPAND_NORMAL);
9103 
9104       /* First try to do it with a special MIN or MAX instruction.
9105 	 If that does not win, use a conditional jump to select the proper
9106 	 value.  */
9107       this_optab = optab_for_tree_code (code, type, optab_default);
9108       temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
9109 			   OPTAB_WIDEN);
9110       if (temp != 0)
9111 	return temp;
9112 
9113       /* For vector MIN <x, y>, expand it a VEC_COND_EXPR <x <= y, x, y>
9114 	 and similarly for MAX <x, y>.  */
9115       if (VECTOR_TYPE_P (type))
9116 	{
9117 	  tree t0 = make_tree (type, op0);
9118 	  tree t1 = make_tree (type, op1);
9119 	  tree comparison = build2 (code == MIN_EXPR ? LE_EXPR : GE_EXPR,
9120 				    type, t0, t1);
9121 	  return expand_vec_cond_expr (type, comparison, t0, t1,
9122 				       original_target);
9123 	}
9124 
9125       /* At this point, a MEM target is no longer useful; we will get better
9126 	 code without it.  */
9127 
9128       if (! REG_P (target))
9129 	target = gen_reg_rtx (mode);
9130 
9131       /* If op1 was placed in target, swap op0 and op1.  */
9132       if (target != op0 && target == op1)
9133 	std::swap (op0, op1);
9134 
9135       /* We generate better code and avoid problems with op1 mentioning
9136 	 target by forcing op1 into a pseudo if it isn't a constant.  */
9137       if (! CONSTANT_P (op1))
9138 	op1 = force_reg (mode, op1);
9139 
9140       {
9141 	enum rtx_code comparison_code;
9142 	rtx cmpop1 = op1;
9143 
9144 	if (code == MAX_EXPR)
9145 	  comparison_code = unsignedp ? GEU : GE;
9146 	else
9147 	  comparison_code = unsignedp ? LEU : LE;
9148 
9149 	/* Canonicalize to comparisons against 0.  */
9150 	if (op1 == const1_rtx)
9151 	  {
9152 	    /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
9153 	       or (a != 0 ? a : 1) for unsigned.
9154 	       For MIN we are safe converting (a <= 1 ? a : 1)
9155 	       into (a <= 0 ? a : 1)  */
9156 	    cmpop1 = const0_rtx;
9157 	    if (code == MAX_EXPR)
9158 	      comparison_code = unsignedp ? NE : GT;
9159 	  }
9160 	if (op1 == constm1_rtx && !unsignedp)
9161 	  {
9162 	    /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
9163 	       and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
9164 	    cmpop1 = const0_rtx;
9165 	    if (code == MIN_EXPR)
9166 	      comparison_code = LT;
9167 	  }
9168 
9169 	/* Use a conditional move if possible.  */
9170 	if (can_conditionally_move_p (mode))
9171 	  {
9172 	    rtx insn;
9173 
9174 	    start_sequence ();
9175 
9176 	    /* Try to emit the conditional move.  */
9177 	    insn = emit_conditional_move (target, comparison_code,
9178 					  op0, cmpop1, mode,
9179 					  op0, op1, mode,
9180 					  unsignedp);
9181 
9182 	    /* If we could do the conditional move, emit the sequence,
9183 	       and return.  */
9184 	    if (insn)
9185 	      {
9186 		rtx_insn *seq = get_insns ();
9187 		end_sequence ();
9188 		emit_insn (seq);
9189 		return target;
9190 	      }
9191 
9192 	    /* Otherwise discard the sequence and fall back to code with
9193 	       branches.  */
9194 	    end_sequence ();
9195 	  }
9196 
9197 	if (target != op0)
9198 	  emit_move_insn (target, op0);
9199 
9200 	lab = gen_label_rtx ();
9201 	do_compare_rtx_and_jump (target, cmpop1, comparison_code,
9202 				 unsignedp, mode, NULL_RTX, NULL, lab,
9203 				 profile_probability::uninitialized ());
9204       }
9205       emit_move_insn (target, op1);
9206       emit_label (lab);
9207       return target;
9208 
9209     case BIT_NOT_EXPR:
9210       op0 = expand_expr (treeop0, subtarget,
9211 			 VOIDmode, EXPAND_NORMAL);
9212       if (modifier == EXPAND_STACK_PARM)
9213 	target = 0;
9214       /* In case we have to reduce the result to bitfield precision
9215 	 for unsigned bitfield expand this as XOR with a proper constant
9216 	 instead.  */
9217       if (reduce_bit_field && TYPE_UNSIGNED (type))
9218 	{
9219 	  int_mode = SCALAR_INT_TYPE_MODE (type);
9220 	  wide_int mask = wi::mask (TYPE_PRECISION (type),
9221 				    false, GET_MODE_PRECISION (int_mode));
9222 
9223 	  temp = expand_binop (int_mode, xor_optab, op0,
9224 			       immed_wide_int_const (mask, int_mode),
9225 			       target, 1, OPTAB_LIB_WIDEN);
9226 	}
9227       else
9228 	temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
9229       gcc_assert (temp);
9230       return temp;
9231 
9232       /* ??? Can optimize bitwise operations with one arg constant.
9233 	 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
9234 	 and (a bitwise1 b) bitwise2 b (etc)
9235 	 but that is probably not worth while.  */
9236 
9237     case BIT_AND_EXPR:
9238     case BIT_IOR_EXPR:
9239     case BIT_XOR_EXPR:
9240       goto binop;
9241 
9242     case LROTATE_EXPR:
9243     case RROTATE_EXPR:
9244       gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
9245 		  || type_has_mode_precision_p (type));
9246       /* fall through */
9247 
9248     case LSHIFT_EXPR:
9249     case RSHIFT_EXPR:
9250       {
9251 	/* If this is a fixed-point operation, then we cannot use the code
9252 	   below because "expand_shift" doesn't support sat/no-sat fixed-point
9253 	   shifts.  */
9254 	if (ALL_FIXED_POINT_MODE_P (mode))
9255 	  goto binop;
9256 
9257 	if (! safe_from_p (subtarget, treeop1, 1))
9258 	  subtarget = 0;
9259 	if (modifier == EXPAND_STACK_PARM)
9260 	  target = 0;
9261 	op0 = expand_expr (treeop0, subtarget,
9262 			   VOIDmode, EXPAND_NORMAL);
9263 
9264 	/* Left shift optimization when shifting across word_size boundary.
9265 
9266 	   If mode == GET_MODE_WIDER_MODE (word_mode), then normally
9267 	   there isn't native instruction to support this wide mode
9268 	   left shift.  Given below scenario:
9269 
9270 	    Type A = (Type) B  << C
9271 
9272 	    |<		 T	    >|
9273 	    | dest_high  |  dest_low |
9274 
9275 			 | word_size |
9276 
9277 	   If the shift amount C caused we shift B to across the word
9278 	   size boundary, i.e part of B shifted into high half of
9279 	   destination register, and part of B remains in the low
9280 	   half, then GCC will use the following left shift expand
9281 	   logic:
9282 
9283 	   1. Initialize dest_low to B.
9284 	   2. Initialize every bit of dest_high to the sign bit of B.
9285 	   3. Logic left shift dest_low by C bit to finalize dest_low.
9286 	      The value of dest_low before this shift is kept in a temp D.
9287 	   4. Logic left shift dest_high by C.
9288 	   5. Logic right shift D by (word_size - C).
9289 	   6. Or the result of 4 and 5 to finalize dest_high.
9290 
9291 	   While, by checking gimple statements, if operand B is
9292 	   coming from signed extension, then we can simplify above
9293 	   expand logic into:
9294 
9295 	      1. dest_high = src_low >> (word_size - C).
9296 	      2. dest_low = src_low << C.
9297 
9298 	   We can use one arithmetic right shift to finish all the
9299 	   purpose of steps 2, 4, 5, 6, thus we reduce the steps
9300 	   needed from 6 into 2.
9301 
9302 	   The case is similar for zero extension, except that we
9303 	   initialize dest_high to zero rather than copies of the sign
9304 	   bit from B.  Furthermore, we need to use a logical right shift
9305 	   in this case.
9306 
9307 	   The choice of sign-extension versus zero-extension is
9308 	   determined entirely by whether or not B is signed and is
9309 	   independent of the current setting of unsignedp.  */
9310 
9311 	temp = NULL_RTX;
9312 	if (code == LSHIFT_EXPR
9313 	    && target
9314 	    && REG_P (target)
9315 	    && GET_MODE_2XWIDER_MODE (word_mode).exists (&int_mode)
9316 	    && mode == int_mode
9317 	    && TREE_CONSTANT (treeop1)
9318 	    && TREE_CODE (treeop0) == SSA_NAME)
9319 	  {
9320 	    gimple *def = SSA_NAME_DEF_STMT (treeop0);
9321 	    if (is_gimple_assign (def)
9322 		&& gimple_assign_rhs_code (def) == NOP_EXPR)
9323 	      {
9324 		scalar_int_mode rmode = SCALAR_INT_TYPE_MODE
9325 		  (TREE_TYPE (gimple_assign_rhs1 (def)));
9326 
9327 		if (GET_MODE_SIZE (rmode) < GET_MODE_SIZE (int_mode)
9328 		    && TREE_INT_CST_LOW (treeop1) < GET_MODE_BITSIZE (word_mode)
9329 		    && ((TREE_INT_CST_LOW (treeop1) + GET_MODE_BITSIZE (rmode))
9330 			>= GET_MODE_BITSIZE (word_mode)))
9331 		  {
9332 		    rtx_insn *seq, *seq_old;
9333 		    poly_uint64 high_off = subreg_highpart_offset (word_mode,
9334 								   int_mode);
9335 		    bool extend_unsigned
9336 		      = TYPE_UNSIGNED (TREE_TYPE (gimple_assign_rhs1 (def)));
9337 		    rtx low = lowpart_subreg (word_mode, op0, int_mode);
9338 		    rtx dest_low = lowpart_subreg (word_mode, target, int_mode);
9339 		    rtx dest_high = simplify_gen_subreg (word_mode, target,
9340 							 int_mode, high_off);
9341 		    HOST_WIDE_INT ramount = (BITS_PER_WORD
9342 					     - TREE_INT_CST_LOW (treeop1));
9343 		    tree rshift = build_int_cst (TREE_TYPE (treeop1), ramount);
9344 
9345 		    start_sequence ();
9346 		    /* dest_high = src_low >> (word_size - C).  */
9347 		    temp = expand_variable_shift (RSHIFT_EXPR, word_mode, low,
9348 						  rshift, dest_high,
9349 						  extend_unsigned);
9350 		    if (temp != dest_high)
9351 		      emit_move_insn (dest_high, temp);
9352 
9353 		    /* dest_low = src_low << C.  */
9354 		    temp = expand_variable_shift (LSHIFT_EXPR, word_mode, low,
9355 						  treeop1, dest_low, unsignedp);
9356 		    if (temp != dest_low)
9357 		      emit_move_insn (dest_low, temp);
9358 
9359 		    seq = get_insns ();
9360 		    end_sequence ();
9361 		    temp = target ;
9362 
9363 		    if (have_insn_for (ASHIFT, int_mode))
9364 		      {
9365 			bool speed_p = optimize_insn_for_speed_p ();
9366 			start_sequence ();
9367 			rtx ret_old = expand_variable_shift (code, int_mode,
9368 							     op0, treeop1,
9369 							     target,
9370 							     unsignedp);
9371 
9372 			seq_old = get_insns ();
9373 			end_sequence ();
9374 			if (seq_cost (seq, speed_p)
9375 			    >= seq_cost (seq_old, speed_p))
9376 			  {
9377 			    seq = seq_old;
9378 			    temp = ret_old;
9379 			  }
9380 		      }
9381 		      emit_insn (seq);
9382 		  }
9383 	      }
9384 	  }
9385 
9386 	if (temp == NULL_RTX)
9387 	  temp = expand_variable_shift (code, mode, op0, treeop1, target,
9388 					unsignedp);
9389 	if (code == LSHIFT_EXPR)
9390 	  temp = REDUCE_BIT_FIELD (temp);
9391 	return temp;
9392       }
9393 
9394       /* Could determine the answer when only additive constants differ.  Also,
9395 	 the addition of one can be handled by changing the condition.  */
9396     case LT_EXPR:
9397     case LE_EXPR:
9398     case GT_EXPR:
9399     case GE_EXPR:
9400     case EQ_EXPR:
9401     case NE_EXPR:
9402     case UNORDERED_EXPR:
9403     case ORDERED_EXPR:
9404     case UNLT_EXPR:
9405     case UNLE_EXPR:
9406     case UNGT_EXPR:
9407     case UNGE_EXPR:
9408     case UNEQ_EXPR:
9409     case LTGT_EXPR:
9410       {
9411 	temp = do_store_flag (ops,
9412 			      modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
9413 			      tmode != VOIDmode ? tmode : mode);
9414 	if (temp)
9415 	  return temp;
9416 
9417 	/* Use a compare and a jump for BLKmode comparisons, or for function
9418 	   type comparisons is have_canonicalize_funcptr_for_compare.  */
9419 
9420 	if ((target == 0
9421 	     || modifier == EXPAND_STACK_PARM
9422 	     || ! safe_from_p (target, treeop0, 1)
9423 	     || ! safe_from_p (target, treeop1, 1)
9424 	     /* Make sure we don't have a hard reg (such as function's return
9425 		value) live across basic blocks, if not optimizing.  */
9426 	     || (!optimize && REG_P (target)
9427 		 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
9428 	  target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
9429 
9430 	emit_move_insn (target, const0_rtx);
9431 
9432 	rtx_code_label *lab1 = gen_label_rtx ();
9433 	jumpifnot_1 (code, treeop0, treeop1, lab1,
9434 		     profile_probability::uninitialized ());
9435 
9436 	if (TYPE_PRECISION (type) == 1 && !TYPE_UNSIGNED (type))
9437 	  emit_move_insn (target, constm1_rtx);
9438 	else
9439 	  emit_move_insn (target, const1_rtx);
9440 
9441 	emit_label (lab1);
9442 	return target;
9443       }
9444     case COMPLEX_EXPR:
9445       /* Get the rtx code of the operands.  */
9446       op0 = expand_normal (treeop0);
9447       op1 = expand_normal (treeop1);
9448 
9449       if (!target)
9450 	target = gen_reg_rtx (TYPE_MODE (type));
9451       else
9452 	/* If target overlaps with op1, then either we need to force
9453 	   op1 into a pseudo (if target also overlaps with op0),
9454 	   or write the complex parts in reverse order.  */
9455 	switch (GET_CODE (target))
9456 	  {
9457 	  case CONCAT:
9458 	    if (reg_overlap_mentioned_p (XEXP (target, 0), op1))
9459 	      {
9460 		if (reg_overlap_mentioned_p (XEXP (target, 1), op0))
9461 		  {
9462 		  complex_expr_force_op1:
9463 		    temp = gen_reg_rtx (GET_MODE_INNER (GET_MODE (target)));
9464 		    emit_move_insn (temp, op1);
9465 		    op1 = temp;
9466 		    break;
9467 		  }
9468 	      complex_expr_swap_order:
9469 		/* Move the imaginary (op1) and real (op0) parts to their
9470 		   location.  */
9471 		write_complex_part (target, op1, true);
9472 		write_complex_part (target, op0, false);
9473 
9474 		return target;
9475 	      }
9476 	    break;
9477 	  case MEM:
9478 	    temp = adjust_address_nv (target,
9479 				      GET_MODE_INNER (GET_MODE (target)), 0);
9480 	    if (reg_overlap_mentioned_p (temp, op1))
9481 	      {
9482 		scalar_mode imode = GET_MODE_INNER (GET_MODE (target));
9483 		temp = adjust_address_nv (target, imode,
9484 					  GET_MODE_SIZE (imode));
9485 		if (reg_overlap_mentioned_p (temp, op0))
9486 		  goto complex_expr_force_op1;
9487 		goto complex_expr_swap_order;
9488 	      }
9489 	    break;
9490 	  default:
9491 	    if (reg_overlap_mentioned_p (target, op1))
9492 	      {
9493 		if (reg_overlap_mentioned_p (target, op0))
9494 		  goto complex_expr_force_op1;
9495 		goto complex_expr_swap_order;
9496 	      }
9497 	    break;
9498 	  }
9499 
9500       /* Move the real (op0) and imaginary (op1) parts to their location.  */
9501       write_complex_part (target, op0, false);
9502       write_complex_part (target, op1, true);
9503 
9504       return target;
9505 
9506     case WIDEN_SUM_EXPR:
9507       {
9508         tree oprnd0 = treeop0;
9509         tree oprnd1 = treeop1;
9510 
9511         expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9512         target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
9513                                             target, unsignedp);
9514         return target;
9515       }
9516 
9517     case VEC_UNPACK_HI_EXPR:
9518     case VEC_UNPACK_LO_EXPR:
9519       {
9520 	op0 = expand_normal (treeop0);
9521 	temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
9522 					  target, unsignedp);
9523 	gcc_assert (temp);
9524 	return temp;
9525       }
9526 
9527     case VEC_UNPACK_FLOAT_HI_EXPR:
9528     case VEC_UNPACK_FLOAT_LO_EXPR:
9529       {
9530 	op0 = expand_normal (treeop0);
9531 	/* The signedness is determined from input operand.  */
9532 	temp = expand_widen_pattern_expr
9533 	  (ops, op0, NULL_RTX, NULL_RTX,
9534 	   target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
9535 
9536 	gcc_assert (temp);
9537 	return temp;
9538       }
9539 
9540     case VEC_WIDEN_MULT_HI_EXPR:
9541     case VEC_WIDEN_MULT_LO_EXPR:
9542     case VEC_WIDEN_MULT_EVEN_EXPR:
9543     case VEC_WIDEN_MULT_ODD_EXPR:
9544     case VEC_WIDEN_LSHIFT_HI_EXPR:
9545     case VEC_WIDEN_LSHIFT_LO_EXPR:
9546       expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9547       target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
9548 					  target, unsignedp);
9549       gcc_assert (target);
9550       return target;
9551 
9552     case VEC_PACK_TRUNC_EXPR:
9553     case VEC_PACK_SAT_EXPR:
9554     case VEC_PACK_FIX_TRUNC_EXPR:
9555       mode = TYPE_MODE (TREE_TYPE (treeop0));
9556       goto binop;
9557 
9558     case VEC_PERM_EXPR:
9559       {
9560 	expand_operands (treeop0, treeop1, target, &op0, &op1, EXPAND_NORMAL);
9561 	vec_perm_builder sel;
9562 	if (TREE_CODE (treeop2) == VECTOR_CST
9563 	    && tree_to_vec_perm_builder (&sel, treeop2))
9564 	  {
9565 	    machine_mode sel_mode = TYPE_MODE (TREE_TYPE (treeop2));
9566 	    temp = expand_vec_perm_const (mode, op0, op1, sel,
9567 					  sel_mode, target);
9568 	  }
9569 	else
9570 	  {
9571 	    op2 = expand_normal (treeop2);
9572 	    temp = expand_vec_perm_var (mode, op0, op1, op2, target);
9573 	  }
9574 	gcc_assert (temp);
9575 	return temp;
9576       }
9577 
9578     case DOT_PROD_EXPR:
9579       {
9580 	tree oprnd0 = treeop0;
9581 	tree oprnd1 = treeop1;
9582 	tree oprnd2 = treeop2;
9583 	rtx op2;
9584 
9585 	expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9586 	op2 = expand_normal (oprnd2);
9587 	target = expand_widen_pattern_expr (ops, op0, op1, op2,
9588 					    target, unsignedp);
9589 	return target;
9590       }
9591 
9592       case SAD_EXPR:
9593       {
9594 	tree oprnd0 = treeop0;
9595 	tree oprnd1 = treeop1;
9596 	tree oprnd2 = treeop2;
9597 	rtx op2;
9598 
9599 	expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9600 	op2 = expand_normal (oprnd2);
9601 	target = expand_widen_pattern_expr (ops, op0, op1, op2,
9602 					    target, unsignedp);
9603 	return target;
9604       }
9605 
9606     case REALIGN_LOAD_EXPR:
9607       {
9608         tree oprnd0 = treeop0;
9609         tree oprnd1 = treeop1;
9610         tree oprnd2 = treeop2;
9611         rtx op2;
9612 
9613         this_optab = optab_for_tree_code (code, type, optab_default);
9614         expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9615         op2 = expand_normal (oprnd2);
9616         temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9617 				  target, unsignedp);
9618         gcc_assert (temp);
9619         return temp;
9620       }
9621 
9622     case COND_EXPR:
9623       {
9624 	/* A COND_EXPR with its type being VOID_TYPE represents a
9625 	   conditional jump and is handled in
9626 	   expand_gimple_cond_expr.  */
9627 	gcc_assert (!VOID_TYPE_P (type));
9628 
9629 	/* Note that COND_EXPRs whose type is a structure or union
9630 	   are required to be constructed to contain assignments of
9631 	   a temporary variable, so that we can evaluate them here
9632 	   for side effect only.  If type is void, we must do likewise.  */
9633 
9634 	gcc_assert (!TREE_ADDRESSABLE (type)
9635 		    && !ignore
9636 		    && TREE_TYPE (treeop1) != void_type_node
9637 		    && TREE_TYPE (treeop2) != void_type_node);
9638 
9639 	temp = expand_cond_expr_using_cmove (treeop0, treeop1, treeop2);
9640 	if (temp)
9641 	  return temp;
9642 
9643 	/* If we are not to produce a result, we have no target.  Otherwise,
9644 	   if a target was specified use it; it will not be used as an
9645 	   intermediate target unless it is safe.  If no target, use a
9646 	   temporary.  */
9647 
9648 	if (modifier != EXPAND_STACK_PARM
9649 	    && original_target
9650 	    && safe_from_p (original_target, treeop0, 1)
9651 	    && GET_MODE (original_target) == mode
9652 	    && !MEM_P (original_target))
9653 	  temp = original_target;
9654 	else
9655 	  temp = assign_temp (type, 0, 1);
9656 
9657 	do_pending_stack_adjust ();
9658 	NO_DEFER_POP;
9659 	rtx_code_label *lab0 = gen_label_rtx ();
9660 	rtx_code_label *lab1 = gen_label_rtx ();
9661 	jumpifnot (treeop0, lab0,
9662 		   profile_probability::uninitialized ());
9663 	store_expr (treeop1, temp,
9664 		    modifier == EXPAND_STACK_PARM,
9665 		    false, false);
9666 
9667 	emit_jump_insn (targetm.gen_jump (lab1));
9668 	emit_barrier ();
9669 	emit_label (lab0);
9670 	store_expr (treeop2, temp,
9671 		    modifier == EXPAND_STACK_PARM,
9672 		    false, false);
9673 
9674 	emit_label (lab1);
9675 	OK_DEFER_POP;
9676 	return temp;
9677       }
9678 
9679     case VEC_COND_EXPR:
9680       target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9681       return target;
9682 
9683     case VEC_DUPLICATE_EXPR:
9684       op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
9685       target = expand_vector_broadcast (mode, op0);
9686       gcc_assert (target);
9687       return target;
9688 
9689     case VEC_SERIES_EXPR:
9690       expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, modifier);
9691       return expand_vec_series_expr (mode, op0, op1, target);
9692 
9693     case BIT_INSERT_EXPR:
9694       {
9695 	unsigned bitpos = tree_to_uhwi (treeop2);
9696 	unsigned bitsize;
9697 	if (INTEGRAL_TYPE_P (TREE_TYPE (treeop1)))
9698 	  bitsize = TYPE_PRECISION (TREE_TYPE (treeop1));
9699 	else
9700 	  bitsize = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (treeop1)));
9701 	rtx op0 = expand_normal (treeop0);
9702 	rtx op1 = expand_normal (treeop1);
9703 	rtx dst = gen_reg_rtx (mode);
9704 	emit_move_insn (dst, op0);
9705 	store_bit_field (dst, bitsize, bitpos, 0, 0,
9706 			 TYPE_MODE (TREE_TYPE (treeop1)), op1, false);
9707 	return dst;
9708       }
9709 
9710     default:
9711       gcc_unreachable ();
9712     }
9713 
9714   /* Here to do an ordinary binary operator.  */
9715  binop:
9716   expand_operands (treeop0, treeop1,
9717 		   subtarget, &op0, &op1, EXPAND_NORMAL);
9718  binop2:
9719   this_optab = optab_for_tree_code (code, type, optab_default);
9720  binop3:
9721   if (modifier == EXPAND_STACK_PARM)
9722     target = 0;
9723   temp = expand_binop (mode, this_optab, op0, op1, target,
9724 		       unsignedp, OPTAB_LIB_WIDEN);
9725   gcc_assert (temp);
9726   /* Bitwise operations do not need bitfield reduction as we expect their
9727      operands being properly truncated.  */
9728   if (code == BIT_XOR_EXPR
9729       || code == BIT_AND_EXPR
9730       || code == BIT_IOR_EXPR)
9731     return temp;
9732   return REDUCE_BIT_FIELD (temp);
9733 }
9734 #undef REDUCE_BIT_FIELD
9735 
9736 
9737 /* Return TRUE if expression STMT is suitable for replacement.
9738    Never consider memory loads as replaceable, because those don't ever lead
9739    into constant expressions.  */
9740 
9741 static bool
9742 stmt_is_replaceable_p (gimple *stmt)
9743 {
9744   if (ssa_is_replaceable_p (stmt))
9745     {
9746       /* Don't move around loads.  */
9747       if (!gimple_assign_single_p (stmt)
9748 	  || is_gimple_val (gimple_assign_rhs1 (stmt)))
9749 	return true;
9750     }
9751   return false;
9752 }
9753 
9754 rtx
9755 expand_expr_real_1 (tree exp, rtx target, machine_mode tmode,
9756 		    enum expand_modifier modifier, rtx *alt_rtl,
9757 		    bool inner_reference_p)
9758 {
9759   rtx op0, op1, temp, decl_rtl;
9760   tree type;
9761   int unsignedp;
9762   machine_mode mode, dmode;
9763   enum tree_code code = TREE_CODE (exp);
9764   rtx subtarget, original_target;
9765   int ignore;
9766   tree context;
9767   bool reduce_bit_field;
9768   location_t loc = EXPR_LOCATION (exp);
9769   struct separate_ops ops;
9770   tree treeop0, treeop1, treeop2;
9771   tree ssa_name = NULL_TREE;
9772   gimple *g;
9773 
9774   type = TREE_TYPE (exp);
9775   mode = TYPE_MODE (type);
9776   unsignedp = TYPE_UNSIGNED (type);
9777 
9778   treeop0 = treeop1 = treeop2 = NULL_TREE;
9779   if (!VL_EXP_CLASS_P (exp))
9780     switch (TREE_CODE_LENGTH (code))
9781       {
9782 	default:
9783 	case 3: treeop2 = TREE_OPERAND (exp, 2); /* FALLTHRU */
9784 	case 2: treeop1 = TREE_OPERAND (exp, 1); /* FALLTHRU */
9785 	case 1: treeop0 = TREE_OPERAND (exp, 0); /* FALLTHRU */
9786 	case 0: break;
9787       }
9788   ops.code = code;
9789   ops.type = type;
9790   ops.op0 = treeop0;
9791   ops.op1 = treeop1;
9792   ops.op2 = treeop2;
9793   ops.location = loc;
9794 
9795   ignore = (target == const0_rtx
9796 	    || ((CONVERT_EXPR_CODE_P (code)
9797 		 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
9798 		&& TREE_CODE (type) == VOID_TYPE));
9799 
9800   /* An operation in what may be a bit-field type needs the
9801      result to be reduced to the precision of the bit-field type,
9802      which is narrower than that of the type's mode.  */
9803   reduce_bit_field = (!ignore
9804 		      && INTEGRAL_TYPE_P (type)
9805 		      && !type_has_mode_precision_p (type));
9806 
9807   /* If we are going to ignore this result, we need only do something
9808      if there is a side-effect somewhere in the expression.  If there
9809      is, short-circuit the most common cases here.  Note that we must
9810      not call expand_expr with anything but const0_rtx in case this
9811      is an initial expansion of a size that contains a PLACEHOLDER_EXPR.  */
9812 
9813   if (ignore)
9814     {
9815       if (! TREE_SIDE_EFFECTS (exp))
9816 	return const0_rtx;
9817 
9818       /* Ensure we reference a volatile object even if value is ignored, but
9819 	 don't do this if all we are doing is taking its address.  */
9820       if (TREE_THIS_VOLATILE (exp)
9821 	  && TREE_CODE (exp) != FUNCTION_DECL
9822 	  && mode != VOIDmode && mode != BLKmode
9823 	  && modifier != EXPAND_CONST_ADDRESS)
9824 	{
9825 	  temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
9826 	  if (MEM_P (temp))
9827 	    copy_to_reg (temp);
9828 	  return const0_rtx;
9829 	}
9830 
9831       if (TREE_CODE_CLASS (code) == tcc_unary
9832 	  || code == BIT_FIELD_REF
9833 	  || code == COMPONENT_REF
9834 	  || code == INDIRECT_REF)
9835 	return expand_expr (treeop0, const0_rtx, VOIDmode,
9836 			    modifier);
9837 
9838       else if (TREE_CODE_CLASS (code) == tcc_binary
9839 	       || TREE_CODE_CLASS (code) == tcc_comparison
9840 	       || code == ARRAY_REF || code == ARRAY_RANGE_REF)
9841 	{
9842 	  expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
9843 	  expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
9844 	  return const0_rtx;
9845 	}
9846 
9847       target = 0;
9848     }
9849 
9850   if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
9851     target = 0;
9852 
9853   /* Use subtarget as the target for operand 0 of a binary operation.  */
9854   subtarget = get_subtarget (target);
9855   original_target = target;
9856 
9857   switch (code)
9858     {
9859     case LABEL_DECL:
9860       {
9861 	tree function = decl_function_context (exp);
9862 
9863 	temp = label_rtx (exp);
9864 	temp = gen_rtx_LABEL_REF (Pmode, temp);
9865 
9866 	if (function != current_function_decl
9867 	    && function != 0)
9868 	  LABEL_REF_NONLOCAL_P (temp) = 1;
9869 
9870 	temp = gen_rtx_MEM (FUNCTION_MODE, temp);
9871 	return temp;
9872       }
9873 
9874     case SSA_NAME:
9875       /* ??? ivopts calls expander, without any preparation from
9876          out-of-ssa.  So fake instructions as if this was an access to the
9877 	 base variable.  This unnecessarily allocates a pseudo, see how we can
9878 	 reuse it, if partition base vars have it set already.  */
9879       if (!currently_expanding_to_rtl)
9880 	{
9881 	  tree var = SSA_NAME_VAR (exp);
9882 	  if (var && DECL_RTL_SET_P (var))
9883 	    return DECL_RTL (var);
9884 	  return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp)),
9885 			      LAST_VIRTUAL_REGISTER + 1);
9886 	}
9887 
9888       g = get_gimple_for_ssa_name (exp);
9889       /* For EXPAND_INITIALIZER try harder to get something simpler.  */
9890       if (g == NULL
9891 	  && modifier == EXPAND_INITIALIZER
9892 	  && !SSA_NAME_IS_DEFAULT_DEF (exp)
9893 	  && (optimize || !SSA_NAME_VAR (exp)
9894 	      || DECL_IGNORED_P (SSA_NAME_VAR (exp)))
9895 	  && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp)))
9896 	g = SSA_NAME_DEF_STMT (exp);
9897       if (g)
9898 	{
9899 	  rtx r;
9900 	  location_t saved_loc = curr_insn_location ();
9901 	  location_t loc = gimple_location (g);
9902 	  if (loc != UNKNOWN_LOCATION)
9903 	    set_curr_insn_location (loc);
9904 	  ops.code = gimple_assign_rhs_code (g);
9905           switch (get_gimple_rhs_class (ops.code))
9906 	    {
9907 	    case GIMPLE_TERNARY_RHS:
9908 	      ops.op2 = gimple_assign_rhs3 (g);
9909 	      /* Fallthru */
9910 	    case GIMPLE_BINARY_RHS:
9911 	      ops.op1 = gimple_assign_rhs2 (g);
9912 
9913 	      /* Try to expand conditonal compare.  */
9914 	      if (targetm.gen_ccmp_first)
9915 		{
9916 		  gcc_checking_assert (targetm.gen_ccmp_next != NULL);
9917 		  r = expand_ccmp_expr (g, mode);
9918 		  if (r)
9919 		    break;
9920 		}
9921 	      /* Fallthru */
9922 	    case GIMPLE_UNARY_RHS:
9923 	      ops.op0 = gimple_assign_rhs1 (g);
9924 	      ops.type = TREE_TYPE (gimple_assign_lhs (g));
9925 	      ops.location = loc;
9926 	      r = expand_expr_real_2 (&ops, target, tmode, modifier);
9927 	      break;
9928 	    case GIMPLE_SINGLE_RHS:
9929 	      {
9930 		r = expand_expr_real (gimple_assign_rhs1 (g), target,
9931 				      tmode, modifier, alt_rtl,
9932 				      inner_reference_p);
9933 		break;
9934 	      }
9935 	    default:
9936 	      gcc_unreachable ();
9937 	    }
9938 	  set_curr_insn_location (saved_loc);
9939 	  if (REG_P (r) && !REG_EXPR (r))
9940 	    set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp), r);
9941 	  return r;
9942 	}
9943 
9944       ssa_name = exp;
9945       decl_rtl = get_rtx_for_ssa_name (ssa_name);
9946       exp = SSA_NAME_VAR (ssa_name);
9947       goto expand_decl_rtl;
9948 
9949     case PARM_DECL:
9950     case VAR_DECL:
9951       /* If a static var's type was incomplete when the decl was written,
9952 	 but the type is complete now, lay out the decl now.  */
9953       if (DECL_SIZE (exp) == 0
9954 	  && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
9955 	  && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
9956 	layout_decl (exp, 0);
9957 
9958       /* fall through */
9959 
9960     case FUNCTION_DECL:
9961     case RESULT_DECL:
9962       decl_rtl = DECL_RTL (exp);
9963     expand_decl_rtl:
9964       gcc_assert (decl_rtl);
9965 
9966       /* DECL_MODE might change when TYPE_MODE depends on attribute target
9967 	 settings for VECTOR_TYPE_P that might switch for the function.  */
9968       if (currently_expanding_to_rtl
9969 	  && code == VAR_DECL && MEM_P (decl_rtl)
9970 	  && VECTOR_TYPE_P (type) && exp && DECL_MODE (exp) != mode)
9971 	decl_rtl = change_address (decl_rtl, TYPE_MODE (type), 0);
9972       else
9973 	decl_rtl = copy_rtx (decl_rtl);
9974 
9975       /* Record writes to register variables.  */
9976       if (modifier == EXPAND_WRITE
9977 	  && REG_P (decl_rtl)
9978 	  && HARD_REGISTER_P (decl_rtl))
9979         add_to_hard_reg_set (&crtl->asm_clobbers,
9980 			     GET_MODE (decl_rtl), REGNO (decl_rtl));
9981 
9982       /* Ensure variable marked as used even if it doesn't go through
9983 	 a parser.  If it hasn't be used yet, write out an external
9984 	 definition.  */
9985       if (exp)
9986 	TREE_USED (exp) = 1;
9987 
9988       /* Show we haven't gotten RTL for this yet.  */
9989       temp = 0;
9990 
9991       /* Variables inherited from containing functions should have
9992 	 been lowered by this point.  */
9993       if (exp)
9994 	context = decl_function_context (exp);
9995       gcc_assert (!exp
9996 		  || SCOPE_FILE_SCOPE_P (context)
9997 		  || context == current_function_decl
9998 		  || TREE_STATIC (exp)
9999 		  || DECL_EXTERNAL (exp)
10000 		  /* ??? C++ creates functions that are not TREE_STATIC.  */
10001 		  || TREE_CODE (exp) == FUNCTION_DECL);
10002 
10003       /* This is the case of an array whose size is to be determined
10004 	 from its initializer, while the initializer is still being parsed.
10005 	 ??? We aren't parsing while expanding anymore.  */
10006 
10007       if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
10008 	temp = validize_mem (decl_rtl);
10009 
10010       /* If DECL_RTL is memory, we are in the normal case and the
10011 	 address is not valid, get the address into a register.  */
10012 
10013       else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
10014 	{
10015 	  if (alt_rtl)
10016 	    *alt_rtl = decl_rtl;
10017 	  decl_rtl = use_anchored_address (decl_rtl);
10018 	  if (modifier != EXPAND_CONST_ADDRESS
10019 	      && modifier != EXPAND_SUM
10020 	      && !memory_address_addr_space_p (exp ? DECL_MODE (exp)
10021 					       : GET_MODE (decl_rtl),
10022 					       XEXP (decl_rtl, 0),
10023 					       MEM_ADDR_SPACE (decl_rtl)))
10024 	    temp = replace_equiv_address (decl_rtl,
10025 					  copy_rtx (XEXP (decl_rtl, 0)));
10026 	}
10027 
10028       /* If we got something, return it.  But first, set the alignment
10029 	 if the address is a register.  */
10030       if (temp != 0)
10031 	{
10032 	  if (exp && MEM_P (temp) && REG_P (XEXP (temp, 0)))
10033 	    mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
10034 
10035 	  return temp;
10036 	}
10037 
10038       if (exp)
10039 	dmode = DECL_MODE (exp);
10040       else
10041 	dmode = TYPE_MODE (TREE_TYPE (ssa_name));
10042 
10043       /* If the mode of DECL_RTL does not match that of the decl,
10044 	 there are two cases: we are dealing with a BLKmode value
10045 	 that is returned in a register, or we are dealing with
10046 	 a promoted value.  In the latter case, return a SUBREG
10047 	 of the wanted mode, but mark it so that we know that it
10048 	 was already extended.  */
10049       if (REG_P (decl_rtl)
10050 	  && dmode != BLKmode
10051 	  && GET_MODE (decl_rtl) != dmode)
10052 	{
10053 	  machine_mode pmode;
10054 
10055 	  /* Get the signedness to be used for this variable.  Ensure we get
10056 	     the same mode we got when the variable was declared.  */
10057 	  if (code != SSA_NAME)
10058 	    pmode = promote_decl_mode (exp, &unsignedp);
10059 	  else if ((g = SSA_NAME_DEF_STMT (ssa_name))
10060 		   && gimple_code (g) == GIMPLE_CALL
10061 		   && !gimple_call_internal_p (g))
10062 	    pmode = promote_function_mode (type, mode, &unsignedp,
10063 					   gimple_call_fntype (g),
10064 					   2);
10065 	  else
10066 	    pmode = promote_ssa_mode (ssa_name, &unsignedp);
10067 	  gcc_assert (GET_MODE (decl_rtl) == pmode);
10068 
10069 	  temp = gen_lowpart_SUBREG (mode, decl_rtl);
10070 	  SUBREG_PROMOTED_VAR_P (temp) = 1;
10071 	  SUBREG_PROMOTED_SET (temp, unsignedp);
10072 	  return temp;
10073 	}
10074 
10075       return decl_rtl;
10076 
10077     case INTEGER_CST:
10078       {
10079 	/* Given that TYPE_PRECISION (type) is not always equal to
10080 	   GET_MODE_PRECISION (TYPE_MODE (type)), we need to extend from
10081 	   the former to the latter according to the signedness of the
10082 	   type.  */
10083 	scalar_int_mode mode = SCALAR_INT_TYPE_MODE (type);
10084 	temp = immed_wide_int_const
10085 	  (wi::to_wide (exp, GET_MODE_PRECISION (mode)), mode);
10086 	return temp;
10087       }
10088 
10089     case VECTOR_CST:
10090       {
10091 	tree tmp = NULL_TREE;
10092 	if (VECTOR_MODE_P (mode))
10093 	  return const_vector_from_tree (exp);
10094 	scalar_int_mode int_mode;
10095 	if (is_int_mode (mode, &int_mode))
10096 	  {
10097 	    if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (exp)))
10098 	      return const_scalar_mask_from_tree (int_mode, exp);
10099 	    else
10100 	      {
10101 		tree type_for_mode
10102 		  = lang_hooks.types.type_for_mode (int_mode, 1);
10103 		if (type_for_mode)
10104 		  tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR,
10105 					type_for_mode, exp);
10106 	      }
10107 	  }
10108 	if (!tmp)
10109 	  {
10110 	    vec<constructor_elt, va_gc> *v;
10111 	    /* Constructors need to be fixed-length.  FIXME.  */
10112 	    unsigned int nunits = VECTOR_CST_NELTS (exp).to_constant ();
10113 	    vec_alloc (v, nunits);
10114 	    for (unsigned int i = 0; i < nunits; ++i)
10115 	      CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, VECTOR_CST_ELT (exp, i));
10116 	    tmp = build_constructor (type, v);
10117 	  }
10118 	return expand_expr (tmp, ignore ? const0_rtx : target,
10119 			    tmode, modifier);
10120       }
10121 
10122     case CONST_DECL:
10123       if (modifier == EXPAND_WRITE)
10124 	{
10125 	  /* Writing into CONST_DECL is always invalid, but handle it
10126 	     gracefully.  */
10127 	  addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
10128 	  scalar_int_mode address_mode = targetm.addr_space.address_mode (as);
10129 	  op0 = expand_expr_addr_expr_1 (exp, NULL_RTX, address_mode,
10130 					 EXPAND_NORMAL, as);
10131 	  op0 = memory_address_addr_space (mode, op0, as);
10132 	  temp = gen_rtx_MEM (mode, op0);
10133 	  set_mem_addr_space (temp, as);
10134 	  return temp;
10135 	}
10136       return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
10137 
10138     case REAL_CST:
10139       /* If optimized, generate immediate CONST_DOUBLE
10140 	 which will be turned into memory by reload if necessary.
10141 
10142 	 We used to force a register so that loop.c could see it.  But
10143 	 this does not allow gen_* patterns to perform optimizations with
10144 	 the constants.  It also produces two insns in cases like "x = 1.0;".
10145 	 On most machines, floating-point constants are not permitted in
10146 	 many insns, so we'd end up copying it to a register in any case.
10147 
10148 	 Now, we do the copying in expand_binop, if appropriate.  */
10149       return const_double_from_real_value (TREE_REAL_CST (exp),
10150 					   TYPE_MODE (TREE_TYPE (exp)));
10151 
10152     case FIXED_CST:
10153       return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
10154 					   TYPE_MODE (TREE_TYPE (exp)));
10155 
10156     case COMPLEX_CST:
10157       /* Handle evaluating a complex constant in a CONCAT target.  */
10158       if (original_target && GET_CODE (original_target) == CONCAT)
10159 	{
10160 	  machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
10161 	  rtx rtarg, itarg;
10162 
10163 	  rtarg = XEXP (original_target, 0);
10164 	  itarg = XEXP (original_target, 1);
10165 
10166 	  /* Move the real and imaginary parts separately.  */
10167 	  op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
10168 	  op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
10169 
10170 	  if (op0 != rtarg)
10171 	    emit_move_insn (rtarg, op0);
10172 	  if (op1 != itarg)
10173 	    emit_move_insn (itarg, op1);
10174 
10175 	  return original_target;
10176 	}
10177 
10178       /* fall through */
10179 
10180     case STRING_CST:
10181       temp = expand_expr_constant (exp, 1, modifier);
10182 
10183       /* temp contains a constant address.
10184 	 On RISC machines where a constant address isn't valid,
10185 	 make some insns to get that address into a register.  */
10186       if (modifier != EXPAND_CONST_ADDRESS
10187 	  && modifier != EXPAND_INITIALIZER
10188 	  && modifier != EXPAND_SUM
10189 	  && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
10190 					    MEM_ADDR_SPACE (temp)))
10191 	return replace_equiv_address (temp,
10192 				      copy_rtx (XEXP (temp, 0)));
10193       return temp;
10194 
10195     case POLY_INT_CST:
10196       return immed_wide_int_const (poly_int_cst_value (exp), mode);
10197 
10198     case SAVE_EXPR:
10199       {
10200 	tree val = treeop0;
10201 	rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl,
10202 				      inner_reference_p);
10203 
10204 	if (!SAVE_EXPR_RESOLVED_P (exp))
10205 	  {
10206 	    /* We can indeed still hit this case, typically via builtin
10207 	       expanders calling save_expr immediately before expanding
10208 	       something.  Assume this means that we only have to deal
10209 	       with non-BLKmode values.  */
10210 	    gcc_assert (GET_MODE (ret) != BLKmode);
10211 
10212 	    val = build_decl (curr_insn_location (),
10213 			      VAR_DECL, NULL, TREE_TYPE (exp));
10214 	    DECL_ARTIFICIAL (val) = 1;
10215 	    DECL_IGNORED_P (val) = 1;
10216 	    treeop0 = val;
10217 	    TREE_OPERAND (exp, 0) = treeop0;
10218 	    SAVE_EXPR_RESOLVED_P (exp) = 1;
10219 
10220 	    if (!CONSTANT_P (ret))
10221 	      ret = copy_to_reg (ret);
10222 	    SET_DECL_RTL (val, ret);
10223 	  }
10224 
10225         return ret;
10226       }
10227 
10228 
10229     case CONSTRUCTOR:
10230       /* If we don't need the result, just ensure we evaluate any
10231 	 subexpressions.  */
10232       if (ignore)
10233 	{
10234 	  unsigned HOST_WIDE_INT idx;
10235 	  tree value;
10236 
10237 	  FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
10238 	    expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
10239 
10240 	  return const0_rtx;
10241 	}
10242 
10243       return expand_constructor (exp, target, modifier, false);
10244 
10245     case TARGET_MEM_REF:
10246       {
10247 	addr_space_t as
10248 	  = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
10249 	enum insn_code icode;
10250 	unsigned int align;
10251 
10252 	op0 = addr_for_mem_ref (exp, as, true);
10253 	op0 = memory_address_addr_space (mode, op0, as);
10254 	temp = gen_rtx_MEM (mode, op0);
10255 	set_mem_attributes (temp, exp, 0);
10256 	set_mem_addr_space (temp, as);
10257 	align = get_object_alignment (exp);
10258 	if (modifier != EXPAND_WRITE
10259 	    && modifier != EXPAND_MEMORY
10260 	    && mode != BLKmode
10261 	    && align < GET_MODE_ALIGNMENT (mode)
10262 	    /* If the target does not have special handling for unaligned
10263 	       loads of mode then it can use regular moves for them.  */
10264 	    && ((icode = optab_handler (movmisalign_optab, mode))
10265 		!= CODE_FOR_nothing))
10266 	  {
10267 	    struct expand_operand ops[2];
10268 
10269 	    /* We've already validated the memory, and we're creating a
10270 	       new pseudo destination.  The predicates really can't fail,
10271 	       nor can the generator.  */
10272 	    create_output_operand (&ops[0], NULL_RTX, mode);
10273 	    create_fixed_operand (&ops[1], temp);
10274 	    expand_insn (icode, 2, ops);
10275 	    temp = ops[0].value;
10276 	  }
10277 	return temp;
10278       }
10279 
10280     case MEM_REF:
10281       {
10282 	const bool reverse = REF_REVERSE_STORAGE_ORDER (exp);
10283 	addr_space_t as
10284 	  = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
10285 	machine_mode address_mode;
10286 	tree base = TREE_OPERAND (exp, 0);
10287 	gimple *def_stmt;
10288 	enum insn_code icode;
10289 	unsigned align;
10290 	/* Handle expansion of non-aliased memory with non-BLKmode.  That
10291 	   might end up in a register.  */
10292 	if (mem_ref_refers_to_non_mem_p (exp))
10293 	  {
10294 	    poly_int64 offset = mem_ref_offset (exp).force_shwi ();
10295 	    base = TREE_OPERAND (base, 0);
10296 	    if (known_eq (offset, 0)
10297 	        && !reverse
10298 		&& tree_fits_uhwi_p (TYPE_SIZE (type))
10299 		&& known_eq (GET_MODE_BITSIZE (DECL_MODE (base)),
10300 			     tree_to_uhwi (TYPE_SIZE (type))))
10301 	      return expand_expr (build1 (VIEW_CONVERT_EXPR, type, base),
10302 				  target, tmode, modifier);
10303 	    if (TYPE_MODE (type) == BLKmode)
10304 	      {
10305 		temp = assign_stack_temp (DECL_MODE (base),
10306 					  GET_MODE_SIZE (DECL_MODE (base)));
10307 		store_expr (base, temp, 0, false, false);
10308 		temp = adjust_address (temp, BLKmode, offset);
10309 		set_mem_size (temp, int_size_in_bytes (type));
10310 		return temp;
10311 	      }
10312 	    exp = build3 (BIT_FIELD_REF, type, base, TYPE_SIZE (type),
10313 			  bitsize_int (offset * BITS_PER_UNIT));
10314 	    REF_REVERSE_STORAGE_ORDER (exp) = reverse;
10315 	    return expand_expr (exp, target, tmode, modifier);
10316 	  }
10317 	address_mode = targetm.addr_space.address_mode (as);
10318 	base = TREE_OPERAND (exp, 0);
10319 	if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
10320 	  {
10321 	    tree mask = gimple_assign_rhs2 (def_stmt);
10322 	    base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
10323 			   gimple_assign_rhs1 (def_stmt), mask);
10324 	    TREE_OPERAND (exp, 0) = base;
10325 	  }
10326 	align = get_object_alignment (exp);
10327 	op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_SUM);
10328 	op0 = memory_address_addr_space (mode, op0, as);
10329 	if (!integer_zerop (TREE_OPERAND (exp, 1)))
10330 	  {
10331 	    rtx off = immed_wide_int_const (mem_ref_offset (exp), address_mode);
10332 	    op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
10333 	    op0 = memory_address_addr_space (mode, op0, as);
10334 	  }
10335 	temp = gen_rtx_MEM (mode, op0);
10336 	set_mem_attributes (temp, exp, 0);
10337 	set_mem_addr_space (temp, as);
10338 	if (TREE_THIS_VOLATILE (exp))
10339 	  MEM_VOLATILE_P (temp) = 1;
10340 	if (modifier != EXPAND_WRITE
10341 	    && modifier != EXPAND_MEMORY
10342 	    && !inner_reference_p
10343 	    && mode != BLKmode
10344 	    && align < GET_MODE_ALIGNMENT (mode))
10345 	  {
10346 	    if ((icode = optab_handler (movmisalign_optab, mode))
10347 		!= CODE_FOR_nothing)
10348 	      {
10349 		struct expand_operand ops[2];
10350 
10351 		/* We've already validated the memory, and we're creating a
10352 		   new pseudo destination.  The predicates really can't fail,
10353 		   nor can the generator.  */
10354 		create_output_operand (&ops[0], NULL_RTX, mode);
10355 		create_fixed_operand (&ops[1], temp);
10356 		expand_insn (icode, 2, ops);
10357 		temp = ops[0].value;
10358 	      }
10359 	    else if (targetm.slow_unaligned_access (mode, align))
10360 	      temp = extract_bit_field (temp, GET_MODE_BITSIZE (mode),
10361 					0, TYPE_UNSIGNED (TREE_TYPE (exp)),
10362 					(modifier == EXPAND_STACK_PARM
10363 					 ? NULL_RTX : target),
10364 					mode, mode, false, alt_rtl);
10365 	  }
10366 	if (reverse
10367 	    && modifier != EXPAND_MEMORY
10368 	    && modifier != EXPAND_WRITE)
10369 	  temp = flip_storage_order (mode, temp);
10370 	return temp;
10371       }
10372 
10373     case ARRAY_REF:
10374 
10375       {
10376 	tree array = treeop0;
10377 	tree index = treeop1;
10378 	tree init;
10379 
10380 	/* Fold an expression like: "foo"[2].
10381 	   This is not done in fold so it won't happen inside &.
10382 	   Don't fold if this is for wide characters since it's too
10383 	   difficult to do correctly and this is a very rare case.  */
10384 
10385 	if (modifier != EXPAND_CONST_ADDRESS
10386 	    && modifier != EXPAND_INITIALIZER
10387 	    && modifier != EXPAND_MEMORY)
10388 	  {
10389 	    tree t = fold_read_from_constant_string (exp);
10390 
10391 	    if (t)
10392 	      return expand_expr (t, target, tmode, modifier);
10393 	  }
10394 
10395 	/* If this is a constant index into a constant array,
10396 	   just get the value from the array.  Handle both the cases when
10397 	   we have an explicit constructor and when our operand is a variable
10398 	   that was declared const.  */
10399 
10400 	if (modifier != EXPAND_CONST_ADDRESS
10401 	    && modifier != EXPAND_INITIALIZER
10402 	    && modifier != EXPAND_MEMORY
10403 	    && TREE_CODE (array) == CONSTRUCTOR
10404 	    && ! TREE_SIDE_EFFECTS (array)
10405 	    && TREE_CODE (index) == INTEGER_CST)
10406 	  {
10407 	    unsigned HOST_WIDE_INT ix;
10408 	    tree field, value;
10409 
10410 	    FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
10411 				      field, value)
10412 	      if (tree_int_cst_equal (field, index))
10413 		{
10414 		  if (!TREE_SIDE_EFFECTS (value))
10415 		    return expand_expr (fold (value), target, tmode, modifier);
10416 		  break;
10417 		}
10418 	  }
10419 
10420 	else if (optimize >= 1
10421 		 && modifier != EXPAND_CONST_ADDRESS
10422 		 && modifier != EXPAND_INITIALIZER
10423 		 && modifier != EXPAND_MEMORY
10424 		 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
10425 		 && TREE_CODE (index) == INTEGER_CST
10426 		 && (VAR_P (array) || TREE_CODE (array) == CONST_DECL)
10427 		 && (init = ctor_for_folding (array)) != error_mark_node)
10428 	  {
10429 	    if (init == NULL_TREE)
10430 	      {
10431 		tree value = build_zero_cst (type);
10432 		if (TREE_CODE (value) == CONSTRUCTOR)
10433 		  {
10434 		    /* If VALUE is a CONSTRUCTOR, this optimization is only
10435 		       useful if this doesn't store the CONSTRUCTOR into
10436 		       memory.  If it does, it is more efficient to just
10437 		       load the data from the array directly.  */
10438 		    rtx ret = expand_constructor (value, target,
10439 						  modifier, true);
10440 		    if (ret == NULL_RTX)
10441 		      value = NULL_TREE;
10442 		  }
10443 
10444 		if (value)
10445 		  return expand_expr (value, target, tmode, modifier);
10446 	      }
10447 	    else if (TREE_CODE (init) == CONSTRUCTOR)
10448 	      {
10449 		unsigned HOST_WIDE_INT ix;
10450 		tree field, value;
10451 
10452 		FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
10453 					  field, value)
10454 		  if (tree_int_cst_equal (field, index))
10455 		    {
10456 		      if (TREE_SIDE_EFFECTS (value))
10457 			break;
10458 
10459 		      if (TREE_CODE (value) == CONSTRUCTOR)
10460 			{
10461 			  /* If VALUE is a CONSTRUCTOR, this
10462 			     optimization is only useful if
10463 			     this doesn't store the CONSTRUCTOR
10464 			     into memory.  If it does, it is more
10465 			     efficient to just load the data from
10466 			     the array directly.  */
10467 			  rtx ret = expand_constructor (value, target,
10468 							modifier, true);
10469 			  if (ret == NULL_RTX)
10470 			    break;
10471 			}
10472 
10473 		      return
10474 		        expand_expr (fold (value), target, tmode, modifier);
10475 		    }
10476 	      }
10477 	    else if (TREE_CODE (init) == STRING_CST)
10478 	      {
10479 		tree low_bound = array_ref_low_bound (exp);
10480 		tree index1 = fold_convert_loc (loc, sizetype, treeop1);
10481 
10482 		/* Optimize the special case of a zero lower bound.
10483 
10484 		   We convert the lower bound to sizetype to avoid problems
10485 		   with constant folding.  E.g. suppose the lower bound is
10486 		   1 and its mode is QI.  Without the conversion
10487 		      (ARRAY + (INDEX - (unsigned char)1))
10488 		   becomes
10489 		      (ARRAY + (-(unsigned char)1) + INDEX)
10490 		   which becomes
10491 		      (ARRAY + 255 + INDEX).  Oops!  */
10492 		if (!integer_zerop (low_bound))
10493 		  index1 = size_diffop_loc (loc, index1,
10494 					    fold_convert_loc (loc, sizetype,
10495 							      low_bound));
10496 
10497 		if (tree_fits_uhwi_p (index1)
10498 		    && compare_tree_int (index1, TREE_STRING_LENGTH (init)) < 0)
10499 		  {
10500 		    tree type = TREE_TYPE (TREE_TYPE (init));
10501 		    scalar_int_mode mode;
10502 
10503 		    if (is_int_mode (TYPE_MODE (type), &mode)
10504 			&& GET_MODE_SIZE (mode) == 1)
10505 		      return gen_int_mode (TREE_STRING_POINTER (init)
10506 					   [TREE_INT_CST_LOW (index1)],
10507 					   mode);
10508 		  }
10509 	      }
10510 	  }
10511       }
10512       goto normal_inner_ref;
10513 
10514     case COMPONENT_REF:
10515       /* If the operand is a CONSTRUCTOR, we can just extract the
10516 	 appropriate field if it is present.  */
10517       if (TREE_CODE (treeop0) == CONSTRUCTOR)
10518 	{
10519 	  unsigned HOST_WIDE_INT idx;
10520 	  tree field, value;
10521 	  scalar_int_mode field_mode;
10522 
10523 	  FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
10524 				    idx, field, value)
10525 	    if (field == treeop1
10526 		/* We can normally use the value of the field in the
10527 		   CONSTRUCTOR.  However, if this is a bitfield in
10528 		   an integral mode that we can fit in a HOST_WIDE_INT,
10529 		   we must mask only the number of bits in the bitfield,
10530 		   since this is done implicitly by the constructor.  If
10531 		   the bitfield does not meet either of those conditions,
10532 		   we can't do this optimization.  */
10533 		&& (! DECL_BIT_FIELD (field)
10534 		    || (is_int_mode (DECL_MODE (field), &field_mode)
10535 			&& (GET_MODE_PRECISION (field_mode)
10536 			    <= HOST_BITS_PER_WIDE_INT))))
10537 	      {
10538 		if (DECL_BIT_FIELD (field)
10539 		    && modifier == EXPAND_STACK_PARM)
10540 		  target = 0;
10541 		op0 = expand_expr (value, target, tmode, modifier);
10542 		if (DECL_BIT_FIELD (field))
10543 		  {
10544 		    HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
10545 		    scalar_int_mode imode
10546 		      = SCALAR_INT_TYPE_MODE (TREE_TYPE (field));
10547 
10548 		    if (TYPE_UNSIGNED (TREE_TYPE (field)))
10549 		      {
10550 			op1 = gen_int_mode ((HOST_WIDE_INT_1 << bitsize) - 1,
10551 					    imode);
10552 			op0 = expand_and (imode, op0, op1, target);
10553 		      }
10554 		    else
10555 		      {
10556 			int count = GET_MODE_PRECISION (imode) - bitsize;
10557 
10558 			op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
10559 					    target, 0);
10560 			op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
10561 					    target, 0);
10562 		      }
10563 		  }
10564 
10565 		return op0;
10566 	      }
10567 	}
10568       goto normal_inner_ref;
10569 
10570     case BIT_FIELD_REF:
10571     case ARRAY_RANGE_REF:
10572     normal_inner_ref:
10573       {
10574 	machine_mode mode1, mode2;
10575 	poly_int64 bitsize, bitpos, bytepos;
10576 	tree offset;
10577 	int reversep, volatilep = 0, must_force_mem;
10578 	tree tem
10579 	  = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
10580 				 &unsignedp, &reversep, &volatilep);
10581 	rtx orig_op0, memloc;
10582 	bool clear_mem_expr = false;
10583 
10584 	/* If we got back the original object, something is wrong.  Perhaps
10585 	   we are evaluating an expression too early.  In any event, don't
10586 	   infinitely recurse.  */
10587 	gcc_assert (tem != exp);
10588 
10589 	/* If TEM's type is a union of variable size, pass TARGET to the inner
10590 	   computation, since it will need a temporary and TARGET is known
10591 	   to have to do.  This occurs in unchecked conversion in Ada.  */
10592 	orig_op0 = op0
10593 	  = expand_expr_real (tem,
10594 			      (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10595 			       && COMPLETE_TYPE_P (TREE_TYPE (tem))
10596 			       && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10597 				   != INTEGER_CST)
10598 			       && modifier != EXPAND_STACK_PARM
10599 			       ? target : NULL_RTX),
10600 			      VOIDmode,
10601 			      modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10602 			      NULL, true);
10603 
10604 	/* If the field has a mode, we want to access it in the
10605 	   field's mode, not the computed mode.
10606 	   If a MEM has VOIDmode (external with incomplete type),
10607 	   use BLKmode for it instead.  */
10608 	if (MEM_P (op0))
10609 	  {
10610 	    if (mode1 != VOIDmode)
10611 	      op0 = adjust_address (op0, mode1, 0);
10612 	    else if (GET_MODE (op0) == VOIDmode)
10613 	      op0 = adjust_address (op0, BLKmode, 0);
10614 	  }
10615 
10616 	mode2
10617 	  = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
10618 
10619 	/* If we have either an offset, a BLKmode result, or a reference
10620 	   outside the underlying object, we must force it to memory.
10621 	   Such a case can occur in Ada if we have unchecked conversion
10622 	   of an expression from a scalar type to an aggregate type or
10623 	   for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
10624 	   passed a partially uninitialized object or a view-conversion
10625 	   to a larger size.  */
10626 	must_force_mem = (offset
10627 			  || mode1 == BLKmode
10628 			  || maybe_gt (bitpos + bitsize,
10629 				       GET_MODE_BITSIZE (mode2)));
10630 
10631 	/* Handle CONCAT first.  */
10632 	if (GET_CODE (op0) == CONCAT && !must_force_mem)
10633 	  {
10634 	    if (known_eq (bitpos, 0)
10635 		&& known_eq (bitsize, GET_MODE_BITSIZE (GET_MODE (op0)))
10636 		&& COMPLEX_MODE_P (mode1)
10637 		&& COMPLEX_MODE_P (GET_MODE (op0))
10638 		&& (GET_MODE_PRECISION (GET_MODE_INNER (mode1))
10639 		    == GET_MODE_PRECISION (GET_MODE_INNER (GET_MODE (op0)))))
10640 	      {
10641 		if (reversep)
10642 		  op0 = flip_storage_order (GET_MODE (op0), op0);
10643 		if (mode1 != GET_MODE (op0))
10644 		  {
10645 		    rtx parts[2];
10646 		    for (int i = 0; i < 2; i++)
10647 		      {
10648 			rtx op = read_complex_part (op0, i != 0);
10649 			if (GET_CODE (op) == SUBREG)
10650 			  op = force_reg (GET_MODE (op), op);
10651 			rtx temp = gen_lowpart_common (GET_MODE_INNER (mode1),
10652 						       op);
10653 			if (temp)
10654 			  op = temp;
10655 			else
10656 			  {
10657 			    if (!REG_P (op) && !MEM_P (op))
10658 			      op = force_reg (GET_MODE (op), op);
10659 			    op = gen_lowpart (GET_MODE_INNER (mode1), op);
10660 			  }
10661 			parts[i] = op;
10662 		      }
10663 		    op0 = gen_rtx_CONCAT (mode1, parts[0], parts[1]);
10664 		  }
10665 		return op0;
10666 	      }
10667 	    if (known_eq (bitpos, 0)
10668 		&& known_eq (bitsize,
10669 			     GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))))
10670 		&& maybe_ne (bitsize, 0))
10671 	      {
10672 		op0 = XEXP (op0, 0);
10673 		mode2 = GET_MODE (op0);
10674 	      }
10675 	    else if (known_eq (bitpos,
10676 			       GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))))
10677 		     && known_eq (bitsize,
10678 				  GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1))))
10679 		     && maybe_ne (bitpos, 0)
10680 		     && maybe_ne (bitsize, 0))
10681 	      {
10682 		op0 = XEXP (op0, 1);
10683 		bitpos = 0;
10684 		mode2 = GET_MODE (op0);
10685 	      }
10686 	    else
10687 	      /* Otherwise force into memory.  */
10688 	      must_force_mem = 1;
10689 	  }
10690 
10691 	/* If this is a constant, put it in a register if it is a legitimate
10692 	   constant and we don't need a memory reference.  */
10693 	if (CONSTANT_P (op0)
10694 	    && mode2 != BLKmode
10695 	    && targetm.legitimate_constant_p (mode2, op0)
10696 	    && !must_force_mem)
10697 	  op0 = force_reg (mode2, op0);
10698 
10699 	/* Otherwise, if this is a constant, try to force it to the constant
10700 	   pool.  Note that back-ends, e.g. MIPS, may refuse to do so if it
10701 	   is a legitimate constant.  */
10702 	else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
10703 	  op0 = validize_mem (memloc);
10704 
10705 	/* Otherwise, if this is a constant or the object is not in memory
10706 	   and need be, put it there.  */
10707 	else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
10708 	  {
10709 	    memloc = assign_temp (TREE_TYPE (tem), 1, 1);
10710 	    emit_move_insn (memloc, op0);
10711 	    op0 = memloc;
10712 	    clear_mem_expr = true;
10713 	  }
10714 
10715 	if (offset)
10716 	  {
10717 	    machine_mode address_mode;
10718 	    rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
10719 					  EXPAND_SUM);
10720 
10721 	    gcc_assert (MEM_P (op0));
10722 
10723 	    address_mode = get_address_mode (op0);
10724 	    if (GET_MODE (offset_rtx) != address_mode)
10725 	      {
10726 		/* We cannot be sure that the RTL in offset_rtx is valid outside
10727 		   of a memory address context, so force it into a register
10728 		   before attempting to convert it to the desired mode.  */
10729 		offset_rtx = force_operand (offset_rtx, NULL_RTX);
10730 		offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
10731 	      }
10732 
10733 	    /* See the comment in expand_assignment for the rationale.  */
10734 	    if (mode1 != VOIDmode
10735 		&& maybe_ne (bitpos, 0)
10736 		&& maybe_gt (bitsize, 0)
10737 		&& multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
10738 		&& multiple_p (bitpos, bitsize)
10739 		&& multiple_p (bitsize, GET_MODE_ALIGNMENT (mode1))
10740 		&& MEM_ALIGN (op0) >= GET_MODE_ALIGNMENT (mode1))
10741 	      {
10742 		op0 = adjust_address (op0, mode1, bytepos);
10743 		bitpos = 0;
10744 	      }
10745 
10746 	    op0 = offset_address (op0, offset_rtx,
10747 				  highest_pow2_factor (offset));
10748 	  }
10749 
10750 	/* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
10751 	   record its alignment as BIGGEST_ALIGNMENT.  */
10752 	if (MEM_P (op0)
10753 	    && known_eq (bitpos, 0)
10754 	    && offset != 0
10755 	    && is_aligning_offset (offset, tem))
10756 	  set_mem_align (op0, BIGGEST_ALIGNMENT);
10757 
10758 	/* Don't forget about volatility even if this is a bitfield.  */
10759 	if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
10760 	  {
10761 	    if (op0 == orig_op0)
10762 	      op0 = copy_rtx (op0);
10763 
10764 	    MEM_VOLATILE_P (op0) = 1;
10765 	  }
10766 
10767 	/* In cases where an aligned union has an unaligned object
10768 	   as a field, we might be extracting a BLKmode value from
10769 	   an integer-mode (e.g., SImode) object.  Handle this case
10770 	   by doing the extract into an object as wide as the field
10771 	   (which we know to be the width of a basic mode), then
10772 	   storing into memory, and changing the mode to BLKmode.  */
10773 	if (mode1 == VOIDmode
10774 	    || REG_P (op0) || GET_CODE (op0) == SUBREG
10775 	    || (mode1 != BLKmode && ! direct_load[(int) mode1]
10776 		&& GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
10777 		&& GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
10778 		&& modifier != EXPAND_CONST_ADDRESS
10779 		&& modifier != EXPAND_INITIALIZER
10780 		&& modifier != EXPAND_MEMORY)
10781 	    /* If the bitfield is volatile and the bitsize
10782 	       is narrower than the access size of the bitfield,
10783 	       we need to extract bitfields from the access.  */
10784 	    || (volatilep && TREE_CODE (exp) == COMPONENT_REF
10785 		&& DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp, 1))
10786 		&& mode1 != BLKmode
10787 		&& maybe_lt (bitsize, GET_MODE_SIZE (mode1) * BITS_PER_UNIT))
10788 	    /* If the field isn't aligned enough to fetch as a memref,
10789 	       fetch it as a bit field.  */
10790 	    || (mode1 != BLKmode
10791 		&& (((MEM_P (op0)
10792 		      ? MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
10793 			|| !multiple_p (bitpos, GET_MODE_ALIGNMENT (mode1))
10794 		      : TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
10795 			|| !multiple_p (bitpos, GET_MODE_ALIGNMENT (mode)))
10796 		     && modifier != EXPAND_MEMORY
10797 		     && ((modifier == EXPAND_CONST_ADDRESS
10798 			  || modifier == EXPAND_INITIALIZER)
10799 			 ? STRICT_ALIGNMENT
10800 			 : targetm.slow_unaligned_access (mode1,
10801 							  MEM_ALIGN (op0))))
10802 		    || !multiple_p (bitpos, BITS_PER_UNIT)))
10803 	    /* If the type and the field are a constant size and the
10804 	       size of the type isn't the same size as the bitfield,
10805 	       we must use bitfield operations.  */
10806 	    || (known_size_p (bitsize)
10807 		&& TYPE_SIZE (TREE_TYPE (exp))
10808 		&& poly_int_tree_p (TYPE_SIZE (TREE_TYPE (exp)))
10809 		&& maybe_ne (wi::to_poly_offset (TYPE_SIZE (TREE_TYPE (exp))),
10810 			     bitsize)))
10811 	  {
10812 	    machine_mode ext_mode = mode;
10813 
10814 	    if (ext_mode == BLKmode
10815 		&& ! (target != 0 && MEM_P (op0)
10816 		      && MEM_P (target)
10817 		      && multiple_p (bitpos, BITS_PER_UNIT)))
10818 	      ext_mode = int_mode_for_size (bitsize, 1).else_blk ();
10819 
10820 	    if (ext_mode == BLKmode)
10821 	      {
10822 		if (target == 0)
10823 		  target = assign_temp (type, 1, 1);
10824 
10825 		/* ??? Unlike the similar test a few lines below, this one is
10826 		   very likely obsolete.  */
10827 		if (known_eq (bitsize, 0))
10828 		  return target;
10829 
10830 		/* In this case, BITPOS must start at a byte boundary and
10831 		   TARGET, if specified, must be a MEM.  */
10832 		gcc_assert (MEM_P (op0)
10833 			    && (!target || MEM_P (target)));
10834 
10835 		bytepos = exact_div (bitpos, BITS_PER_UNIT);
10836 		poly_int64 bytesize = bits_to_bytes_round_up (bitsize);
10837 		emit_block_move (target,
10838 				 adjust_address (op0, VOIDmode, bytepos),
10839 				 gen_int_mode (bytesize, Pmode),
10840 				 (modifier == EXPAND_STACK_PARM
10841 				  ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10842 
10843 		return target;
10844 	      }
10845 
10846 	    /* If we have nothing to extract, the result will be 0 for targets
10847 	       with SHIFT_COUNT_TRUNCATED == 0 and garbage otherwise.  Always
10848 	       return 0 for the sake of consistency, as reading a zero-sized
10849 	       bitfield is valid in Ada and the value is fully specified.  */
10850 	    if (known_eq (bitsize, 0))
10851 	      return const0_rtx;
10852 
10853 	    op0 = validize_mem (op0);
10854 
10855 	    if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
10856 	      mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10857 
10858 	    /* If the result has a record type and the extraction is done in
10859 	       an integral mode, then the field may be not aligned on a byte
10860 	       boundary; in this case, if it has reverse storage order, it
10861 	       needs to be extracted as a scalar field with reverse storage
10862 	       order and put back into memory order afterwards.  */
10863 	    if (TREE_CODE (type) == RECORD_TYPE
10864 		&& GET_MODE_CLASS (ext_mode) == MODE_INT)
10865 	      reversep = TYPE_REVERSE_STORAGE_ORDER (type);
10866 
10867 	    op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
10868 				     (modifier == EXPAND_STACK_PARM
10869 				      ? NULL_RTX : target),
10870 				     ext_mode, ext_mode, reversep, alt_rtl);
10871 
10872 	    /* If the result has a record type and the mode of OP0 is an
10873 	       integral mode then, if BITSIZE is narrower than this mode
10874 	       and this is for big-endian data, we must put the field
10875 	       into the high-order bits.  And we must also put it back
10876 	       into memory order if it has been previously reversed.  */
10877 	    scalar_int_mode op0_mode;
10878 	    if (TREE_CODE (type) == RECORD_TYPE
10879 		&& is_int_mode (GET_MODE (op0), &op0_mode))
10880 	      {
10881 		HOST_WIDE_INT size = GET_MODE_BITSIZE (op0_mode);
10882 
10883 		gcc_checking_assert (known_le (bitsize, size));
10884 		if (maybe_lt (bitsize, size)
10885 		    && reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
10886 		  op0 = expand_shift (LSHIFT_EXPR, op0_mode, op0,
10887 				      size - bitsize, op0, 1);
10888 
10889 		if (reversep)
10890 		  op0 = flip_storage_order (op0_mode, op0);
10891 	      }
10892 
10893 	    /* If the result type is BLKmode, store the data into a temporary
10894 	       of the appropriate type, but with the mode corresponding to the
10895 	       mode for the data we have (op0's mode).  */
10896 	    if (mode == BLKmode)
10897 	      {
10898 		rtx new_rtx
10899 		  = assign_stack_temp_for_type (ext_mode,
10900 						GET_MODE_BITSIZE (ext_mode),
10901 						type);
10902 		emit_move_insn (new_rtx, op0);
10903 		op0 = copy_rtx (new_rtx);
10904 		PUT_MODE (op0, BLKmode);
10905 	      }
10906 
10907 	    return op0;
10908 	  }
10909 
10910 	/* If the result is BLKmode, use that to access the object
10911 	   now as well.  */
10912 	if (mode == BLKmode)
10913 	  mode1 = BLKmode;
10914 
10915 	/* Get a reference to just this component.  */
10916 	bytepos = bits_to_bytes_round_down (bitpos);
10917 	if (modifier == EXPAND_CONST_ADDRESS
10918 	    || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
10919 	  op0 = adjust_address_nv (op0, mode1, bytepos);
10920 	else
10921 	  op0 = adjust_address (op0, mode1, bytepos);
10922 
10923 	if (op0 == orig_op0)
10924 	  op0 = copy_rtx (op0);
10925 
10926 	/* Don't set memory attributes if the base expression is
10927 	   SSA_NAME that got expanded as a MEM.  In that case, we should
10928 	   just honor its original memory attributes.  */
10929 	if (TREE_CODE (tem) != SSA_NAME || !MEM_P (orig_op0))
10930 	  set_mem_attributes (op0, exp, 0);
10931 
10932 	if (REG_P (XEXP (op0, 0)))
10933 	  mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10934 
10935 	/* If op0 is a temporary because the original expressions was forced
10936 	   to memory, clear MEM_EXPR so that the original expression cannot
10937 	   be marked as addressable through MEM_EXPR of the temporary.  */
10938 	if (clear_mem_expr)
10939 	  set_mem_expr (op0, NULL_TREE);
10940 
10941 	MEM_VOLATILE_P (op0) |= volatilep;
10942 
10943         if (reversep
10944 	    && modifier != EXPAND_MEMORY
10945 	    && modifier != EXPAND_WRITE)
10946 	  op0 = flip_storage_order (mode1, op0);
10947 
10948 	if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
10949 	    || modifier == EXPAND_CONST_ADDRESS
10950 	    || modifier == EXPAND_INITIALIZER)
10951 	  return op0;
10952 
10953 	if (target == 0)
10954 	  target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
10955 
10956 	convert_move (target, op0, unsignedp);
10957 	return target;
10958       }
10959 
10960     case OBJ_TYPE_REF:
10961       return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
10962 
10963     case CALL_EXPR:
10964       /* All valid uses of __builtin_va_arg_pack () are removed during
10965 	 inlining.  */
10966       if (CALL_EXPR_VA_ARG_PACK (exp))
10967 	error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
10968       {
10969 	tree fndecl = get_callee_fndecl (exp), attr;
10970 
10971 	if (fndecl
10972 	    /* Don't diagnose the error attribute in thunks, those are
10973 	       artificially created.  */
10974 	    && !CALL_FROM_THUNK_P (exp)
10975 	    && (attr = lookup_attribute ("error",
10976 					 DECL_ATTRIBUTES (fndecl))) != NULL)
10977 	  {
10978 	    const char *ident = lang_hooks.decl_printable_name (fndecl, 1);
10979 	    error ("%Kcall to %qs declared with attribute error: %s", exp,
10980 		   identifier_to_locale (ident),
10981 		   TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10982 	  }
10983 	if (fndecl
10984 	    /* Don't diagnose the warning attribute in thunks, those are
10985 	       artificially created.  */
10986 	    && !CALL_FROM_THUNK_P (exp)
10987 	    && (attr = lookup_attribute ("warning",
10988 					 DECL_ATTRIBUTES (fndecl))) != NULL)
10989 	  {
10990 	    const char *ident = lang_hooks.decl_printable_name (fndecl, 1);
10991 	    warning_at (tree_nonartificial_location (exp), 0,
10992 			"%Kcall to %qs declared with attribute warning: %s",
10993 			exp, identifier_to_locale (ident),
10994 			TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10995 	  }
10996 
10997 	/* Check for a built-in function.  */
10998 	if (fndecl && DECL_BUILT_IN (fndecl))
10999 	  {
11000 	    gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
11001 	    if (CALL_WITH_BOUNDS_P (exp))
11002 	      return expand_builtin_with_bounds (exp, target, subtarget,
11003 						 tmode, ignore);
11004 	    else
11005 	      return expand_builtin (exp, target, subtarget, tmode, ignore);
11006 	  }
11007       }
11008       return expand_call (exp, target, ignore);
11009 
11010     case VIEW_CONVERT_EXPR:
11011       op0 = NULL_RTX;
11012 
11013       /* If we are converting to BLKmode, try to avoid an intermediate
11014 	 temporary by fetching an inner memory reference.  */
11015       if (mode == BLKmode
11016 	  && poly_int_tree_p (TYPE_SIZE (type))
11017 	  && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
11018 	  && handled_component_p (treeop0))
11019       {
11020 	machine_mode mode1;
11021 	poly_int64 bitsize, bitpos, bytepos;
11022 	tree offset;
11023 	int unsignedp, reversep, volatilep = 0;
11024 	tree tem
11025 	  = get_inner_reference (treeop0, &bitsize, &bitpos, &offset, &mode1,
11026 				 &unsignedp, &reversep, &volatilep);
11027 	rtx orig_op0;
11028 
11029 	/* ??? We should work harder and deal with non-zero offsets.  */
11030 	if (!offset
11031 	    && multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
11032 	    && !reversep
11033 	    && known_size_p (bitsize)
11034 	    && known_eq (wi::to_poly_offset (TYPE_SIZE (type)), bitsize))
11035 	  {
11036 	    /* See the normal_inner_ref case for the rationale.  */
11037 	    orig_op0
11038 	      = expand_expr_real (tem,
11039 				  (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
11040 				   && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
11041 				       != INTEGER_CST)
11042 				   && modifier != EXPAND_STACK_PARM
11043 				   ? target : NULL_RTX),
11044 				  VOIDmode,
11045 				  modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
11046 				  NULL, true);
11047 
11048 	    if (MEM_P (orig_op0))
11049 	      {
11050 		op0 = orig_op0;
11051 
11052 		/* Get a reference to just this component.  */
11053 		if (modifier == EXPAND_CONST_ADDRESS
11054 		    || modifier == EXPAND_SUM
11055 		    || modifier == EXPAND_INITIALIZER)
11056 		  op0 = adjust_address_nv (op0, mode, bytepos);
11057 		else
11058 		  op0 = adjust_address (op0, mode, bytepos);
11059 
11060 		if (op0 == orig_op0)
11061 		  op0 = copy_rtx (op0);
11062 
11063 		set_mem_attributes (op0, treeop0, 0);
11064 		if (REG_P (XEXP (op0, 0)))
11065 		  mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
11066 
11067 		MEM_VOLATILE_P (op0) |= volatilep;
11068 	      }
11069 	  }
11070       }
11071 
11072       if (!op0)
11073 	op0 = expand_expr_real (treeop0, NULL_RTX, VOIDmode, modifier,
11074 				NULL, inner_reference_p);
11075 
11076       /* If the input and output modes are both the same, we are done.  */
11077       if (mode == GET_MODE (op0))
11078 	;
11079       /* If neither mode is BLKmode, and both modes are the same size
11080 	 then we can use gen_lowpart.  */
11081       else if (mode != BLKmode
11082 	       && GET_MODE (op0) != BLKmode
11083 	       && known_eq (GET_MODE_PRECISION (mode),
11084 			    GET_MODE_PRECISION (GET_MODE (op0)))
11085 	       && !COMPLEX_MODE_P (GET_MODE (op0)))
11086 	{
11087 	  if (GET_CODE (op0) == SUBREG)
11088 	    op0 = force_reg (GET_MODE (op0), op0);
11089 	  temp = gen_lowpart_common (mode, op0);
11090 	  if (temp)
11091 	    op0 = temp;
11092 	  else
11093 	    {
11094 	      if (!REG_P (op0) && !MEM_P (op0))
11095 		op0 = force_reg (GET_MODE (op0), op0);
11096 	      op0 = gen_lowpart (mode, op0);
11097 	    }
11098 	}
11099       /* If both types are integral, convert from one mode to the other.  */
11100       else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
11101 	op0 = convert_modes (mode, GET_MODE (op0), op0,
11102 			     TYPE_UNSIGNED (TREE_TYPE (treeop0)));
11103       /* If the output type is a bit-field type, do an extraction.  */
11104       else if (reduce_bit_field)
11105 	return extract_bit_field (op0, TYPE_PRECISION (type), 0,
11106 				  TYPE_UNSIGNED (type), NULL_RTX,
11107 				  mode, mode, false, NULL);
11108       /* As a last resort, spill op0 to memory, and reload it in a
11109 	 different mode.  */
11110       else if (!MEM_P (op0))
11111 	{
11112 	  /* If the operand is not a MEM, force it into memory.  Since we
11113 	     are going to be changing the mode of the MEM, don't call
11114 	     force_const_mem for constants because we don't allow pool
11115 	     constants to change mode.  */
11116 	  tree inner_type = TREE_TYPE (treeop0);
11117 
11118 	  gcc_assert (!TREE_ADDRESSABLE (exp));
11119 
11120 	  if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
11121 	    target
11122 	      = assign_stack_temp_for_type
11123 		(TYPE_MODE (inner_type),
11124 		 GET_MODE_SIZE (TYPE_MODE (inner_type)), inner_type);
11125 
11126 	  emit_move_insn (target, op0);
11127 	  op0 = target;
11128 	}
11129 
11130       /* If OP0 is (now) a MEM, we need to deal with alignment issues.  If the
11131 	 output type is such that the operand is known to be aligned, indicate
11132 	 that it is.  Otherwise, we need only be concerned about alignment for
11133 	 non-BLKmode results.  */
11134       if (MEM_P (op0))
11135 	{
11136 	  enum insn_code icode;
11137 
11138 	  if (modifier != EXPAND_WRITE
11139 	      && modifier != EXPAND_MEMORY
11140 	      && !inner_reference_p
11141 	      && mode != BLKmode
11142 	      && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
11143 	    {
11144 	      /* If the target does have special handling for unaligned
11145 		 loads of mode then use them.  */
11146 	      if ((icode = optab_handler (movmisalign_optab, mode))
11147 		  != CODE_FOR_nothing)
11148 		{
11149 		  rtx reg;
11150 
11151 		  op0 = adjust_address (op0, mode, 0);
11152 		  /* We've already validated the memory, and we're creating a
11153 		     new pseudo destination.  The predicates really can't
11154 		     fail.  */
11155 		  reg = gen_reg_rtx (mode);
11156 
11157 		  /* Nor can the insn generator.  */
11158 		  rtx_insn *insn = GEN_FCN (icode) (reg, op0);
11159 		  emit_insn (insn);
11160 		  return reg;
11161 		}
11162 	      else if (STRICT_ALIGNMENT)
11163 		{
11164 		  poly_uint64 mode_size = GET_MODE_SIZE (mode);
11165 		  poly_uint64 temp_size = mode_size;
11166 		  if (GET_MODE (op0) != BLKmode)
11167 		    temp_size = upper_bound (temp_size,
11168 					     GET_MODE_SIZE (GET_MODE (op0)));
11169 		  rtx new_rtx
11170 		    = assign_stack_temp_for_type (mode, temp_size, type);
11171 		  rtx new_with_op0_mode
11172 		    = adjust_address (new_rtx, GET_MODE (op0), 0);
11173 
11174 		  gcc_assert (!TREE_ADDRESSABLE (exp));
11175 
11176 		  if (GET_MODE (op0) == BLKmode)
11177 		    {
11178 		      rtx size_rtx = gen_int_mode (mode_size, Pmode);
11179 		      emit_block_move (new_with_op0_mode, op0, size_rtx,
11180 				       (modifier == EXPAND_STACK_PARM
11181 					? BLOCK_OP_CALL_PARM
11182 					: BLOCK_OP_NORMAL));
11183 		    }
11184 		  else
11185 		    emit_move_insn (new_with_op0_mode, op0);
11186 
11187 		  op0 = new_rtx;
11188 		}
11189 	    }
11190 
11191 	  op0 = adjust_address (op0, mode, 0);
11192 	}
11193 
11194       return op0;
11195 
11196     case MODIFY_EXPR:
11197       {
11198 	tree lhs = treeop0;
11199 	tree rhs = treeop1;
11200 	gcc_assert (ignore);
11201 
11202 	/* Check for |= or &= of a bitfield of size one into another bitfield
11203 	   of size 1.  In this case, (unless we need the result of the
11204 	   assignment) we can do this more efficiently with a
11205 	   test followed by an assignment, if necessary.
11206 
11207 	   ??? At this point, we can't get a BIT_FIELD_REF here.  But if
11208 	   things change so we do, this code should be enhanced to
11209 	   support it.  */
11210 	if (TREE_CODE (lhs) == COMPONENT_REF
11211 	    && (TREE_CODE (rhs) == BIT_IOR_EXPR
11212 		|| TREE_CODE (rhs) == BIT_AND_EXPR)
11213 	    && TREE_OPERAND (rhs, 0) == lhs
11214 	    && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
11215 	    && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
11216 	    && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
11217 	  {
11218 	    rtx_code_label *label = gen_label_rtx ();
11219 	    int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
11220 	    do_jump (TREE_OPERAND (rhs, 1),
11221 		     value ? label : 0,
11222 		     value ? 0 : label,
11223 		     profile_probability::uninitialized ());
11224 	    expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
11225 			       false);
11226 	    do_pending_stack_adjust ();
11227 	    emit_label (label);
11228 	    return const0_rtx;
11229 	  }
11230 
11231 	expand_assignment (lhs, rhs, false);
11232 	return const0_rtx;
11233       }
11234 
11235     case ADDR_EXPR:
11236       return expand_expr_addr_expr (exp, target, tmode, modifier);
11237 
11238     case REALPART_EXPR:
11239       op0 = expand_normal (treeop0);
11240       return read_complex_part (op0, false);
11241 
11242     case IMAGPART_EXPR:
11243       op0 = expand_normal (treeop0);
11244       return read_complex_part (op0, true);
11245 
11246     case RETURN_EXPR:
11247     case LABEL_EXPR:
11248     case GOTO_EXPR:
11249     case SWITCH_EXPR:
11250     case ASM_EXPR:
11251       /* Expanded in cfgexpand.c.  */
11252       gcc_unreachable ();
11253 
11254     case TRY_CATCH_EXPR:
11255     case CATCH_EXPR:
11256     case EH_FILTER_EXPR:
11257     case TRY_FINALLY_EXPR:
11258       /* Lowered by tree-eh.c.  */
11259       gcc_unreachable ();
11260 
11261     case WITH_CLEANUP_EXPR:
11262     case CLEANUP_POINT_EXPR:
11263     case TARGET_EXPR:
11264     case CASE_LABEL_EXPR:
11265     case VA_ARG_EXPR:
11266     case BIND_EXPR:
11267     case INIT_EXPR:
11268     case CONJ_EXPR:
11269     case COMPOUND_EXPR:
11270     case PREINCREMENT_EXPR:
11271     case PREDECREMENT_EXPR:
11272     case POSTINCREMENT_EXPR:
11273     case POSTDECREMENT_EXPR:
11274     case LOOP_EXPR:
11275     case EXIT_EXPR:
11276     case COMPOUND_LITERAL_EXPR:
11277       /* Lowered by gimplify.c.  */
11278       gcc_unreachable ();
11279 
11280     case FDESC_EXPR:
11281       /* Function descriptors are not valid except for as
11282 	 initialization constants, and should not be expanded.  */
11283       gcc_unreachable ();
11284 
11285     case WITH_SIZE_EXPR:
11286       /* WITH_SIZE_EXPR expands to its first argument.  The caller should
11287 	 have pulled out the size to use in whatever context it needed.  */
11288       return expand_expr_real (treeop0, original_target, tmode,
11289 			       modifier, alt_rtl, inner_reference_p);
11290 
11291     default:
11292       return expand_expr_real_2 (&ops, target, tmode, modifier);
11293     }
11294 }
11295 
11296 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
11297    signedness of TYPE), possibly returning the result in TARGET.
11298    TYPE is known to be a partial integer type.  */
11299 static rtx
11300 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
11301 {
11302   HOST_WIDE_INT prec = TYPE_PRECISION (type);
11303   if (target && GET_MODE (target) != GET_MODE (exp))
11304     target = 0;
11305   /* For constant values, reduce using build_int_cst_type. */
11306   if (CONST_INT_P (exp))
11307     {
11308       HOST_WIDE_INT value = INTVAL (exp);
11309       tree t = build_int_cst_type (type, value);
11310       return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
11311     }
11312   else if (TYPE_UNSIGNED (type))
11313     {
11314       scalar_int_mode mode = as_a <scalar_int_mode> (GET_MODE (exp));
11315       rtx mask = immed_wide_int_const
11316 	(wi::mask (prec, false, GET_MODE_PRECISION (mode)), mode);
11317       return expand_and (mode, exp, mask, target);
11318     }
11319   else
11320     {
11321       scalar_int_mode mode = as_a <scalar_int_mode> (GET_MODE (exp));
11322       int count = GET_MODE_PRECISION (mode) - prec;
11323       exp = expand_shift (LSHIFT_EXPR, mode, exp, count, target, 0);
11324       return expand_shift (RSHIFT_EXPR, mode, exp, count, target, 0);
11325     }
11326 }
11327 
11328 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
11329    when applied to the address of EXP produces an address known to be
11330    aligned more than BIGGEST_ALIGNMENT.  */
11331 
11332 static int
11333 is_aligning_offset (const_tree offset, const_tree exp)
11334 {
11335   /* Strip off any conversions.  */
11336   while (CONVERT_EXPR_P (offset))
11337     offset = TREE_OPERAND (offset, 0);
11338 
11339   /* We must now have a BIT_AND_EXPR with a constant that is one less than
11340      power of 2 and which is larger than BIGGEST_ALIGNMENT.  */
11341   if (TREE_CODE (offset) != BIT_AND_EXPR
11342       || !tree_fits_uhwi_p (TREE_OPERAND (offset, 1))
11343       || compare_tree_int (TREE_OPERAND (offset, 1),
11344 			   BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
11345       || !pow2p_hwi (tree_to_uhwi (TREE_OPERAND (offset, 1)) + 1))
11346     return 0;
11347 
11348   /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
11349      It must be NEGATE_EXPR.  Then strip any more conversions.  */
11350   offset = TREE_OPERAND (offset, 0);
11351   while (CONVERT_EXPR_P (offset))
11352     offset = TREE_OPERAND (offset, 0);
11353 
11354   if (TREE_CODE (offset) != NEGATE_EXPR)
11355     return 0;
11356 
11357   offset = TREE_OPERAND (offset, 0);
11358   while (CONVERT_EXPR_P (offset))
11359     offset = TREE_OPERAND (offset, 0);
11360 
11361   /* This must now be the address of EXP.  */
11362   return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
11363 }
11364 
11365 /* Return the tree node if an ARG corresponds to a string constant or zero
11366    if it doesn't.  If we return nonzero, set *PTR_OFFSET to the offset
11367    in bytes within the string that ARG is accessing.  The type of the
11368    offset will be `sizetype'.  */
11369 
11370 tree
11371 string_constant (tree arg, tree *ptr_offset)
11372 {
11373   tree array, offset, lower_bound;
11374   STRIP_NOPS (arg);
11375 
11376   if (TREE_CODE (arg) == ADDR_EXPR)
11377     {
11378       if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
11379 	{
11380 	  *ptr_offset = size_zero_node;
11381 	  return TREE_OPERAND (arg, 0);
11382 	}
11383       else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
11384 	{
11385 	  array = TREE_OPERAND (arg, 0);
11386 	  offset = size_zero_node;
11387 	}
11388       else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
11389 	{
11390 	  array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
11391 	  offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
11392 	  if (TREE_CODE (array) != STRING_CST && !VAR_P (array))
11393 	    return 0;
11394 
11395 	  /* Check if the array has a nonzero lower bound.  */
11396 	  lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
11397 	  if (!integer_zerop (lower_bound))
11398 	    {
11399 	      /* If the offset and base aren't both constants, return 0.  */
11400 	      if (TREE_CODE (lower_bound) != INTEGER_CST)
11401 	        return 0;
11402 	      if (TREE_CODE (offset) != INTEGER_CST)
11403 		return 0;
11404 	      /* Adjust offset by the lower bound.  */
11405 	      offset = size_diffop (fold_convert (sizetype, offset),
11406 				    fold_convert (sizetype, lower_bound));
11407 	    }
11408 	}
11409       else if (TREE_CODE (TREE_OPERAND (arg, 0)) == MEM_REF)
11410 	{
11411 	  array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
11412 	  offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
11413 	  if (TREE_CODE (array) != ADDR_EXPR)
11414 	    return 0;
11415 	  array = TREE_OPERAND (array, 0);
11416 	  if (TREE_CODE (array) != STRING_CST && !VAR_P (array))
11417 	    return 0;
11418 	}
11419       else
11420 	return 0;
11421     }
11422   else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
11423     {
11424       tree arg0 = TREE_OPERAND (arg, 0);
11425       tree arg1 = TREE_OPERAND (arg, 1);
11426 
11427       STRIP_NOPS (arg0);
11428       STRIP_NOPS (arg1);
11429 
11430       if (TREE_CODE (arg0) == ADDR_EXPR
11431 	  && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
11432 	      || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
11433 	{
11434 	  array = TREE_OPERAND (arg0, 0);
11435 	  offset = arg1;
11436 	}
11437       else if (TREE_CODE (arg1) == ADDR_EXPR
11438 	       && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
11439 		   || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
11440 	{
11441 	  array = TREE_OPERAND (arg1, 0);
11442 	  offset = arg0;
11443 	}
11444       else
11445 	return 0;
11446     }
11447   else
11448     return 0;
11449 
11450   if (TREE_CODE (array) == STRING_CST)
11451     {
11452       *ptr_offset = fold_convert (sizetype, offset);
11453       return array;
11454     }
11455   else if (VAR_P (array) || TREE_CODE (array) == CONST_DECL)
11456     {
11457       int length;
11458       tree init = ctor_for_folding (array);
11459 
11460       /* Variables initialized to string literals can be handled too.  */
11461       if (init == error_mark_node
11462 	  || !init
11463 	  || TREE_CODE (init) != STRING_CST)
11464 	return 0;
11465 
11466       /* Avoid const char foo[4] = "abcde";  */
11467       if (DECL_SIZE_UNIT (array) == NULL_TREE
11468 	  || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
11469 	  || (length = TREE_STRING_LENGTH (init)) <= 0
11470 	  || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
11471 	return 0;
11472 
11473       /* If variable is bigger than the string literal, OFFSET must be constant
11474 	 and inside of the bounds of the string literal.  */
11475       offset = fold_convert (sizetype, offset);
11476       if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
11477 	  && (! tree_fits_uhwi_p (offset)
11478 	      || compare_tree_int (offset, length) >= 0))
11479 	return 0;
11480 
11481       *ptr_offset = offset;
11482       return init;
11483     }
11484 
11485   return 0;
11486 }
11487 
11488 /* Generate code to calculate OPS, and exploded expression
11489    using a store-flag instruction and return an rtx for the result.
11490    OPS reflects a comparison.
11491 
11492    If TARGET is nonzero, store the result there if convenient.
11493 
11494    Return zero if there is no suitable set-flag instruction
11495    available on this machine.
11496 
11497    Once expand_expr has been called on the arguments of the comparison,
11498    we are committed to doing the store flag, since it is not safe to
11499    re-evaluate the expression.  We emit the store-flag insn by calling
11500    emit_store_flag, but only expand the arguments if we have a reason
11501    to believe that emit_store_flag will be successful.  If we think that
11502    it will, but it isn't, we have to simulate the store-flag with a
11503    set/jump/set sequence.  */
11504 
11505 static rtx
11506 do_store_flag (sepops ops, rtx target, machine_mode mode)
11507 {
11508   enum rtx_code code;
11509   tree arg0, arg1, type;
11510   machine_mode operand_mode;
11511   int unsignedp;
11512   rtx op0, op1;
11513   rtx subtarget = target;
11514   location_t loc = ops->location;
11515 
11516   arg0 = ops->op0;
11517   arg1 = ops->op1;
11518 
11519   /* Don't crash if the comparison was erroneous.  */
11520   if (arg0 == error_mark_node || arg1 == error_mark_node)
11521     return const0_rtx;
11522 
11523   type = TREE_TYPE (arg0);
11524   operand_mode = TYPE_MODE (type);
11525   unsignedp = TYPE_UNSIGNED (type);
11526 
11527   /* We won't bother with BLKmode store-flag operations because it would mean
11528      passing a lot of information to emit_store_flag.  */
11529   if (operand_mode == BLKmode)
11530     return 0;
11531 
11532   /* We won't bother with store-flag operations involving function pointers
11533      when function pointers must be canonicalized before comparisons.  */
11534   if (targetm.have_canonicalize_funcptr_for_compare ()
11535       && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
11536 	   && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
11537 	       == FUNCTION_TYPE))
11538 	  || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
11539 	      && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
11540 		  == FUNCTION_TYPE))))
11541     return 0;
11542 
11543   STRIP_NOPS (arg0);
11544   STRIP_NOPS (arg1);
11545 
11546   /* For vector typed comparisons emit code to generate the desired
11547      all-ones or all-zeros mask.  Conveniently use the VEC_COND_EXPR
11548      expander for this.  */
11549   if (TREE_CODE (ops->type) == VECTOR_TYPE)
11550     {
11551       tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
11552       if (VECTOR_BOOLEAN_TYPE_P (ops->type)
11553 	  && expand_vec_cmp_expr_p (TREE_TYPE (arg0), ops->type, ops->code))
11554 	return expand_vec_cmp_expr (ops->type, ifexp, target);
11555       else
11556 	{
11557 	  tree if_true = constant_boolean_node (true, ops->type);
11558 	  tree if_false = constant_boolean_node (false, ops->type);
11559 	  return expand_vec_cond_expr (ops->type, ifexp, if_true,
11560 				       if_false, target);
11561 	}
11562     }
11563 
11564   /* Get the rtx comparison code to use.  We know that EXP is a comparison
11565      operation of some type.  Some comparisons against 1 and -1 can be
11566      converted to comparisons with zero.  Do so here so that the tests
11567      below will be aware that we have a comparison with zero.   These
11568      tests will not catch constants in the first operand, but constants
11569      are rarely passed as the first operand.  */
11570 
11571   switch (ops->code)
11572     {
11573     case EQ_EXPR:
11574       code = EQ;
11575       break;
11576     case NE_EXPR:
11577       code = NE;
11578       break;
11579     case LT_EXPR:
11580       if (integer_onep (arg1))
11581 	arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
11582       else
11583 	code = unsignedp ? LTU : LT;
11584       break;
11585     case LE_EXPR:
11586       if (! unsignedp && integer_all_onesp (arg1))
11587 	arg1 = integer_zero_node, code = LT;
11588       else
11589 	code = unsignedp ? LEU : LE;
11590       break;
11591     case GT_EXPR:
11592       if (! unsignedp && integer_all_onesp (arg1))
11593 	arg1 = integer_zero_node, code = GE;
11594       else
11595 	code = unsignedp ? GTU : GT;
11596       break;
11597     case GE_EXPR:
11598       if (integer_onep (arg1))
11599 	arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
11600       else
11601 	code = unsignedp ? GEU : GE;
11602       break;
11603 
11604     case UNORDERED_EXPR:
11605       code = UNORDERED;
11606       break;
11607     case ORDERED_EXPR:
11608       code = ORDERED;
11609       break;
11610     case UNLT_EXPR:
11611       code = UNLT;
11612       break;
11613     case UNLE_EXPR:
11614       code = UNLE;
11615       break;
11616     case UNGT_EXPR:
11617       code = UNGT;
11618       break;
11619     case UNGE_EXPR:
11620       code = UNGE;
11621       break;
11622     case UNEQ_EXPR:
11623       code = UNEQ;
11624       break;
11625     case LTGT_EXPR:
11626       code = LTGT;
11627       break;
11628 
11629     default:
11630       gcc_unreachable ();
11631     }
11632 
11633   /* Put a constant second.  */
11634   if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
11635       || TREE_CODE (arg0) == FIXED_CST)
11636     {
11637       std::swap (arg0, arg1);
11638       code = swap_condition (code);
11639     }
11640 
11641   /* If this is an equality or inequality test of a single bit, we can
11642      do this by shifting the bit being tested to the low-order bit and
11643      masking the result with the constant 1.  If the condition was EQ,
11644      we xor it with 1.  This does not require an scc insn and is faster
11645      than an scc insn even if we have it.
11646 
11647      The code to make this transformation was moved into fold_single_bit_test,
11648      so we just call into the folder and expand its result.  */
11649 
11650   if ((code == NE || code == EQ)
11651       && integer_zerop (arg1)
11652       && (TYPE_PRECISION (ops->type) != 1 || TYPE_UNSIGNED (ops->type)))
11653     {
11654       gimple *srcstmt = get_def_for_expr (arg0, BIT_AND_EXPR);
11655       if (srcstmt
11656 	  && integer_pow2p (gimple_assign_rhs2 (srcstmt)))
11657 	{
11658 	  enum tree_code tcode = code == NE ? NE_EXPR : EQ_EXPR;
11659 	  tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
11660 	  tree temp = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg1),
11661 				       gimple_assign_rhs1 (srcstmt),
11662 				       gimple_assign_rhs2 (srcstmt));
11663 	  temp = fold_single_bit_test (loc, tcode, temp, arg1, type);
11664 	  if (temp)
11665 	    return expand_expr (temp, target, VOIDmode, EXPAND_NORMAL);
11666 	}
11667     }
11668 
11669   if (! get_subtarget (target)
11670       || GET_MODE (subtarget) != operand_mode)
11671     subtarget = 0;
11672 
11673   expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
11674 
11675   if (target == 0)
11676     target = gen_reg_rtx (mode);
11677 
11678   /* Try a cstore if possible.  */
11679   return emit_store_flag_force (target, code, op0, op1,
11680 				operand_mode, unsignedp,
11681 				(TYPE_PRECISION (ops->type) == 1
11682 				 && !TYPE_UNSIGNED (ops->type)) ? -1 : 1);
11683 }
11684 
11685 /* Attempt to generate a casesi instruction.  Returns 1 if successful,
11686    0 otherwise (i.e. if there is no casesi instruction).
11687 
11688    DEFAULT_PROBABILITY is the probability of jumping to the default
11689    label.  */
11690 int
11691 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
11692 	    rtx table_label, rtx default_label, rtx fallback_label,
11693             profile_probability default_probability)
11694 {
11695   struct expand_operand ops[5];
11696   scalar_int_mode index_mode = SImode;
11697   rtx op1, op2, index;
11698 
11699   if (! targetm.have_casesi ())
11700     return 0;
11701 
11702   /* The index must be some form of integer.  Convert it to SImode.  */
11703   scalar_int_mode omode = SCALAR_INT_TYPE_MODE (index_type);
11704   if (GET_MODE_BITSIZE (omode) > GET_MODE_BITSIZE (index_mode))
11705     {
11706       rtx rangertx = expand_normal (range);
11707 
11708       /* We must handle the endpoints in the original mode.  */
11709       index_expr = build2 (MINUS_EXPR, index_type,
11710 			   index_expr, minval);
11711       minval = integer_zero_node;
11712       index = expand_normal (index_expr);
11713       if (default_label)
11714         emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
11715 				 omode, 1, default_label,
11716                                  default_probability);
11717       /* Now we can safely truncate.  */
11718       index = convert_to_mode (index_mode, index, 0);
11719     }
11720   else
11721     {
11722       if (omode != index_mode)
11723 	{
11724 	  index_type = lang_hooks.types.type_for_mode (index_mode, 0);
11725 	  index_expr = fold_convert (index_type, index_expr);
11726 	}
11727 
11728       index = expand_normal (index_expr);
11729     }
11730 
11731   do_pending_stack_adjust ();
11732 
11733   op1 = expand_normal (minval);
11734   op2 = expand_normal (range);
11735 
11736   create_input_operand (&ops[0], index, index_mode);
11737   create_convert_operand_from_type (&ops[1], op1, TREE_TYPE (minval));
11738   create_convert_operand_from_type (&ops[2], op2, TREE_TYPE (range));
11739   create_fixed_operand (&ops[3], table_label);
11740   create_fixed_operand (&ops[4], (default_label
11741 				  ? default_label
11742 				  : fallback_label));
11743   expand_jump_insn (targetm.code_for_casesi, 5, ops);
11744   return 1;
11745 }
11746 
11747 /* Attempt to generate a tablejump instruction; same concept.  */
11748 /* Subroutine of the next function.
11749 
11750    INDEX is the value being switched on, with the lowest value
11751    in the table already subtracted.
11752    MODE is its expected mode (needed if INDEX is constant).
11753    RANGE is the length of the jump table.
11754    TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11755 
11756    DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11757    index value is out of range.
11758    DEFAULT_PROBABILITY is the probability of jumping to
11759    the default label.  */
11760 
11761 static void
11762 do_tablejump (rtx index, machine_mode mode, rtx range, rtx table_label,
11763 	      rtx default_label, profile_probability default_probability)
11764 {
11765   rtx temp, vector;
11766 
11767   if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
11768     cfun->cfg->max_jumptable_ents = INTVAL (range);
11769 
11770   /* Do an unsigned comparison (in the proper mode) between the index
11771      expression and the value which represents the length of the range.
11772      Since we just finished subtracting the lower bound of the range
11773      from the index expression, this comparison allows us to simultaneously
11774      check that the original index expression value is both greater than
11775      or equal to the minimum value of the range and less than or equal to
11776      the maximum value of the range.  */
11777 
11778   if (default_label)
11779     emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
11780 			     default_label, default_probability);
11781 
11782 
11783   /* If index is in range, it must fit in Pmode.
11784      Convert to Pmode so we can index with it.  */
11785   if (mode != Pmode)
11786     index = convert_to_mode (Pmode, index, 1);
11787 
11788   /* Don't let a MEM slip through, because then INDEX that comes
11789      out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11790      and break_out_memory_refs will go to work on it and mess it up.  */
11791 #ifdef PIC_CASE_VECTOR_ADDRESS
11792   if (flag_pic && !REG_P (index))
11793     index = copy_to_mode_reg (Pmode, index);
11794 #endif
11795 
11796   /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11797      GET_MODE_SIZE, because this indicates how large insns are.  The other
11798      uses should all be Pmode, because they are addresses.  This code
11799      could fail if addresses and insns are not the same size.  */
11800   index = simplify_gen_binary (MULT, Pmode, index,
11801 			       gen_int_mode (GET_MODE_SIZE (CASE_VECTOR_MODE),
11802 					     Pmode));
11803   index = simplify_gen_binary (PLUS, Pmode, index,
11804 			       gen_rtx_LABEL_REF (Pmode, table_label));
11805 
11806 #ifdef PIC_CASE_VECTOR_ADDRESS
11807   if (flag_pic)
11808     index = PIC_CASE_VECTOR_ADDRESS (index);
11809   else
11810 #endif
11811     index = memory_address (CASE_VECTOR_MODE, index);
11812   temp = gen_reg_rtx (CASE_VECTOR_MODE);
11813   vector = gen_const_mem (CASE_VECTOR_MODE, index);
11814   convert_move (temp, vector, 0);
11815 
11816   emit_jump_insn (targetm.gen_tablejump (temp, table_label));
11817 
11818   /* If we are generating PIC code or if the table is PC-relative, the
11819      table and JUMP_INSN must be adjacent, so don't output a BARRIER.  */
11820   if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11821     emit_barrier ();
11822 }
11823 
11824 int
11825 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
11826 	       rtx table_label, rtx default_label,
11827 	       profile_probability default_probability)
11828 {
11829   rtx index;
11830 
11831   if (! targetm.have_tablejump ())
11832     return 0;
11833 
11834   index_expr = fold_build2 (MINUS_EXPR, index_type,
11835 			    fold_convert (index_type, index_expr),
11836 			    fold_convert (index_type, minval));
11837   index = expand_normal (index_expr);
11838   do_pending_stack_adjust ();
11839 
11840   do_tablejump (index, TYPE_MODE (index_type),
11841 		convert_modes (TYPE_MODE (index_type),
11842 			       TYPE_MODE (TREE_TYPE (range)),
11843 			       expand_normal (range),
11844 			       TYPE_UNSIGNED (TREE_TYPE (range))),
11845 		table_label, default_label, default_probability);
11846   return 1;
11847 }
11848 
11849 /* Return a CONST_VECTOR rtx representing vector mask for
11850    a VECTOR_CST of booleans.  */
11851 static rtx
11852 const_vector_mask_from_tree (tree exp)
11853 {
11854   machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
11855   machine_mode inner = GET_MODE_INNER (mode);
11856 
11857   rtx_vector_builder builder (mode, VECTOR_CST_NPATTERNS (exp),
11858 			      VECTOR_CST_NELTS_PER_PATTERN (exp));
11859   unsigned int count = builder.encoded_nelts ();
11860   for (unsigned int i = 0; i < count; ++i)
11861     {
11862       tree elt = VECTOR_CST_ELT (exp, i);
11863       gcc_assert (TREE_CODE (elt) == INTEGER_CST);
11864       if (integer_zerop (elt))
11865 	builder.quick_push (CONST0_RTX (inner));
11866       else if (integer_onep (elt)
11867 	       || integer_minus_onep (elt))
11868 	builder.quick_push (CONSTM1_RTX (inner));
11869       else
11870 	gcc_unreachable ();
11871     }
11872   return builder.build ();
11873 }
11874 
11875 /* EXP is a VECTOR_CST in which each element is either all-zeros or all-ones.
11876    Return a constant scalar rtx of mode MODE in which bit X is set if element
11877    X of EXP is nonzero.  */
11878 static rtx
11879 const_scalar_mask_from_tree (scalar_int_mode mode, tree exp)
11880 {
11881   wide_int res = wi::zero (GET_MODE_PRECISION (mode));
11882   tree elt;
11883 
11884   /* The result has a fixed number of bits so the input must too.  */
11885   unsigned int nunits = VECTOR_CST_NELTS (exp).to_constant ();
11886   for (unsigned int i = 0; i < nunits; ++i)
11887     {
11888       elt = VECTOR_CST_ELT (exp, i);
11889       gcc_assert (TREE_CODE (elt) == INTEGER_CST);
11890       if (integer_all_onesp (elt))
11891 	res = wi::set_bit (res, i);
11892       else
11893 	gcc_assert (integer_zerop (elt));
11894     }
11895 
11896   return immed_wide_int_const (res, mode);
11897 }
11898 
11899 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree.  */
11900 static rtx
11901 const_vector_from_tree (tree exp)
11902 {
11903   machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
11904 
11905   if (initializer_zerop (exp))
11906     return CONST0_RTX (mode);
11907 
11908   if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (exp)))
11909     return const_vector_mask_from_tree (exp);
11910 
11911   machine_mode inner = GET_MODE_INNER (mode);
11912 
11913   rtx_vector_builder builder (mode, VECTOR_CST_NPATTERNS (exp),
11914 			      VECTOR_CST_NELTS_PER_PATTERN (exp));
11915   unsigned int count = builder.encoded_nelts ();
11916   for (unsigned int i = 0; i < count; ++i)
11917     {
11918       tree elt = VECTOR_CST_ELT (exp, i);
11919       if (TREE_CODE (elt) == REAL_CST)
11920 	builder.quick_push (const_double_from_real_value (TREE_REAL_CST (elt),
11921 							  inner));
11922       else if (TREE_CODE (elt) == FIXED_CST)
11923 	builder.quick_push (CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
11924 							  inner));
11925       else
11926 	builder.quick_push (immed_wide_int_const (wi::to_poly_wide (elt),
11927 						  inner));
11928     }
11929   return builder.build ();
11930 }
11931 
11932 /* Build a decl for a personality function given a language prefix.  */
11933 
11934 tree
11935 build_personality_function (const char *lang)
11936 {
11937   const char *unwind_and_version;
11938   tree decl, type;
11939   char *name;
11940 
11941   switch (targetm_common.except_unwind_info (&global_options))
11942     {
11943     case UI_NONE:
11944       return NULL;
11945     case UI_SJLJ:
11946       unwind_and_version = "_sj0";
11947       break;
11948     case UI_DWARF2:
11949     case UI_TARGET:
11950       unwind_and_version = "_v0";
11951       break;
11952     case UI_SEH:
11953       unwind_and_version = "_seh0";
11954       break;
11955     default:
11956       gcc_unreachable ();
11957     }
11958 
11959   name = ACONCAT (("__", lang, "_personality", unwind_and_version, NULL));
11960 
11961   type = build_function_type_list (integer_type_node, integer_type_node,
11962 				   long_long_unsigned_type_node,
11963 				   ptr_type_node, ptr_type_node, NULL_TREE);
11964   decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
11965 		     get_identifier (name), type);
11966   DECL_ARTIFICIAL (decl) = 1;
11967   DECL_EXTERNAL (decl) = 1;
11968   TREE_PUBLIC (decl) = 1;
11969 
11970   /* Zap the nonsensical SYMBOL_REF_DECL for this.  What we're left with
11971      are the flags assigned by targetm.encode_section_info.  */
11972   SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
11973 
11974   return decl;
11975 }
11976 
11977 /* Extracts the personality function of DECL and returns the corresponding
11978    libfunc.  */
11979 
11980 rtx
11981 get_personality_function (tree decl)
11982 {
11983   tree personality = DECL_FUNCTION_PERSONALITY (decl);
11984   enum eh_personality_kind pk;
11985 
11986   pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
11987   if (pk == eh_personality_none)
11988     return NULL;
11989 
11990   if (!personality
11991       && pk == eh_personality_any)
11992     personality = lang_hooks.eh_personality ();
11993 
11994   if (pk == eh_personality_lang)
11995     gcc_assert (personality != NULL_TREE);
11996 
11997   return XEXP (DECL_RTL (personality), 0);
11998 }
11999 
12000 /* Returns a tree for the size of EXP in bytes.  */
12001 
12002 static tree
12003 tree_expr_size (const_tree exp)
12004 {
12005   if (DECL_P (exp)
12006       && DECL_SIZE_UNIT (exp) != 0)
12007     return DECL_SIZE_UNIT (exp);
12008   else
12009     return size_in_bytes (TREE_TYPE (exp));
12010 }
12011 
12012 /* Return an rtx for the size in bytes of the value of EXP.  */
12013 
12014 rtx
12015 expr_size (tree exp)
12016 {
12017   tree size;
12018 
12019   if (TREE_CODE (exp) == WITH_SIZE_EXPR)
12020     size = TREE_OPERAND (exp, 1);
12021   else
12022     {
12023       size = tree_expr_size (exp);
12024       gcc_assert (size);
12025       gcc_assert (size == SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, exp));
12026     }
12027 
12028   return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype), EXPAND_NORMAL);
12029 }
12030 
12031 /* Return a wide integer for the size in bytes of the value of EXP, or -1
12032    if the size can vary or is larger than an integer.  */
12033 
12034 static HOST_WIDE_INT
12035 int_expr_size (tree exp)
12036 {
12037   tree size;
12038 
12039   if (TREE_CODE (exp) == WITH_SIZE_EXPR)
12040     size = TREE_OPERAND (exp, 1);
12041   else
12042     {
12043       size = tree_expr_size (exp);
12044       gcc_assert (size);
12045     }
12046 
12047   if (size == 0 || !tree_fits_shwi_p (size))
12048     return -1;
12049 
12050   return tree_to_shwi (size);
12051 }
12052