1 /* Convert tree expression to rtl instructions, for GNU compiler.
2    Copyright (C) 1988-2014 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "stringpool.h"
28 #include "stor-layout.h"
29 #include "attribs.h"
30 #include "varasm.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "insn-attr.h"
38 /* Include expr.h after insn-config.h so we get HAVE_conditional_move.  */
39 #include "expr.h"
40 #include "optabs.h"
41 #include "libfuncs.h"
42 #include "recog.h"
43 #include "reload.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "langhooks.h"
47 #include "intl.h"
48 #include "tm_p.h"
49 #include "tree-iterator.h"
50 #include "basic-block.h"
51 #include "tree-ssa-alias.h"
52 #include "internal-fn.h"
53 #include "gimple-expr.h"
54 #include "is-a.h"
55 #include "gimple.h"
56 #include "gimple-ssa.h"
57 #include "cgraph.h"
58 #include "tree-ssanames.h"
59 #include "target.h"
60 #include "common/common-target.h"
61 #include "timevar.h"
62 #include "df.h"
63 #include "diagnostic.h"
64 #include "tree-ssa-live.h"
65 #include "tree-outof-ssa.h"
66 #include "target-globals.h"
67 #include "params.h"
68 #include "tree-ssa-address.h"
69 #include "cfgexpand.h"
70 
71 /* Decide whether a function's arguments should be processed
72    from first to last or from last to first.
73 
74    They should if the stack and args grow in opposite directions, but
75    only if we have push insns.  */
76 
77 #ifdef PUSH_ROUNDING
78 
79 #ifndef PUSH_ARGS_REVERSED
80 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
81 #define PUSH_ARGS_REVERSED	/* If it's last to first.  */
82 #endif
83 #endif
84 
85 #endif
86 
87 #ifndef STACK_PUSH_CODE
88 #ifdef STACK_GROWS_DOWNWARD
89 #define STACK_PUSH_CODE PRE_DEC
90 #else
91 #define STACK_PUSH_CODE PRE_INC
92 #endif
93 #endif
94 
95 
96 /* If this is nonzero, we do not bother generating VOLATILE
97    around volatile memory references, and we are willing to
98    output indirect addresses.  If cse is to follow, we reject
99    indirect addresses so a useful potential cse is generated;
100    if it is used only once, instruction combination will produce
101    the same indirect address eventually.  */
102 int cse_not_expected;
103 
104 /* This structure is used by move_by_pieces to describe the move to
105    be performed.  */
106 struct move_by_pieces_d
107 {
108   rtx to;
109   rtx to_addr;
110   int autinc_to;
111   int explicit_inc_to;
112   rtx from;
113   rtx from_addr;
114   int autinc_from;
115   int explicit_inc_from;
116   unsigned HOST_WIDE_INT len;
117   HOST_WIDE_INT offset;
118   int reverse;
119 };
120 
121 /* This structure is used by store_by_pieces to describe the clear to
122    be performed.  */
123 
124 struct store_by_pieces_d
125 {
126   rtx to;
127   rtx to_addr;
128   int autinc_to;
129   int explicit_inc_to;
130   unsigned HOST_WIDE_INT len;
131   HOST_WIDE_INT offset;
132   rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
133   void *constfundata;
134   int reverse;
135 };
136 
137 static void move_by_pieces_1 (insn_gen_fn, machine_mode,
138 			      struct move_by_pieces_d *);
139 static bool block_move_libcall_safe_for_call_parm (void);
140 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT,
141 					unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
142 					unsigned HOST_WIDE_INT);
143 static tree emit_block_move_libcall_fn (int);
144 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
145 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
146 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
147 static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
148 static void store_by_pieces_2 (insn_gen_fn, machine_mode,
149 			       struct store_by_pieces_d *);
150 static tree clear_storage_libcall_fn (int);
151 static rtx compress_float_constant (rtx, rtx);
152 static rtx get_subtarget (rtx);
153 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
154 				     HOST_WIDE_INT, enum machine_mode,
155 				     tree, int, alias_set_type);
156 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
157 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT,
158 			unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
159 			enum machine_mode, tree, alias_set_type, bool);
160 
161 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
162 
163 static int is_aligning_offset (const_tree, const_tree);
164 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
165 			     enum expand_modifier);
166 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
167 static rtx do_store_flag (sepops, rtx, enum machine_mode);
168 #ifdef PUSH_ROUNDING
169 static void emit_single_push_insn (enum machine_mode, rtx, tree);
170 #endif
171 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx, int);
172 static rtx const_vector_from_tree (tree);
173 static void write_complex_part (rtx, rtx, bool);
174 
175 /* This macro is used to determine whether move_by_pieces should be called
176    to perform a structure copy.  */
177 #ifndef MOVE_BY_PIECES_P
178 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
179   (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
180    < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
181 #endif
182 
183 /* This macro is used to determine whether clear_by_pieces should be
184    called to clear storage.  */
185 #ifndef CLEAR_BY_PIECES_P
186 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
187   (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
188    < (unsigned int) CLEAR_RATIO (optimize_insn_for_speed_p ()))
189 #endif
190 
191 /* This macro is used to determine whether store_by_pieces should be
192    called to "memset" storage with byte values other than zero.  */
193 #ifndef SET_BY_PIECES_P
194 #define SET_BY_PIECES_P(SIZE, ALIGN) \
195   (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
196    < (unsigned int) SET_RATIO (optimize_insn_for_speed_p ()))
197 #endif
198 
199 /* This macro is used to determine whether store_by_pieces should be
200    called to "memcpy" storage when the source is a constant string.  */
201 #ifndef STORE_BY_PIECES_P
202 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
203   (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
204    < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
205 #endif
206 
207 /* This is run to set up which modes can be used
208    directly in memory and to initialize the block move optab.  It is run
209    at the beginning of compilation and when the target is reinitialized.  */
210 
211 void
init_expr_target(void)212 init_expr_target (void)
213 {
214   rtx insn, pat;
215   enum machine_mode mode;
216   int num_clobbers;
217   rtx mem, mem1;
218   rtx reg;
219 
220   /* Try indexing by frame ptr and try by stack ptr.
221      It is known that on the Convex the stack ptr isn't a valid index.
222      With luck, one or the other is valid on any machine.  */
223   mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
224   mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
225 
226   /* A scratch register we can modify in-place below to avoid
227      useless RTL allocations.  */
228   reg = gen_rtx_REG (VOIDmode, -1);
229 
230   insn = rtx_alloc (INSN);
231   pat = gen_rtx_SET (VOIDmode, NULL_RTX, NULL_RTX);
232   PATTERN (insn) = pat;
233 
234   for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
235        mode = (enum machine_mode) ((int) mode + 1))
236     {
237       int regno;
238 
239       direct_load[(int) mode] = direct_store[(int) mode] = 0;
240       PUT_MODE (mem, mode);
241       PUT_MODE (mem1, mode);
242       PUT_MODE (reg, mode);
243 
244       /* See if there is some register that can be used in this mode and
245 	 directly loaded or stored from memory.  */
246 
247       if (mode != VOIDmode && mode != BLKmode)
248 	for (regno = 0; regno < FIRST_PSEUDO_REGISTER
249 	     && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
250 	     regno++)
251 	  {
252 	    if (! HARD_REGNO_MODE_OK (regno, mode))
253 	      continue;
254 
255 	    SET_REGNO (reg, regno);
256 
257 	    SET_SRC (pat) = mem;
258 	    SET_DEST (pat) = reg;
259 	    if (recog (pat, insn, &num_clobbers) >= 0)
260 	      direct_load[(int) mode] = 1;
261 
262 	    SET_SRC (pat) = mem1;
263 	    SET_DEST (pat) = reg;
264 	    if (recog (pat, insn, &num_clobbers) >= 0)
265 	      direct_load[(int) mode] = 1;
266 
267 	    SET_SRC (pat) = reg;
268 	    SET_DEST (pat) = mem;
269 	    if (recog (pat, insn, &num_clobbers) >= 0)
270 	      direct_store[(int) mode] = 1;
271 
272 	    SET_SRC (pat) = reg;
273 	    SET_DEST (pat) = mem1;
274 	    if (recog (pat, insn, &num_clobbers) >= 0)
275 	      direct_store[(int) mode] = 1;
276 	  }
277     }
278 
279   mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
280 
281   for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
282        mode = GET_MODE_WIDER_MODE (mode))
283     {
284       enum machine_mode srcmode;
285       for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
286 	   srcmode = GET_MODE_WIDER_MODE (srcmode))
287 	{
288 	  enum insn_code ic;
289 
290 	  ic = can_extend_p (mode, srcmode, 0);
291 	  if (ic == CODE_FOR_nothing)
292 	    continue;
293 
294 	  PUT_MODE (mem, srcmode);
295 
296 	  if (insn_operand_matches (ic, 1, mem))
297 	    float_extend_from_mem[mode][srcmode] = true;
298 	}
299     }
300 }
301 
302 /* This is run at the start of compiling a function.  */
303 
304 void
init_expr(void)305 init_expr (void)
306 {
307   memset (&crtl->expr, 0, sizeof (crtl->expr));
308 }
309 
310 /* Copy data from FROM to TO, where the machine modes are not the same.
311    Both modes may be integer, or both may be floating, or both may be
312    fixed-point.
313    UNSIGNEDP should be nonzero if FROM is an unsigned type.
314    This causes zero-extension instead of sign-extension.  */
315 
316 void
convert_move(rtx to,rtx from,int unsignedp)317 convert_move (rtx to, rtx from, int unsignedp)
318 {
319   enum machine_mode to_mode = GET_MODE (to);
320   enum machine_mode from_mode = GET_MODE (from);
321   int to_real = SCALAR_FLOAT_MODE_P (to_mode);
322   int from_real = SCALAR_FLOAT_MODE_P (from_mode);
323   enum insn_code code;
324   rtx libcall;
325 
326   /* rtx code for making an equivalent value.  */
327   enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
328 			      : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
329 
330 
331   gcc_assert (to_real == from_real);
332   gcc_assert (to_mode != BLKmode);
333   gcc_assert (from_mode != BLKmode);
334 
335   /* If the source and destination are already the same, then there's
336      nothing to do.  */
337   if (to == from)
338     return;
339 
340   /* If FROM is a SUBREG that indicates that we have already done at least
341      the required extension, strip it.  We don't handle such SUBREGs as
342      TO here.  */
343 
344   if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
345       && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (from)))
346 	  >= GET_MODE_PRECISION (to_mode))
347       && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
348     from = gen_lowpart (to_mode, from), from_mode = to_mode;
349 
350   gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
351 
352   if (to_mode == from_mode
353       || (from_mode == VOIDmode && CONSTANT_P (from)))
354     {
355       emit_move_insn (to, from);
356       return;
357     }
358 
359   if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
360     {
361       gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
362 
363       if (VECTOR_MODE_P (to_mode))
364 	from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
365       else
366 	to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
367 
368       emit_move_insn (to, from);
369       return;
370     }
371 
372   if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
373     {
374       convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
375       convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
376       return;
377     }
378 
379   if (to_real)
380     {
381       rtx value, insns;
382       convert_optab tab;
383 
384       gcc_assert ((GET_MODE_PRECISION (from_mode)
385 		   != GET_MODE_PRECISION (to_mode))
386 		  || (DECIMAL_FLOAT_MODE_P (from_mode)
387 		      != DECIMAL_FLOAT_MODE_P (to_mode)));
388 
389       if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
390 	/* Conversion between decimal float and binary float, same size.  */
391 	tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
392       else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
393 	tab = sext_optab;
394       else
395 	tab = trunc_optab;
396 
397       /* Try converting directly if the insn is supported.  */
398 
399       code = convert_optab_handler (tab, to_mode, from_mode);
400       if (code != CODE_FOR_nothing)
401 	{
402 	  emit_unop_insn (code, to, from,
403 			  tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
404 	  return;
405 	}
406 
407       /* Otherwise use a libcall.  */
408       libcall = convert_optab_libfunc (tab, to_mode, from_mode);
409 
410       /* Is this conversion implemented yet?  */
411       gcc_assert (libcall);
412 
413       start_sequence ();
414       value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
415 				       1, from, from_mode);
416       insns = get_insns ();
417       end_sequence ();
418       emit_libcall_block (insns, to, value,
419 			  tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
420 								       from)
421 			  : gen_rtx_FLOAT_EXTEND (to_mode, from));
422       return;
423     }
424 
425   /* Handle pointer conversion.  */			/* SPEE 900220.  */
426   /* Targets are expected to provide conversion insns between PxImode and
427      xImode for all MODE_PARTIAL_INT modes they use, but no others.  */
428   if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
429     {
430       enum machine_mode full_mode
431 	= smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
432 
433       gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
434 		  != CODE_FOR_nothing);
435 
436       if (full_mode != from_mode)
437 	from = convert_to_mode (full_mode, from, unsignedp);
438       emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
439 		      to, from, UNKNOWN);
440       return;
441     }
442   if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
443     {
444       rtx new_from;
445       enum machine_mode full_mode
446 	= smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
447       convert_optab ctab = unsignedp ? zext_optab : sext_optab;
448       enum insn_code icode;
449 
450       icode = convert_optab_handler (ctab, full_mode, from_mode);
451       gcc_assert (icode != CODE_FOR_nothing);
452 
453       if (to_mode == full_mode)
454 	{
455 	  emit_unop_insn (icode, to, from, UNKNOWN);
456 	  return;
457 	}
458 
459       new_from = gen_reg_rtx (full_mode);
460       emit_unop_insn (icode, new_from, from, UNKNOWN);
461 
462       /* else proceed to integer conversions below.  */
463       from_mode = full_mode;
464       from = new_from;
465     }
466 
467    /* Make sure both are fixed-point modes or both are not.  */
468    gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
469 	       ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
470    if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
471     {
472       /* If we widen from_mode to to_mode and they are in the same class,
473 	 we won't saturate the result.
474 	 Otherwise, always saturate the result to play safe.  */
475       if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
476 	  && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
477 	expand_fixed_convert (to, from, 0, 0);
478       else
479 	expand_fixed_convert (to, from, 0, 1);
480       return;
481     }
482 
483   /* Now both modes are integers.  */
484 
485   /* Handle expanding beyond a word.  */
486   if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode)
487       && GET_MODE_PRECISION (to_mode) > BITS_PER_WORD)
488     {
489       rtx insns;
490       rtx lowpart;
491       rtx fill_value;
492       rtx lowfrom;
493       int i;
494       enum machine_mode lowpart_mode;
495       int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
496 
497       /* Try converting directly if the insn is supported.  */
498       if ((code = can_extend_p (to_mode, from_mode, unsignedp))
499 	  != CODE_FOR_nothing)
500 	{
501 	  /* If FROM is a SUBREG, put it into a register.  Do this
502 	     so that we always generate the same set of insns for
503 	     better cse'ing; if an intermediate assignment occurred,
504 	     we won't be doing the operation directly on the SUBREG.  */
505 	  if (optimize > 0 && GET_CODE (from) == SUBREG)
506 	    from = force_reg (from_mode, from);
507 	  emit_unop_insn (code, to, from, equiv_code);
508 	  return;
509 	}
510       /* Next, try converting via full word.  */
511       else if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD
512 	       && ((code = can_extend_p (to_mode, word_mode, unsignedp))
513 		   != CODE_FOR_nothing))
514 	{
515 	  rtx word_to = gen_reg_rtx (word_mode);
516 	  if (REG_P (to))
517 	    {
518 	      if (reg_overlap_mentioned_p (to, from))
519 		from = force_reg (from_mode, from);
520 	      emit_clobber (to);
521 	    }
522 	  convert_move (word_to, from, unsignedp);
523 	  emit_unop_insn (code, to, word_to, equiv_code);
524 	  return;
525 	}
526 
527       /* No special multiword conversion insn; do it by hand.  */
528       start_sequence ();
529 
530       /* Since we will turn this into a no conflict block, we must ensure the
531          the source does not overlap the target so force it into an isolated
532          register when maybe so.  Likewise for any MEM input, since the
533          conversion sequence might require several references to it and we
534          must ensure we're getting the same value every time.  */
535 
536       if (MEM_P (from) || reg_overlap_mentioned_p (to, from))
537 	from = force_reg (from_mode, from);
538 
539       /* Get a copy of FROM widened to a word, if necessary.  */
540       if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD)
541 	lowpart_mode = word_mode;
542       else
543 	lowpart_mode = from_mode;
544 
545       lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
546 
547       lowpart = gen_lowpart (lowpart_mode, to);
548       emit_move_insn (lowpart, lowfrom);
549 
550       /* Compute the value to put in each remaining word.  */
551       if (unsignedp)
552 	fill_value = const0_rtx;
553       else
554 	fill_value = emit_store_flag_force (gen_reg_rtx (word_mode),
555 					    LT, lowfrom, const0_rtx,
556 					    lowpart_mode, 0, -1);
557 
558       /* Fill the remaining words.  */
559       for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
560 	{
561 	  int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
562 	  rtx subword = operand_subword (to, index, 1, to_mode);
563 
564 	  gcc_assert (subword);
565 
566 	  if (fill_value != subword)
567 	    emit_move_insn (subword, fill_value);
568 	}
569 
570       insns = get_insns ();
571       end_sequence ();
572 
573       emit_insn (insns);
574       return;
575     }
576 
577   /* Truncating multi-word to a word or less.  */
578   if (GET_MODE_PRECISION (from_mode) > BITS_PER_WORD
579       && GET_MODE_PRECISION (to_mode) <= BITS_PER_WORD)
580     {
581       if (!((MEM_P (from)
582 	     && ! MEM_VOLATILE_P (from)
583 	     && direct_load[(int) to_mode]
584 	     && ! mode_dependent_address_p (XEXP (from, 0),
585 					    MEM_ADDR_SPACE (from)))
586 	    || REG_P (from)
587 	    || GET_CODE (from) == SUBREG))
588 	from = force_reg (from_mode, from);
589       convert_move (to, gen_lowpart (word_mode, from), 0);
590       return;
591     }
592 
593   /* Now follow all the conversions between integers
594      no more than a word long.  */
595 
596   /* For truncation, usually we can just refer to FROM in a narrower mode.  */
597   if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
598       && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, from_mode))
599     {
600       if (!((MEM_P (from)
601 	     && ! MEM_VOLATILE_P (from)
602 	     && direct_load[(int) to_mode]
603 	     && ! mode_dependent_address_p (XEXP (from, 0),
604 					    MEM_ADDR_SPACE (from)))
605 	    || REG_P (from)
606 	    || GET_CODE (from) == SUBREG))
607 	from = force_reg (from_mode, from);
608       if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
609 	  && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
610 	from = copy_to_reg (from);
611       emit_move_insn (to, gen_lowpart (to_mode, from));
612       return;
613     }
614 
615   /* Handle extension.  */
616   if (GET_MODE_PRECISION (to_mode) > GET_MODE_PRECISION (from_mode))
617     {
618       /* Convert directly if that works.  */
619       if ((code = can_extend_p (to_mode, from_mode, unsignedp))
620 	  != CODE_FOR_nothing)
621 	{
622 	  emit_unop_insn (code, to, from, equiv_code);
623 	  return;
624 	}
625       else
626 	{
627 	  enum machine_mode intermediate;
628 	  rtx tmp;
629 	  int shift_amount;
630 
631 	  /* Search for a mode to convert via.  */
632 	  for (intermediate = from_mode; intermediate != VOIDmode;
633 	       intermediate = GET_MODE_WIDER_MODE (intermediate))
634 	    if (((can_extend_p (to_mode, intermediate, unsignedp)
635 		  != CODE_FOR_nothing)
636 		 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
637 		     && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, intermediate)))
638 		&& (can_extend_p (intermediate, from_mode, unsignedp)
639 		    != CODE_FOR_nothing))
640 	      {
641 		convert_move (to, convert_to_mode (intermediate, from,
642 						   unsignedp), unsignedp);
643 		return;
644 	      }
645 
646 	  /* No suitable intermediate mode.
647 	     Generate what we need with	shifts.  */
648 	  shift_amount = (GET_MODE_PRECISION (to_mode)
649 			  - GET_MODE_PRECISION (from_mode));
650 	  from = gen_lowpart (to_mode, force_reg (from_mode, from));
651 	  tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
652 			      to, unsignedp);
653 	  tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
654 			      to, unsignedp);
655 	  if (tmp != to)
656 	    emit_move_insn (to, tmp);
657 	  return;
658 	}
659     }
660 
661   /* Support special truncate insns for certain modes.  */
662   if (convert_optab_handler (trunc_optab, to_mode,
663 			     from_mode) != CODE_FOR_nothing)
664     {
665       emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
666 		      to, from, UNKNOWN);
667       return;
668     }
669 
670   /* Handle truncation of volatile memrefs, and so on;
671      the things that couldn't be truncated directly,
672      and for which there was no special instruction.
673 
674      ??? Code above formerly short-circuited this, for most integer
675      mode pairs, with a force_reg in from_mode followed by a recursive
676      call to this routine.  Appears always to have been wrong.  */
677   if (GET_MODE_PRECISION (to_mode) < GET_MODE_PRECISION (from_mode))
678     {
679       rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
680       emit_move_insn (to, temp);
681       return;
682     }
683 
684   /* Mode combination is not recognized.  */
685   gcc_unreachable ();
686 }
687 
688 /* Return an rtx for a value that would result
689    from converting X to mode MODE.
690    Both X and MODE may be floating, or both integer.
691    UNSIGNEDP is nonzero if X is an unsigned value.
692    This can be done by referring to a part of X in place
693    or by copying to a new temporary with conversion.  */
694 
695 rtx
convert_to_mode(enum machine_mode mode,rtx x,int unsignedp)696 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
697 {
698   return convert_modes (mode, VOIDmode, x, unsignedp);
699 }
700 
701 /* Return an rtx for a value that would result
702    from converting X from mode OLDMODE to mode MODE.
703    Both modes may be floating, or both integer.
704    UNSIGNEDP is nonzero if X is an unsigned value.
705 
706    This can be done by referring to a part of X in place
707    or by copying to a new temporary with conversion.
708 
709    You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.  */
710 
711 rtx
convert_modes(enum machine_mode mode,enum machine_mode oldmode,rtx x,int unsignedp)712 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
713 {
714   rtx temp;
715 
716   /* If FROM is a SUBREG that indicates that we have already done at least
717      the required extension, strip it.  */
718 
719   if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
720       && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
721       && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
722     x = gen_lowpart (mode, SUBREG_REG (x));
723 
724   if (GET_MODE (x) != VOIDmode)
725     oldmode = GET_MODE (x);
726 
727   if (mode == oldmode)
728     return x;
729 
730   /* There is one case that we must handle specially: If we are converting
731      a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
732      we are to interpret the constant as unsigned, gen_lowpart will do
733      the wrong if the constant appears negative.  What we want to do is
734      make the high-order word of the constant zero, not all ones.  */
735 
736   if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
737       && GET_MODE_BITSIZE (mode) == HOST_BITS_PER_DOUBLE_INT
738       && CONST_INT_P (x) && INTVAL (x) < 0)
739     {
740       double_int val = double_int::from_uhwi (INTVAL (x));
741 
742       /* We need to zero extend VAL.  */
743       if (oldmode != VOIDmode)
744 	val = val.zext (GET_MODE_BITSIZE (oldmode));
745 
746       return immed_double_int_const (val, mode);
747     }
748 
749   /* We can do this with a gen_lowpart if both desired and current modes
750      are integer, and this is either a constant integer, a register, or a
751      non-volatile MEM.  Except for the constant case where MODE is no
752      wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand.  */
753 
754   if ((CONST_INT_P (x)
755        && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT)
756       || (GET_MODE_CLASS (mode) == MODE_INT
757 	  && GET_MODE_CLASS (oldmode) == MODE_INT
758 	  && (CONST_DOUBLE_AS_INT_P (x)
759 	      || (GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (oldmode)
760 		  && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
761 		       && direct_load[(int) mode])
762 		      || (REG_P (x)
763 			  && (! HARD_REGISTER_P (x)
764 			      || HARD_REGNO_MODE_OK (REGNO (x), mode))
765 			  && TRULY_NOOP_TRUNCATION_MODES_P (mode,
766 							    GET_MODE (x))))))))
767     {
768       /* ?? If we don't know OLDMODE, we have to assume here that
769 	 X does not need sign- or zero-extension.   This may not be
770 	 the case, but it's the best we can do.  */
771       if (CONST_INT_P (x) && oldmode != VOIDmode
772 	  && GET_MODE_PRECISION (mode) > GET_MODE_PRECISION (oldmode))
773 	{
774 	  HOST_WIDE_INT val = INTVAL (x);
775 
776 	  /* We must sign or zero-extend in this case.  Start by
777 	     zero-extending, then sign extend if we need to.  */
778 	  val &= GET_MODE_MASK (oldmode);
779 	  if (! unsignedp
780 	      && val_signbit_known_set_p (oldmode, val))
781 	    val |= ~GET_MODE_MASK (oldmode);
782 
783 	  return gen_int_mode (val, mode);
784 	}
785 
786       return gen_lowpart (mode, x);
787     }
788 
789   /* Converting from integer constant into mode is always equivalent to an
790      subreg operation.  */
791   if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
792     {
793       gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
794       return simplify_gen_subreg (mode, x, oldmode, 0);
795     }
796 
797   temp = gen_reg_rtx (mode);
798   convert_move (temp, x, unsignedp);
799   return temp;
800 }
801 
802 /* Return the largest alignment we can use for doing a move (or store)
803    of MAX_PIECES.  ALIGN is the largest alignment we could use.  */
804 
805 static unsigned int
alignment_for_piecewise_move(unsigned int max_pieces,unsigned int align)806 alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
807 {
808   enum machine_mode tmode;
809 
810   tmode = mode_for_size (max_pieces * BITS_PER_UNIT, MODE_INT, 1);
811   if (align >= GET_MODE_ALIGNMENT (tmode))
812     align = GET_MODE_ALIGNMENT (tmode);
813   else
814     {
815       enum machine_mode tmode, xmode;
816 
817       for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
818 	   tmode != VOIDmode;
819 	   xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
820 	if (GET_MODE_SIZE (tmode) > max_pieces
821 	    || SLOW_UNALIGNED_ACCESS (tmode, align))
822 	  break;
823 
824       align = MAX (align, GET_MODE_ALIGNMENT (xmode));
825     }
826 
827   return align;
828 }
829 
830 /* Return the widest integer mode no wider than SIZE.  If no such mode
831    can be found, return VOIDmode.  */
832 
833 static enum machine_mode
widest_int_mode_for_size(unsigned int size)834 widest_int_mode_for_size (unsigned int size)
835 {
836   enum machine_mode tmode, mode = VOIDmode;
837 
838   for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
839        tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
840     if (GET_MODE_SIZE (tmode) < size)
841       mode = tmode;
842 
843   return mode;
844 }
845 
846 /* STORE_MAX_PIECES is the number of bytes at a time that we can
847    store efficiently.  Due to internal GCC limitations, this is
848    MOVE_MAX_PIECES limited by the number of bytes GCC can represent
849    for an immediate constant.  */
850 
851 #define STORE_MAX_PIECES  MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
852 
853 /* Determine whether the LEN bytes can be moved by using several move
854    instructions.  Return nonzero if a call to move_by_pieces should
855    succeed.  */
856 
857 int
can_move_by_pieces(unsigned HOST_WIDE_INT len ATTRIBUTE_UNUSED,unsigned int align ATTRIBUTE_UNUSED)858 can_move_by_pieces (unsigned HOST_WIDE_INT len ATTRIBUTE_UNUSED,
859 		    unsigned int align ATTRIBUTE_UNUSED)
860 {
861   return MOVE_BY_PIECES_P (len, align);
862 }
863 
864 /* Generate several move instructions to copy LEN bytes from block FROM to
865    block TO.  (These are MEM rtx's with BLKmode).
866 
867    If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
868    used to push FROM to the stack.
869 
870    ALIGN is maximum stack alignment we can assume.
871 
872    If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
873    mempcpy, and if ENDP is 2 return memory the end minus one byte ala
874    stpcpy.  */
875 
876 rtx
move_by_pieces(rtx to,rtx from,unsigned HOST_WIDE_INT len,unsigned int align,int endp)877 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
878 		unsigned int align, int endp)
879 {
880   struct move_by_pieces_d data;
881   enum machine_mode to_addr_mode;
882   enum machine_mode from_addr_mode = get_address_mode (from);
883   rtx to_addr, from_addr = XEXP (from, 0);
884   unsigned int max_size = MOVE_MAX_PIECES + 1;
885   enum insn_code icode;
886 
887   align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
888 
889   data.offset = 0;
890   data.from_addr = from_addr;
891   if (to)
892     {
893       to_addr_mode = get_address_mode (to);
894       to_addr = XEXP (to, 0);
895       data.to = to;
896       data.autinc_to
897 	= (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
898 	   || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
899       data.reverse
900 	= (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
901     }
902   else
903     {
904       to_addr_mode = VOIDmode;
905       to_addr = NULL_RTX;
906       data.to = NULL_RTX;
907       data.autinc_to = 1;
908 #ifdef STACK_GROWS_DOWNWARD
909       data.reverse = 1;
910 #else
911       data.reverse = 0;
912 #endif
913     }
914   data.to_addr = to_addr;
915   data.from = from;
916   data.autinc_from
917     = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
918        || GET_CODE (from_addr) == POST_INC
919        || GET_CODE (from_addr) == POST_DEC);
920 
921   data.explicit_inc_from = 0;
922   data.explicit_inc_to = 0;
923   if (data.reverse) data.offset = len;
924   data.len = len;
925 
926   /* If copying requires more than two move insns,
927      copy addresses to registers (to make displacements shorter)
928      and use post-increment if available.  */
929   if (!(data.autinc_from && data.autinc_to)
930       && move_by_pieces_ninsns (len, align, max_size) > 2)
931     {
932       /* Find the mode of the largest move...
933 	 MODE might not be used depending on the definitions of the
934 	 USE_* macros below.  */
935       enum machine_mode mode ATTRIBUTE_UNUSED
936 	= widest_int_mode_for_size (max_size);
937 
938       if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
939 	{
940 	  data.from_addr = copy_to_mode_reg (from_addr_mode,
941 					     plus_constant (from_addr_mode,
942 							    from_addr, len));
943 	  data.autinc_from = 1;
944 	  data.explicit_inc_from = -1;
945 	}
946       if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
947 	{
948 	  data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
949 	  data.autinc_from = 1;
950 	  data.explicit_inc_from = 1;
951 	}
952       if (!data.autinc_from && CONSTANT_P (from_addr))
953 	data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
954       if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
955 	{
956 	  data.to_addr = copy_to_mode_reg (to_addr_mode,
957 					   plus_constant (to_addr_mode,
958 							  to_addr, len));
959 	  data.autinc_to = 1;
960 	  data.explicit_inc_to = -1;
961 	}
962       if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
963 	{
964 	  data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
965 	  data.autinc_to = 1;
966 	  data.explicit_inc_to = 1;
967 	}
968       if (!data.autinc_to && CONSTANT_P (to_addr))
969 	data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
970     }
971 
972   align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
973 
974   /* First move what we can in the largest integer mode, then go to
975      successively smaller modes.  */
976 
977   while (max_size > 1 && data.len > 0)
978     {
979       enum machine_mode mode = widest_int_mode_for_size (max_size);
980 
981       if (mode == VOIDmode)
982 	break;
983 
984       icode = optab_handler (mov_optab, mode);
985       if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
986 	move_by_pieces_1 (GEN_FCN (icode), mode, &data);
987 
988       max_size = GET_MODE_SIZE (mode);
989     }
990 
991   /* The code above should have handled everything.  */
992   gcc_assert (!data.len);
993 
994   if (endp)
995     {
996       rtx to1;
997 
998       gcc_assert (!data.reverse);
999       if (data.autinc_to)
1000 	{
1001 	  if (endp == 2)
1002 	    {
1003 	      if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1004 		emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1005 	      else
1006 		data.to_addr = copy_to_mode_reg (to_addr_mode,
1007 						 plus_constant (to_addr_mode,
1008 								data.to_addr,
1009 								-1));
1010 	    }
1011 	  to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1012 					   data.offset);
1013 	}
1014       else
1015 	{
1016 	  if (endp == 2)
1017 	    --data.offset;
1018 	  to1 = adjust_address (data.to, QImode, data.offset);
1019 	}
1020       return to1;
1021     }
1022   else
1023     return data.to;
1024 }
1025 
1026 /* Return number of insns required to move L bytes by pieces.
1027    ALIGN (in bits) is maximum alignment we can assume.  */
1028 
1029 unsigned HOST_WIDE_INT
move_by_pieces_ninsns(unsigned HOST_WIDE_INT l,unsigned int align,unsigned int max_size)1030 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1031 		       unsigned int max_size)
1032 {
1033   unsigned HOST_WIDE_INT n_insns = 0;
1034 
1035   align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
1036 
1037   while (max_size > 1 && l > 0)
1038     {
1039       enum machine_mode mode;
1040       enum insn_code icode;
1041 
1042       mode = widest_int_mode_for_size (max_size);
1043 
1044       if (mode == VOIDmode)
1045 	break;
1046 
1047       icode = optab_handler (mov_optab, mode);
1048       if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1049 	n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1050 
1051       max_size = GET_MODE_SIZE (mode);
1052     }
1053 
1054   gcc_assert (!l);
1055   return n_insns;
1056 }
1057 
1058 /* Subroutine of move_by_pieces.  Move as many bytes as appropriate
1059    with move instructions for mode MODE.  GENFUN is the gen_... function
1060    to make a move insn for that mode.  DATA has all the other info.  */
1061 
1062 static void
move_by_pieces_1(insn_gen_fn genfun,machine_mode mode,struct move_by_pieces_d * data)1063 move_by_pieces_1 (insn_gen_fn genfun, machine_mode mode,
1064 		  struct move_by_pieces_d *data)
1065 {
1066   unsigned int size = GET_MODE_SIZE (mode);
1067   rtx to1 = NULL_RTX, from1;
1068 
1069   while (data->len >= size)
1070     {
1071       if (data->reverse)
1072 	data->offset -= size;
1073 
1074       if (data->to)
1075 	{
1076 	  if (data->autinc_to)
1077 	    to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1078 					     data->offset);
1079 	  else
1080 	    to1 = adjust_address (data->to, mode, data->offset);
1081 	}
1082 
1083       if (data->autinc_from)
1084 	from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1085 					   data->offset);
1086       else
1087 	from1 = adjust_address (data->from, mode, data->offset);
1088 
1089       if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1090 	emit_insn (gen_add2_insn (data->to_addr,
1091 				  gen_int_mode (-(HOST_WIDE_INT) size,
1092 						GET_MODE (data->to_addr))));
1093       if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1094 	emit_insn (gen_add2_insn (data->from_addr,
1095 				  gen_int_mode (-(HOST_WIDE_INT) size,
1096 						GET_MODE (data->from_addr))));
1097 
1098       if (data->to)
1099 	emit_insn ((*genfun) (to1, from1));
1100       else
1101 	{
1102 #ifdef PUSH_ROUNDING
1103 	  emit_single_push_insn (mode, from1, NULL);
1104 #else
1105 	  gcc_unreachable ();
1106 #endif
1107 	}
1108 
1109       if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1110 	emit_insn (gen_add2_insn (data->to_addr,
1111 				  gen_int_mode (size,
1112 						GET_MODE (data->to_addr))));
1113       if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1114 	emit_insn (gen_add2_insn (data->from_addr,
1115 				  gen_int_mode (size,
1116 						GET_MODE (data->from_addr))));
1117 
1118       if (! data->reverse)
1119 	data->offset += size;
1120 
1121       data->len -= size;
1122     }
1123 }
1124 
1125 /* Emit code to move a block Y to a block X.  This may be done with
1126    string-move instructions, with multiple scalar move instructions,
1127    or with a library call.
1128 
1129    Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1130    SIZE is an rtx that says how long they are.
1131    ALIGN is the maximum alignment we can assume they have.
1132    METHOD describes what kind of copy this is, and what mechanisms may be used.
1133    MIN_SIZE is the minimal size of block to move
1134    MAX_SIZE is the maximal size of block to move, if it can not be represented
1135    in unsigned HOST_WIDE_INT, than it is mask of all ones.
1136 
1137    Return the address of the new block, if memcpy is called and returns it,
1138    0 otherwise.  */
1139 
1140 rtx
emit_block_move_hints(rtx x,rtx y,rtx size,enum block_op_methods method,unsigned int expected_align,HOST_WIDE_INT expected_size,unsigned HOST_WIDE_INT min_size,unsigned HOST_WIDE_INT max_size,unsigned HOST_WIDE_INT probable_max_size)1141 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1142 		       unsigned int expected_align, HOST_WIDE_INT expected_size,
1143 		       unsigned HOST_WIDE_INT min_size,
1144 		       unsigned HOST_WIDE_INT max_size,
1145 		       unsigned HOST_WIDE_INT probable_max_size)
1146 {
1147   bool may_use_call;
1148   rtx retval = 0;
1149   unsigned int align;
1150 
1151   gcc_assert (size);
1152   if (CONST_INT_P (size)
1153       && INTVAL (size) == 0)
1154     return 0;
1155 
1156   switch (method)
1157     {
1158     case BLOCK_OP_NORMAL:
1159     case BLOCK_OP_TAILCALL:
1160       may_use_call = true;
1161       break;
1162 
1163     case BLOCK_OP_CALL_PARM:
1164       may_use_call = block_move_libcall_safe_for_call_parm ();
1165 
1166       /* Make inhibit_defer_pop nonzero around the library call
1167 	 to force it to pop the arguments right away.  */
1168       NO_DEFER_POP;
1169       break;
1170 
1171     case BLOCK_OP_NO_LIBCALL:
1172       may_use_call = false;
1173       break;
1174 
1175     default:
1176       gcc_unreachable ();
1177     }
1178 
1179   gcc_assert (MEM_P (x) && MEM_P (y));
1180   align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1181   gcc_assert (align >= BITS_PER_UNIT);
1182 
1183   /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1184      block copy is more efficient for other large modes, e.g. DCmode.  */
1185   x = adjust_address (x, BLKmode, 0);
1186   y = adjust_address (y, BLKmode, 0);
1187 
1188   /* Set MEM_SIZE as appropriate for this block copy.  The main place this
1189      can be incorrect is coming from __builtin_memcpy.  */
1190   if (CONST_INT_P (size))
1191     {
1192       x = shallow_copy_rtx (x);
1193       y = shallow_copy_rtx (y);
1194       set_mem_size (x, INTVAL (size));
1195       set_mem_size (y, INTVAL (size));
1196     }
1197 
1198   if (CONST_INT_P (size) && MOVE_BY_PIECES_P (INTVAL (size), align))
1199     move_by_pieces (x, y, INTVAL (size), align, 0);
1200   else if (emit_block_move_via_movmem (x, y, size, align,
1201 				       expected_align, expected_size,
1202 				       min_size, max_size, probable_max_size))
1203     ;
1204   else if (may_use_call
1205 	   && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1206 	   && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1207     {
1208       /* Since x and y are passed to a libcall, mark the corresponding
1209 	 tree EXPR as addressable.  */
1210       tree y_expr = MEM_EXPR (y);
1211       tree x_expr = MEM_EXPR (x);
1212       if (y_expr)
1213 	mark_addressable (y_expr);
1214       if (x_expr)
1215 	mark_addressable (x_expr);
1216       retval = emit_block_move_via_libcall (x, y, size,
1217 					    method == BLOCK_OP_TAILCALL);
1218     }
1219 
1220   else
1221     emit_block_move_via_loop (x, y, size, align);
1222 
1223   if (method == BLOCK_OP_CALL_PARM)
1224     OK_DEFER_POP;
1225 
1226   return retval;
1227 }
1228 
1229 rtx
emit_block_move(rtx x,rtx y,rtx size,enum block_op_methods method)1230 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1231 {
1232   unsigned HOST_WIDE_INT max, min = 0;
1233   if (GET_CODE (size) == CONST_INT)
1234     min = max = UINTVAL (size);
1235   else
1236     max = GET_MODE_MASK (GET_MODE (size));
1237   return emit_block_move_hints (x, y, size, method, 0, -1,
1238 				min, max, max);
1239 }
1240 
1241 /* A subroutine of emit_block_move.  Returns true if calling the
1242    block move libcall will not clobber any parameters which may have
1243    already been placed on the stack.  */
1244 
1245 static bool
block_move_libcall_safe_for_call_parm(void)1246 block_move_libcall_safe_for_call_parm (void)
1247 {
1248 #if defined (REG_PARM_STACK_SPACE)
1249   tree fn;
1250 #endif
1251 
1252   /* If arguments are pushed on the stack, then they're safe.  */
1253   if (PUSH_ARGS)
1254     return true;
1255 
1256   /* If registers go on the stack anyway, any argument is sure to clobber
1257      an outgoing argument.  */
1258 #if defined (REG_PARM_STACK_SPACE)
1259   fn = emit_block_move_libcall_fn (false);
1260   /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1261      depend on its argument.  */
1262   (void) fn;
1263   if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1264       && REG_PARM_STACK_SPACE (fn) != 0)
1265     return false;
1266 #endif
1267 
1268   /* If any argument goes in memory, then it might clobber an outgoing
1269      argument.  */
1270   {
1271     CUMULATIVE_ARGS args_so_far_v;
1272     cumulative_args_t args_so_far;
1273     tree fn, arg;
1274 
1275     fn = emit_block_move_libcall_fn (false);
1276     INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
1277     args_so_far = pack_cumulative_args (&args_so_far_v);
1278 
1279     arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1280     for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1281       {
1282 	enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1283 	rtx tmp = targetm.calls.function_arg (args_so_far, mode,
1284 					      NULL_TREE, true);
1285 	if (!tmp || !REG_P (tmp))
1286 	  return false;
1287 	if (targetm.calls.arg_partial_bytes (args_so_far, mode, NULL, 1))
1288 	  return false;
1289 	targetm.calls.function_arg_advance (args_so_far, mode,
1290 					    NULL_TREE, true);
1291       }
1292   }
1293   return true;
1294 }
1295 
1296 /* A subroutine of emit_block_move.  Expand a movmem pattern;
1297    return true if successful.  */
1298 
1299 static bool
emit_block_move_via_movmem(rtx x,rtx y,rtx size,unsigned int align,unsigned int expected_align,HOST_WIDE_INT expected_size,unsigned HOST_WIDE_INT min_size,unsigned HOST_WIDE_INT max_size,unsigned HOST_WIDE_INT probable_max_size)1300 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1301 			    unsigned int expected_align, HOST_WIDE_INT expected_size,
1302 			    unsigned HOST_WIDE_INT min_size,
1303 			    unsigned HOST_WIDE_INT max_size,
1304 			    unsigned HOST_WIDE_INT probable_max_size)
1305 {
1306   int save_volatile_ok = volatile_ok;
1307   enum machine_mode mode;
1308 
1309   if (expected_align < align)
1310     expected_align = align;
1311   if (expected_size != -1)
1312     {
1313       if ((unsigned HOST_WIDE_INT)expected_size > probable_max_size)
1314 	expected_size = probable_max_size;
1315       if ((unsigned HOST_WIDE_INT)expected_size < min_size)
1316 	expected_size = min_size;
1317     }
1318 
1319   /* Since this is a move insn, we don't care about volatility.  */
1320   volatile_ok = 1;
1321 
1322   /* Try the most limited insn first, because there's no point
1323      including more than one in the machine description unless
1324      the more limited one has some advantage.  */
1325 
1326   for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1327        mode = GET_MODE_WIDER_MODE (mode))
1328     {
1329       enum insn_code code = direct_optab_handler (movmem_optab, mode);
1330 
1331       if (code != CODE_FOR_nothing
1332 	  /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1333 	     here because if SIZE is less than the mode mask, as it is
1334 	     returned by the macro, it will definitely be less than the
1335 	     actual mode mask.  Since SIZE is within the Pmode address
1336 	     space, we limit MODE to Pmode.  */
1337 	  && ((CONST_INT_P (size)
1338 	       && ((unsigned HOST_WIDE_INT) INTVAL (size)
1339 		   <= (GET_MODE_MASK (mode) >> 1)))
1340 	      || max_size <= (GET_MODE_MASK (mode) >> 1)
1341 	      || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
1342 	{
1343 	  struct expand_operand ops[9];
1344 	  unsigned int nops;
1345 
1346 	  /* ??? When called via emit_block_move_for_call, it'd be
1347 	     nice if there were some way to inform the backend, so
1348 	     that it doesn't fail the expansion because it thinks
1349 	     emitting the libcall would be more efficient.  */
1350 	  nops = insn_data[(int) code].n_generator_args;
1351 	  gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
1352 
1353 	  create_fixed_operand (&ops[0], x);
1354 	  create_fixed_operand (&ops[1], y);
1355 	  /* The check above guarantees that this size conversion is valid.  */
1356 	  create_convert_operand_to (&ops[2], size, mode, true);
1357 	  create_integer_operand (&ops[3], align / BITS_PER_UNIT);
1358 	  if (nops >= 6)
1359 	    {
1360 	      create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
1361 	      create_integer_operand (&ops[5], expected_size);
1362 	    }
1363 	  if (nops >= 8)
1364 	    {
1365 	      create_integer_operand (&ops[6], min_size);
1366 	      /* If we can not represent the maximal size,
1367 		 make parameter NULL.  */
1368 	      if ((HOST_WIDE_INT) max_size != -1)
1369 	        create_integer_operand (&ops[7], max_size);
1370 	      else
1371 		create_fixed_operand (&ops[7], NULL);
1372 	    }
1373 	  if (nops == 9)
1374 	    {
1375 	      /* If we can not represent the maximal size,
1376 		 make parameter NULL.  */
1377 	      if ((HOST_WIDE_INT) probable_max_size != -1)
1378 	        create_integer_operand (&ops[8], probable_max_size);
1379 	      else
1380 		create_fixed_operand (&ops[8], NULL);
1381 	    }
1382 	  if (maybe_expand_insn (code, nops, ops))
1383 	    {
1384 	      volatile_ok = save_volatile_ok;
1385 	      return true;
1386 	    }
1387 	}
1388     }
1389 
1390   volatile_ok = save_volatile_ok;
1391   return false;
1392 }
1393 
1394 /* A subroutine of emit_block_move.  Expand a call to memcpy.
1395    Return the return value from memcpy, 0 otherwise.  */
1396 
1397 rtx
emit_block_move_via_libcall(rtx dst,rtx src,rtx size,bool tailcall)1398 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1399 {
1400   rtx dst_addr, src_addr;
1401   tree call_expr, fn, src_tree, dst_tree, size_tree;
1402   enum machine_mode size_mode;
1403   rtx retval;
1404 
1405   /* Emit code to copy the addresses of DST and SRC and SIZE into new
1406      pseudos.  We can then place those new pseudos into a VAR_DECL and
1407      use them later.  */
1408 
1409   dst_addr = copy_addr_to_reg (XEXP (dst, 0));
1410   src_addr = copy_addr_to_reg (XEXP (src, 0));
1411 
1412   dst_addr = convert_memory_address (ptr_mode, dst_addr);
1413   src_addr = convert_memory_address (ptr_mode, src_addr);
1414 
1415   dst_tree = make_tree (ptr_type_node, dst_addr);
1416   src_tree = make_tree (ptr_type_node, src_addr);
1417 
1418   size_mode = TYPE_MODE (sizetype);
1419 
1420   size = convert_to_mode (size_mode, size, 1);
1421   size = copy_to_mode_reg (size_mode, size);
1422 
1423   /* It is incorrect to use the libcall calling conventions to call
1424      memcpy in this context.  This could be a user call to memcpy and
1425      the user may wish to examine the return value from memcpy.  For
1426      targets where libcalls and normal calls have different conventions
1427      for returning pointers, we could end up generating incorrect code.  */
1428 
1429   size_tree = make_tree (sizetype, size);
1430 
1431   fn = emit_block_move_libcall_fn (true);
1432   call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1433   CALL_EXPR_TAILCALL (call_expr) = tailcall;
1434 
1435   retval = expand_normal (call_expr);
1436 
1437   return retval;
1438 }
1439 
1440 /* A subroutine of emit_block_move_via_libcall.  Create the tree node
1441    for the function we use for block copies.  */
1442 
1443 static GTY(()) tree block_move_fn;
1444 
1445 void
init_block_move_fn(const char * asmspec)1446 init_block_move_fn (const char *asmspec)
1447 {
1448   if (!block_move_fn)
1449     {
1450       tree args, fn, attrs, attr_args;
1451 
1452       fn = get_identifier ("memcpy");
1453       args = build_function_type_list (ptr_type_node, ptr_type_node,
1454 				       const_ptr_type_node, sizetype,
1455 				       NULL_TREE);
1456 
1457       fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
1458       DECL_EXTERNAL (fn) = 1;
1459       TREE_PUBLIC (fn) = 1;
1460       DECL_ARTIFICIAL (fn) = 1;
1461       TREE_NOTHROW (fn) = 1;
1462       DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1463       DECL_VISIBILITY_SPECIFIED (fn) = 1;
1464 
1465       attr_args = build_tree_list (NULL_TREE, build_string (1, "1"));
1466       attrs = tree_cons (get_identifier ("fn spec"), attr_args, NULL);
1467 
1468       decl_attributes (&fn, attrs, ATTR_FLAG_BUILT_IN);
1469 
1470       block_move_fn = fn;
1471     }
1472 
1473   if (asmspec)
1474     set_user_assembler_name (block_move_fn, asmspec);
1475 }
1476 
1477 static tree
emit_block_move_libcall_fn(int for_call)1478 emit_block_move_libcall_fn (int for_call)
1479 {
1480   static bool emitted_extern;
1481 
1482   if (!block_move_fn)
1483     init_block_move_fn (NULL);
1484 
1485   if (for_call && !emitted_extern)
1486     {
1487       emitted_extern = true;
1488       make_decl_rtl (block_move_fn);
1489     }
1490 
1491   return block_move_fn;
1492 }
1493 
1494 /* A subroutine of emit_block_move.  Copy the data via an explicit
1495    loop.  This is used only when libcalls are forbidden.  */
1496 /* ??? It'd be nice to copy in hunks larger than QImode.  */
1497 
1498 static void
emit_block_move_via_loop(rtx x,rtx y,rtx size,unsigned int align ATTRIBUTE_UNUSED)1499 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1500 			  unsigned int align ATTRIBUTE_UNUSED)
1501 {
1502   rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1503   enum machine_mode x_addr_mode = get_address_mode (x);
1504   enum machine_mode y_addr_mode = get_address_mode (y);
1505   enum machine_mode iter_mode;
1506 
1507   iter_mode = GET_MODE (size);
1508   if (iter_mode == VOIDmode)
1509     iter_mode = word_mode;
1510 
1511   top_label = gen_label_rtx ();
1512   cmp_label = gen_label_rtx ();
1513   iter = gen_reg_rtx (iter_mode);
1514 
1515   emit_move_insn (iter, const0_rtx);
1516 
1517   x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1518   y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1519   do_pending_stack_adjust ();
1520 
1521   emit_jump (cmp_label);
1522   emit_label (top_label);
1523 
1524   tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1525   x_addr = simplify_gen_binary (PLUS, x_addr_mode, x_addr, tmp);
1526 
1527   if (x_addr_mode != y_addr_mode)
1528     tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1529   y_addr = simplify_gen_binary (PLUS, y_addr_mode, y_addr, tmp);
1530 
1531   x = change_address (x, QImode, x_addr);
1532   y = change_address (y, QImode, y_addr);
1533 
1534   emit_move_insn (x, y);
1535 
1536   tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1537 			     true, OPTAB_LIB_WIDEN);
1538   if (tmp != iter)
1539     emit_move_insn (iter, tmp);
1540 
1541   emit_label (cmp_label);
1542 
1543   emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1544 			   true, top_label, REG_BR_PROB_BASE * 90 / 100);
1545 }
1546 
1547 /* Copy all or part of a value X into registers starting at REGNO.
1548    The number of registers to be filled is NREGS.  */
1549 
1550 void
move_block_to_reg(int regno,rtx x,int nregs,enum machine_mode mode)1551 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1552 {
1553   int i;
1554 #ifdef HAVE_load_multiple
1555   rtx pat;
1556   rtx last;
1557 #endif
1558 
1559   if (nregs == 0)
1560     return;
1561 
1562   if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
1563     x = validize_mem (force_const_mem (mode, x));
1564 
1565   /* See if the machine can do this with a load multiple insn.  */
1566 #ifdef HAVE_load_multiple
1567   if (HAVE_load_multiple)
1568     {
1569       last = get_last_insn ();
1570       pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1571 			       GEN_INT (nregs));
1572       if (pat)
1573 	{
1574 	  emit_insn (pat);
1575 	  return;
1576 	}
1577       else
1578 	delete_insns_since (last);
1579     }
1580 #endif
1581 
1582   for (i = 0; i < nregs; i++)
1583     emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1584 		    operand_subword_force (x, i, mode));
1585 }
1586 
1587 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1588    The number of registers to be filled is NREGS.  */
1589 
1590 void
move_block_from_reg(int regno,rtx x,int nregs)1591 move_block_from_reg (int regno, rtx x, int nregs)
1592 {
1593   int i;
1594 
1595   if (nregs == 0)
1596     return;
1597 
1598   /* See if the machine can do this with a store multiple insn.  */
1599 #ifdef HAVE_store_multiple
1600   if (HAVE_store_multiple)
1601     {
1602       rtx last = get_last_insn ();
1603       rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1604 				    GEN_INT (nregs));
1605       if (pat)
1606 	{
1607 	  emit_insn (pat);
1608 	  return;
1609 	}
1610       else
1611 	delete_insns_since (last);
1612     }
1613 #endif
1614 
1615   for (i = 0; i < nregs; i++)
1616     {
1617       rtx tem = operand_subword (x, i, 1, BLKmode);
1618 
1619       gcc_assert (tem);
1620 
1621       emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1622     }
1623 }
1624 
1625 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1626    ORIG, where ORIG is a non-consecutive group of registers represented by
1627    a PARALLEL.  The clone is identical to the original except in that the
1628    original set of registers is replaced by a new set of pseudo registers.
1629    The new set has the same modes as the original set.  */
1630 
1631 rtx
gen_group_rtx(rtx orig)1632 gen_group_rtx (rtx orig)
1633 {
1634   int i, length;
1635   rtx *tmps;
1636 
1637   gcc_assert (GET_CODE (orig) == PARALLEL);
1638 
1639   length = XVECLEN (orig, 0);
1640   tmps = XALLOCAVEC (rtx, length);
1641 
1642   /* Skip a NULL entry in first slot.  */
1643   i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1644 
1645   if (i)
1646     tmps[0] = 0;
1647 
1648   for (; i < length; i++)
1649     {
1650       enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1651       rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1652 
1653       tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1654     }
1655 
1656   return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1657 }
1658 
1659 /* A subroutine of emit_group_load.  Arguments as for emit_group_load,
1660    except that values are placed in TMPS[i], and must later be moved
1661    into corresponding XEXP (XVECEXP (DST, 0, i), 0) element.  */
1662 
1663 static void
emit_group_load_1(rtx * tmps,rtx dst,rtx orig_src,tree type,int ssize)1664 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1665 {
1666   rtx src;
1667   int start, i;
1668   enum machine_mode m = GET_MODE (orig_src);
1669 
1670   gcc_assert (GET_CODE (dst) == PARALLEL);
1671 
1672   if (m != VOIDmode
1673       && !SCALAR_INT_MODE_P (m)
1674       && !MEM_P (orig_src)
1675       && GET_CODE (orig_src) != CONCAT)
1676     {
1677       enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1678       if (imode == BLKmode)
1679 	src = assign_stack_temp (GET_MODE (orig_src), ssize);
1680       else
1681 	src = gen_reg_rtx (imode);
1682       if (imode != BLKmode)
1683 	src = gen_lowpart (GET_MODE (orig_src), src);
1684       emit_move_insn (src, orig_src);
1685       /* ...and back again.  */
1686       if (imode != BLKmode)
1687 	src = gen_lowpart (imode, src);
1688       emit_group_load_1 (tmps, dst, src, type, ssize);
1689       return;
1690     }
1691 
1692   /* Check for a NULL entry, used to indicate that the parameter goes
1693      both on the stack and in registers.  */
1694   if (XEXP (XVECEXP (dst, 0, 0), 0))
1695     start = 0;
1696   else
1697     start = 1;
1698 
1699   /* Process the pieces.  */
1700   for (i = start; i < XVECLEN (dst, 0); i++)
1701     {
1702       enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1703       HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1704       unsigned int bytelen = GET_MODE_SIZE (mode);
1705       int shift = 0;
1706 
1707       /* Handle trailing fragments that run over the size of the struct.  */
1708       if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1709 	{
1710 	  /* Arrange to shift the fragment to where it belongs.
1711 	     extract_bit_field loads to the lsb of the reg.  */
1712 	  if (
1713 #ifdef BLOCK_REG_PADDING
1714 	      BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1715 	      == (BYTES_BIG_ENDIAN ? upward : downward)
1716 #else
1717 	      BYTES_BIG_ENDIAN
1718 #endif
1719 	      )
1720 	    shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1721 	  bytelen = ssize - bytepos;
1722 	  gcc_assert (bytelen > 0);
1723 	}
1724 
1725       /* If we won't be loading directly from memory, protect the real source
1726 	 from strange tricks we might play; but make sure that the source can
1727 	 be loaded directly into the destination.  */
1728       src = orig_src;
1729       if (!MEM_P (orig_src)
1730 	  && (!CONSTANT_P (orig_src)
1731 	      || (GET_MODE (orig_src) != mode
1732 		  && GET_MODE (orig_src) != VOIDmode)))
1733 	{
1734 	  if (GET_MODE (orig_src) == VOIDmode)
1735 	    src = gen_reg_rtx (mode);
1736 	  else
1737 	    src = gen_reg_rtx (GET_MODE (orig_src));
1738 
1739 	  emit_move_insn (src, orig_src);
1740 	}
1741 
1742       /* Optimize the access just a bit.  */
1743       if (MEM_P (src)
1744 	  && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1745 	      || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1746 	  && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1747 	  && bytelen == GET_MODE_SIZE (mode))
1748 	{
1749 	  tmps[i] = gen_reg_rtx (mode);
1750 	  emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1751 	}
1752       else if (COMPLEX_MODE_P (mode)
1753 	       && GET_MODE (src) == mode
1754 	       && bytelen == GET_MODE_SIZE (mode))
1755 	/* Let emit_move_complex do the bulk of the work.  */
1756 	tmps[i] = src;
1757       else if (GET_CODE (src) == CONCAT)
1758 	{
1759 	  unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1760 	  unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1761 
1762 	  if ((bytepos == 0 && bytelen == slen0)
1763 	      || (bytepos != 0 && bytepos + bytelen <= slen))
1764 	    {
1765 	      /* The following assumes that the concatenated objects all
1766 		 have the same size.  In this case, a simple calculation
1767 		 can be used to determine the object and the bit field
1768 		 to be extracted.  */
1769 	      tmps[i] = XEXP (src, bytepos / slen0);
1770 	      if (! CONSTANT_P (tmps[i])
1771 		  && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1772 		tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1773 					     (bytepos % slen0) * BITS_PER_UNIT,
1774 					     1, NULL_RTX, mode, mode);
1775 	    }
1776 	  else
1777 	    {
1778 	      rtx mem;
1779 
1780 	      gcc_assert (!bytepos);
1781 	      mem = assign_stack_temp (GET_MODE (src), slen);
1782 	      emit_move_insn (mem, src);
1783 	      tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1784 					   0, 1, NULL_RTX, mode, mode);
1785 	    }
1786 	}
1787       /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1788 	 SIMD register, which is currently broken.  While we get GCC
1789 	 to emit proper RTL for these cases, let's dump to memory.  */
1790       else if (VECTOR_MODE_P (GET_MODE (dst))
1791 	       && REG_P (src))
1792 	{
1793 	  int slen = GET_MODE_SIZE (GET_MODE (src));
1794 	  rtx mem;
1795 
1796 	  mem = assign_stack_temp (GET_MODE (src), slen);
1797 	  emit_move_insn (mem, src);
1798 	  tmps[i] = adjust_address (mem, mode, (int) bytepos);
1799 	}
1800       else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1801                && XVECLEN (dst, 0) > 1)
1802         tmps[i] = simplify_gen_subreg (mode, src, GET_MODE (dst), bytepos);
1803       else if (CONSTANT_P (src))
1804 	{
1805 	  HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1806 
1807 	  if (len == ssize)
1808 	    tmps[i] = src;
1809 	  else
1810 	    {
1811 	      rtx first, second;
1812 
1813 	      gcc_assert (2 * len == ssize);
1814 	      split_double (src, &first, &second);
1815 	      if (i)
1816 		tmps[i] = second;
1817 	      else
1818 		tmps[i] = first;
1819 	    }
1820 	}
1821       else if (REG_P (src) && GET_MODE (src) == mode)
1822 	tmps[i] = src;
1823       else
1824 	tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1825 				     bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1826 				     mode, mode);
1827 
1828       if (shift)
1829 	tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1830 				shift, tmps[i], 0);
1831     }
1832 }
1833 
1834 /* Emit code to move a block SRC of type TYPE to a block DST,
1835    where DST is non-consecutive registers represented by a PARALLEL.
1836    SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1837    if not known.  */
1838 
1839 void
emit_group_load(rtx dst,rtx src,tree type,int ssize)1840 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1841 {
1842   rtx *tmps;
1843   int i;
1844 
1845   tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1846   emit_group_load_1 (tmps, dst, src, type, ssize);
1847 
1848   /* Copy the extracted pieces into the proper (probable) hard regs.  */
1849   for (i = 0; i < XVECLEN (dst, 0); i++)
1850     {
1851       rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1852       if (d == NULL)
1853 	continue;
1854       emit_move_insn (d, tmps[i]);
1855     }
1856 }
1857 
1858 /* Similar, but load SRC into new pseudos in a format that looks like
1859    PARALLEL.  This can later be fed to emit_group_move to get things
1860    in the right place.  */
1861 
1862 rtx
emit_group_load_into_temps(rtx parallel,rtx src,tree type,int ssize)1863 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1864 {
1865   rtvec vec;
1866   int i;
1867 
1868   vec = rtvec_alloc (XVECLEN (parallel, 0));
1869   emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1870 
1871   /* Convert the vector to look just like the original PARALLEL, except
1872      with the computed values.  */
1873   for (i = 0; i < XVECLEN (parallel, 0); i++)
1874     {
1875       rtx e = XVECEXP (parallel, 0, i);
1876       rtx d = XEXP (e, 0);
1877 
1878       if (d)
1879 	{
1880 	  d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1881 	  e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1882 	}
1883       RTVEC_ELT (vec, i) = e;
1884     }
1885 
1886   return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1887 }
1888 
1889 /* Emit code to move a block SRC to block DST, where SRC and DST are
1890    non-consecutive groups of registers, each represented by a PARALLEL.  */
1891 
1892 void
emit_group_move(rtx dst,rtx src)1893 emit_group_move (rtx dst, rtx src)
1894 {
1895   int i;
1896 
1897   gcc_assert (GET_CODE (src) == PARALLEL
1898 	      && GET_CODE (dst) == PARALLEL
1899 	      && XVECLEN (src, 0) == XVECLEN (dst, 0));
1900 
1901   /* Skip first entry if NULL.  */
1902   for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1903     emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1904 		    XEXP (XVECEXP (src, 0, i), 0));
1905 }
1906 
1907 /* Move a group of registers represented by a PARALLEL into pseudos.  */
1908 
1909 rtx
emit_group_move_into_temps(rtx src)1910 emit_group_move_into_temps (rtx src)
1911 {
1912   rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1913   int i;
1914 
1915   for (i = 0; i < XVECLEN (src, 0); i++)
1916     {
1917       rtx e = XVECEXP (src, 0, i);
1918       rtx d = XEXP (e, 0);
1919 
1920       if (d)
1921 	e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1922       RTVEC_ELT (vec, i) = e;
1923     }
1924 
1925   return gen_rtx_PARALLEL (GET_MODE (src), vec);
1926 }
1927 
1928 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1929    where SRC is non-consecutive registers represented by a PARALLEL.
1930    SSIZE represents the total size of block ORIG_DST, or -1 if not
1931    known.  */
1932 
1933 void
emit_group_store(rtx orig_dst,rtx src,tree type ATTRIBUTE_UNUSED,int ssize)1934 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1935 {
1936   rtx *tmps, dst;
1937   int start, finish, i;
1938   enum machine_mode m = GET_MODE (orig_dst);
1939 
1940   gcc_assert (GET_CODE (src) == PARALLEL);
1941 
1942   if (!SCALAR_INT_MODE_P (m)
1943       && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1944     {
1945       enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1946       if (imode == BLKmode)
1947         dst = assign_stack_temp (GET_MODE (orig_dst), ssize);
1948       else
1949         dst = gen_reg_rtx (imode);
1950       emit_group_store (dst, src, type, ssize);
1951       if (imode != BLKmode)
1952         dst = gen_lowpart (GET_MODE (orig_dst), dst);
1953       emit_move_insn (orig_dst, dst);
1954       return;
1955     }
1956 
1957   /* Check for a NULL entry, used to indicate that the parameter goes
1958      both on the stack and in registers.  */
1959   if (XEXP (XVECEXP (src, 0, 0), 0))
1960     start = 0;
1961   else
1962     start = 1;
1963   finish = XVECLEN (src, 0);
1964 
1965   tmps = XALLOCAVEC (rtx, finish);
1966 
1967   /* Copy the (probable) hard regs into pseudos.  */
1968   for (i = start; i < finish; i++)
1969     {
1970       rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1971       if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1972 	{
1973 	  tmps[i] = gen_reg_rtx (GET_MODE (reg));
1974 	  emit_move_insn (tmps[i], reg);
1975 	}
1976       else
1977 	tmps[i] = reg;
1978     }
1979 
1980   /* If we won't be storing directly into memory, protect the real destination
1981      from strange tricks we might play.  */
1982   dst = orig_dst;
1983   if (GET_CODE (dst) == PARALLEL)
1984     {
1985       rtx temp;
1986 
1987       /* We can get a PARALLEL dst if there is a conditional expression in
1988 	 a return statement.  In that case, the dst and src are the same,
1989 	 so no action is necessary.  */
1990       if (rtx_equal_p (dst, src))
1991 	return;
1992 
1993       /* It is unclear if we can ever reach here, but we may as well handle
1994 	 it.  Allocate a temporary, and split this into a store/load to/from
1995 	 the temporary.  */
1996       temp = assign_stack_temp (GET_MODE (dst), ssize);
1997       emit_group_store (temp, src, type, ssize);
1998       emit_group_load (dst, temp, type, ssize);
1999       return;
2000     }
2001   else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
2002     {
2003       enum machine_mode outer = GET_MODE (dst);
2004       enum machine_mode inner;
2005       HOST_WIDE_INT bytepos;
2006       bool done = false;
2007       rtx temp;
2008 
2009       if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
2010 	dst = gen_reg_rtx (outer);
2011 
2012       /* Make life a bit easier for combine.  */
2013       /* If the first element of the vector is the low part
2014 	 of the destination mode, use a paradoxical subreg to
2015 	 initialize the destination.  */
2016       if (start < finish)
2017 	{
2018 	  inner = GET_MODE (tmps[start]);
2019 	  bytepos = subreg_lowpart_offset (inner, outer);
2020 	  if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
2021 	    {
2022 	      temp = simplify_gen_subreg (outer, tmps[start],
2023 					  inner, 0);
2024 	      if (temp)
2025 		{
2026 		  emit_move_insn (dst, temp);
2027 		  done = true;
2028 		  start++;
2029 		}
2030 	    }
2031 	}
2032 
2033       /* If the first element wasn't the low part, try the last.  */
2034       if (!done
2035 	  && start < finish - 1)
2036 	{
2037 	  inner = GET_MODE (tmps[finish - 1]);
2038 	  bytepos = subreg_lowpart_offset (inner, outer);
2039 	  if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
2040 	    {
2041 	      temp = simplify_gen_subreg (outer, tmps[finish - 1],
2042 					  inner, 0);
2043 	      if (temp)
2044 		{
2045 		  emit_move_insn (dst, temp);
2046 		  done = true;
2047 		  finish--;
2048 		}
2049 	    }
2050 	}
2051 
2052       /* Otherwise, simply initialize the result to zero.  */
2053       if (!done)
2054         emit_move_insn (dst, CONST0_RTX (outer));
2055     }
2056 
2057   /* Process the pieces.  */
2058   for (i = start; i < finish; i++)
2059     {
2060       HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2061       enum machine_mode mode = GET_MODE (tmps[i]);
2062       unsigned int bytelen = GET_MODE_SIZE (mode);
2063       unsigned int adj_bytelen;
2064       rtx dest = dst;
2065 
2066       /* Handle trailing fragments that run over the size of the struct.  */
2067       if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2068 	adj_bytelen = ssize - bytepos;
2069       else
2070 	adj_bytelen = bytelen;
2071 
2072       if (GET_CODE (dst) == CONCAT)
2073 	{
2074 	  if (bytepos + adj_bytelen
2075 	      <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2076 	    dest = XEXP (dst, 0);
2077 	  else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2078 	    {
2079 	      bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2080 	      dest = XEXP (dst, 1);
2081 	    }
2082 	  else
2083 	    {
2084 	      enum machine_mode dest_mode = GET_MODE (dest);
2085 	      enum machine_mode tmp_mode = GET_MODE (tmps[i]);
2086 
2087 	      gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2088 
2089 	      if (GET_MODE_ALIGNMENT (dest_mode)
2090 		  >= GET_MODE_ALIGNMENT (tmp_mode))
2091 		{
2092 		  dest = assign_stack_temp (dest_mode,
2093 					    GET_MODE_SIZE (dest_mode));
2094 		  emit_move_insn (adjust_address (dest,
2095 						  tmp_mode,
2096 						  bytepos),
2097 				  tmps[i]);
2098 		  dst = dest;
2099 		}
2100 	      else
2101 		{
2102 		  dest = assign_stack_temp (tmp_mode,
2103 					    GET_MODE_SIZE (tmp_mode));
2104 		  emit_move_insn (dest, tmps[i]);
2105 		  dst = adjust_address (dest, dest_mode, bytepos);
2106 		}
2107 	      break;
2108 	    }
2109 	}
2110 
2111       /* Handle trailing fragments that run over the size of the struct.  */
2112       if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2113 	{
2114 	  /* store_bit_field always takes its value from the lsb.
2115 	     Move the fragment to the lsb if it's not already there.  */
2116 	  if (
2117 #ifdef BLOCK_REG_PADDING
2118 	      BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2119 	      == (BYTES_BIG_ENDIAN ? upward : downward)
2120 #else
2121 	      BYTES_BIG_ENDIAN
2122 #endif
2123 	      )
2124 	    {
2125 	      int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2126 	      tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2127 				      shift, tmps[i], 0);
2128 	    }
2129 
2130 	  /* Make sure not to write past the end of the struct.  */
2131 	  store_bit_field (dest,
2132 			   adj_bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2133 			   bytepos * BITS_PER_UNIT, ssize * BITS_PER_UNIT - 1,
2134 			   VOIDmode, tmps[i]);
2135 	}
2136 
2137       /* Optimize the access just a bit.  */
2138       else if (MEM_P (dest)
2139 	       && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2140 		   || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2141 	       && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2142 	       && bytelen == GET_MODE_SIZE (mode))
2143 	emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2144 
2145       else
2146 	store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2147 			 0, 0, mode, tmps[i]);
2148     }
2149 
2150   /* Copy from the pseudo into the (probable) hard reg.  */
2151   if (orig_dst != dst)
2152     emit_move_insn (orig_dst, dst);
2153 }
2154 
2155 /* Return a form of X that does not use a PARALLEL.  TYPE is the type
2156    of the value stored in X.  */
2157 
2158 rtx
maybe_emit_group_store(rtx x,tree type)2159 maybe_emit_group_store (rtx x, tree type)
2160 {
2161   enum machine_mode mode = TYPE_MODE (type);
2162   gcc_checking_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
2163   if (GET_CODE (x) == PARALLEL)
2164     {
2165       rtx result = gen_reg_rtx (mode);
2166       emit_group_store (result, x, type, int_size_in_bytes (type));
2167       return result;
2168     }
2169   return x;
2170 }
2171 
2172 /* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
2173 
2174    This is used on targets that return BLKmode values in registers.  */
2175 
2176 void
copy_blkmode_from_reg(rtx target,rtx srcreg,tree type)2177 copy_blkmode_from_reg (rtx target, rtx srcreg, tree type)
2178 {
2179   unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2180   rtx src = NULL, dst = NULL;
2181   unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2182   unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2183   enum machine_mode mode = GET_MODE (srcreg);
2184   enum machine_mode tmode = GET_MODE (target);
2185   enum machine_mode copy_mode;
2186 
2187   /* BLKmode registers created in the back-end shouldn't have survived.  */
2188   gcc_assert (mode != BLKmode);
2189 
2190   /* If the structure doesn't take up a whole number of words, see whether
2191      SRCREG is padded on the left or on the right.  If it's on the left,
2192      set PADDING_CORRECTION to the number of bits to skip.
2193 
2194      In most ABIs, the structure will be returned at the least end of
2195      the register, which translates to right padding on little-endian
2196      targets and left padding on big-endian targets.  The opposite
2197      holds if the structure is returned at the most significant
2198      end of the register.  */
2199   if (bytes % UNITS_PER_WORD != 0
2200       && (targetm.calls.return_in_msb (type)
2201 	  ? !BYTES_BIG_ENDIAN
2202 	  : BYTES_BIG_ENDIAN))
2203     padding_correction
2204       = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2205 
2206   /* We can use a single move if we have an exact mode for the size.  */
2207   else if (MEM_P (target)
2208 	   && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
2209 	       || MEM_ALIGN (target) >= GET_MODE_ALIGNMENT (mode))
2210 	   && bytes == GET_MODE_SIZE (mode))
2211   {
2212     emit_move_insn (adjust_address (target, mode, 0), srcreg);
2213     return;
2214   }
2215 
2216   /* And if we additionally have the same mode for a register.  */
2217   else if (REG_P (target)
2218 	   && GET_MODE (target) == mode
2219 	   && bytes == GET_MODE_SIZE (mode))
2220   {
2221     emit_move_insn (target, srcreg);
2222     return;
2223   }
2224 
2225   /* This code assumes srcreg is at least a full word.  If it isn't, copy it
2226      into a new pseudo which is a full word.  */
2227   if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
2228     {
2229       srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2230       mode = word_mode;
2231     }
2232 
2233   /* Copy the structure BITSIZE bits at a time.  If the target lives in
2234      memory, take care of not reading/writing past its end by selecting
2235      a copy mode suited to BITSIZE.  This should always be possible given
2236      how it is computed.
2237 
2238      If the target lives in register, make sure not to select a copy mode
2239      larger than the mode of the register.
2240 
2241      We could probably emit more efficient code for machines which do not use
2242      strict alignment, but it doesn't seem worth the effort at the current
2243      time.  */
2244 
2245   copy_mode = word_mode;
2246   if (MEM_P (target))
2247     {
2248       enum machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2249       if (mem_mode != BLKmode)
2250 	copy_mode = mem_mode;
2251     }
2252   else if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2253     copy_mode = tmode;
2254 
2255   for (bitpos = 0, xbitpos = padding_correction;
2256        bitpos < bytes * BITS_PER_UNIT;
2257        bitpos += bitsize, xbitpos += bitsize)
2258     {
2259       /* We need a new source operand each time xbitpos is on a
2260 	 word boundary and when xbitpos == padding_correction
2261 	 (the first time through).  */
2262       if (xbitpos % BITS_PER_WORD == 0 || xbitpos == padding_correction)
2263 	src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, mode);
2264 
2265       /* We need a new destination operand each time bitpos is on
2266 	 a word boundary.  */
2267       if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2268 	dst = target;
2269       else if (bitpos % BITS_PER_WORD == 0)
2270 	dst = operand_subword (target, bitpos / BITS_PER_WORD, 1, tmode);
2271 
2272       /* Use xbitpos for the source extraction (right justified) and
2273 	 bitpos for the destination store (left justified).  */
2274       store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, 0, 0, copy_mode,
2275 		       extract_bit_field (src, bitsize,
2276 					  xbitpos % BITS_PER_WORD, 1,
2277 					  NULL_RTX, copy_mode, copy_mode));
2278     }
2279 }
2280 
2281 /* Copy BLKmode value SRC into a register of mode MODE.  Return the
2282    register if it contains any data, otherwise return null.
2283 
2284    This is used on targets that return BLKmode values in registers.  */
2285 
2286 rtx
copy_blkmode_to_reg(enum machine_mode mode,tree src)2287 copy_blkmode_to_reg (enum machine_mode mode, tree src)
2288 {
2289   int i, n_regs;
2290   unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0, bytes;
2291   unsigned int bitsize;
2292   rtx *dst_words, dst, x, src_word = NULL_RTX, dst_word = NULL_RTX;
2293   enum machine_mode dst_mode;
2294 
2295   gcc_assert (TYPE_MODE (TREE_TYPE (src)) == BLKmode);
2296 
2297   x = expand_normal (src);
2298 
2299   bytes = int_size_in_bytes (TREE_TYPE (src));
2300   if (bytes == 0)
2301     return NULL_RTX;
2302 
2303   /* If the structure doesn't take up a whole number of words, see
2304      whether the register value should be padded on the left or on
2305      the right.  Set PADDING_CORRECTION to the number of padding
2306      bits needed on the left side.
2307 
2308      In most ABIs, the structure will be returned at the least end of
2309      the register, which translates to right padding on little-endian
2310      targets and left padding on big-endian targets.  The opposite
2311      holds if the structure is returned at the most significant
2312      end of the register.  */
2313   if (bytes % UNITS_PER_WORD != 0
2314       && (targetm.calls.return_in_msb (TREE_TYPE (src))
2315 	  ? !BYTES_BIG_ENDIAN
2316 	  : BYTES_BIG_ENDIAN))
2317     padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2318 					   * BITS_PER_UNIT));
2319 
2320   n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2321   dst_words = XALLOCAVEC (rtx, n_regs);
2322   bitsize = MIN (TYPE_ALIGN (TREE_TYPE (src)), BITS_PER_WORD);
2323 
2324   /* Copy the structure BITSIZE bits at a time.  */
2325   for (bitpos = 0, xbitpos = padding_correction;
2326        bitpos < bytes * BITS_PER_UNIT;
2327        bitpos += bitsize, xbitpos += bitsize)
2328     {
2329       /* We need a new destination pseudo each time xbitpos is
2330 	 on a word boundary and when xbitpos == padding_correction
2331 	 (the first time through).  */
2332       if (xbitpos % BITS_PER_WORD == 0
2333 	  || xbitpos == padding_correction)
2334 	{
2335 	  /* Generate an appropriate register.  */
2336 	  dst_word = gen_reg_rtx (word_mode);
2337 	  dst_words[xbitpos / BITS_PER_WORD] = dst_word;
2338 
2339 	  /* Clear the destination before we move anything into it.  */
2340 	  emit_move_insn (dst_word, CONST0_RTX (word_mode));
2341 	}
2342 
2343       /* We need a new source operand each time bitpos is on a word
2344 	 boundary.  */
2345       if (bitpos % BITS_PER_WORD == 0)
2346 	src_word = operand_subword_force (x, bitpos / BITS_PER_WORD, BLKmode);
2347 
2348       /* Use bitpos for the source extraction (left justified) and
2349 	 xbitpos for the destination store (right justified).  */
2350       store_bit_field (dst_word, bitsize, xbitpos % BITS_PER_WORD,
2351 		       0, 0, word_mode,
2352 		       extract_bit_field (src_word, bitsize,
2353 					  bitpos % BITS_PER_WORD, 1,
2354 					  NULL_RTX, word_mode, word_mode));
2355     }
2356 
2357   if (mode == BLKmode)
2358     {
2359       /* Find the smallest integer mode large enough to hold the
2360 	 entire structure.  */
2361       for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2362 	   mode != VOIDmode;
2363 	   mode = GET_MODE_WIDER_MODE (mode))
2364 	/* Have we found a large enough mode?  */
2365 	if (GET_MODE_SIZE (mode) >= bytes)
2366 	  break;
2367 
2368       /* A suitable mode should have been found.  */
2369       gcc_assert (mode != VOIDmode);
2370     }
2371 
2372   if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2373     dst_mode = word_mode;
2374   else
2375     dst_mode = mode;
2376   dst = gen_reg_rtx (dst_mode);
2377 
2378   for (i = 0; i < n_regs; i++)
2379     emit_move_insn (operand_subword (dst, i, 0, dst_mode), dst_words[i]);
2380 
2381   if (mode != dst_mode)
2382     dst = gen_lowpart (mode, dst);
2383 
2384   return dst;
2385 }
2386 
2387 /* Add a USE expression for REG to the (possibly empty) list pointed
2388    to by CALL_FUSAGE.  REG must denote a hard register.  */
2389 
2390 void
use_reg_mode(rtx * call_fusage,rtx reg,enum machine_mode mode)2391 use_reg_mode (rtx *call_fusage, rtx reg, enum machine_mode mode)
2392 {
2393   gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2394 
2395   *call_fusage
2396     = gen_rtx_EXPR_LIST (mode, gen_rtx_USE (VOIDmode, reg), *call_fusage);
2397 }
2398 
2399 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2400    starting at REGNO.  All of these registers must be hard registers.  */
2401 
2402 void
use_regs(rtx * call_fusage,int regno,int nregs)2403 use_regs (rtx *call_fusage, int regno, int nregs)
2404 {
2405   int i;
2406 
2407   gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2408 
2409   for (i = 0; i < nregs; i++)
2410     use_reg (call_fusage, regno_reg_rtx[regno + i]);
2411 }
2412 
2413 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2414    PARALLEL REGS.  This is for calls that pass values in multiple
2415    non-contiguous locations.  The Irix 6 ABI has examples of this.  */
2416 
2417 void
use_group_regs(rtx * call_fusage,rtx regs)2418 use_group_regs (rtx *call_fusage, rtx regs)
2419 {
2420   int i;
2421 
2422   for (i = 0; i < XVECLEN (regs, 0); i++)
2423     {
2424       rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2425 
2426       /* A NULL entry means the parameter goes both on the stack and in
2427 	 registers.  This can also be a MEM for targets that pass values
2428 	 partially on the stack and partially in registers.  */
2429       if (reg != 0 && REG_P (reg))
2430 	use_reg (call_fusage, reg);
2431     }
2432 }
2433 
2434 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2435    assigment and the code of the expresion on the RHS is CODE.  Return
2436    NULL otherwise.  */
2437 
2438 static gimple
get_def_for_expr(tree name,enum tree_code code)2439 get_def_for_expr (tree name, enum tree_code code)
2440 {
2441   gimple def_stmt;
2442 
2443   if (TREE_CODE (name) != SSA_NAME)
2444     return NULL;
2445 
2446   def_stmt = get_gimple_for_ssa_name (name);
2447   if (!def_stmt
2448       || gimple_assign_rhs_code (def_stmt) != code)
2449     return NULL;
2450 
2451   return def_stmt;
2452 }
2453 
2454 #ifdef HAVE_conditional_move
2455 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2456    assigment and the class of the expresion on the RHS is CLASS.  Return
2457    NULL otherwise.  */
2458 
2459 static gimple
get_def_for_expr_class(tree name,enum tree_code_class tclass)2460 get_def_for_expr_class (tree name, enum tree_code_class tclass)
2461 {
2462   gimple def_stmt;
2463 
2464   if (TREE_CODE (name) != SSA_NAME)
2465     return NULL;
2466 
2467   def_stmt = get_gimple_for_ssa_name (name);
2468   if (!def_stmt
2469       || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) != tclass)
2470     return NULL;
2471 
2472   return def_stmt;
2473 }
2474 #endif
2475 
2476 
2477 /* Determine whether the LEN bytes generated by CONSTFUN can be
2478    stored to memory using several move instructions.  CONSTFUNDATA is
2479    a pointer which will be passed as argument in every CONSTFUN call.
2480    ALIGN is maximum alignment we can assume.  MEMSETP is true if this is
2481    a memset operation and false if it's a copy of a constant string.
2482    Return nonzero if a call to store_by_pieces should succeed.  */
2483 
2484 int
can_store_by_pieces(unsigned HOST_WIDE_INT len,rtx (* constfun)(void *,HOST_WIDE_INT,enum machine_mode),void * constfundata,unsigned int align,bool memsetp)2485 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2486 		     rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2487 		     void *constfundata, unsigned int align, bool memsetp)
2488 {
2489   unsigned HOST_WIDE_INT l;
2490   unsigned int max_size;
2491   HOST_WIDE_INT offset = 0;
2492   enum machine_mode mode;
2493   enum insn_code icode;
2494   int reverse;
2495   /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it.  */
2496   rtx cst ATTRIBUTE_UNUSED;
2497 
2498   if (len == 0)
2499     return 1;
2500 
2501   if (! (memsetp
2502 	 ? SET_BY_PIECES_P (len, align)
2503 	 : STORE_BY_PIECES_P (len, align)))
2504     return 0;
2505 
2506   align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2507 
2508   /* We would first store what we can in the largest integer mode, then go to
2509      successively smaller modes.  */
2510 
2511   for (reverse = 0;
2512        reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2513        reverse++)
2514     {
2515       l = len;
2516       max_size = STORE_MAX_PIECES + 1;
2517       while (max_size > 1 && l > 0)
2518 	{
2519 	  mode = widest_int_mode_for_size (max_size);
2520 
2521 	  if (mode == VOIDmode)
2522 	    break;
2523 
2524 	  icode = optab_handler (mov_optab, mode);
2525 	  if (icode != CODE_FOR_nothing
2526 	      && align >= GET_MODE_ALIGNMENT (mode))
2527 	    {
2528 	      unsigned int size = GET_MODE_SIZE (mode);
2529 
2530 	      while (l >= size)
2531 		{
2532 		  if (reverse)
2533 		    offset -= size;
2534 
2535 		  cst = (*constfun) (constfundata, offset, mode);
2536 		  if (!targetm.legitimate_constant_p (mode, cst))
2537 		    return 0;
2538 
2539 		  if (!reverse)
2540 		    offset += size;
2541 
2542 		  l -= size;
2543 		}
2544 	    }
2545 
2546 	  max_size = GET_MODE_SIZE (mode);
2547 	}
2548 
2549       /* The code above should have handled everything.  */
2550       gcc_assert (!l);
2551     }
2552 
2553   return 1;
2554 }
2555 
2556 /* Generate several move instructions to store LEN bytes generated by
2557    CONSTFUN to block TO.  (A MEM rtx with BLKmode).  CONSTFUNDATA is a
2558    pointer which will be passed as argument in every CONSTFUN call.
2559    ALIGN is maximum alignment we can assume.  MEMSETP is true if this is
2560    a memset operation and false if it's a copy of a constant string.
2561    If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2562    mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2563    stpcpy.  */
2564 
2565 rtx
store_by_pieces(rtx to,unsigned HOST_WIDE_INT len,rtx (* constfun)(void *,HOST_WIDE_INT,enum machine_mode),void * constfundata,unsigned int align,bool memsetp,int endp)2566 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2567 		 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2568 		 void *constfundata, unsigned int align, bool memsetp, int endp)
2569 {
2570   enum machine_mode to_addr_mode = get_address_mode (to);
2571   struct store_by_pieces_d data;
2572 
2573   if (len == 0)
2574     {
2575       gcc_assert (endp != 2);
2576       return to;
2577     }
2578 
2579   gcc_assert (memsetp
2580 	      ? SET_BY_PIECES_P (len, align)
2581 	      : STORE_BY_PIECES_P (len, align));
2582   data.constfun = constfun;
2583   data.constfundata = constfundata;
2584   data.len = len;
2585   data.to = to;
2586   store_by_pieces_1 (&data, align);
2587   if (endp)
2588     {
2589       rtx to1;
2590 
2591       gcc_assert (!data.reverse);
2592       if (data.autinc_to)
2593 	{
2594 	  if (endp == 2)
2595 	    {
2596 	      if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2597 		emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2598 	      else
2599 		data.to_addr = copy_to_mode_reg (to_addr_mode,
2600 						 plus_constant (to_addr_mode,
2601 								data.to_addr,
2602 								-1));
2603 	    }
2604 	  to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2605 					   data.offset);
2606 	}
2607       else
2608 	{
2609 	  if (endp == 2)
2610 	    --data.offset;
2611 	  to1 = adjust_address (data.to, QImode, data.offset);
2612 	}
2613       return to1;
2614     }
2615   else
2616     return data.to;
2617 }
2618 
2619 /* Generate several move instructions to clear LEN bytes of block TO.  (A MEM
2620    rtx with BLKmode).  ALIGN is maximum alignment we can assume.  */
2621 
2622 static void
clear_by_pieces(rtx to,unsigned HOST_WIDE_INT len,unsigned int align)2623 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2624 {
2625   struct store_by_pieces_d data;
2626 
2627   if (len == 0)
2628     return;
2629 
2630   data.constfun = clear_by_pieces_1;
2631   data.constfundata = NULL;
2632   data.len = len;
2633   data.to = to;
2634   store_by_pieces_1 (&data, align);
2635 }
2636 
2637 /* Callback routine for clear_by_pieces.
2638    Return const0_rtx unconditionally.  */
2639 
2640 static rtx
clear_by_pieces_1(void * data ATTRIBUTE_UNUSED,HOST_WIDE_INT offset ATTRIBUTE_UNUSED,enum machine_mode mode ATTRIBUTE_UNUSED)2641 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2642 		   HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2643 		   enum machine_mode mode ATTRIBUTE_UNUSED)
2644 {
2645   return const0_rtx;
2646 }
2647 
2648 /* Subroutine of clear_by_pieces and store_by_pieces.
2649    Generate several move instructions to store LEN bytes of block TO.  (A MEM
2650    rtx with BLKmode).  ALIGN is maximum alignment we can assume.  */
2651 
2652 static void
store_by_pieces_1(struct store_by_pieces_d * data ATTRIBUTE_UNUSED,unsigned int align ATTRIBUTE_UNUSED)2653 store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
2654 		   unsigned int align ATTRIBUTE_UNUSED)
2655 {
2656   enum machine_mode to_addr_mode = get_address_mode (data->to);
2657   rtx to_addr = XEXP (data->to, 0);
2658   unsigned int max_size = STORE_MAX_PIECES + 1;
2659   enum insn_code icode;
2660 
2661   data->offset = 0;
2662   data->to_addr = to_addr;
2663   data->autinc_to
2664     = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2665        || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2666 
2667   data->explicit_inc_to = 0;
2668   data->reverse
2669     = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2670   if (data->reverse)
2671     data->offset = data->len;
2672 
2673   /* If storing requires more than two move insns,
2674      copy addresses to registers (to make displacements shorter)
2675      and use post-increment if available.  */
2676   if (!data->autinc_to
2677       && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2678     {
2679       /* Determine the main mode we'll be using.
2680 	 MODE might not be used depending on the definitions of the
2681 	 USE_* macros below.  */
2682       enum machine_mode mode ATTRIBUTE_UNUSED
2683 	= widest_int_mode_for_size (max_size);
2684 
2685       if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2686 	{
2687 	  data->to_addr = copy_to_mode_reg (to_addr_mode,
2688 					    plus_constant (to_addr_mode,
2689 							   to_addr,
2690 							   data->len));
2691 	  data->autinc_to = 1;
2692 	  data->explicit_inc_to = -1;
2693 	}
2694 
2695       if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2696 	  && ! data->autinc_to)
2697 	{
2698 	  data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2699 	  data->autinc_to = 1;
2700 	  data->explicit_inc_to = 1;
2701 	}
2702 
2703       if ( !data->autinc_to && CONSTANT_P (to_addr))
2704 	data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2705     }
2706 
2707   align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2708 
2709   /* First store what we can in the largest integer mode, then go to
2710      successively smaller modes.  */
2711 
2712   while (max_size > 1 && data->len > 0)
2713     {
2714       enum machine_mode mode = widest_int_mode_for_size (max_size);
2715 
2716       if (mode == VOIDmode)
2717 	break;
2718 
2719       icode = optab_handler (mov_optab, mode);
2720       if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2721 	store_by_pieces_2 (GEN_FCN (icode), mode, data);
2722 
2723       max_size = GET_MODE_SIZE (mode);
2724     }
2725 
2726   /* The code above should have handled everything.  */
2727   gcc_assert (!data->len);
2728 }
2729 
2730 /* Subroutine of store_by_pieces_1.  Store as many bytes as appropriate
2731    with move instructions for mode MODE.  GENFUN is the gen_... function
2732    to make a move insn for that mode.  DATA has all the other info.  */
2733 
2734 static void
store_by_pieces_2(insn_gen_fn genfun,machine_mode mode,struct store_by_pieces_d * data)2735 store_by_pieces_2 (insn_gen_fn genfun, machine_mode mode,
2736 		   struct store_by_pieces_d *data)
2737 {
2738   unsigned int size = GET_MODE_SIZE (mode);
2739   rtx to1, cst;
2740 
2741   while (data->len >= size)
2742     {
2743       if (data->reverse)
2744 	data->offset -= size;
2745 
2746       if (data->autinc_to)
2747 	to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2748 					 data->offset);
2749       else
2750 	to1 = adjust_address (data->to, mode, data->offset);
2751 
2752       if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2753 	emit_insn (gen_add2_insn (data->to_addr,
2754 				  gen_int_mode (-(HOST_WIDE_INT) size,
2755 						GET_MODE (data->to_addr))));
2756 
2757       cst = (*data->constfun) (data->constfundata, data->offset, mode);
2758       emit_insn ((*genfun) (to1, cst));
2759 
2760       if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2761 	emit_insn (gen_add2_insn (data->to_addr,
2762 				  gen_int_mode (size,
2763 						GET_MODE (data->to_addr))));
2764 
2765       if (! data->reverse)
2766 	data->offset += size;
2767 
2768       data->len -= size;
2769     }
2770 }
2771 
2772 /* Write zeros through the storage of OBJECT.  If OBJECT has BLKmode, SIZE is
2773    its length in bytes.  */
2774 
2775 rtx
clear_storage_hints(rtx object,rtx size,enum block_op_methods method,unsigned int expected_align,HOST_WIDE_INT expected_size,unsigned HOST_WIDE_INT min_size,unsigned HOST_WIDE_INT max_size,unsigned HOST_WIDE_INT probable_max_size)2776 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2777 		     unsigned int expected_align, HOST_WIDE_INT expected_size,
2778 		     unsigned HOST_WIDE_INT min_size,
2779 		     unsigned HOST_WIDE_INT max_size,
2780 		     unsigned HOST_WIDE_INT probable_max_size)
2781 {
2782   enum machine_mode mode = GET_MODE (object);
2783   unsigned int align;
2784 
2785   gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2786 
2787   /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2788      just move a zero.  Otherwise, do this a piece at a time.  */
2789   if (mode != BLKmode
2790       && CONST_INT_P (size)
2791       && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2792     {
2793       rtx zero = CONST0_RTX (mode);
2794       if (zero != NULL)
2795 	{
2796 	  emit_move_insn (object, zero);
2797 	  return NULL;
2798 	}
2799 
2800       if (COMPLEX_MODE_P (mode))
2801 	{
2802 	  zero = CONST0_RTX (GET_MODE_INNER (mode));
2803 	  if (zero != NULL)
2804 	    {
2805 	      write_complex_part (object, zero, 0);
2806 	      write_complex_part (object, zero, 1);
2807 	      return NULL;
2808 	    }
2809 	}
2810     }
2811 
2812   if (size == const0_rtx)
2813     return NULL;
2814 
2815   align = MEM_ALIGN (object);
2816 
2817   if (CONST_INT_P (size)
2818       && CLEAR_BY_PIECES_P (INTVAL (size), align))
2819     clear_by_pieces (object, INTVAL (size), align);
2820   else if (set_storage_via_setmem (object, size, const0_rtx, align,
2821 				   expected_align, expected_size,
2822 				   min_size, max_size, probable_max_size))
2823     ;
2824   else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
2825     return set_storage_via_libcall (object, size, const0_rtx,
2826 				    method == BLOCK_OP_TAILCALL);
2827   else
2828     gcc_unreachable ();
2829 
2830   return NULL;
2831 }
2832 
2833 rtx
clear_storage(rtx object,rtx size,enum block_op_methods method)2834 clear_storage (rtx object, rtx size, enum block_op_methods method)
2835 {
2836   unsigned HOST_WIDE_INT max, min = 0;
2837   if (GET_CODE (size) == CONST_INT)
2838     min = max = UINTVAL (size);
2839   else
2840     max = GET_MODE_MASK (GET_MODE (size));
2841   return clear_storage_hints (object, size, method, 0, -1, min, max, max);
2842 }
2843 
2844 
2845 /* A subroutine of clear_storage.  Expand a call to memset.
2846    Return the return value of memset, 0 otherwise.  */
2847 
2848 rtx
set_storage_via_libcall(rtx object,rtx size,rtx val,bool tailcall)2849 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2850 {
2851   tree call_expr, fn, object_tree, size_tree, val_tree;
2852   enum machine_mode size_mode;
2853   rtx retval;
2854 
2855   /* Emit code to copy OBJECT and SIZE into new pseudos.  We can then
2856      place those into new pseudos into a VAR_DECL and use them later.  */
2857 
2858   object = copy_addr_to_reg (XEXP (object, 0));
2859 
2860   size_mode = TYPE_MODE (sizetype);
2861   size = convert_to_mode (size_mode, size, 1);
2862   size = copy_to_mode_reg (size_mode, size);
2863 
2864   /* It is incorrect to use the libcall calling conventions to call
2865      memset in this context.  This could be a user call to memset and
2866      the user may wish to examine the return value from memset.  For
2867      targets where libcalls and normal calls have different conventions
2868      for returning pointers, we could end up generating incorrect code.  */
2869 
2870   object_tree = make_tree (ptr_type_node, object);
2871   if (!CONST_INT_P (val))
2872     val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2873   size_tree = make_tree (sizetype, size);
2874   val_tree = make_tree (integer_type_node, val);
2875 
2876   fn = clear_storage_libcall_fn (true);
2877   call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
2878   CALL_EXPR_TAILCALL (call_expr) = tailcall;
2879 
2880   retval = expand_normal (call_expr);
2881 
2882   return retval;
2883 }
2884 
2885 /* A subroutine of set_storage_via_libcall.  Create the tree node
2886    for the function we use for block clears.  */
2887 
2888 tree block_clear_fn;
2889 
2890 void
init_block_clear_fn(const char * asmspec)2891 init_block_clear_fn (const char *asmspec)
2892 {
2893   if (!block_clear_fn)
2894     {
2895       tree fn, args;
2896 
2897       fn = get_identifier ("memset");
2898       args = build_function_type_list (ptr_type_node, ptr_type_node,
2899 				       integer_type_node, sizetype,
2900 				       NULL_TREE);
2901 
2902       fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
2903       DECL_EXTERNAL (fn) = 1;
2904       TREE_PUBLIC (fn) = 1;
2905       DECL_ARTIFICIAL (fn) = 1;
2906       TREE_NOTHROW (fn) = 1;
2907       DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2908       DECL_VISIBILITY_SPECIFIED (fn) = 1;
2909 
2910       block_clear_fn = fn;
2911     }
2912 
2913   if (asmspec)
2914     set_user_assembler_name (block_clear_fn, asmspec);
2915 }
2916 
2917 static tree
clear_storage_libcall_fn(int for_call)2918 clear_storage_libcall_fn (int for_call)
2919 {
2920   static bool emitted_extern;
2921 
2922   if (!block_clear_fn)
2923     init_block_clear_fn (NULL);
2924 
2925   if (for_call && !emitted_extern)
2926     {
2927       emitted_extern = true;
2928       make_decl_rtl (block_clear_fn);
2929     }
2930 
2931   return block_clear_fn;
2932 }
2933 
2934 /* Expand a setmem pattern; return true if successful.  */
2935 
2936 bool
set_storage_via_setmem(rtx object,rtx size,rtx val,unsigned int align,unsigned int expected_align,HOST_WIDE_INT expected_size,unsigned HOST_WIDE_INT min_size,unsigned HOST_WIDE_INT max_size,unsigned HOST_WIDE_INT probable_max_size)2937 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2938 			unsigned int expected_align, HOST_WIDE_INT expected_size,
2939 			unsigned HOST_WIDE_INT min_size,
2940 			unsigned HOST_WIDE_INT max_size,
2941 			unsigned HOST_WIDE_INT probable_max_size)
2942 {
2943   /* Try the most limited insn first, because there's no point
2944      including more than one in the machine description unless
2945      the more limited one has some advantage.  */
2946 
2947   enum machine_mode mode;
2948 
2949   if (expected_align < align)
2950     expected_align = align;
2951   if (expected_size != -1)
2952     {
2953       if ((unsigned HOST_WIDE_INT)expected_size > max_size)
2954 	expected_size = max_size;
2955       if ((unsigned HOST_WIDE_INT)expected_size < min_size)
2956 	expected_size = min_size;
2957     }
2958 
2959   for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2960        mode = GET_MODE_WIDER_MODE (mode))
2961     {
2962       enum insn_code code = direct_optab_handler (setmem_optab, mode);
2963 
2964       if (code != CODE_FOR_nothing
2965 	  /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
2966 	     here because if SIZE is less than the mode mask, as it is
2967 	     returned by the macro, it will definitely be less than the
2968 	     actual mode mask.  Since SIZE is within the Pmode address
2969 	     space, we limit MODE to Pmode.  */
2970 	  && ((CONST_INT_P (size)
2971 	       && ((unsigned HOST_WIDE_INT) INTVAL (size)
2972 		   <= (GET_MODE_MASK (mode) >> 1)))
2973 	      || max_size <= (GET_MODE_MASK (mode) >> 1)
2974 	      || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
2975 	{
2976 	  struct expand_operand ops[9];
2977 	  unsigned int nops;
2978 
2979 	  nops = insn_data[(int) code].n_generator_args;
2980 	  gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
2981 
2982 	  create_fixed_operand (&ops[0], object);
2983 	  /* The check above guarantees that this size conversion is valid.  */
2984 	  create_convert_operand_to (&ops[1], size, mode, true);
2985 	  create_convert_operand_from (&ops[2], val, byte_mode, true);
2986 	  create_integer_operand (&ops[3], align / BITS_PER_UNIT);
2987 	  if (nops >= 6)
2988 	    {
2989 	      create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
2990 	      create_integer_operand (&ops[5], expected_size);
2991 	    }
2992 	  if (nops >= 8)
2993 	    {
2994 	      create_integer_operand (&ops[6], min_size);
2995 	      /* If we can not represent the maximal size,
2996 		 make parameter NULL.  */
2997 	      if ((HOST_WIDE_INT) max_size != -1)
2998 	        create_integer_operand (&ops[7], max_size);
2999 	      else
3000 		create_fixed_operand (&ops[7], NULL);
3001 	    }
3002 	  if (nops == 9)
3003 	    {
3004 	      /* If we can not represent the maximal size,
3005 		 make parameter NULL.  */
3006 	      if ((HOST_WIDE_INT) probable_max_size != -1)
3007 	        create_integer_operand (&ops[8], probable_max_size);
3008 	      else
3009 		create_fixed_operand (&ops[8], NULL);
3010 	    }
3011 	  if (maybe_expand_insn (code, nops, ops))
3012 	    return true;
3013 	}
3014     }
3015 
3016   return false;
3017 }
3018 
3019 
3020 /* Write to one of the components of the complex value CPLX.  Write VAL to
3021    the real part if IMAG_P is false, and the imaginary part if its true.  */
3022 
3023 static void
write_complex_part(rtx cplx,rtx val,bool imag_p)3024 write_complex_part (rtx cplx, rtx val, bool imag_p)
3025 {
3026   enum machine_mode cmode;
3027   enum machine_mode imode;
3028   unsigned ibitsize;
3029 
3030   if (GET_CODE (cplx) == CONCAT)
3031     {
3032       emit_move_insn (XEXP (cplx, imag_p), val);
3033       return;
3034     }
3035 
3036   cmode = GET_MODE (cplx);
3037   imode = GET_MODE_INNER (cmode);
3038   ibitsize = GET_MODE_BITSIZE (imode);
3039 
3040   /* For MEMs simplify_gen_subreg may generate an invalid new address
3041      because, e.g., the original address is considered mode-dependent
3042      by the target, which restricts simplify_subreg from invoking
3043      adjust_address_nv.  Instead of preparing fallback support for an
3044      invalid address, we call adjust_address_nv directly.  */
3045   if (MEM_P (cplx))
3046     {
3047       emit_move_insn (adjust_address_nv (cplx, imode,
3048 					 imag_p ? GET_MODE_SIZE (imode) : 0),
3049 		      val);
3050       return;
3051     }
3052 
3053   /* If the sub-object is at least word sized, then we know that subregging
3054      will work.  This special case is important, since store_bit_field
3055      wants to operate on integer modes, and there's rarely an OImode to
3056      correspond to TCmode.  */
3057   if (ibitsize >= BITS_PER_WORD
3058       /* For hard regs we have exact predicates.  Assume we can split
3059 	 the original object if it spans an even number of hard regs.
3060 	 This special case is important for SCmode on 64-bit platforms
3061 	 where the natural size of floating-point regs is 32-bit.  */
3062       || (REG_P (cplx)
3063 	  && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3064 	  && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
3065     {
3066       rtx part = simplify_gen_subreg (imode, cplx, cmode,
3067 				      imag_p ? GET_MODE_SIZE (imode) : 0);
3068       if (part)
3069         {
3070 	  emit_move_insn (part, val);
3071 	  return;
3072 	}
3073       else
3074 	/* simplify_gen_subreg may fail for sub-word MEMs.  */
3075 	gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3076     }
3077 
3078   store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, 0, 0, imode, val);
3079 }
3080 
3081 /* Extract one of the components of the complex value CPLX.  Extract the
3082    real part if IMAG_P is false, and the imaginary part if it's true.  */
3083 
3084 static rtx
read_complex_part(rtx cplx,bool imag_p)3085 read_complex_part (rtx cplx, bool imag_p)
3086 {
3087   enum machine_mode cmode, imode;
3088   unsigned ibitsize;
3089 
3090   if (GET_CODE (cplx) == CONCAT)
3091     return XEXP (cplx, imag_p);
3092 
3093   cmode = GET_MODE (cplx);
3094   imode = GET_MODE_INNER (cmode);
3095   ibitsize = GET_MODE_BITSIZE (imode);
3096 
3097   /* Special case reads from complex constants that got spilled to memory.  */
3098   if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
3099     {
3100       tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
3101       if (decl && TREE_CODE (decl) == COMPLEX_CST)
3102 	{
3103 	  tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
3104 	  if (CONSTANT_CLASS_P (part))
3105 	    return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
3106 	}
3107     }
3108 
3109   /* For MEMs simplify_gen_subreg may generate an invalid new address
3110      because, e.g., the original address is considered mode-dependent
3111      by the target, which restricts simplify_subreg from invoking
3112      adjust_address_nv.  Instead of preparing fallback support for an
3113      invalid address, we call adjust_address_nv directly.  */
3114   if (MEM_P (cplx))
3115     return adjust_address_nv (cplx, imode,
3116 			      imag_p ? GET_MODE_SIZE (imode) : 0);
3117 
3118   /* If the sub-object is at least word sized, then we know that subregging
3119      will work.  This special case is important, since extract_bit_field
3120      wants to operate on integer modes, and there's rarely an OImode to
3121      correspond to TCmode.  */
3122   if (ibitsize >= BITS_PER_WORD
3123       /* For hard regs we have exact predicates.  Assume we can split
3124 	 the original object if it spans an even number of hard regs.
3125 	 This special case is important for SCmode on 64-bit platforms
3126 	 where the natural size of floating-point regs is 32-bit.  */
3127       || (REG_P (cplx)
3128 	  && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3129 	  && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
3130     {
3131       rtx ret = simplify_gen_subreg (imode, cplx, cmode,
3132 				     imag_p ? GET_MODE_SIZE (imode) : 0);
3133       if (ret)
3134         return ret;
3135       else
3136 	/* simplify_gen_subreg may fail for sub-word MEMs.  */
3137 	gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3138     }
3139 
3140   return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
3141 			    true, NULL_RTX, imode, imode);
3142 }
3143 
3144 /* A subroutine of emit_move_insn_1.  Yet another lowpart generator.
3145    NEW_MODE and OLD_MODE are the same size.  Return NULL if X cannot be
3146    represented in NEW_MODE.  If FORCE is true, this will never happen, as
3147    we'll force-create a SUBREG if needed.  */
3148 
3149 static rtx
emit_move_change_mode(enum machine_mode new_mode,enum machine_mode old_mode,rtx x,bool force)3150 emit_move_change_mode (enum machine_mode new_mode,
3151 		       enum machine_mode old_mode, rtx x, bool force)
3152 {
3153   rtx ret;
3154 
3155   if (push_operand (x, GET_MODE (x)))
3156     {
3157       ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
3158       MEM_COPY_ATTRIBUTES (ret, x);
3159     }
3160   else if (MEM_P (x))
3161     {
3162       /* We don't have to worry about changing the address since the
3163 	 size in bytes is supposed to be the same.  */
3164       if (reload_in_progress)
3165 	{
3166 	  /* Copy the MEM to change the mode and move any
3167 	     substitutions from the old MEM to the new one.  */
3168 	  ret = adjust_address_nv (x, new_mode, 0);
3169 	  copy_replacements (x, ret);
3170 	}
3171       else
3172 	ret = adjust_address (x, new_mode, 0);
3173     }
3174   else
3175     {
3176       /* Note that we do want simplify_subreg's behavior of validating
3177 	 that the new mode is ok for a hard register.  If we were to use
3178 	 simplify_gen_subreg, we would create the subreg, but would
3179 	 probably run into the target not being able to implement it.  */
3180       /* Except, of course, when FORCE is true, when this is exactly what
3181 	 we want.  Which is needed for CCmodes on some targets.  */
3182       if (force)
3183 	ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
3184       else
3185 	ret = simplify_subreg (new_mode, x, old_mode, 0);
3186     }
3187 
3188   return ret;
3189 }
3190 
3191 /* A subroutine of emit_move_insn_1.  Generate a move from Y into X using
3192    an integer mode of the same size as MODE.  Returns the instruction
3193    emitted, or NULL if such a move could not be generated.  */
3194 
3195 static rtx
emit_move_via_integer(enum machine_mode mode,rtx x,rtx y,bool force)3196 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
3197 {
3198   enum machine_mode imode;
3199   enum insn_code code;
3200 
3201   /* There must exist a mode of the exact size we require.  */
3202   imode = int_mode_for_mode (mode);
3203   if (imode == BLKmode)
3204     return NULL_RTX;
3205 
3206   /* The target must support moves in this mode.  */
3207   code = optab_handler (mov_optab, imode);
3208   if (code == CODE_FOR_nothing)
3209     return NULL_RTX;
3210 
3211   x = emit_move_change_mode (imode, mode, x, force);
3212   if (x == NULL_RTX)
3213     return NULL_RTX;
3214   y = emit_move_change_mode (imode, mode, y, force);
3215   if (y == NULL_RTX)
3216     return NULL_RTX;
3217   return emit_insn (GEN_FCN (code) (x, y));
3218 }
3219 
3220 /* A subroutine of emit_move_insn_1.  X is a push_operand in MODE.
3221    Return an equivalent MEM that does not use an auto-increment.  */
3222 
3223 rtx
emit_move_resolve_push(enum machine_mode mode,rtx x)3224 emit_move_resolve_push (enum machine_mode mode, rtx x)
3225 {
3226   enum rtx_code code = GET_CODE (XEXP (x, 0));
3227   HOST_WIDE_INT adjust;
3228   rtx temp;
3229 
3230   adjust = GET_MODE_SIZE (mode);
3231 #ifdef PUSH_ROUNDING
3232   adjust = PUSH_ROUNDING (adjust);
3233 #endif
3234   if (code == PRE_DEC || code == POST_DEC)
3235     adjust = -adjust;
3236   else if (code == PRE_MODIFY || code == POST_MODIFY)
3237     {
3238       rtx expr = XEXP (XEXP (x, 0), 1);
3239       HOST_WIDE_INT val;
3240 
3241       gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3242       gcc_assert (CONST_INT_P (XEXP (expr, 1)));
3243       val = INTVAL (XEXP (expr, 1));
3244       if (GET_CODE (expr) == MINUS)
3245 	val = -val;
3246       gcc_assert (adjust == val || adjust == -val);
3247       adjust = val;
3248     }
3249 
3250   /* Do not use anti_adjust_stack, since we don't want to update
3251      stack_pointer_delta.  */
3252   temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3253 			      gen_int_mode (adjust, Pmode), stack_pointer_rtx,
3254 			      0, OPTAB_LIB_WIDEN);
3255   if (temp != stack_pointer_rtx)
3256     emit_move_insn (stack_pointer_rtx, temp);
3257 
3258   switch (code)
3259     {
3260     case PRE_INC:
3261     case PRE_DEC:
3262     case PRE_MODIFY:
3263       temp = stack_pointer_rtx;
3264       break;
3265     case POST_INC:
3266     case POST_DEC:
3267     case POST_MODIFY:
3268       temp = plus_constant (Pmode, stack_pointer_rtx, -adjust);
3269       break;
3270     default:
3271       gcc_unreachable ();
3272     }
3273 
3274   return replace_equiv_address (x, temp);
3275 }
3276 
3277 /* A subroutine of emit_move_complex.  Generate a move from Y into X.
3278    X is known to satisfy push_operand, and MODE is known to be complex.
3279    Returns the last instruction emitted.  */
3280 
3281 rtx
emit_move_complex_push(enum machine_mode mode,rtx x,rtx y)3282 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
3283 {
3284   enum machine_mode submode = GET_MODE_INNER (mode);
3285   bool imag_first;
3286 
3287 #ifdef PUSH_ROUNDING
3288   unsigned int submodesize = GET_MODE_SIZE (submode);
3289 
3290   /* In case we output to the stack, but the size is smaller than the
3291      machine can push exactly, we need to use move instructions.  */
3292   if (PUSH_ROUNDING (submodesize) != submodesize)
3293     {
3294       x = emit_move_resolve_push (mode, x);
3295       return emit_move_insn (x, y);
3296     }
3297 #endif
3298 
3299   /* Note that the real part always precedes the imag part in memory
3300      regardless of machine's endianness.  */
3301   switch (GET_CODE (XEXP (x, 0)))
3302     {
3303     case PRE_DEC:
3304     case POST_DEC:
3305       imag_first = true;
3306       break;
3307     case PRE_INC:
3308     case POST_INC:
3309       imag_first = false;
3310       break;
3311     default:
3312       gcc_unreachable ();
3313     }
3314 
3315   emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3316 		  read_complex_part (y, imag_first));
3317   return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3318 			 read_complex_part (y, !imag_first));
3319 }
3320 
3321 /* A subroutine of emit_move_complex.  Perform the move from Y to X
3322    via two moves of the parts.  Returns the last instruction emitted.  */
3323 
3324 rtx
emit_move_complex_parts(rtx x,rtx y)3325 emit_move_complex_parts (rtx x, rtx y)
3326 {
3327   /* Show the output dies here.  This is necessary for SUBREGs
3328      of pseudos since we cannot track their lifetimes correctly;
3329      hard regs shouldn't appear here except as return values.  */
3330   if (!reload_completed && !reload_in_progress
3331       && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3332     emit_clobber (x);
3333 
3334   write_complex_part (x, read_complex_part (y, false), false);
3335   write_complex_part (x, read_complex_part (y, true), true);
3336 
3337   return get_last_insn ();
3338 }
3339 
3340 /* A subroutine of emit_move_insn_1.  Generate a move from Y into X.
3341    MODE is known to be complex.  Returns the last instruction emitted.  */
3342 
3343 static rtx
emit_move_complex(enum machine_mode mode,rtx x,rtx y)3344 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3345 {
3346   bool try_int;
3347 
3348   /* Need to take special care for pushes, to maintain proper ordering
3349      of the data, and possibly extra padding.  */
3350   if (push_operand (x, mode))
3351     return emit_move_complex_push (mode, x, y);
3352 
3353   /* See if we can coerce the target into moving both values at once, except
3354      for floating point where we favor moving as parts if this is easy.  */
3355   if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3356       && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing
3357       && !(REG_P (x)
3358 	   && HARD_REGISTER_P (x)
3359 	   && hard_regno_nregs[REGNO (x)][mode] == 1)
3360       && !(REG_P (y)
3361 	   && HARD_REGISTER_P (y)
3362 	   && hard_regno_nregs[REGNO (y)][mode] == 1))
3363     try_int = false;
3364   /* Not possible if the values are inherently not adjacent.  */
3365   else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3366     try_int = false;
3367   /* Is possible if both are registers (or subregs of registers).  */
3368   else if (register_operand (x, mode) && register_operand (y, mode))
3369     try_int = true;
3370   /* If one of the operands is a memory, and alignment constraints
3371      are friendly enough, we may be able to do combined memory operations.
3372      We do not attempt this if Y is a constant because that combination is
3373      usually better with the by-parts thing below.  */
3374   else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3375 	   && (!STRICT_ALIGNMENT
3376 	       || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3377     try_int = true;
3378   else
3379     try_int = false;
3380 
3381   if (try_int)
3382     {
3383       rtx ret;
3384 
3385       /* For memory to memory moves, optimal behavior can be had with the
3386 	 existing block move logic.  */
3387       if (MEM_P (x) && MEM_P (y))
3388 	{
3389 	  emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3390 			   BLOCK_OP_NO_LIBCALL);
3391 	  return get_last_insn ();
3392 	}
3393 
3394       ret = emit_move_via_integer (mode, x, y, true);
3395       if (ret)
3396 	return ret;
3397     }
3398 
3399   return emit_move_complex_parts (x, y);
3400 }
3401 
3402 /* A subroutine of emit_move_insn_1.  Generate a move from Y into X.
3403    MODE is known to be MODE_CC.  Returns the last instruction emitted.  */
3404 
3405 static rtx
emit_move_ccmode(enum machine_mode mode,rtx x,rtx y)3406 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3407 {
3408   rtx ret;
3409 
3410   /* Assume all MODE_CC modes are equivalent; if we have movcc, use it.  */
3411   if (mode != CCmode)
3412     {
3413       enum insn_code code = optab_handler (mov_optab, CCmode);
3414       if (code != CODE_FOR_nothing)
3415 	{
3416 	  x = emit_move_change_mode (CCmode, mode, x, true);
3417 	  y = emit_move_change_mode (CCmode, mode, y, true);
3418 	  return emit_insn (GEN_FCN (code) (x, y));
3419 	}
3420     }
3421 
3422   /* Otherwise, find the MODE_INT mode of the same width.  */
3423   ret = emit_move_via_integer (mode, x, y, false);
3424   gcc_assert (ret != NULL);
3425   return ret;
3426 }
3427 
3428 /* Return true if word I of OP lies entirely in the
3429    undefined bits of a paradoxical subreg.  */
3430 
3431 static bool
undefined_operand_subword_p(const_rtx op,int i)3432 undefined_operand_subword_p (const_rtx op, int i)
3433 {
3434   enum machine_mode innermode, innermostmode;
3435   int offset;
3436   if (GET_CODE (op) != SUBREG)
3437     return false;
3438   innermode = GET_MODE (op);
3439   innermostmode = GET_MODE (SUBREG_REG (op));
3440   offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3441   /* The SUBREG_BYTE represents offset, as if the value were stored in
3442      memory, except for a paradoxical subreg where we define
3443      SUBREG_BYTE to be 0; undo this exception as in
3444      simplify_subreg.  */
3445   if (SUBREG_BYTE (op) == 0
3446       && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3447     {
3448       int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3449       if (WORDS_BIG_ENDIAN)
3450 	offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3451       if (BYTES_BIG_ENDIAN)
3452 	offset += difference % UNITS_PER_WORD;
3453     }
3454   if (offset >= GET_MODE_SIZE (innermostmode)
3455       || offset <= -GET_MODE_SIZE (word_mode))
3456     return true;
3457   return false;
3458 }
3459 
3460 /* A subroutine of emit_move_insn_1.  Generate a move from Y into X.
3461    MODE is any multi-word or full-word mode that lacks a move_insn
3462    pattern.  Note that you will get better code if you define such
3463    patterns, even if they must turn into multiple assembler instructions.  */
3464 
3465 static rtx
emit_move_multi_word(enum machine_mode mode,rtx x,rtx y)3466 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3467 {
3468   rtx last_insn = 0;
3469   rtx seq, inner;
3470   bool need_clobber;
3471   int i;
3472 
3473   gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3474 
3475   /* If X is a push on the stack, do the push now and replace
3476      X with a reference to the stack pointer.  */
3477   if (push_operand (x, mode))
3478     x = emit_move_resolve_push (mode, x);
3479 
3480   /* If we are in reload, see if either operand is a MEM whose address
3481      is scheduled for replacement.  */
3482   if (reload_in_progress && MEM_P (x)
3483       && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3484     x = replace_equiv_address_nv (x, inner);
3485   if (reload_in_progress && MEM_P (y)
3486       && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3487     y = replace_equiv_address_nv (y, inner);
3488 
3489   start_sequence ();
3490 
3491   need_clobber = false;
3492   for (i = 0;
3493        i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3494        i++)
3495     {
3496       rtx xpart = operand_subword (x, i, 1, mode);
3497       rtx ypart;
3498 
3499       /* Do not generate code for a move if it would come entirely
3500 	 from the undefined bits of a paradoxical subreg.  */
3501       if (undefined_operand_subword_p (y, i))
3502 	continue;
3503 
3504       ypart = operand_subword (y, i, 1, mode);
3505 
3506       /* If we can't get a part of Y, put Y into memory if it is a
3507 	 constant.  Otherwise, force it into a register.  Then we must
3508 	 be able to get a part of Y.  */
3509       if (ypart == 0 && CONSTANT_P (y))
3510 	{
3511 	  y = use_anchored_address (force_const_mem (mode, y));
3512 	  ypart = operand_subword (y, i, 1, mode);
3513 	}
3514       else if (ypart == 0)
3515 	ypart = operand_subword_force (y, i, mode);
3516 
3517       gcc_assert (xpart && ypart);
3518 
3519       need_clobber |= (GET_CODE (xpart) == SUBREG);
3520 
3521       last_insn = emit_move_insn (xpart, ypart);
3522     }
3523 
3524   seq = get_insns ();
3525   end_sequence ();
3526 
3527   /* Show the output dies here.  This is necessary for SUBREGs
3528      of pseudos since we cannot track their lifetimes correctly;
3529      hard regs shouldn't appear here except as return values.
3530      We never want to emit such a clobber after reload.  */
3531   if (x != y
3532       && ! (reload_in_progress || reload_completed)
3533       && need_clobber != 0)
3534     emit_clobber (x);
3535 
3536   emit_insn (seq);
3537 
3538   return last_insn;
3539 }
3540 
3541 /* Low level part of emit_move_insn.
3542    Called just like emit_move_insn, but assumes X and Y
3543    are basically valid.  */
3544 
3545 rtx
emit_move_insn_1(rtx x,rtx y)3546 emit_move_insn_1 (rtx x, rtx y)
3547 {
3548   enum machine_mode mode = GET_MODE (x);
3549   enum insn_code code;
3550 
3551   gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3552 
3553   code = optab_handler (mov_optab, mode);
3554   if (code != CODE_FOR_nothing)
3555     return emit_insn (GEN_FCN (code) (x, y));
3556 
3557   /* Expand complex moves by moving real part and imag part.  */
3558   if (COMPLEX_MODE_P (mode))
3559     return emit_move_complex (mode, x, y);
3560 
3561   if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3562       || ALL_FIXED_POINT_MODE_P (mode))
3563     {
3564       rtx result = emit_move_via_integer (mode, x, y, true);
3565 
3566       /* If we can't find an integer mode, use multi words.  */
3567       if (result)
3568 	return result;
3569       else
3570 	return emit_move_multi_word (mode, x, y);
3571     }
3572 
3573   if (GET_MODE_CLASS (mode) == MODE_CC)
3574     return emit_move_ccmode (mode, x, y);
3575 
3576   /* Try using a move pattern for the corresponding integer mode.  This is
3577      only safe when simplify_subreg can convert MODE constants into integer
3578      constants.  At present, it can only do this reliably if the value
3579      fits within a HOST_WIDE_INT.  */
3580   if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3581     {
3582       rtx ret = emit_move_via_integer (mode, x, y, lra_in_progress);
3583 
3584       if (ret)
3585 	{
3586 	  if (! lra_in_progress || recog (PATTERN (ret), ret, 0) >= 0)
3587 	    return ret;
3588 	}
3589     }
3590 
3591   return emit_move_multi_word (mode, x, y);
3592 }
3593 
3594 /* Generate code to copy Y into X.
3595    Both Y and X must have the same mode, except that
3596    Y can be a constant with VOIDmode.
3597    This mode cannot be BLKmode; use emit_block_move for that.
3598 
3599    Return the last instruction emitted.  */
3600 
3601 rtx
emit_move_insn(rtx x,rtx y)3602 emit_move_insn (rtx x, rtx y)
3603 {
3604   enum machine_mode mode = GET_MODE (x);
3605   rtx y_cst = NULL_RTX;
3606   rtx last_insn, set;
3607 
3608   gcc_assert (mode != BLKmode
3609 	      && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3610 
3611   if (CONSTANT_P (y))
3612     {
3613       if (optimize
3614 	  && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3615 	  && (last_insn = compress_float_constant (x, y)))
3616 	return last_insn;
3617 
3618       y_cst = y;
3619 
3620       if (!targetm.legitimate_constant_p (mode, y))
3621 	{
3622 	  y = force_const_mem (mode, y);
3623 
3624 	  /* If the target's cannot_force_const_mem prevented the spill,
3625 	     assume that the target's move expanders will also take care
3626 	     of the non-legitimate constant.  */
3627 	  if (!y)
3628 	    y = y_cst;
3629 	  else
3630 	    y = use_anchored_address (y);
3631 	}
3632     }
3633 
3634   /* If X or Y are memory references, verify that their addresses are valid
3635      for the machine.  */
3636   if (MEM_P (x)
3637       && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3638 					 MEM_ADDR_SPACE (x))
3639 	  && ! push_operand (x, GET_MODE (x))))
3640     x = validize_mem (x);
3641 
3642   if (MEM_P (y)
3643       && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3644 					MEM_ADDR_SPACE (y)))
3645     y = validize_mem (y);
3646 
3647   gcc_assert (mode != BLKmode);
3648 
3649   last_insn = emit_move_insn_1 (x, y);
3650 
3651   if (y_cst && REG_P (x)
3652       && (set = single_set (last_insn)) != NULL_RTX
3653       && SET_DEST (set) == x
3654       && ! rtx_equal_p (y_cst, SET_SRC (set)))
3655     set_unique_reg_note (last_insn, REG_EQUAL, copy_rtx (y_cst));
3656 
3657   return last_insn;
3658 }
3659 
3660 /* If Y is representable exactly in a narrower mode, and the target can
3661    perform the extension directly from constant or memory, then emit the
3662    move as an extension.  */
3663 
3664 static rtx
compress_float_constant(rtx x,rtx y)3665 compress_float_constant (rtx x, rtx y)
3666 {
3667   enum machine_mode dstmode = GET_MODE (x);
3668   enum machine_mode orig_srcmode = GET_MODE (y);
3669   enum machine_mode srcmode;
3670   REAL_VALUE_TYPE r;
3671   int oldcost, newcost;
3672   bool speed = optimize_insn_for_speed_p ();
3673 
3674   REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3675 
3676   if (targetm.legitimate_constant_p (dstmode, y))
3677     oldcost = set_src_cost (y, speed);
3678   else
3679     oldcost = set_src_cost (force_const_mem (dstmode, y), speed);
3680 
3681   for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3682        srcmode != orig_srcmode;
3683        srcmode = GET_MODE_WIDER_MODE (srcmode))
3684     {
3685       enum insn_code ic;
3686       rtx trunc_y, last_insn;
3687 
3688       /* Skip if the target can't extend this way.  */
3689       ic = can_extend_p (dstmode, srcmode, 0);
3690       if (ic == CODE_FOR_nothing)
3691 	continue;
3692 
3693       /* Skip if the narrowed value isn't exact.  */
3694       if (! exact_real_truncate (srcmode, &r))
3695 	continue;
3696 
3697       trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3698 
3699       if (targetm.legitimate_constant_p (srcmode, trunc_y))
3700 	{
3701 	  /* Skip if the target needs extra instructions to perform
3702 	     the extension.  */
3703 	  if (!insn_operand_matches (ic, 1, trunc_y))
3704 	    continue;
3705 	  /* This is valid, but may not be cheaper than the original. */
3706 	  newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3707 				  speed);
3708 	  if (oldcost < newcost)
3709 	    continue;
3710 	}
3711       else if (float_extend_from_mem[dstmode][srcmode])
3712 	{
3713 	  trunc_y = force_const_mem (srcmode, trunc_y);
3714 	  /* This is valid, but may not be cheaper than the original. */
3715 	  newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3716 				  speed);
3717 	  if (oldcost < newcost)
3718 	    continue;
3719 	  trunc_y = validize_mem (trunc_y);
3720 	}
3721       else
3722 	continue;
3723 
3724       /* For CSE's benefit, force the compressed constant pool entry
3725 	 into a new pseudo.  This constant may be used in different modes,
3726 	 and if not, combine will put things back together for us.  */
3727       trunc_y = force_reg (srcmode, trunc_y);
3728 
3729       /* If x is a hard register, perform the extension into a pseudo,
3730 	 so that e.g. stack realignment code is aware of it.  */
3731       rtx target = x;
3732       if (REG_P (x) && HARD_REGISTER_P (x))
3733 	target = gen_reg_rtx (dstmode);
3734 
3735       emit_unop_insn (ic, target, trunc_y, UNKNOWN);
3736       last_insn = get_last_insn ();
3737 
3738       if (REG_P (target))
3739 	set_unique_reg_note (last_insn, REG_EQUAL, y);
3740 
3741       if (target != x)
3742 	return emit_move_insn (x, target);
3743       return last_insn;
3744     }
3745 
3746   return NULL_RTX;
3747 }
3748 
3749 /* Pushing data onto the stack.  */
3750 
3751 /* Push a block of length SIZE (perhaps variable)
3752    and return an rtx to address the beginning of the block.
3753    The value may be virtual_outgoing_args_rtx.
3754 
3755    EXTRA is the number of bytes of padding to push in addition to SIZE.
3756    BELOW nonzero means this padding comes at low addresses;
3757    otherwise, the padding comes at high addresses.  */
3758 
3759 rtx
push_block(rtx size,int extra,int below)3760 push_block (rtx size, int extra, int below)
3761 {
3762   rtx temp;
3763 
3764   size = convert_modes (Pmode, ptr_mode, size, 1);
3765   if (CONSTANT_P (size))
3766     anti_adjust_stack (plus_constant (Pmode, size, extra));
3767   else if (REG_P (size) && extra == 0)
3768     anti_adjust_stack (size);
3769   else
3770     {
3771       temp = copy_to_mode_reg (Pmode, size);
3772       if (extra != 0)
3773 	temp = expand_binop (Pmode, add_optab, temp,
3774 			     gen_int_mode (extra, Pmode),
3775 			     temp, 0, OPTAB_LIB_WIDEN);
3776       anti_adjust_stack (temp);
3777     }
3778 
3779 #ifndef STACK_GROWS_DOWNWARD
3780   if (0)
3781 #else
3782   if (1)
3783 #endif
3784     {
3785       temp = virtual_outgoing_args_rtx;
3786       if (extra != 0 && below)
3787 	temp = plus_constant (Pmode, temp, extra);
3788     }
3789   else
3790     {
3791       if (CONST_INT_P (size))
3792 	temp = plus_constant (Pmode, virtual_outgoing_args_rtx,
3793 			      -INTVAL (size) - (below ? 0 : extra));
3794       else if (extra != 0 && !below)
3795 	temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3796 			     negate_rtx (Pmode, plus_constant (Pmode, size,
3797 							       extra)));
3798       else
3799 	temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3800 			     negate_rtx (Pmode, size));
3801     }
3802 
3803   return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3804 }
3805 
3806 /* A utility routine that returns the base of an auto-inc memory, or NULL.  */
3807 
3808 static rtx
mem_autoinc_base(rtx mem)3809 mem_autoinc_base (rtx mem)
3810 {
3811   if (MEM_P (mem))
3812     {
3813       rtx addr = XEXP (mem, 0);
3814       if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC)
3815 	return XEXP (addr, 0);
3816     }
3817   return NULL;
3818 }
3819 
3820 /* A utility routine used here, in reload, and in try_split.  The insns
3821    after PREV up to and including LAST are known to adjust the stack,
3822    with a final value of END_ARGS_SIZE.  Iterate backward from LAST
3823    placing notes as appropriate.  PREV may be NULL, indicating the
3824    entire insn sequence prior to LAST should be scanned.
3825 
3826    The set of allowed stack pointer modifications is small:
3827      (1) One or more auto-inc style memory references (aka pushes),
3828      (2) One or more addition/subtraction with the SP as destination,
3829      (3) A single move insn with the SP as destination,
3830      (4) A call_pop insn,
3831      (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3832 
3833    Insns in the sequence that do not modify the SP are ignored,
3834    except for noreturn calls.
3835 
3836    The return value is the amount of adjustment that can be trivially
3837    verified, via immediate operand or auto-inc.  If the adjustment
3838    cannot be trivially extracted, the return value is INT_MIN.  */
3839 
3840 HOST_WIDE_INT
find_args_size_adjust(rtx insn)3841 find_args_size_adjust (rtx insn)
3842 {
3843   rtx dest, set, pat;
3844   int i;
3845 
3846   pat = PATTERN (insn);
3847   set = NULL;
3848 
3849   /* Look for a call_pop pattern.  */
3850   if (CALL_P (insn))
3851     {
3852       /* We have to allow non-call_pop patterns for the case
3853 	 of emit_single_push_insn of a TLS address.  */
3854       if (GET_CODE (pat) != PARALLEL)
3855 	return 0;
3856 
3857       /* All call_pop have a stack pointer adjust in the parallel.
3858 	 The call itself is always first, and the stack adjust is
3859 	 usually last, so search from the end.  */
3860       for (i = XVECLEN (pat, 0) - 1; i > 0; --i)
3861 	{
3862 	  set = XVECEXP (pat, 0, i);
3863 	  if (GET_CODE (set) != SET)
3864 	    continue;
3865 	  dest = SET_DEST (set);
3866 	  if (dest == stack_pointer_rtx)
3867 	    break;
3868 	}
3869       /* We'd better have found the stack pointer adjust.  */
3870       if (i == 0)
3871 	return 0;
3872       /* Fall through to process the extracted SET and DEST
3873 	 as if it was a standalone insn.  */
3874     }
3875   else if (GET_CODE (pat) == SET)
3876     set = pat;
3877   else if ((set = single_set (insn)) != NULL)
3878     ;
3879   else if (GET_CODE (pat) == PARALLEL)
3880     {
3881       /* ??? Some older ports use a parallel with a stack adjust
3882 	 and a store for a PUSH_ROUNDING pattern, rather than a
3883 	 PRE/POST_MODIFY rtx.  Don't force them to update yet...  */
3884       /* ??? See h8300 and m68k, pushqi1.  */
3885       for (i = XVECLEN (pat, 0) - 1; i >= 0; --i)
3886 	{
3887 	  set = XVECEXP (pat, 0, i);
3888 	  if (GET_CODE (set) != SET)
3889 	    continue;
3890 	  dest = SET_DEST (set);
3891 	  if (dest == stack_pointer_rtx)
3892 	    break;
3893 
3894 	  /* We do not expect an auto-inc of the sp in the parallel.  */
3895 	  gcc_checking_assert (mem_autoinc_base (dest) != stack_pointer_rtx);
3896 	  gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3897 			       != stack_pointer_rtx);
3898 	}
3899       if (i < 0)
3900 	return 0;
3901     }
3902   else
3903     return 0;
3904 
3905   dest = SET_DEST (set);
3906 
3907   /* Look for direct modifications of the stack pointer.  */
3908   if (REG_P (dest) && REGNO (dest) == STACK_POINTER_REGNUM)
3909     {
3910       /* Look for a trivial adjustment, otherwise assume nothing.  */
3911       /* Note that the SPU restore_stack_block pattern refers to
3912 	 the stack pointer in V4SImode.  Consider that non-trivial.  */
3913       if (SCALAR_INT_MODE_P (GET_MODE (dest))
3914 	  && GET_CODE (SET_SRC (set)) == PLUS
3915 	  && XEXP (SET_SRC (set), 0) == stack_pointer_rtx
3916 	  && CONST_INT_P (XEXP (SET_SRC (set), 1)))
3917 	return INTVAL (XEXP (SET_SRC (set), 1));
3918       /* ??? Reload can generate no-op moves, which will be cleaned
3919 	 up later.  Recognize it and continue searching.  */
3920       else if (rtx_equal_p (dest, SET_SRC (set)))
3921 	return 0;
3922       else
3923 	return HOST_WIDE_INT_MIN;
3924     }
3925   else
3926     {
3927       rtx mem, addr;
3928 
3929       /* Otherwise only think about autoinc patterns.  */
3930       if (mem_autoinc_base (dest) == stack_pointer_rtx)
3931 	{
3932 	  mem = dest;
3933 	  gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3934 			       != stack_pointer_rtx);
3935 	}
3936       else if (mem_autoinc_base (SET_SRC (set)) == stack_pointer_rtx)
3937 	mem = SET_SRC (set);
3938       else
3939 	return 0;
3940 
3941       addr = XEXP (mem, 0);
3942       switch (GET_CODE (addr))
3943 	{
3944 	case PRE_INC:
3945 	case POST_INC:
3946 	  return GET_MODE_SIZE (GET_MODE (mem));
3947 	case PRE_DEC:
3948 	case POST_DEC:
3949 	  return -GET_MODE_SIZE (GET_MODE (mem));
3950 	case PRE_MODIFY:
3951 	case POST_MODIFY:
3952 	  addr = XEXP (addr, 1);
3953 	  gcc_assert (GET_CODE (addr) == PLUS);
3954 	  gcc_assert (XEXP (addr, 0) == stack_pointer_rtx);
3955 	  gcc_assert (CONST_INT_P (XEXP (addr, 1)));
3956 	  return INTVAL (XEXP (addr, 1));
3957 	default:
3958 	  gcc_unreachable ();
3959 	}
3960     }
3961 }
3962 
3963 int
fixup_args_size_notes(rtx prev,rtx last,int end_args_size)3964 fixup_args_size_notes (rtx prev, rtx last, int end_args_size)
3965 {
3966   int args_size = end_args_size;
3967   bool saw_unknown = false;
3968   rtx insn;
3969 
3970   for (insn = last; insn != prev; insn = PREV_INSN (insn))
3971     {
3972       HOST_WIDE_INT this_delta;
3973 
3974       if (!NONDEBUG_INSN_P (insn))
3975 	continue;
3976 
3977       this_delta = find_args_size_adjust (insn);
3978       if (this_delta == 0)
3979 	{
3980 	  if (!CALL_P (insn)
3981 	      || ACCUMULATE_OUTGOING_ARGS
3982 	      || find_reg_note (insn, REG_NORETURN, NULL_RTX) == NULL_RTX)
3983 	    continue;
3984 	}
3985 
3986       gcc_assert (!saw_unknown);
3987       if (this_delta == HOST_WIDE_INT_MIN)
3988 	saw_unknown = true;
3989 
3990       add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (args_size));
3991 #ifdef STACK_GROWS_DOWNWARD
3992       this_delta = -(unsigned HOST_WIDE_INT) this_delta;
3993 #endif
3994       args_size -= this_delta;
3995     }
3996 
3997   return saw_unknown ? INT_MIN : args_size;
3998 }
3999 
4000 #ifdef PUSH_ROUNDING
4001 /* Emit single push insn.  */
4002 
4003 static void
emit_single_push_insn_1(enum machine_mode mode,rtx x,tree type)4004 emit_single_push_insn_1 (enum machine_mode mode, rtx x, tree type)
4005 {
4006   rtx dest_addr;
4007   unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
4008   rtx dest;
4009   enum insn_code icode;
4010 
4011   stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
4012   /* If there is push pattern, use it.  Otherwise try old way of throwing
4013      MEM representing push operation to move expander.  */
4014   icode = optab_handler (push_optab, mode);
4015   if (icode != CODE_FOR_nothing)
4016     {
4017       struct expand_operand ops[1];
4018 
4019       create_input_operand (&ops[0], x, mode);
4020       if (maybe_expand_insn (icode, 1, ops))
4021 	return;
4022     }
4023   if (GET_MODE_SIZE (mode) == rounded_size)
4024     dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
4025   /* If we are to pad downward, adjust the stack pointer first and
4026      then store X into the stack location using an offset.  This is
4027      because emit_move_insn does not know how to pad; it does not have
4028      access to type.  */
4029   else if (FUNCTION_ARG_PADDING (mode, type) == downward)
4030     {
4031       unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
4032       HOST_WIDE_INT offset;
4033 
4034       emit_move_insn (stack_pointer_rtx,
4035 		      expand_binop (Pmode,
4036 #ifdef STACK_GROWS_DOWNWARD
4037 				    sub_optab,
4038 #else
4039 				    add_optab,
4040 #endif
4041 				    stack_pointer_rtx,
4042 				    gen_int_mode (rounded_size, Pmode),
4043 				    NULL_RTX, 0, OPTAB_LIB_WIDEN));
4044 
4045       offset = (HOST_WIDE_INT) padding_size;
4046 #ifdef STACK_GROWS_DOWNWARD
4047       if (STACK_PUSH_CODE == POST_DEC)
4048 	/* We have already decremented the stack pointer, so get the
4049 	   previous value.  */
4050 	offset += (HOST_WIDE_INT) rounded_size;
4051 #else
4052       if (STACK_PUSH_CODE == POST_INC)
4053 	/* We have already incremented the stack pointer, so get the
4054 	   previous value.  */
4055 	offset -= (HOST_WIDE_INT) rounded_size;
4056 #endif
4057       dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4058 				gen_int_mode (offset, Pmode));
4059     }
4060   else
4061     {
4062 #ifdef STACK_GROWS_DOWNWARD
4063       /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC.  */
4064       dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4065 				gen_int_mode (-(HOST_WIDE_INT) rounded_size,
4066 					      Pmode));
4067 #else
4068       /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC.  */
4069       dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4070 				gen_int_mode (rounded_size, Pmode));
4071 #endif
4072       dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
4073     }
4074 
4075   dest = gen_rtx_MEM (mode, dest_addr);
4076 
4077   if (type != 0)
4078     {
4079       set_mem_attributes (dest, type, 1);
4080 
4081       if (cfun->tail_call_marked)
4082 	/* Function incoming arguments may overlap with sibling call
4083 	   outgoing arguments and we cannot allow reordering of reads
4084 	   from function arguments with stores to outgoing arguments
4085 	   of sibling calls.  */
4086 	set_mem_alias_set (dest, 0);
4087     }
4088   emit_move_insn (dest, x);
4089 }
4090 
4091 /* Emit and annotate a single push insn.  */
4092 
4093 static void
emit_single_push_insn(enum machine_mode mode,rtx x,tree type)4094 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
4095 {
4096   int delta, old_delta = stack_pointer_delta;
4097   rtx prev = get_last_insn ();
4098   rtx last;
4099 
4100   emit_single_push_insn_1 (mode, x, type);
4101 
4102   last = get_last_insn ();
4103 
4104   /* Notice the common case where we emitted exactly one insn.  */
4105   if (PREV_INSN (last) == prev)
4106     {
4107       add_reg_note (last, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4108       return;
4109     }
4110 
4111   delta = fixup_args_size_notes (prev, last, stack_pointer_delta);
4112   gcc_assert (delta == INT_MIN || delta == old_delta);
4113 }
4114 #endif
4115 
4116 /* Generate code to push X onto the stack, assuming it has mode MODE and
4117    type TYPE.
4118    MODE is redundant except when X is a CONST_INT (since they don't
4119    carry mode info).
4120    SIZE is an rtx for the size of data to be copied (in bytes),
4121    needed only if X is BLKmode.
4122 
4123    ALIGN (in bits) is maximum alignment we can assume.
4124 
4125    If PARTIAL and REG are both nonzero, then copy that many of the first
4126    bytes of X into registers starting with REG, and push the rest of X.
4127    The amount of space pushed is decreased by PARTIAL bytes.
4128    REG must be a hard register in this case.
4129    If REG is zero but PARTIAL is not, take any all others actions for an
4130    argument partially in registers, but do not actually load any
4131    registers.
4132 
4133    EXTRA is the amount in bytes of extra space to leave next to this arg.
4134    This is ignored if an argument block has already been allocated.
4135 
4136    On a machine that lacks real push insns, ARGS_ADDR is the address of
4137    the bottom of the argument block for this call.  We use indexing off there
4138    to store the arg.  On machines with push insns, ARGS_ADDR is 0 when a
4139    argument block has not been preallocated.
4140 
4141    ARGS_SO_FAR is the size of args previously pushed for this call.
4142 
4143    REG_PARM_STACK_SPACE is nonzero if functions require stack space
4144    for arguments passed in registers.  If nonzero, it will be the number
4145    of bytes required.  */
4146 
4147 void
emit_push_insn(rtx x,enum machine_mode mode,tree type,rtx size,unsigned int align,int partial,rtx reg,int extra,rtx args_addr,rtx args_so_far,int reg_parm_stack_space,rtx alignment_pad)4148 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
4149 		unsigned int align, int partial, rtx reg, int extra,
4150 		rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
4151 		rtx alignment_pad)
4152 {
4153   rtx xinner;
4154   enum direction stack_direction
4155 #ifdef STACK_GROWS_DOWNWARD
4156     = downward;
4157 #else
4158     = upward;
4159 #endif
4160 
4161   /* Decide where to pad the argument: `downward' for below,
4162      `upward' for above, or `none' for don't pad it.
4163      Default is below for small data on big-endian machines; else above.  */
4164   enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
4165 
4166   /* Invert direction if stack is post-decrement.
4167      FIXME: why?  */
4168   if (STACK_PUSH_CODE == POST_DEC)
4169     if (where_pad != none)
4170       where_pad = (where_pad == downward ? upward : downward);
4171 
4172   xinner = x;
4173 
4174   if (mode == BLKmode
4175       || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
4176     {
4177       /* Copy a block into the stack, entirely or partially.  */
4178 
4179       rtx temp;
4180       int used;
4181       int offset;
4182       int skip;
4183 
4184       offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4185       used = partial - offset;
4186 
4187       if (mode != BLKmode)
4188 	{
4189 	  /* A value is to be stored in an insufficiently aligned
4190 	     stack slot; copy via a suitably aligned slot if
4191 	     necessary.  */
4192 	  size = GEN_INT (GET_MODE_SIZE (mode));
4193 	  if (!MEM_P (xinner))
4194 	    {
4195 	      temp = assign_temp (type, 1, 1);
4196 	      emit_move_insn (temp, xinner);
4197 	      xinner = temp;
4198 	    }
4199 	}
4200 
4201       gcc_assert (size);
4202 
4203       /* USED is now the # of bytes we need not copy to the stack
4204 	 because registers will take care of them.  */
4205 
4206       if (partial != 0)
4207 	xinner = adjust_address (xinner, BLKmode, used);
4208 
4209       /* If the partial register-part of the arg counts in its stack size,
4210 	 skip the part of stack space corresponding to the registers.
4211 	 Otherwise, start copying to the beginning of the stack space,
4212 	 by setting SKIP to 0.  */
4213       skip = (reg_parm_stack_space == 0) ? 0 : used;
4214 
4215 #ifdef PUSH_ROUNDING
4216       /* Do it with several push insns if that doesn't take lots of insns
4217 	 and if there is no difficulty with push insns that skip bytes
4218 	 on the stack for alignment purposes.  */
4219       if (args_addr == 0
4220 	  && PUSH_ARGS
4221 	  && CONST_INT_P (size)
4222 	  && skip == 0
4223 	  && MEM_ALIGN (xinner) >= align
4224 	  && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
4225 	  /* Here we avoid the case of a structure whose weak alignment
4226 	     forces many pushes of a small amount of data,
4227 	     and such small pushes do rounding that causes trouble.  */
4228 	  && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
4229 	      || align >= BIGGEST_ALIGNMENT
4230 	      || (PUSH_ROUNDING (align / BITS_PER_UNIT)
4231 		  == (align / BITS_PER_UNIT)))
4232 	  && (HOST_WIDE_INT) PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
4233 	{
4234 	  /* Push padding now if padding above and stack grows down,
4235 	     or if padding below and stack grows up.
4236 	     But if space already allocated, this has already been done.  */
4237 	  if (extra && args_addr == 0
4238 	      && where_pad != none && where_pad != stack_direction)
4239 	    anti_adjust_stack (GEN_INT (extra));
4240 
4241 	  move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
4242 	}
4243       else
4244 #endif /* PUSH_ROUNDING  */
4245 	{
4246 	  rtx target;
4247 
4248 	  /* Otherwise make space on the stack and copy the data
4249 	     to the address of that space.  */
4250 
4251 	  /* Deduct words put into registers from the size we must copy.  */
4252 	  if (partial != 0)
4253 	    {
4254 	      if (CONST_INT_P (size))
4255 		size = GEN_INT (INTVAL (size) - used);
4256 	      else
4257 		size = expand_binop (GET_MODE (size), sub_optab, size,
4258 				     gen_int_mode (used, GET_MODE (size)),
4259 				     NULL_RTX, 0, OPTAB_LIB_WIDEN);
4260 	    }
4261 
4262 	  /* Get the address of the stack space.
4263 	     In this case, we do not deal with EXTRA separately.
4264 	     A single stack adjust will do.  */
4265 	  if (! args_addr)
4266 	    {
4267 	      temp = push_block (size, extra, where_pad == downward);
4268 	      extra = 0;
4269 	    }
4270 	  else if (CONST_INT_P (args_so_far))
4271 	    temp = memory_address (BLKmode,
4272 				   plus_constant (Pmode, args_addr,
4273 						  skip + INTVAL (args_so_far)));
4274 	  else
4275 	    temp = memory_address (BLKmode,
4276 				   plus_constant (Pmode,
4277 						  gen_rtx_PLUS (Pmode,
4278 								args_addr,
4279 								args_so_far),
4280 						  skip));
4281 
4282 	  if (!ACCUMULATE_OUTGOING_ARGS)
4283 	    {
4284 	      /* If the source is referenced relative to the stack pointer,
4285 		 copy it to another register to stabilize it.  We do not need
4286 		 to do this if we know that we won't be changing sp.  */
4287 
4288 	      if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
4289 		  || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
4290 		temp = copy_to_reg (temp);
4291 	    }
4292 
4293 	  target = gen_rtx_MEM (BLKmode, temp);
4294 
4295 	  /* We do *not* set_mem_attributes here, because incoming arguments
4296 	     may overlap with sibling call outgoing arguments and we cannot
4297 	     allow reordering of reads from function arguments with stores
4298 	     to outgoing arguments of sibling calls.  We do, however, want
4299 	     to record the alignment of the stack slot.  */
4300 	  /* ALIGN may well be better aligned than TYPE, e.g. due to
4301 	     PARM_BOUNDARY.  Assume the caller isn't lying.  */
4302 	  set_mem_align (target, align);
4303 
4304 	  emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
4305 	}
4306     }
4307   else if (partial > 0)
4308     {
4309       /* Scalar partly in registers.  */
4310 
4311       int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
4312       int i;
4313       int not_stack;
4314       /* # bytes of start of argument
4315 	 that we must make space for but need not store.  */
4316       int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4317       int args_offset = INTVAL (args_so_far);
4318       int skip;
4319 
4320       /* Push padding now if padding above and stack grows down,
4321 	 or if padding below and stack grows up.
4322 	 But if space already allocated, this has already been done.  */
4323       if (extra && args_addr == 0
4324 	  && where_pad != none && where_pad != stack_direction)
4325 	anti_adjust_stack (GEN_INT (extra));
4326 
4327       /* If we make space by pushing it, we might as well push
4328 	 the real data.  Otherwise, we can leave OFFSET nonzero
4329 	 and leave the space uninitialized.  */
4330       if (args_addr == 0)
4331 	offset = 0;
4332 
4333       /* Now NOT_STACK gets the number of words that we don't need to
4334 	 allocate on the stack.  Convert OFFSET to words too.  */
4335       not_stack = (partial - offset) / UNITS_PER_WORD;
4336       offset /= UNITS_PER_WORD;
4337 
4338       /* If the partial register-part of the arg counts in its stack size,
4339 	 skip the part of stack space corresponding to the registers.
4340 	 Otherwise, start copying to the beginning of the stack space,
4341 	 by setting SKIP to 0.  */
4342       skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4343 
4344       if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
4345 	x = validize_mem (force_const_mem (mode, x));
4346 
4347       /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4348 	 SUBREGs of such registers are not allowed.  */
4349       if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
4350 	   && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4351 	x = copy_to_reg (x);
4352 
4353       /* Loop over all the words allocated on the stack for this arg.  */
4354       /* We can do it by words, because any scalar bigger than a word
4355 	 has a size a multiple of a word.  */
4356 #ifndef PUSH_ARGS_REVERSED
4357       for (i = not_stack; i < size; i++)
4358 #else
4359       for (i = size - 1; i >= not_stack; i--)
4360 #endif
4361 	if (i >= not_stack + offset)
4362 	  emit_push_insn (operand_subword_force (x, i, mode),
4363 			  word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4364 			  0, args_addr,
4365 			  GEN_INT (args_offset + ((i - not_stack + skip)
4366 						  * UNITS_PER_WORD)),
4367 			  reg_parm_stack_space, alignment_pad);
4368     }
4369   else
4370     {
4371       rtx addr;
4372       rtx dest;
4373 
4374       /* Push padding now if padding above and stack grows down,
4375 	 or if padding below and stack grows up.
4376 	 But if space already allocated, this has already been done.  */
4377       if (extra && args_addr == 0
4378 	  && where_pad != none && where_pad != stack_direction)
4379 	anti_adjust_stack (GEN_INT (extra));
4380 
4381 #ifdef PUSH_ROUNDING
4382       if (args_addr == 0 && PUSH_ARGS)
4383 	emit_single_push_insn (mode, x, type);
4384       else
4385 #endif
4386 	{
4387 	  if (CONST_INT_P (args_so_far))
4388 	    addr
4389 	      = memory_address (mode,
4390 				plus_constant (Pmode, args_addr,
4391 					       INTVAL (args_so_far)));
4392 	  else
4393 	    addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4394 						       args_so_far));
4395 	  dest = gen_rtx_MEM (mode, addr);
4396 
4397 	  /* We do *not* set_mem_attributes here, because incoming arguments
4398 	     may overlap with sibling call outgoing arguments and we cannot
4399 	     allow reordering of reads from function arguments with stores
4400 	     to outgoing arguments of sibling calls.  We do, however, want
4401 	     to record the alignment of the stack slot.  */
4402 	  /* ALIGN may well be better aligned than TYPE, e.g. due to
4403 	     PARM_BOUNDARY.  Assume the caller isn't lying.  */
4404 	  set_mem_align (dest, align);
4405 
4406 	  emit_move_insn (dest, x);
4407 	}
4408     }
4409 
4410   /* If part should go in registers, copy that part
4411      into the appropriate registers.  Do this now, at the end,
4412      since mem-to-mem copies above may do function calls.  */
4413   if (partial > 0 && reg != 0)
4414     {
4415       /* Handle calls that pass values in multiple non-contiguous locations.
4416 	 The Irix 6 ABI has examples of this.  */
4417       if (GET_CODE (reg) == PARALLEL)
4418 	emit_group_load (reg, x, type, -1);
4419       else
4420 	{
4421 	  gcc_assert (partial % UNITS_PER_WORD == 0);
4422 	  move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
4423 	}
4424     }
4425 
4426   if (extra && args_addr == 0 && where_pad == stack_direction)
4427     anti_adjust_stack (GEN_INT (extra));
4428 
4429   if (alignment_pad && args_addr == 0)
4430     anti_adjust_stack (alignment_pad);
4431 }
4432 
4433 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4434    operations.  */
4435 
4436 static rtx
get_subtarget(rtx x)4437 get_subtarget (rtx x)
4438 {
4439   return (optimize
4440           || x == 0
4441 	   /* Only registers can be subtargets.  */
4442 	   || !REG_P (x)
4443 	   /* Don't use hard regs to avoid extending their life.  */
4444 	   || REGNO (x) < FIRST_PSEUDO_REGISTER
4445 	  ? 0 : x);
4446 }
4447 
4448 /* A subroutine of expand_assignment.  Optimize FIELD op= VAL, where
4449    FIELD is a bitfield.  Returns true if the optimization was successful,
4450    and there's nothing else to do.  */
4451 
4452 static bool
optimize_bitfield_assignment_op(unsigned HOST_WIDE_INT bitsize,unsigned HOST_WIDE_INT bitpos,unsigned HOST_WIDE_INT bitregion_start,unsigned HOST_WIDE_INT bitregion_end,enum machine_mode mode1,rtx str_rtx,tree to,tree src)4453 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
4454 				 unsigned HOST_WIDE_INT bitpos,
4455 				 unsigned HOST_WIDE_INT bitregion_start,
4456 				 unsigned HOST_WIDE_INT bitregion_end,
4457 				 enum machine_mode mode1, rtx str_rtx,
4458 				 tree to, tree src)
4459 {
4460   enum machine_mode str_mode = GET_MODE (str_rtx);
4461   unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4462   tree op0, op1;
4463   rtx value, result;
4464   optab binop;
4465   gimple srcstmt;
4466   enum tree_code code;
4467 
4468   if (mode1 != VOIDmode
4469       || bitsize >= BITS_PER_WORD
4470       || str_bitsize > BITS_PER_WORD
4471       || TREE_SIDE_EFFECTS (to)
4472       || TREE_THIS_VOLATILE (to))
4473     return false;
4474 
4475   STRIP_NOPS (src);
4476   if (TREE_CODE (src) != SSA_NAME)
4477     return false;
4478   if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4479     return false;
4480 
4481   srcstmt = get_gimple_for_ssa_name (src);
4482   if (!srcstmt
4483       || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt)) != tcc_binary)
4484     return false;
4485 
4486   code = gimple_assign_rhs_code (srcstmt);
4487 
4488   op0 = gimple_assign_rhs1 (srcstmt);
4489 
4490   /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4491      to find its initialization.  Hopefully the initialization will
4492      be from a bitfield load.  */
4493   if (TREE_CODE (op0) == SSA_NAME)
4494     {
4495       gimple op0stmt = get_gimple_for_ssa_name (op0);
4496 
4497       /* We want to eventually have OP0 be the same as TO, which
4498 	 should be a bitfield.  */
4499       if (!op0stmt
4500 	  || !is_gimple_assign (op0stmt)
4501 	  || gimple_assign_rhs_code (op0stmt) != TREE_CODE (to))
4502 	return false;
4503       op0 = gimple_assign_rhs1 (op0stmt);
4504     }
4505 
4506   op1 = gimple_assign_rhs2 (srcstmt);
4507 
4508   if (!operand_equal_p (to, op0, 0))
4509     return false;
4510 
4511   if (MEM_P (str_rtx))
4512     {
4513       unsigned HOST_WIDE_INT offset1;
4514 
4515       if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4516 	str_mode = word_mode;
4517       str_mode = get_best_mode (bitsize, bitpos,
4518 				bitregion_start, bitregion_end,
4519 				MEM_ALIGN (str_rtx), str_mode, 0);
4520       if (str_mode == VOIDmode)
4521 	return false;
4522       str_bitsize = GET_MODE_BITSIZE (str_mode);
4523 
4524       offset1 = bitpos;
4525       bitpos %= str_bitsize;
4526       offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4527       str_rtx = adjust_address (str_rtx, str_mode, offset1);
4528     }
4529   else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4530     return false;
4531 
4532   /* If the bit field covers the whole REG/MEM, store_field
4533      will likely generate better code.  */
4534   if (bitsize >= str_bitsize)
4535     return false;
4536 
4537   /* We can't handle fields split across multiple entities.  */
4538   if (bitpos + bitsize > str_bitsize)
4539     return false;
4540 
4541   if (BYTES_BIG_ENDIAN)
4542     bitpos = str_bitsize - bitpos - bitsize;
4543 
4544   switch (code)
4545     {
4546     case PLUS_EXPR:
4547     case MINUS_EXPR:
4548       /* For now, just optimize the case of the topmost bitfield
4549 	 where we don't need to do any masking and also
4550 	 1 bit bitfields where xor can be used.
4551 	 We might win by one instruction for the other bitfields
4552 	 too if insv/extv instructions aren't used, so that
4553 	 can be added later.  */
4554       if (bitpos + bitsize != str_bitsize
4555 	  && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4556 	break;
4557 
4558       value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4559       value = convert_modes (str_mode,
4560 			     TYPE_MODE (TREE_TYPE (op1)), value,
4561 			     TYPE_UNSIGNED (TREE_TYPE (op1)));
4562 
4563       /* We may be accessing data outside the field, which means
4564 	 we can alias adjacent data.  */
4565       if (MEM_P (str_rtx))
4566 	{
4567 	  str_rtx = shallow_copy_rtx (str_rtx);
4568 	  set_mem_alias_set (str_rtx, 0);
4569 	  set_mem_expr (str_rtx, 0);
4570 	}
4571 
4572       binop = code == PLUS_EXPR ? add_optab : sub_optab;
4573       if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4574 	{
4575 	  value = expand_and (str_mode, value, const1_rtx, NULL);
4576 	  binop = xor_optab;
4577 	}
4578       value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4579       result = expand_binop (str_mode, binop, str_rtx,
4580 			     value, str_rtx, 1, OPTAB_WIDEN);
4581       if (result != str_rtx)
4582 	emit_move_insn (str_rtx, result);
4583       return true;
4584 
4585     case BIT_IOR_EXPR:
4586     case BIT_XOR_EXPR:
4587       if (TREE_CODE (op1) != INTEGER_CST)
4588 	break;
4589       value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4590       value = convert_modes (str_mode,
4591 			     TYPE_MODE (TREE_TYPE (op1)), value,
4592 			     TYPE_UNSIGNED (TREE_TYPE (op1)));
4593 
4594       /* We may be accessing data outside the field, which means
4595 	 we can alias adjacent data.  */
4596       if (MEM_P (str_rtx))
4597 	{
4598 	  str_rtx = shallow_copy_rtx (str_rtx);
4599 	  set_mem_alias_set (str_rtx, 0);
4600 	  set_mem_expr (str_rtx, 0);
4601 	}
4602 
4603       binop = code == BIT_IOR_EXPR ? ior_optab : xor_optab;
4604       if (bitpos + bitsize != str_bitsize)
4605 	{
4606 	  rtx mask = gen_int_mode (((unsigned HOST_WIDE_INT) 1 << bitsize) - 1,
4607 				   str_mode);
4608 	  value = expand_and (str_mode, value, mask, NULL_RTX);
4609 	}
4610       value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4611       result = expand_binop (str_mode, binop, str_rtx,
4612 			     value, str_rtx, 1, OPTAB_WIDEN);
4613       if (result != str_rtx)
4614 	emit_move_insn (str_rtx, result);
4615       return true;
4616 
4617     default:
4618       break;
4619     }
4620 
4621   return false;
4622 }
4623 
4624 /* In the C++ memory model, consecutive bit fields in a structure are
4625    considered one memory location.
4626 
4627    Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4628    returns the bit range of consecutive bits in which this COMPONENT_REF
4629    belongs.  The values are returned in *BITSTART and *BITEND.  *BITPOS
4630    and *OFFSET may be adjusted in the process.
4631 
4632    If the access does not need to be restricted, 0 is returned in both
4633    *BITSTART and *BITEND.  */
4634 
4635 static void
get_bit_range(unsigned HOST_WIDE_INT * bitstart,unsigned HOST_WIDE_INT * bitend,tree exp,HOST_WIDE_INT * bitpos,tree * offset)4636 get_bit_range (unsigned HOST_WIDE_INT *bitstart,
4637 	       unsigned HOST_WIDE_INT *bitend,
4638 	       tree exp,
4639 	       HOST_WIDE_INT *bitpos,
4640 	       tree *offset)
4641 {
4642   HOST_WIDE_INT bitoffset;
4643   tree field, repr;
4644 
4645   gcc_assert (TREE_CODE (exp) == COMPONENT_REF);
4646 
4647   field = TREE_OPERAND (exp, 1);
4648   repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
4649   /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4650      need to limit the range we can access.  */
4651   if (!repr)
4652     {
4653       *bitstart = *bitend = 0;
4654       return;
4655     }
4656 
4657   /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4658      part of a larger bit field, then the representative does not serve any
4659      useful purpose.  This can occur in Ada.  */
4660   if (handled_component_p (TREE_OPERAND (exp, 0)))
4661     {
4662       enum machine_mode rmode;
4663       HOST_WIDE_INT rbitsize, rbitpos;
4664       tree roffset;
4665       int unsignedp;
4666       int volatilep = 0;
4667       get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos,
4668 			   &roffset, &rmode, &unsignedp, &volatilep, false);
4669       if ((rbitpos % BITS_PER_UNIT) != 0)
4670 	{
4671 	  *bitstart = *bitend = 0;
4672 	  return;
4673 	}
4674     }
4675 
4676   /* Compute the adjustment to bitpos from the offset of the field
4677      relative to the representative.  DECL_FIELD_OFFSET of field and
4678      repr are the same by construction if they are not constants,
4679      see finish_bitfield_layout.  */
4680   if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field))
4681       && tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr)))
4682     bitoffset = (tree_to_uhwi (DECL_FIELD_OFFSET (field))
4683 		 - tree_to_uhwi (DECL_FIELD_OFFSET (repr))) * BITS_PER_UNIT;
4684   else
4685     bitoffset = 0;
4686   bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
4687 		- tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
4688 
4689   /* If the adjustment is larger than bitpos, we would have a negative bit
4690      position for the lower bound and this may wreak havoc later.  Adjust
4691      offset and bitpos to make the lower bound non-negative in that case.  */
4692   if (bitoffset > *bitpos)
4693     {
4694       HOST_WIDE_INT adjust = bitoffset - *bitpos;
4695       gcc_assert ((adjust % BITS_PER_UNIT) == 0);
4696 
4697       *bitpos += adjust;
4698       if (*offset == NULL_TREE)
4699 	*offset = size_int (-adjust / BITS_PER_UNIT);
4700       else
4701 	*offset
4702 	  = size_binop (MINUS_EXPR, *offset, size_int (adjust / BITS_PER_UNIT));
4703       *bitstart = 0;
4704     }
4705   else
4706     *bitstart = *bitpos - bitoffset;
4707 
4708   *bitend = *bitstart + tree_to_uhwi (DECL_SIZE (repr)) - 1;
4709 }
4710 
4711 /* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside
4712    in memory and has non-BLKmode.  DECL_RTL must not be a MEM; if
4713    DECL_RTL was not set yet, return NORTL.  */
4714 
4715 static inline bool
addr_expr_of_non_mem_decl_p_1(tree addr,bool nortl)4716 addr_expr_of_non_mem_decl_p_1 (tree addr, bool nortl)
4717 {
4718   if (TREE_CODE (addr) != ADDR_EXPR)
4719     return false;
4720 
4721   tree base = TREE_OPERAND (addr, 0);
4722 
4723   if (!DECL_P (base)
4724       || TREE_ADDRESSABLE (base)
4725       || DECL_MODE (base) == BLKmode)
4726     return false;
4727 
4728   if (!DECL_RTL_SET_P (base))
4729     return nortl;
4730 
4731   return (!MEM_P (DECL_RTL (base)));
4732 }
4733 
4734 /* Returns true if the MEM_REF REF refers to an object that does not
4735    reside in memory and has non-BLKmode.  */
4736 
4737 static inline bool
mem_ref_refers_to_non_mem_p(tree ref)4738 mem_ref_refers_to_non_mem_p (tree ref)
4739 {
4740   tree base = TREE_OPERAND (ref, 0);
4741   return addr_expr_of_non_mem_decl_p_1 (base, false);
4742 }
4743 
4744 /* Expand an assignment that stores the value of FROM into TO.  If NONTEMPORAL
4745    is true, try generating a nontemporal store.  */
4746 
4747 void
expand_assignment(tree to,tree from,bool nontemporal)4748 expand_assignment (tree to, tree from, bool nontemporal)
4749 {
4750   rtx to_rtx = 0;
4751   rtx result;
4752   enum machine_mode mode;
4753   unsigned int align;
4754   enum insn_code icode;
4755 
4756   /* Don't crash if the lhs of the assignment was erroneous.  */
4757   if (TREE_CODE (to) == ERROR_MARK)
4758     {
4759       expand_normal (from);
4760       return;
4761     }
4762 
4763   /* Optimize away no-op moves without side-effects.  */
4764   if (operand_equal_p (to, from, 0))
4765     return;
4766 
4767   /* Handle misaligned stores.  */
4768   mode = TYPE_MODE (TREE_TYPE (to));
4769   if ((TREE_CODE (to) == MEM_REF
4770        || TREE_CODE (to) == TARGET_MEM_REF)
4771       && mode != BLKmode
4772       && !mem_ref_refers_to_non_mem_p (to)
4773       && ((align = get_object_alignment (to))
4774 	  < GET_MODE_ALIGNMENT (mode))
4775       && (((icode = optab_handler (movmisalign_optab, mode))
4776 	   != CODE_FOR_nothing)
4777 	  || SLOW_UNALIGNED_ACCESS (mode, align)))
4778     {
4779       rtx reg, mem;
4780 
4781       reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4782       reg = force_not_mem (reg);
4783       mem = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4784 
4785       if (icode != CODE_FOR_nothing)
4786 	{
4787 	  struct expand_operand ops[2];
4788 
4789 	  create_fixed_operand (&ops[0], mem);
4790 	  create_input_operand (&ops[1], reg, mode);
4791 	  /* The movmisalign<mode> pattern cannot fail, else the assignment
4792 	     would silently be omitted.  */
4793 	  expand_insn (icode, 2, ops);
4794 	}
4795       else
4796 	store_bit_field (mem, GET_MODE_BITSIZE (mode), 0, 0, 0, mode, reg);
4797       return;
4798     }
4799 
4800   /* Assignment of a structure component needs special treatment
4801      if the structure component's rtx is not simply a MEM.
4802      Assignment of an array element at a constant index, and assignment of
4803      an array element in an unaligned packed structure field, has the same
4804      problem.  Same for (partially) storing into a non-memory object.  */
4805   if (handled_component_p (to)
4806       || (TREE_CODE (to) == MEM_REF
4807 	  && mem_ref_refers_to_non_mem_p (to))
4808       || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4809     {
4810       enum machine_mode mode1;
4811       HOST_WIDE_INT bitsize, bitpos;
4812       unsigned HOST_WIDE_INT bitregion_start = 0;
4813       unsigned HOST_WIDE_INT bitregion_end = 0;
4814       tree offset;
4815       int unsignedp;
4816       int volatilep = 0;
4817       tree tem;
4818 
4819       push_temp_slots ();
4820       tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4821 				 &unsignedp, &volatilep, true);
4822 
4823       /* Make sure bitpos is not negative, it can wreak havoc later.  */
4824       if (bitpos < 0)
4825 	{
4826 	  gcc_assert (offset == NULL_TREE);
4827 	  offset = size_int (bitpos >> (BITS_PER_UNIT == 8
4828 					? 3 : exact_log2 (BITS_PER_UNIT)));
4829 	  bitpos &= BITS_PER_UNIT - 1;
4830 	}
4831 
4832       if (TREE_CODE (to) == COMPONENT_REF
4833 	  && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to, 1)))
4834 	get_bit_range (&bitregion_start, &bitregion_end, to, &bitpos, &offset);
4835       /* The C++ memory model naturally applies to byte-aligned fields.
4836 	 However, if we do not have a DECL_BIT_FIELD_TYPE but BITPOS or
4837 	 BITSIZE are not byte-aligned, there is no need to limit the range
4838 	 we can access.  This can occur with packed structures in Ada.  */
4839       else if (bitsize > 0
4840 	       && bitsize % BITS_PER_UNIT == 0
4841 	       && bitpos % BITS_PER_UNIT == 0)
4842 	{
4843 	  bitregion_start = bitpos;
4844 	  bitregion_end = bitpos + bitsize - 1;
4845 	}
4846 
4847       to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
4848 
4849       /* If the field has a mode, we want to access it in the
4850 	 field's mode, not the computed mode.
4851 	 If a MEM has VOIDmode (external with incomplete type),
4852 	 use BLKmode for it instead.  */
4853       if (MEM_P (to_rtx))
4854 	{
4855 	  if (mode1 != VOIDmode)
4856 	    to_rtx = adjust_address (to_rtx, mode1, 0);
4857 	  else if (GET_MODE (to_rtx) == VOIDmode)
4858 	    to_rtx = adjust_address (to_rtx, BLKmode, 0);
4859 	}
4860 
4861       if (offset != 0)
4862 	{
4863 	  enum machine_mode address_mode;
4864 	  rtx offset_rtx;
4865 
4866 	  if (!MEM_P (to_rtx))
4867 	    {
4868 	      /* We can get constant negative offsets into arrays with broken
4869 		 user code.  Translate this to a trap instead of ICEing.  */
4870 	      gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4871 	      expand_builtin_trap ();
4872 	      to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4873 	    }
4874 
4875 	  offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4876 	  address_mode = get_address_mode (to_rtx);
4877 	  if (GET_MODE (offset_rtx) != address_mode)
4878 	    offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
4879 
4880 	  /* The check for a constant address in TO_RTX not having VOIDmode
4881 	     is probably no longer necessary.  */
4882 	  if (MEM_P (to_rtx)
4883 	      && GET_MODE (to_rtx) == BLKmode
4884 	      && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4885 	      && bitsize > 0
4886 	      && (bitpos % bitsize) == 0
4887 	      && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4888 	      && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4889 	    {
4890 	      to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4891 	      bitregion_start = 0;
4892 	      if (bitregion_end >= (unsigned HOST_WIDE_INT) bitpos)
4893 		bitregion_end -= bitpos;
4894 	      bitpos = 0;
4895 	    }
4896 
4897 	  to_rtx = offset_address (to_rtx, offset_rtx,
4898 				   highest_pow2_factor_for_target (to,
4899 				   				   offset));
4900 	}
4901 
4902       /* No action is needed if the target is not a memory and the field
4903 	 lies completely outside that target.  This can occur if the source
4904 	 code contains an out-of-bounds access to a small array.  */
4905       if (!MEM_P (to_rtx)
4906 	  && GET_MODE (to_rtx) != BLKmode
4907 	  && (unsigned HOST_WIDE_INT) bitpos
4908 	     >= GET_MODE_PRECISION (GET_MODE (to_rtx)))
4909 	{
4910 	  expand_normal (from);
4911 	  result = NULL;
4912 	}
4913       /* Handle expand_expr of a complex value returning a CONCAT.  */
4914       else if (GET_CODE (to_rtx) == CONCAT)
4915 	{
4916 	  unsigned short mode_bitsize = GET_MODE_BITSIZE (GET_MODE (to_rtx));
4917 	  if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from)))
4918 	      && bitpos == 0
4919 	      && bitsize == mode_bitsize)
4920 	    result = store_expr (from, to_rtx, false, nontemporal);
4921 	  else if (bitsize == mode_bitsize / 2
4922 		   && (bitpos == 0 || bitpos == mode_bitsize / 2))
4923 	    result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4924 				 nontemporal);
4925 	  else if (bitpos + bitsize <= mode_bitsize / 2)
4926 	    result = store_field (XEXP (to_rtx, 0), bitsize, bitpos,
4927 				  bitregion_start, bitregion_end,
4928 				  mode1, from,
4929 				  get_alias_set (to), nontemporal);
4930 	  else if (bitpos >= mode_bitsize / 2)
4931 	    result = store_field (XEXP (to_rtx, 1), bitsize,
4932 				  bitpos - mode_bitsize / 2,
4933 				  bitregion_start, bitregion_end,
4934 				  mode1, from,
4935 				  get_alias_set (to), nontemporal);
4936 	  else if (bitpos == 0 && bitsize == mode_bitsize)
4937 	    {
4938 	      rtx from_rtx;
4939 	      result = expand_normal (from);
4940 	      from_rtx = simplify_gen_subreg (GET_MODE (to_rtx), result,
4941 					      TYPE_MODE (TREE_TYPE (from)), 0);
4942 	      emit_move_insn (XEXP (to_rtx, 0),
4943 			      read_complex_part (from_rtx, false));
4944 	      emit_move_insn (XEXP (to_rtx, 1),
4945 			      read_complex_part (from_rtx, true));
4946 	    }
4947 	  else
4948 	    {
4949 	      rtx temp = assign_stack_temp (GET_MODE (to_rtx),
4950 					    GET_MODE_SIZE (GET_MODE (to_rtx)));
4951 	      write_complex_part (temp, XEXP (to_rtx, 0), false);
4952 	      write_complex_part (temp, XEXP (to_rtx, 1), true);
4953 	      result = store_field (temp, bitsize, bitpos,
4954 				    bitregion_start, bitregion_end,
4955 				    mode1, from,
4956 				    get_alias_set (to), nontemporal);
4957 	      emit_move_insn (XEXP (to_rtx, 0), read_complex_part (temp, false));
4958 	      emit_move_insn (XEXP (to_rtx, 1), read_complex_part (temp, true));
4959 	    }
4960 	}
4961       else
4962 	{
4963 	  if (MEM_P (to_rtx))
4964 	    {
4965 	      /* If the field is at offset zero, we could have been given the
4966 		 DECL_RTX of the parent struct.  Don't munge it.  */
4967 	      to_rtx = shallow_copy_rtx (to_rtx);
4968 	      set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4969 	      if (volatilep)
4970 		MEM_VOLATILE_P (to_rtx) = 1;
4971 	    }
4972 
4973 	  if (optimize_bitfield_assignment_op (bitsize, bitpos,
4974 					       bitregion_start, bitregion_end,
4975 					       mode1,
4976 					       to_rtx, to, from))
4977 	    result = NULL;
4978 	  else
4979 	    result = store_field (to_rtx, bitsize, bitpos,
4980 				  bitregion_start, bitregion_end,
4981 				  mode1, from,
4982 				  get_alias_set (to), nontemporal);
4983 	}
4984 
4985       if (result)
4986 	preserve_temp_slots (result);
4987       pop_temp_slots ();
4988       return;
4989     }
4990 
4991   /* If the rhs is a function call and its value is not an aggregate,
4992      call the function before we start to compute the lhs.
4993      This is needed for correct code for cases such as
4994      val = setjmp (buf) on machines where reference to val
4995      requires loading up part of an address in a separate insn.
4996 
4997      Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4998      since it might be a promoted variable where the zero- or sign- extension
4999      needs to be done.  Handling this in the normal way is safe because no
5000      computation is done before the call.  The same is true for SSA names.  */
5001   if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
5002       && COMPLETE_TYPE_P (TREE_TYPE (from))
5003       && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
5004       && ! (((TREE_CODE (to) == VAR_DECL
5005 	      || TREE_CODE (to) == PARM_DECL
5006 	      || TREE_CODE (to) == RESULT_DECL)
5007 	     && REG_P (DECL_RTL (to)))
5008 	    || TREE_CODE (to) == SSA_NAME))
5009     {
5010       rtx value;
5011 
5012       push_temp_slots ();
5013       value = expand_normal (from);
5014       if (to_rtx == 0)
5015 	to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5016 
5017       /* Handle calls that return values in multiple non-contiguous locations.
5018 	 The Irix 6 ABI has examples of this.  */
5019       if (GET_CODE (to_rtx) == PARALLEL)
5020 	{
5021 	  if (GET_CODE (value) == PARALLEL)
5022 	    emit_group_move (to_rtx, value);
5023 	  else
5024 	    emit_group_load (to_rtx, value, TREE_TYPE (from),
5025 			     int_size_in_bytes (TREE_TYPE (from)));
5026 	}
5027       else if (GET_CODE (value) == PARALLEL)
5028 	emit_group_store (to_rtx, value, TREE_TYPE (from),
5029 			  int_size_in_bytes (TREE_TYPE (from)));
5030       else if (GET_MODE (to_rtx) == BLKmode)
5031 	{
5032 	  /* Handle calls that return BLKmode values in registers.  */
5033 	  if (REG_P (value))
5034 	    copy_blkmode_from_reg (to_rtx, value, TREE_TYPE (from));
5035 	  else
5036 	    emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
5037 	}
5038       else
5039 	{
5040 	  if (POINTER_TYPE_P (TREE_TYPE (to)))
5041 	    value = convert_memory_address_addr_space
5042 		      (GET_MODE (to_rtx), value,
5043 		       TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
5044 
5045 	  emit_move_insn (to_rtx, value);
5046 	}
5047       preserve_temp_slots (to_rtx);
5048       pop_temp_slots ();
5049       return;
5050     }
5051 
5052   /* Ordinary treatment.  Expand TO to get a REG or MEM rtx.  */
5053   to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5054 
5055   /* Don't move directly into a return register.  */
5056   if (TREE_CODE (to) == RESULT_DECL
5057       && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
5058     {
5059       rtx temp;
5060 
5061       push_temp_slots ();
5062 
5063       /* If the source is itself a return value, it still is in a pseudo at
5064 	 this point so we can move it back to the return register directly.  */
5065       if (REG_P (to_rtx)
5066 	  && TYPE_MODE (TREE_TYPE (from)) == BLKmode
5067 	  && TREE_CODE (from) != CALL_EXPR)
5068 	temp = copy_blkmode_to_reg (GET_MODE (to_rtx), from);
5069       else
5070 	temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
5071 
5072       /* Handle calls that return values in multiple non-contiguous locations.
5073 	 The Irix 6 ABI has examples of this.  */
5074       if (GET_CODE (to_rtx) == PARALLEL)
5075 	{
5076 	  if (GET_CODE (temp) == PARALLEL)
5077 	    emit_group_move (to_rtx, temp);
5078 	  else
5079 	    emit_group_load (to_rtx, temp, TREE_TYPE (from),
5080 			     int_size_in_bytes (TREE_TYPE (from)));
5081 	}
5082       else if (temp)
5083 	emit_move_insn (to_rtx, temp);
5084 
5085       preserve_temp_slots (to_rtx);
5086       pop_temp_slots ();
5087       return;
5088     }
5089 
5090   /* In case we are returning the contents of an object which overlaps
5091      the place the value is being stored, use a safe function when copying
5092      a value through a pointer into a structure value return block.  */
5093   if (TREE_CODE (to) == RESULT_DECL
5094       && TREE_CODE (from) == INDIRECT_REF
5095       && ADDR_SPACE_GENERIC_P
5096 	   (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
5097       && refs_may_alias_p (to, from)
5098       && cfun->returns_struct
5099       && !cfun->returns_pcc_struct)
5100     {
5101       rtx from_rtx, size;
5102 
5103       push_temp_slots ();
5104       size = expr_size (from);
5105       from_rtx = expand_normal (from);
5106 
5107       emit_library_call (memmove_libfunc, LCT_NORMAL,
5108 			 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
5109 			 XEXP (from_rtx, 0), Pmode,
5110 			 convert_to_mode (TYPE_MODE (sizetype),
5111 					  size, TYPE_UNSIGNED (sizetype)),
5112 			 TYPE_MODE (sizetype));
5113 
5114       preserve_temp_slots (to_rtx);
5115       pop_temp_slots ();
5116       return;
5117     }
5118 
5119   /* Compute FROM and store the value in the rtx we got.  */
5120 
5121   push_temp_slots ();
5122   result = store_expr (from, to_rtx, 0, nontemporal);
5123   preserve_temp_slots (result);
5124   pop_temp_slots ();
5125   return;
5126 }
5127 
5128 /* Emits nontemporal store insn that moves FROM to TO.  Returns true if this
5129    succeeded, false otherwise.  */
5130 
5131 bool
emit_storent_insn(rtx to,rtx from)5132 emit_storent_insn (rtx to, rtx from)
5133 {
5134   struct expand_operand ops[2];
5135   enum machine_mode mode = GET_MODE (to);
5136   enum insn_code code = optab_handler (storent_optab, mode);
5137 
5138   if (code == CODE_FOR_nothing)
5139     return false;
5140 
5141   create_fixed_operand (&ops[0], to);
5142   create_input_operand (&ops[1], from, mode);
5143   return maybe_expand_insn (code, 2, ops);
5144 }
5145 
5146 /* Generate code for computing expression EXP,
5147    and storing the value into TARGET.
5148 
5149    If the mode is BLKmode then we may return TARGET itself.
5150    It turns out that in BLKmode it doesn't cause a problem.
5151    because C has no operators that could combine two different
5152    assignments into the same BLKmode object with different values
5153    with no sequence point.  Will other languages need this to
5154    be more thorough?
5155 
5156    If CALL_PARAM_P is nonzero, this is a store into a call param on the
5157    stack, and block moves may need to be treated specially.
5158 
5159    If NONTEMPORAL is true, try using a nontemporal store instruction.  */
5160 
5161 rtx
store_expr(tree exp,rtx target,int call_param_p,bool nontemporal)5162 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
5163 {
5164   rtx temp;
5165   rtx alt_rtl = NULL_RTX;
5166   location_t loc = curr_insn_location ();
5167 
5168   if (VOID_TYPE_P (TREE_TYPE (exp)))
5169     {
5170       /* C++ can generate ?: expressions with a throw expression in one
5171 	 branch and an rvalue in the other. Here, we resolve attempts to
5172 	 store the throw expression's nonexistent result.  */
5173       gcc_assert (!call_param_p);
5174       expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5175       return NULL_RTX;
5176     }
5177   if (TREE_CODE (exp) == COMPOUND_EXPR)
5178     {
5179       /* Perform first part of compound expression, then assign from second
5180 	 part.  */
5181       expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5182 		   call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5183       return store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
5184 			 nontemporal);
5185     }
5186   else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
5187     {
5188       /* For conditional expression, get safe form of the target.  Then
5189 	 test the condition, doing the appropriate assignment on either
5190 	 side.  This avoids the creation of unnecessary temporaries.
5191 	 For non-BLKmode, it is more efficient not to do this.  */
5192 
5193       rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
5194 
5195       do_pending_stack_adjust ();
5196       NO_DEFER_POP;
5197       jumpifnot (TREE_OPERAND (exp, 0), lab1, -1);
5198       store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
5199 		  nontemporal);
5200       emit_jump_insn (gen_jump (lab2));
5201       emit_barrier ();
5202       emit_label (lab1);
5203       store_expr (TREE_OPERAND (exp, 2), target, call_param_p,
5204 		  nontemporal);
5205       emit_label (lab2);
5206       OK_DEFER_POP;
5207 
5208       return NULL_RTX;
5209     }
5210   else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
5211     /* If this is a scalar in a register that is stored in a wider mode
5212        than the declared mode, compute the result into its declared mode
5213        and then convert to the wider mode.  Our value is the computed
5214        expression.  */
5215     {
5216       rtx inner_target = 0;
5217 
5218       /* We can do the conversion inside EXP, which will often result
5219 	 in some optimizations.  Do the conversion in two steps: first
5220 	 change the signedness, if needed, then the extend.  But don't
5221 	 do this if the type of EXP is a subtype of something else
5222 	 since then the conversion might involve more than just
5223 	 converting modes.  */
5224       if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
5225 	  && TREE_TYPE (TREE_TYPE (exp)) == 0
5226 	  && GET_MODE_PRECISION (GET_MODE (target))
5227 	     == TYPE_PRECISION (TREE_TYPE (exp)))
5228 	{
5229 	  if (TYPE_UNSIGNED (TREE_TYPE (exp))
5230 	      != SUBREG_PROMOTED_UNSIGNED_P (target))
5231 	    {
5232 	      /* Some types, e.g. Fortran's logical*4, won't have a signed
5233 		 version, so use the mode instead.  */
5234 	      tree ntype
5235 		= (signed_or_unsigned_type_for
5236 		   (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)));
5237 	      if (ntype == NULL)
5238 		ntype = lang_hooks.types.type_for_mode
5239 		  (TYPE_MODE (TREE_TYPE (exp)),
5240 		   SUBREG_PROMOTED_UNSIGNED_P (target));
5241 
5242 	      exp = fold_convert_loc (loc, ntype, exp);
5243 	    }
5244 
5245 	  exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
5246 				  (GET_MODE (SUBREG_REG (target)),
5247 				   SUBREG_PROMOTED_UNSIGNED_P (target)),
5248 				  exp);
5249 
5250 	  inner_target = SUBREG_REG (target);
5251 	}
5252 
5253       temp = expand_expr (exp, inner_target, VOIDmode,
5254 			  call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5255 
5256       /* If TEMP is a VOIDmode constant, use convert_modes to make
5257 	 sure that we properly convert it.  */
5258       if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
5259 	{
5260 	  temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5261 				temp, SUBREG_PROMOTED_UNSIGNED_P (target));
5262 	  temp = convert_modes (GET_MODE (SUBREG_REG (target)),
5263 			        GET_MODE (target), temp,
5264 			        SUBREG_PROMOTED_UNSIGNED_P (target));
5265 	}
5266 
5267       convert_move (SUBREG_REG (target), temp,
5268 		    SUBREG_PROMOTED_UNSIGNED_P (target));
5269 
5270       return NULL_RTX;
5271     }
5272   else if ((TREE_CODE (exp) == STRING_CST
5273 	    || (TREE_CODE (exp) == MEM_REF
5274 		&& TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5275 		&& TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5276 		   == STRING_CST
5277 		&& integer_zerop (TREE_OPERAND (exp, 1))))
5278 	   && !nontemporal && !call_param_p
5279 	   && MEM_P (target))
5280     {
5281       /* Optimize initialization of an array with a STRING_CST.  */
5282       HOST_WIDE_INT exp_len, str_copy_len;
5283       rtx dest_mem;
5284       tree str = TREE_CODE (exp) == STRING_CST
5285 		 ? exp : TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5286 
5287       exp_len = int_expr_size (exp);
5288       if (exp_len <= 0)
5289 	goto normal_expr;
5290 
5291       if (TREE_STRING_LENGTH (str) <= 0)
5292 	goto normal_expr;
5293 
5294       str_copy_len = strlen (TREE_STRING_POINTER (str));
5295       if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
5296 	goto normal_expr;
5297 
5298       str_copy_len = TREE_STRING_LENGTH (str);
5299       if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0
5300 	  && TREE_STRING_POINTER (str)[TREE_STRING_LENGTH (str) - 1] == '\0')
5301 	{
5302 	  str_copy_len += STORE_MAX_PIECES - 1;
5303 	  str_copy_len &= ~(STORE_MAX_PIECES - 1);
5304 	}
5305       str_copy_len = MIN (str_copy_len, exp_len);
5306       if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
5307 				CONST_CAST (char *, TREE_STRING_POINTER (str)),
5308 				MEM_ALIGN (target), false))
5309 	goto normal_expr;
5310 
5311       dest_mem = target;
5312 
5313       dest_mem = store_by_pieces (dest_mem,
5314 				  str_copy_len, builtin_strncpy_read_str,
5315 				  CONST_CAST (char *,
5316 					      TREE_STRING_POINTER (str)),
5317 				  MEM_ALIGN (target), false,
5318 				  exp_len > str_copy_len ? 1 : 0);
5319       if (exp_len > str_copy_len)
5320 	clear_storage (adjust_address (dest_mem, BLKmode, 0),
5321 		       GEN_INT (exp_len - str_copy_len),
5322 		       BLOCK_OP_NORMAL);
5323       return NULL_RTX;
5324     }
5325   else
5326     {
5327       rtx tmp_target;
5328 
5329   normal_expr:
5330       /* If we want to use a nontemporal store, force the value to
5331 	 register first.  */
5332       tmp_target = nontemporal ? NULL_RTX : target;
5333       temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
5334 			       (call_param_p
5335 				? EXPAND_STACK_PARM : EXPAND_NORMAL),
5336 			       &alt_rtl, false);
5337     }
5338 
5339   /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5340      the same as that of TARGET, adjust the constant.  This is needed, for
5341      example, in case it is a CONST_DOUBLE and we want only a word-sized
5342      value.  */
5343   if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
5344       && TREE_CODE (exp) != ERROR_MARK
5345       && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
5346     temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5347 			  temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5348 
5349   /* If value was not generated in the target, store it there.
5350      Convert the value to TARGET's type first if necessary and emit the
5351      pending incrementations that have been queued when expanding EXP.
5352      Note that we cannot emit the whole queue blindly because this will
5353      effectively disable the POST_INC optimization later.
5354 
5355      If TEMP and TARGET compare equal according to rtx_equal_p, but
5356      one or both of them are volatile memory refs, we have to distinguish
5357      two cases:
5358      - expand_expr has used TARGET.  In this case, we must not generate
5359        another copy.  This can be detected by TARGET being equal according
5360        to == .
5361      - expand_expr has not used TARGET - that means that the source just
5362        happens to have the same RTX form.  Since temp will have been created
5363        by expand_expr, it will compare unequal according to == .
5364        We must generate a copy in this case, to reach the correct number
5365        of volatile memory references.  */
5366 
5367   if ((! rtx_equal_p (temp, target)
5368        || (temp != target && (side_effects_p (temp)
5369 			      || side_effects_p (target))))
5370       && TREE_CODE (exp) != ERROR_MARK
5371       /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5372 	 but TARGET is not valid memory reference, TEMP will differ
5373 	 from TARGET although it is really the same location.  */
5374       && !(alt_rtl
5375 	   && rtx_equal_p (alt_rtl, target)
5376 	   && !side_effects_p (alt_rtl)
5377 	   && !side_effects_p (target))
5378       /* If there's nothing to copy, don't bother.  Don't call
5379 	 expr_size unless necessary, because some front-ends (C++)
5380 	 expr_size-hook must not be given objects that are not
5381 	 supposed to be bit-copied or bit-initialized.  */
5382       && expr_size (exp) != const0_rtx)
5383     {
5384       if (GET_MODE (temp) != GET_MODE (target) && GET_MODE (temp) != VOIDmode)
5385 	{
5386 	  if (GET_MODE (target) == BLKmode)
5387 	    {
5388 	      /* Handle calls that return BLKmode values in registers.  */
5389 	      if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
5390 		copy_blkmode_from_reg (target, temp, TREE_TYPE (exp));
5391 	      else
5392 		store_bit_field (target,
5393 				 INTVAL (expr_size (exp)) * BITS_PER_UNIT,
5394 				 0, 0, 0, GET_MODE (temp), temp);
5395 	    }
5396 	  else
5397 	    convert_move (target, temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5398 	}
5399 
5400       else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
5401 	{
5402 	  /* Handle copying a string constant into an array.  The string
5403 	     constant may be shorter than the array.  So copy just the string's
5404 	     actual length, and clear the rest.  First get the size of the data
5405 	     type of the string, which is actually the size of the target.  */
5406 	  rtx size = expr_size (exp);
5407 
5408 	  if (CONST_INT_P (size)
5409 	      && INTVAL (size) < TREE_STRING_LENGTH (exp))
5410 	    emit_block_move (target, temp, size,
5411 			     (call_param_p
5412 			      ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5413 	  else
5414 	    {
5415 	      enum machine_mode pointer_mode
5416 		= targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
5417 	      enum machine_mode address_mode = get_address_mode (target);
5418 
5419 	      /* Compute the size of the data to copy from the string.  */
5420 	      tree copy_size
5421 		= size_binop_loc (loc, MIN_EXPR,
5422 				  make_tree (sizetype, size),
5423 				  size_int (TREE_STRING_LENGTH (exp)));
5424 	      rtx copy_size_rtx
5425 		= expand_expr (copy_size, NULL_RTX, VOIDmode,
5426 			       (call_param_p
5427 				? EXPAND_STACK_PARM : EXPAND_NORMAL));
5428 	      rtx label = 0;
5429 
5430 	      /* Copy that much.  */
5431 	      copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
5432 					       TYPE_UNSIGNED (sizetype));
5433 	      emit_block_move (target, temp, copy_size_rtx,
5434 			       (call_param_p
5435 				? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5436 
5437 	      /* Figure out how much is left in TARGET that we have to clear.
5438 		 Do all calculations in pointer_mode.  */
5439 	      if (CONST_INT_P (copy_size_rtx))
5440 		{
5441 		  size = plus_constant (address_mode, size,
5442 					-INTVAL (copy_size_rtx));
5443 		  target = adjust_address (target, BLKmode,
5444 					   INTVAL (copy_size_rtx));
5445 		}
5446 	      else
5447 		{
5448 		  size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
5449 				       copy_size_rtx, NULL_RTX, 0,
5450 				       OPTAB_LIB_WIDEN);
5451 
5452 		  if (GET_MODE (copy_size_rtx) != address_mode)
5453 		    copy_size_rtx = convert_to_mode (address_mode,
5454 						     copy_size_rtx,
5455 						     TYPE_UNSIGNED (sizetype));
5456 
5457 		  target = offset_address (target, copy_size_rtx,
5458 					   highest_pow2_factor (copy_size));
5459 		  label = gen_label_rtx ();
5460 		  emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
5461 					   GET_MODE (size), 0, label);
5462 		}
5463 
5464 	      if (size != const0_rtx)
5465 		clear_storage (target, size, BLOCK_OP_NORMAL);
5466 
5467 	      if (label)
5468 		emit_label (label);
5469 	    }
5470 	}
5471       /* Handle calls that return values in multiple non-contiguous locations.
5472 	 The Irix 6 ABI has examples of this.  */
5473       else if (GET_CODE (target) == PARALLEL)
5474 	{
5475 	  if (GET_CODE (temp) == PARALLEL)
5476 	    emit_group_move (target, temp);
5477 	  else
5478 	    emit_group_load (target, temp, TREE_TYPE (exp),
5479 			     int_size_in_bytes (TREE_TYPE (exp)));
5480 	}
5481       else if (GET_CODE (temp) == PARALLEL)
5482 	emit_group_store (target, temp, TREE_TYPE (exp),
5483 			  int_size_in_bytes (TREE_TYPE (exp)));
5484       else if (GET_MODE (temp) == BLKmode)
5485 	emit_block_move (target, temp, expr_size (exp),
5486 			 (call_param_p
5487 			  ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5488       /* If we emit a nontemporal store, there is nothing else to do.  */
5489       else if (nontemporal && emit_storent_insn (target, temp))
5490 	;
5491       else
5492 	{
5493 	  temp = force_operand (temp, target);
5494 	  if (temp != target)
5495 	    emit_move_insn (target, temp);
5496 	}
5497     }
5498 
5499   return NULL_RTX;
5500 }
5501 
5502 /* Return true if field F of structure TYPE is a flexible array.  */
5503 
5504 static bool
flexible_array_member_p(const_tree f,const_tree type)5505 flexible_array_member_p (const_tree f, const_tree type)
5506 {
5507   const_tree tf;
5508 
5509   tf = TREE_TYPE (f);
5510   return (DECL_CHAIN (f) == NULL
5511 	  && TREE_CODE (tf) == ARRAY_TYPE
5512 	  && TYPE_DOMAIN (tf)
5513 	  && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5514 	  && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5515 	  && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5516 	  && int_size_in_bytes (type) >= 0);
5517 }
5518 
5519 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5520    must have in order for it to completely initialize a value of type TYPE.
5521    Return -1 if the number isn't known.
5522 
5523    If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE.  */
5524 
5525 static HOST_WIDE_INT
count_type_elements(const_tree type,bool for_ctor_p)5526 count_type_elements (const_tree type, bool for_ctor_p)
5527 {
5528   switch (TREE_CODE (type))
5529     {
5530     case ARRAY_TYPE:
5531       {
5532 	tree nelts;
5533 
5534 	nelts = array_type_nelts (type);
5535 	if (nelts && tree_fits_uhwi_p (nelts))
5536 	  {
5537 	    unsigned HOST_WIDE_INT n;
5538 
5539 	    n = tree_to_uhwi (nelts) + 1;
5540 	    if (n == 0 || for_ctor_p)
5541 	      return n;
5542 	    else
5543 	      return n * count_type_elements (TREE_TYPE (type), false);
5544 	  }
5545 	return for_ctor_p ? -1 : 1;
5546       }
5547 
5548     case RECORD_TYPE:
5549       {
5550 	unsigned HOST_WIDE_INT n;
5551 	tree f;
5552 
5553 	n = 0;
5554 	for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5555 	  if (TREE_CODE (f) == FIELD_DECL)
5556 	    {
5557 	      if (!for_ctor_p)
5558 		n += count_type_elements (TREE_TYPE (f), false);
5559 	      else if (!flexible_array_member_p (f, type))
5560 		/* Don't count flexible arrays, which are not supposed
5561 		   to be initialized.  */
5562 		n += 1;
5563 	    }
5564 
5565 	return n;
5566       }
5567 
5568     case UNION_TYPE:
5569     case QUAL_UNION_TYPE:
5570       {
5571 	tree f;
5572 	HOST_WIDE_INT n, m;
5573 
5574 	gcc_assert (!for_ctor_p);
5575 	/* Estimate the number of scalars in each field and pick the
5576 	   maximum.  Other estimates would do instead; the idea is simply
5577 	   to make sure that the estimate is not sensitive to the ordering
5578 	   of the fields.  */
5579 	n = 1;
5580 	for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5581 	  if (TREE_CODE (f) == FIELD_DECL)
5582 	    {
5583 	      m = count_type_elements (TREE_TYPE (f), false);
5584 	      /* If the field doesn't span the whole union, add an extra
5585 		 scalar for the rest.  */
5586 	      if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f)),
5587 				    TYPE_SIZE (type)) != 1)
5588 		m++;
5589 	      if (n < m)
5590 		n = m;
5591 	    }
5592 	return n;
5593       }
5594 
5595     case COMPLEX_TYPE:
5596       return 2;
5597 
5598     case VECTOR_TYPE:
5599       return TYPE_VECTOR_SUBPARTS (type);
5600 
5601     case INTEGER_TYPE:
5602     case REAL_TYPE:
5603     case FIXED_POINT_TYPE:
5604     case ENUMERAL_TYPE:
5605     case BOOLEAN_TYPE:
5606     case POINTER_TYPE:
5607     case OFFSET_TYPE:
5608     case REFERENCE_TYPE:
5609     case NULLPTR_TYPE:
5610       return 1;
5611 
5612     case ERROR_MARK:
5613       return 0;
5614 
5615     case VOID_TYPE:
5616     case METHOD_TYPE:
5617     case FUNCTION_TYPE:
5618     case LANG_TYPE:
5619     default:
5620       gcc_unreachable ();
5621     }
5622 }
5623 
5624 /* Helper for categorize_ctor_elements.  Identical interface.  */
5625 
5626 static bool
categorize_ctor_elements_1(const_tree ctor,HOST_WIDE_INT * p_nz_elts,HOST_WIDE_INT * p_init_elts,bool * p_complete)5627 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5628 			    HOST_WIDE_INT *p_init_elts, bool *p_complete)
5629 {
5630   unsigned HOST_WIDE_INT idx;
5631   HOST_WIDE_INT nz_elts, init_elts, num_fields;
5632   tree value, purpose, elt_type;
5633 
5634   /* Whether CTOR is a valid constant initializer, in accordance with what
5635      initializer_constant_valid_p does.  If inferred from the constructor
5636      elements, true until proven otherwise.  */
5637   bool const_from_elts_p = constructor_static_from_elts_p (ctor);
5638   bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
5639 
5640   nz_elts = 0;
5641   init_elts = 0;
5642   num_fields = 0;
5643   elt_type = NULL_TREE;
5644 
5645   FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
5646     {
5647       HOST_WIDE_INT mult = 1;
5648 
5649       if (purpose && TREE_CODE (purpose) == RANGE_EXPR)
5650 	{
5651 	  tree lo_index = TREE_OPERAND (purpose, 0);
5652 	  tree hi_index = TREE_OPERAND (purpose, 1);
5653 
5654 	  if (tree_fits_uhwi_p (lo_index) && tree_fits_uhwi_p (hi_index))
5655 	    mult = (tree_to_uhwi (hi_index)
5656 		    - tree_to_uhwi (lo_index) + 1);
5657 	}
5658       num_fields += mult;
5659       elt_type = TREE_TYPE (value);
5660 
5661       switch (TREE_CODE (value))
5662 	{
5663 	case CONSTRUCTOR:
5664 	  {
5665 	    HOST_WIDE_INT nz = 0, ic = 0;
5666 
5667 	    bool const_elt_p = categorize_ctor_elements_1 (value, &nz, &ic,
5668 							   p_complete);
5669 
5670 	    nz_elts += mult * nz;
5671  	    init_elts += mult * ic;
5672 
5673 	    if (const_from_elts_p && const_p)
5674 	      const_p = const_elt_p;
5675 	  }
5676 	  break;
5677 
5678 	case INTEGER_CST:
5679 	case REAL_CST:
5680 	case FIXED_CST:
5681 	  if (!initializer_zerop (value))
5682 	    nz_elts += mult;
5683 	  init_elts += mult;
5684 	  break;
5685 
5686 	case STRING_CST:
5687 	  nz_elts += mult * TREE_STRING_LENGTH (value);
5688 	  init_elts += mult * TREE_STRING_LENGTH (value);
5689 	  break;
5690 
5691 	case COMPLEX_CST:
5692 	  if (!initializer_zerop (TREE_REALPART (value)))
5693 	    nz_elts += mult;
5694 	  if (!initializer_zerop (TREE_IMAGPART (value)))
5695 	    nz_elts += mult;
5696 	  init_elts += mult;
5697 	  break;
5698 
5699 	case VECTOR_CST:
5700 	  {
5701 	    unsigned i;
5702 	    for (i = 0; i < VECTOR_CST_NELTS (value); ++i)
5703 	      {
5704 		tree v = VECTOR_CST_ELT (value, i);
5705 		if (!initializer_zerop (v))
5706 		  nz_elts += mult;
5707 		init_elts += mult;
5708 	      }
5709 	  }
5710 	  break;
5711 
5712 	default:
5713 	  {
5714 	    HOST_WIDE_INT tc = count_type_elements (elt_type, false);
5715 	    nz_elts += mult * tc;
5716 	    init_elts += mult * tc;
5717 
5718 	    if (const_from_elts_p && const_p)
5719 	      const_p = initializer_constant_valid_p (value, elt_type)
5720 			!= NULL_TREE;
5721 	  }
5722 	  break;
5723 	}
5724     }
5725 
5726   if (*p_complete && !complete_ctor_at_level_p (TREE_TYPE (ctor),
5727 						num_fields, elt_type))
5728     *p_complete = false;
5729 
5730   *p_nz_elts += nz_elts;
5731   *p_init_elts += init_elts;
5732 
5733   return const_p;
5734 }
5735 
5736 /* Examine CTOR to discover:
5737    * how many scalar fields are set to nonzero values,
5738      and place it in *P_NZ_ELTS;
5739    * how many scalar fields in total are in CTOR,
5740      and place it in *P_ELT_COUNT.
5741    * whether the constructor is complete -- in the sense that every
5742      meaningful byte is explicitly given a value --
5743      and place it in *P_COMPLETE.
5744 
5745    Return whether or not CTOR is a valid static constant initializer, the same
5746    as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0".  */
5747 
5748 bool
categorize_ctor_elements(const_tree ctor,HOST_WIDE_INT * p_nz_elts,HOST_WIDE_INT * p_init_elts,bool * p_complete)5749 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5750 			  HOST_WIDE_INT *p_init_elts, bool *p_complete)
5751 {
5752   *p_nz_elts = 0;
5753   *p_init_elts = 0;
5754   *p_complete = true;
5755 
5756   return categorize_ctor_elements_1 (ctor, p_nz_elts, p_init_elts, p_complete);
5757 }
5758 
5759 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
5760    of which had type LAST_TYPE.  Each element was itself a complete
5761    initializer, in the sense that every meaningful byte was explicitly
5762    given a value.  Return true if the same is true for the constructor
5763    as a whole.  */
5764 
5765 bool
complete_ctor_at_level_p(const_tree type,HOST_WIDE_INT num_elts,const_tree last_type)5766 complete_ctor_at_level_p (const_tree type, HOST_WIDE_INT num_elts,
5767 			  const_tree last_type)
5768 {
5769   if (TREE_CODE (type) == UNION_TYPE
5770       || TREE_CODE (type) == QUAL_UNION_TYPE)
5771     {
5772       if (num_elts == 0)
5773 	return false;
5774 
5775       gcc_assert (num_elts == 1 && last_type);
5776 
5777       /* ??? We could look at each element of the union, and find the
5778 	 largest element.  Which would avoid comparing the size of the
5779 	 initialized element against any tail padding in the union.
5780 	 Doesn't seem worth the effort...  */
5781       return simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (last_type)) == 1;
5782     }
5783 
5784   return count_type_elements (type, true) == num_elts;
5785 }
5786 
5787 /* Return 1 if EXP contains mostly (3/4)  zeros.  */
5788 
5789 static int
mostly_zeros_p(const_tree exp)5790 mostly_zeros_p (const_tree exp)
5791 {
5792   if (TREE_CODE (exp) == CONSTRUCTOR)
5793     {
5794       HOST_WIDE_INT nz_elts, init_elts;
5795       bool complete_p;
5796 
5797       categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5798       return !complete_p || nz_elts < init_elts / 4;
5799     }
5800 
5801   return initializer_zerop (exp);
5802 }
5803 
5804 /* Return 1 if EXP contains all zeros.  */
5805 
5806 static int
all_zeros_p(const_tree exp)5807 all_zeros_p (const_tree exp)
5808 {
5809   if (TREE_CODE (exp) == CONSTRUCTOR)
5810     {
5811       HOST_WIDE_INT nz_elts, init_elts;
5812       bool complete_p;
5813 
5814       categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5815       return nz_elts == 0;
5816     }
5817 
5818   return initializer_zerop (exp);
5819 }
5820 
5821 /* Helper function for store_constructor.
5822    TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5823    CLEARED is as for store_constructor.
5824    ALIAS_SET is the alias set to use for any stores.
5825 
5826    This provides a recursive shortcut back to store_constructor when it isn't
5827    necessary to go through store_field.  This is so that we can pass through
5828    the cleared field to let store_constructor know that we may not have to
5829    clear a substructure if the outer structure has already been cleared.  */
5830 
5831 static void
store_constructor_field(rtx target,unsigned HOST_WIDE_INT bitsize,HOST_WIDE_INT bitpos,enum machine_mode mode,tree exp,int cleared,alias_set_type alias_set)5832 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5833 			 HOST_WIDE_INT bitpos, enum machine_mode mode,
5834 			 tree exp, int cleared, alias_set_type alias_set)
5835 {
5836   if (TREE_CODE (exp) == CONSTRUCTOR
5837       /* We can only call store_constructor recursively if the size and
5838 	 bit position are on a byte boundary.  */
5839       && bitpos % BITS_PER_UNIT == 0
5840       && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5841       /* If we have a nonzero bitpos for a register target, then we just
5842 	 let store_field do the bitfield handling.  This is unlikely to
5843 	 generate unnecessary clear instructions anyways.  */
5844       && (bitpos == 0 || MEM_P (target)))
5845     {
5846       if (MEM_P (target))
5847 	target
5848 	  = adjust_address (target,
5849 			    GET_MODE (target) == BLKmode
5850 			    || 0 != (bitpos
5851 				     % GET_MODE_ALIGNMENT (GET_MODE (target)))
5852 			    ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5853 
5854 
5855       /* Update the alias set, if required.  */
5856       if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5857 	  && MEM_ALIAS_SET (target) != 0)
5858 	{
5859 	  target = copy_rtx (target);
5860 	  set_mem_alias_set (target, alias_set);
5861 	}
5862 
5863       store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5864     }
5865   else
5866     store_field (target, bitsize, bitpos, 0, 0, mode, exp, alias_set, false);
5867 }
5868 
5869 
5870 /* Returns the number of FIELD_DECLs in TYPE.  */
5871 
5872 static int
fields_length(const_tree type)5873 fields_length (const_tree type)
5874 {
5875   tree t = TYPE_FIELDS (type);
5876   int count = 0;
5877 
5878   for (; t; t = DECL_CHAIN (t))
5879     if (TREE_CODE (t) == FIELD_DECL)
5880       ++count;
5881 
5882   return count;
5883 }
5884 
5885 
5886 /* Store the value of constructor EXP into the rtx TARGET.
5887    TARGET is either a REG or a MEM; we know it cannot conflict, since
5888    safe_from_p has been called.
5889    CLEARED is true if TARGET is known to have been zero'd.
5890    SIZE is the number of bytes of TARGET we are allowed to modify: this
5891    may not be the same as the size of EXP if we are assigning to a field
5892    which has been packed to exclude padding bits.  */
5893 
5894 static void
store_constructor(tree exp,rtx target,int cleared,HOST_WIDE_INT size)5895 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5896 {
5897   tree type = TREE_TYPE (exp);
5898 #ifdef WORD_REGISTER_OPERATIONS
5899   HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5900 #endif
5901 
5902   switch (TREE_CODE (type))
5903     {
5904     case RECORD_TYPE:
5905     case UNION_TYPE:
5906     case QUAL_UNION_TYPE:
5907       {
5908 	unsigned HOST_WIDE_INT idx;
5909 	tree field, value;
5910 
5911 	/* If size is zero or the target is already cleared, do nothing.  */
5912 	if (size == 0 || cleared)
5913 	  cleared = 1;
5914 	/* We either clear the aggregate or indicate the value is dead.  */
5915 	else if ((TREE_CODE (type) == UNION_TYPE
5916 		  || TREE_CODE (type) == QUAL_UNION_TYPE)
5917 		 && ! CONSTRUCTOR_ELTS (exp))
5918 	  /* If the constructor is empty, clear the union.  */
5919 	  {
5920 	    clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5921 	    cleared = 1;
5922 	  }
5923 
5924 	/* If we are building a static constructor into a register,
5925 	   set the initial value as zero so we can fold the value into
5926 	   a constant.  But if more than one register is involved,
5927 	   this probably loses.  */
5928 	else if (REG_P (target) && TREE_STATIC (exp)
5929 		 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5930 	  {
5931 	    emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5932 	    cleared = 1;
5933 	  }
5934 
5935         /* If the constructor has fewer fields than the structure or
5936 	   if we are initializing the structure to mostly zeros, clear
5937 	   the whole structure first.  Don't do this if TARGET is a
5938 	   register whose mode size isn't equal to SIZE since
5939 	   clear_storage can't handle this case.  */
5940 	else if (size > 0
5941 		 && (((int)vec_safe_length (CONSTRUCTOR_ELTS (exp))
5942 		      != fields_length (type))
5943 		     || mostly_zeros_p (exp))
5944 		 && (!REG_P (target)
5945 		     || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5946 			 == size)))
5947 	  {
5948 	    clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5949 	    cleared = 1;
5950 	  }
5951 
5952 	if (REG_P (target) && !cleared)
5953 	  emit_clobber (target);
5954 
5955 	/* Store each element of the constructor into the
5956 	   corresponding field of TARGET.  */
5957 	FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5958 	  {
5959 	    enum machine_mode mode;
5960 	    HOST_WIDE_INT bitsize;
5961 	    HOST_WIDE_INT bitpos = 0;
5962 	    tree offset;
5963 	    rtx to_rtx = target;
5964 
5965 	    /* Just ignore missing fields.  We cleared the whole
5966 	       structure, above, if any fields are missing.  */
5967 	    if (field == 0)
5968 	      continue;
5969 
5970 	    if (cleared && initializer_zerop (value))
5971 	      continue;
5972 
5973 	    if (tree_fits_uhwi_p (DECL_SIZE (field)))
5974 	      bitsize = tree_to_uhwi (DECL_SIZE (field));
5975 	    else
5976 	      bitsize = -1;
5977 
5978 	    mode = DECL_MODE (field);
5979 	    if (DECL_BIT_FIELD (field))
5980 	      mode = VOIDmode;
5981 
5982 	    offset = DECL_FIELD_OFFSET (field);
5983 	    if (tree_fits_shwi_p (offset)
5984 		&& tree_fits_shwi_p (bit_position (field)))
5985 	      {
5986 		bitpos = int_bit_position (field);
5987 		offset = 0;
5988 	      }
5989 	    else
5990 	      bitpos = tree_to_shwi (DECL_FIELD_BIT_OFFSET (field));
5991 
5992 	    if (offset)
5993 	      {
5994 	        enum machine_mode address_mode;
5995 		rtx offset_rtx;
5996 
5997 		offset
5998 		  = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5999 						    make_tree (TREE_TYPE (exp),
6000 							       target));
6001 
6002 		offset_rtx = expand_normal (offset);
6003 		gcc_assert (MEM_P (to_rtx));
6004 
6005 		address_mode = get_address_mode (to_rtx);
6006 		if (GET_MODE (offset_rtx) != address_mode)
6007 		  offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
6008 
6009 		to_rtx = offset_address (to_rtx, offset_rtx,
6010 					 highest_pow2_factor (offset));
6011 	      }
6012 
6013 #ifdef WORD_REGISTER_OPERATIONS
6014 	    /* If this initializes a field that is smaller than a
6015 	       word, at the start of a word, try to widen it to a full
6016 	       word.  This special case allows us to output C++ member
6017 	       function initializations in a form that the optimizers
6018 	       can understand.  */
6019 	    if (REG_P (target)
6020 		&& bitsize < BITS_PER_WORD
6021 		&& bitpos % BITS_PER_WORD == 0
6022 		&& GET_MODE_CLASS (mode) == MODE_INT
6023 		&& TREE_CODE (value) == INTEGER_CST
6024 		&& exp_size >= 0
6025 		&& bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
6026 	      {
6027 		tree type = TREE_TYPE (value);
6028 
6029 		if (TYPE_PRECISION (type) < BITS_PER_WORD)
6030 		  {
6031 		    type = lang_hooks.types.type_for_mode
6032 		      (word_mode, TYPE_UNSIGNED (type));
6033 		    value = fold_convert (type, value);
6034 		  }
6035 
6036 		if (BYTES_BIG_ENDIAN)
6037 		  value
6038 		   = fold_build2 (LSHIFT_EXPR, type, value,
6039 				   build_int_cst (type,
6040 						  BITS_PER_WORD - bitsize));
6041 		bitsize = BITS_PER_WORD;
6042 		mode = word_mode;
6043 	      }
6044 #endif
6045 
6046 	    if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
6047 		&& DECL_NONADDRESSABLE_P (field))
6048 	      {
6049 		to_rtx = copy_rtx (to_rtx);
6050 		MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
6051 	      }
6052 
6053 	    store_constructor_field (to_rtx, bitsize, bitpos, mode,
6054 				     value, cleared,
6055 				     get_alias_set (TREE_TYPE (field)));
6056 	  }
6057 	break;
6058       }
6059     case ARRAY_TYPE:
6060       {
6061 	tree value, index;
6062 	unsigned HOST_WIDE_INT i;
6063 	int need_to_clear;
6064 	tree domain;
6065 	tree elttype = TREE_TYPE (type);
6066 	int const_bounds_p;
6067 	HOST_WIDE_INT minelt = 0;
6068 	HOST_WIDE_INT maxelt = 0;
6069 
6070 	domain = TYPE_DOMAIN (type);
6071 	const_bounds_p = (TYPE_MIN_VALUE (domain)
6072 			  && TYPE_MAX_VALUE (domain)
6073 			  && tree_fits_shwi_p (TYPE_MIN_VALUE (domain))
6074 			  && tree_fits_shwi_p (TYPE_MAX_VALUE (domain)));
6075 
6076 	/* If we have constant bounds for the range of the type, get them.  */
6077 	if (const_bounds_p)
6078 	  {
6079 	    minelt = tree_to_shwi (TYPE_MIN_VALUE (domain));
6080 	    maxelt = tree_to_shwi (TYPE_MAX_VALUE (domain));
6081 	  }
6082 
6083 	/* If the constructor has fewer elements than the array, clear
6084            the whole array first.  Similarly if this is static
6085            constructor of a non-BLKmode object.  */
6086 	if (cleared)
6087 	  need_to_clear = 0;
6088 	else if (REG_P (target) && TREE_STATIC (exp))
6089 	  need_to_clear = 1;
6090 	else
6091 	  {
6092 	    unsigned HOST_WIDE_INT idx;
6093 	    tree index, value;
6094 	    HOST_WIDE_INT count = 0, zero_count = 0;
6095 	    need_to_clear = ! const_bounds_p;
6096 
6097 	    /* This loop is a more accurate version of the loop in
6098 	       mostly_zeros_p (it handles RANGE_EXPR in an index).  It
6099 	       is also needed to check for missing elements.  */
6100 	    FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
6101 	      {
6102 		HOST_WIDE_INT this_node_count;
6103 
6104 		if (need_to_clear)
6105 		  break;
6106 
6107 		if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6108 		  {
6109 		    tree lo_index = TREE_OPERAND (index, 0);
6110 		    tree hi_index = TREE_OPERAND (index, 1);
6111 
6112 		    if (! tree_fits_uhwi_p (lo_index)
6113 			|| ! tree_fits_uhwi_p (hi_index))
6114 		      {
6115 			need_to_clear = 1;
6116 			break;
6117 		      }
6118 
6119 		    this_node_count = (tree_to_uhwi (hi_index)
6120 				       - tree_to_uhwi (lo_index) + 1);
6121 		  }
6122 		else
6123 		  this_node_count = 1;
6124 
6125 		count += this_node_count;
6126 		if (mostly_zeros_p (value))
6127 		  zero_count += this_node_count;
6128 	      }
6129 
6130 	    /* Clear the entire array first if there are any missing
6131 	       elements, or if the incidence of zero elements is >=
6132 	       75%.  */
6133 	    if (! need_to_clear
6134 		&& (count < maxelt - minelt + 1
6135 		    || 4 * zero_count >= 3 * count))
6136 	      need_to_clear = 1;
6137 	  }
6138 
6139 	if (need_to_clear && size > 0)
6140 	  {
6141 	    if (REG_P (target))
6142 	      emit_move_insn (target,  CONST0_RTX (GET_MODE (target)));
6143 	    else
6144 	      clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6145 	    cleared = 1;
6146 	  }
6147 
6148 	if (!cleared && REG_P (target))
6149 	  /* Inform later passes that the old value is dead.  */
6150 	  emit_clobber (target);
6151 
6152 	/* Store each element of the constructor into the
6153 	   corresponding element of TARGET, determined by counting the
6154 	   elements.  */
6155 	FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
6156 	  {
6157 	    enum machine_mode mode;
6158 	    HOST_WIDE_INT bitsize;
6159 	    HOST_WIDE_INT bitpos;
6160 	    rtx xtarget = target;
6161 
6162 	    if (cleared && initializer_zerop (value))
6163 	      continue;
6164 
6165 	    mode = TYPE_MODE (elttype);
6166 	    if (mode == BLKmode)
6167 	      bitsize = (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6168 			 ? tree_to_uhwi (TYPE_SIZE (elttype))
6169 			 : -1);
6170 	    else
6171 	      bitsize = GET_MODE_BITSIZE (mode);
6172 
6173 	    if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6174 	      {
6175 		tree lo_index = TREE_OPERAND (index, 0);
6176 		tree hi_index = TREE_OPERAND (index, 1);
6177 		rtx index_r, pos_rtx;
6178 		HOST_WIDE_INT lo, hi, count;
6179 		tree position;
6180 
6181 		/* If the range is constant and "small", unroll the loop.  */
6182 		if (const_bounds_p
6183 		    && tree_fits_shwi_p (lo_index)
6184 		    && tree_fits_shwi_p (hi_index)
6185 		    && (lo = tree_to_shwi (lo_index),
6186 			hi = tree_to_shwi (hi_index),
6187 			count = hi - lo + 1,
6188 			(!MEM_P (target)
6189 			 || count <= 2
6190 			 || (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6191 			     && (tree_to_uhwi (TYPE_SIZE (elttype)) * count
6192 				 <= 40 * 8)))))
6193 		  {
6194 		    lo -= minelt;  hi -= minelt;
6195 		    for (; lo <= hi; lo++)
6196 		      {
6197 			bitpos = lo * tree_to_shwi (TYPE_SIZE (elttype));
6198 
6199 			if (MEM_P (target)
6200 			    && !MEM_KEEP_ALIAS_SET_P (target)
6201 			    && TREE_CODE (type) == ARRAY_TYPE
6202 			    && TYPE_NONALIASED_COMPONENT (type))
6203 			  {
6204 			    target = copy_rtx (target);
6205 			    MEM_KEEP_ALIAS_SET_P (target) = 1;
6206 			  }
6207 
6208 			store_constructor_field
6209 			  (target, bitsize, bitpos, mode, value, cleared,
6210 			   get_alias_set (elttype));
6211 		      }
6212 		  }
6213 		else
6214 		  {
6215 		    rtx loop_start = gen_label_rtx ();
6216 		    rtx loop_end = gen_label_rtx ();
6217 		    tree exit_cond;
6218 
6219 		    expand_normal (hi_index);
6220 
6221 		    index = build_decl (EXPR_LOCATION (exp),
6222 					VAR_DECL, NULL_TREE, domain);
6223 		    index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
6224 		    SET_DECL_RTL (index, index_r);
6225 		    store_expr (lo_index, index_r, 0, false);
6226 
6227 		    /* Build the head of the loop.  */
6228 		    do_pending_stack_adjust ();
6229 		    emit_label (loop_start);
6230 
6231 		    /* Assign value to element index.  */
6232 		    position =
6233 		      fold_convert (ssizetype,
6234 				    fold_build2 (MINUS_EXPR,
6235 						 TREE_TYPE (index),
6236 						 index,
6237 						 TYPE_MIN_VALUE (domain)));
6238 
6239 		    position =
6240 			size_binop (MULT_EXPR, position,
6241 				    fold_convert (ssizetype,
6242 						  TYPE_SIZE_UNIT (elttype)));
6243 
6244 		    pos_rtx = expand_normal (position);
6245 		    xtarget = offset_address (target, pos_rtx,
6246 					      highest_pow2_factor (position));
6247 		    xtarget = adjust_address (xtarget, mode, 0);
6248 		    if (TREE_CODE (value) == CONSTRUCTOR)
6249 		      store_constructor (value, xtarget, cleared,
6250 					 bitsize / BITS_PER_UNIT);
6251 		    else
6252 		      store_expr (value, xtarget, 0, false);
6253 
6254 		    /* Generate a conditional jump to exit the loop.  */
6255 		    exit_cond = build2 (LT_EXPR, integer_type_node,
6256 					index, hi_index);
6257 		    jumpif (exit_cond, loop_end, -1);
6258 
6259 		    /* Update the loop counter, and jump to the head of
6260 		       the loop.  */
6261 		    expand_assignment (index,
6262 				       build2 (PLUS_EXPR, TREE_TYPE (index),
6263 					       index, integer_one_node),
6264 				       false);
6265 
6266 		    emit_jump (loop_start);
6267 
6268 		    /* Build the end of the loop.  */
6269 		    emit_label (loop_end);
6270 		  }
6271 	      }
6272 	    else if ((index != 0 && ! tree_fits_shwi_p (index))
6273 		     || ! tree_fits_uhwi_p (TYPE_SIZE (elttype)))
6274 	      {
6275 		tree position;
6276 
6277 		if (index == 0)
6278 		  index = ssize_int (1);
6279 
6280 		if (minelt)
6281 		  index = fold_convert (ssizetype,
6282 					fold_build2 (MINUS_EXPR,
6283 						     TREE_TYPE (index),
6284 						     index,
6285 						     TYPE_MIN_VALUE (domain)));
6286 
6287 		position =
6288 		  size_binop (MULT_EXPR, index,
6289 			      fold_convert (ssizetype,
6290 					    TYPE_SIZE_UNIT (elttype)));
6291 		xtarget = offset_address (target,
6292 					  expand_normal (position),
6293 					  highest_pow2_factor (position));
6294 		xtarget = adjust_address (xtarget, mode, 0);
6295 		store_expr (value, xtarget, 0, false);
6296 	      }
6297 	    else
6298 	      {
6299 		if (index != 0)
6300 		  bitpos = ((tree_to_shwi (index) - minelt)
6301 			    * tree_to_uhwi (TYPE_SIZE (elttype)));
6302 		else
6303 		  bitpos = (i * tree_to_uhwi (TYPE_SIZE (elttype)));
6304 
6305 		if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
6306 		    && TREE_CODE (type) == ARRAY_TYPE
6307 		    && TYPE_NONALIASED_COMPONENT (type))
6308 		  {
6309 		    target = copy_rtx (target);
6310 		    MEM_KEEP_ALIAS_SET_P (target) = 1;
6311 		  }
6312 		store_constructor_field (target, bitsize, bitpos, mode, value,
6313 					 cleared, get_alias_set (elttype));
6314 	      }
6315 	  }
6316 	break;
6317       }
6318 
6319     case VECTOR_TYPE:
6320       {
6321 	unsigned HOST_WIDE_INT idx;
6322 	constructor_elt *ce;
6323 	int i;
6324 	int need_to_clear;
6325 	int icode = CODE_FOR_nothing;
6326 	tree elttype = TREE_TYPE (type);
6327 	int elt_size = tree_to_uhwi (TYPE_SIZE (elttype));
6328 	enum machine_mode eltmode = TYPE_MODE (elttype);
6329 	HOST_WIDE_INT bitsize;
6330 	HOST_WIDE_INT bitpos;
6331 	rtvec vector = NULL;
6332 	unsigned n_elts;
6333 	alias_set_type alias;
6334 
6335 	gcc_assert (eltmode != BLKmode);
6336 
6337 	n_elts = TYPE_VECTOR_SUBPARTS (type);
6338 	if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
6339 	  {
6340 	    enum machine_mode mode = GET_MODE (target);
6341 
6342 	    icode = (int) optab_handler (vec_init_optab, mode);
6343 	    /* Don't use vec_init<mode> if some elements have VECTOR_TYPE.  */
6344 	    if (icode != CODE_FOR_nothing)
6345 	      {
6346 		tree value;
6347 
6348 		FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6349 		  if (TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE)
6350 		    {
6351 		      icode = CODE_FOR_nothing;
6352 		      break;
6353 		    }
6354 	      }
6355 	    if (icode != CODE_FOR_nothing)
6356 	      {
6357 		unsigned int i;
6358 
6359 		vector = rtvec_alloc (n_elts);
6360 		for (i = 0; i < n_elts; i++)
6361 		  RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
6362 	      }
6363 	  }
6364 
6365 	/* If the constructor has fewer elements than the vector,
6366 	   clear the whole array first.  Similarly if this is static
6367 	   constructor of a non-BLKmode object.  */
6368 	if (cleared)
6369 	  need_to_clear = 0;
6370 	else if (REG_P (target) && TREE_STATIC (exp))
6371 	  need_to_clear = 1;
6372 	else
6373 	  {
6374 	    unsigned HOST_WIDE_INT count = 0, zero_count = 0;
6375 	    tree value;
6376 
6377 	    FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6378 	      {
6379 		int n_elts_here = tree_to_uhwi
6380 		  (int_const_binop (TRUNC_DIV_EXPR,
6381 				    TYPE_SIZE (TREE_TYPE (value)),
6382 				    TYPE_SIZE (elttype)));
6383 
6384 		count += n_elts_here;
6385 		if (mostly_zeros_p (value))
6386 		  zero_count += n_elts_here;
6387 	      }
6388 
6389 	    /* Clear the entire vector first if there are any missing elements,
6390 	       or if the incidence of zero elements is >= 75%.  */
6391 	    need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
6392 	  }
6393 
6394 	if (need_to_clear && size > 0 && !vector)
6395 	  {
6396 	    if (REG_P (target))
6397 	      emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6398 	    else
6399 	      clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6400 	    cleared = 1;
6401 	  }
6402 
6403 	/* Inform later passes that the old value is dead.  */
6404 	if (!cleared && !vector && REG_P (target))
6405 	  emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6406 
6407         if (MEM_P (target))
6408 	  alias = MEM_ALIAS_SET (target);
6409 	else
6410 	  alias = get_alias_set (elttype);
6411 
6412         /* Store each element of the constructor into the corresponding
6413 	   element of TARGET, determined by counting the elements.  */
6414 	for (idx = 0, i = 0;
6415 	     vec_safe_iterate (CONSTRUCTOR_ELTS (exp), idx, &ce);
6416 	     idx++, i += bitsize / elt_size)
6417 	  {
6418 	    HOST_WIDE_INT eltpos;
6419 	    tree value = ce->value;
6420 
6421 	    bitsize = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (value)));
6422 	    if (cleared && initializer_zerop (value))
6423 	      continue;
6424 
6425 	    if (ce->index)
6426 	      eltpos = tree_to_uhwi (ce->index);
6427 	    else
6428 	      eltpos = i;
6429 
6430 	    if (vector)
6431 	      {
6432 		/* vec_init<mode> should not be used if there are VECTOR_TYPE
6433 		   elements.  */
6434 		gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
6435 		RTVEC_ELT (vector, eltpos)
6436 		  = expand_normal (value);
6437 	      }
6438 	    else
6439 	      {
6440 		enum machine_mode value_mode =
6441 		  TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
6442 		  ? TYPE_MODE (TREE_TYPE (value))
6443 		  : eltmode;
6444 		bitpos = eltpos * elt_size;
6445 		store_constructor_field (target, bitsize, bitpos, value_mode,
6446 					 value, cleared, alias);
6447 	      }
6448 	  }
6449 
6450 	if (vector)
6451 	  emit_insn (GEN_FCN (icode)
6452 		     (target,
6453 		      gen_rtx_PARALLEL (GET_MODE (target), vector)));
6454 	break;
6455       }
6456 
6457     default:
6458       gcc_unreachable ();
6459     }
6460 }
6461 
6462 /* Store the value of EXP (an expression tree)
6463    into a subfield of TARGET which has mode MODE and occupies
6464    BITSIZE bits, starting BITPOS bits from the start of TARGET.
6465    If MODE is VOIDmode, it means that we are storing into a bit-field.
6466 
6467    BITREGION_START is bitpos of the first bitfield in this region.
6468    BITREGION_END is the bitpos of the ending bitfield in this region.
6469    These two fields are 0, if the C++ memory model does not apply,
6470    or we are not interested in keeping track of bitfield regions.
6471 
6472    Always return const0_rtx unless we have something particular to
6473    return.
6474 
6475    ALIAS_SET is the alias set for the destination.  This value will
6476    (in general) be different from that for TARGET, since TARGET is a
6477    reference to the containing structure.
6478 
6479    If NONTEMPORAL is true, try generating a nontemporal store.  */
6480 
6481 static rtx
store_field(rtx target,HOST_WIDE_INT bitsize,HOST_WIDE_INT bitpos,unsigned HOST_WIDE_INT bitregion_start,unsigned HOST_WIDE_INT bitregion_end,enum machine_mode mode,tree exp,alias_set_type alias_set,bool nontemporal)6482 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
6483 	     unsigned HOST_WIDE_INT bitregion_start,
6484 	     unsigned HOST_WIDE_INT bitregion_end,
6485 	     enum machine_mode mode, tree exp,
6486 	     alias_set_type alias_set, bool nontemporal)
6487 {
6488   if (TREE_CODE (exp) == ERROR_MARK)
6489     return const0_rtx;
6490 
6491   /* If we have nothing to store, do nothing unless the expression has
6492      side-effects.  */
6493   if (bitsize == 0)
6494     return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
6495 
6496   if (GET_CODE (target) == CONCAT)
6497     {
6498       /* We're storing into a struct containing a single __complex.  */
6499 
6500       gcc_assert (!bitpos);
6501       return store_expr (exp, target, 0, nontemporal);
6502     }
6503 
6504   /* If the structure is in a register or if the component
6505      is a bit field, we cannot use addressing to access it.
6506      Use bit-field techniques or SUBREG to store in it.  */
6507 
6508   if (mode == VOIDmode
6509       || (mode != BLKmode && ! direct_store[(int) mode]
6510 	  && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6511 	  && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6512       || REG_P (target)
6513       || GET_CODE (target) == SUBREG
6514       /* If the field isn't aligned enough to store as an ordinary memref,
6515 	 store it as a bit field.  */
6516       || (mode != BLKmode
6517 	  && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
6518 		|| bitpos % GET_MODE_ALIGNMENT (mode))
6519 	       && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
6520 	      || (bitpos % BITS_PER_UNIT != 0)))
6521       || (bitsize >= 0 && mode != BLKmode
6522 	  && GET_MODE_BITSIZE (mode) > bitsize)
6523       /* If the RHS and field are a constant size and the size of the
6524 	 RHS isn't the same size as the bitfield, we must use bitfield
6525 	 operations.  */
6526       || (bitsize >= 0
6527 	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
6528 	  && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0)
6529       /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6530          decl we must use bitfield operations.  */
6531       || (bitsize >= 0
6532 	  && TREE_CODE (exp) == MEM_REF
6533 	  && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6534 	  && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6535 	  && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0),0 ))
6536 	  && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
6537     {
6538       rtx temp;
6539       gimple nop_def;
6540 
6541       /* If EXP is a NOP_EXPR of precision less than its mode, then that
6542 	 implies a mask operation.  If the precision is the same size as
6543 	 the field we're storing into, that mask is redundant.  This is
6544 	 particularly common with bit field assignments generated by the
6545 	 C front end.  */
6546       nop_def = get_def_for_expr (exp, NOP_EXPR);
6547       if (nop_def)
6548 	{
6549 	  tree type = TREE_TYPE (exp);
6550 	  if (INTEGRAL_TYPE_P (type)
6551 	      && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
6552 	      && bitsize == TYPE_PRECISION (type))
6553 	    {
6554 	      tree op = gimple_assign_rhs1 (nop_def);
6555 	      type = TREE_TYPE (op);
6556 	      if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
6557 		exp = op;
6558 	    }
6559 	}
6560 
6561       temp = expand_normal (exp);
6562 
6563       /* If BITSIZE is narrower than the size of the type of EXP
6564 	 we will be narrowing TEMP.  Normally, what's wanted are the
6565 	 low-order bits.  However, if EXP's type is a record and this is
6566 	 big-endian machine, we want the upper BITSIZE bits.  */
6567       if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
6568 	  && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
6569 	  && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
6570 	temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
6571 			     GET_MODE_BITSIZE (GET_MODE (temp)) - bitsize,
6572 			     NULL_RTX, 1);
6573 
6574       /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE.  */
6575       if (mode != VOIDmode && mode != BLKmode
6576 	  && mode != TYPE_MODE (TREE_TYPE (exp)))
6577 	temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
6578 
6579       /* If the modes of TEMP and TARGET are both BLKmode, both
6580 	 must be in memory and BITPOS must be aligned on a byte
6581 	 boundary.  If so, we simply do a block copy.  Likewise
6582 	 for a BLKmode-like TARGET.  */
6583       if (GET_MODE (temp) == BLKmode
6584 	  && (GET_MODE (target) == BLKmode
6585 	      || (MEM_P (target)
6586 		  && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
6587 		  && (bitpos % BITS_PER_UNIT) == 0
6588 		  && (bitsize % BITS_PER_UNIT) == 0)))
6589 	{
6590 	  gcc_assert (MEM_P (target) && MEM_P (temp)
6591 		      && (bitpos % BITS_PER_UNIT) == 0);
6592 
6593 	  target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
6594 	  emit_block_move (target, temp,
6595 			   GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6596 				    / BITS_PER_UNIT),
6597 			   BLOCK_OP_NORMAL);
6598 
6599 	  return const0_rtx;
6600 	}
6601 
6602       /* Handle calls that return values in multiple non-contiguous locations.
6603 	 The Irix 6 ABI has examples of this.  */
6604       if (GET_CODE (temp) == PARALLEL)
6605 	{
6606 	  HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6607 	  rtx temp_target;
6608 	  if (mode == BLKmode || mode == VOIDmode)
6609 	    mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6610 	  temp_target = gen_reg_rtx (mode);
6611 	  emit_group_store (temp_target, temp, TREE_TYPE (exp), size);
6612 	  temp = temp_target;
6613 	}
6614       else if (mode == BLKmode)
6615 	{
6616 	  /* Handle calls that return BLKmode values in registers.  */
6617 	  if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
6618 	    {
6619 	      rtx temp_target = gen_reg_rtx (GET_MODE (temp));
6620 	      copy_blkmode_from_reg (temp_target, temp, TREE_TYPE (exp));
6621 	      temp = temp_target;
6622 	    }
6623 	  else
6624 	    {
6625 	      HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6626 	      rtx temp_target;
6627 	      mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6628 	      temp_target = gen_reg_rtx (mode);
6629 	      temp_target
6630 	        = extract_bit_field (temp, size * BITS_PER_UNIT, 0, 1,
6631 				     temp_target, mode, mode);
6632 	      temp = temp_target;
6633 	    }
6634 	}
6635 
6636       /* Store the value in the bitfield.  */
6637       store_bit_field (target, bitsize, bitpos,
6638 		       bitregion_start, bitregion_end,
6639 		       mode, temp);
6640 
6641       return const0_rtx;
6642     }
6643   else
6644     {
6645       /* Now build a reference to just the desired component.  */
6646       rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
6647 
6648       if (to_rtx == target)
6649 	to_rtx = copy_rtx (to_rtx);
6650 
6651       if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
6652 	set_mem_alias_set (to_rtx, alias_set);
6653 
6654       return store_expr (exp, to_rtx, 0, nontemporal);
6655     }
6656 }
6657 
6658 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
6659    an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
6660    codes and find the ultimate containing object, which we return.
6661 
6662    We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
6663    bit position, and *PUNSIGNEDP to the signedness of the field.
6664    If the position of the field is variable, we store a tree
6665    giving the variable offset (in units) in *POFFSET.
6666    This offset is in addition to the bit position.
6667    If the position is not variable, we store 0 in *POFFSET.
6668 
6669    If any of the extraction expressions is volatile,
6670    we store 1 in *PVOLATILEP.  Otherwise we don't change that.
6671 
6672    If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
6673    Otherwise, it is a mode that can be used to access the field.
6674 
6675    If the field describes a variable-sized object, *PMODE is set to
6676    BLKmode and *PBITSIZE is set to -1.  An access cannot be made in
6677    this case, but the address of the object can be found.
6678 
6679    If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
6680    look through nodes that serve as markers of a greater alignment than
6681    the one that can be deduced from the expression.  These nodes make it
6682    possible for front-ends to prevent temporaries from being created by
6683    the middle-end on alignment considerations.  For that purpose, the
6684    normal operating mode at high-level is to always pass FALSE so that
6685    the ultimate containing object is really returned; moreover, the
6686    associated predicate handled_component_p will always return TRUE
6687    on these nodes, thus indicating that they are essentially handled
6688    by get_inner_reference.  TRUE should only be passed when the caller
6689    is scanning the expression in order to build another representation
6690    and specifically knows how to handle these nodes; as such, this is
6691    the normal operating mode in the RTL expanders.  */
6692 
6693 tree
get_inner_reference(tree exp,HOST_WIDE_INT * pbitsize,HOST_WIDE_INT * pbitpos,tree * poffset,enum machine_mode * pmode,int * punsignedp,int * pvolatilep,bool keep_aligning)6694 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
6695 		     HOST_WIDE_INT *pbitpos, tree *poffset,
6696 		     enum machine_mode *pmode, int *punsignedp,
6697 		     int *pvolatilep, bool keep_aligning)
6698 {
6699   tree size_tree = 0;
6700   enum machine_mode mode = VOIDmode;
6701   bool blkmode_bitfield = false;
6702   tree offset = size_zero_node;
6703   double_int bit_offset = double_int_zero;
6704 
6705   /* First get the mode, signedness, and size.  We do this from just the
6706      outermost expression.  */
6707   *pbitsize = -1;
6708   if (TREE_CODE (exp) == COMPONENT_REF)
6709     {
6710       tree field = TREE_OPERAND (exp, 1);
6711       size_tree = DECL_SIZE (field);
6712       if (flag_strict_volatile_bitfields > 0
6713 	  && TREE_THIS_VOLATILE (exp)
6714 	  && DECL_BIT_FIELD_TYPE (field)
6715 	  && DECL_MODE (field) != BLKmode)
6716 	/* Volatile bitfields should be accessed in the mode of the
6717 	     field's type, not the mode computed based on the bit
6718 	     size.  */
6719 	mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
6720       else if (!DECL_BIT_FIELD (field))
6721 	mode = DECL_MODE (field);
6722       else if (DECL_MODE (field) == BLKmode)
6723 	blkmode_bitfield = true;
6724 
6725       *punsignedp = DECL_UNSIGNED (field);
6726     }
6727   else if (TREE_CODE (exp) == BIT_FIELD_REF)
6728     {
6729       size_tree = TREE_OPERAND (exp, 1);
6730       *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
6731 		     || TYPE_UNSIGNED (TREE_TYPE (exp)));
6732 
6733       /* For vector types, with the correct size of access, use the mode of
6734 	 inner type.  */
6735       if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
6736 	  && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
6737 	  && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
6738         mode = TYPE_MODE (TREE_TYPE (exp));
6739     }
6740   else
6741     {
6742       mode = TYPE_MODE (TREE_TYPE (exp));
6743       *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
6744 
6745       if (mode == BLKmode)
6746 	size_tree = TYPE_SIZE (TREE_TYPE (exp));
6747       else
6748 	*pbitsize = GET_MODE_BITSIZE (mode);
6749     }
6750 
6751   if (size_tree != 0)
6752     {
6753       if (! tree_fits_uhwi_p (size_tree))
6754 	mode = BLKmode, *pbitsize = -1;
6755       else
6756 	*pbitsize = tree_to_uhwi (size_tree);
6757     }
6758 
6759   /* Compute cumulative bit-offset for nested component-refs and array-refs,
6760      and find the ultimate containing object.  */
6761   while (1)
6762     {
6763       switch (TREE_CODE (exp))
6764 	{
6765 	case BIT_FIELD_REF:
6766 	  bit_offset += tree_to_double_int (TREE_OPERAND (exp, 2));
6767 	  break;
6768 
6769 	case COMPONENT_REF:
6770 	  {
6771 	    tree field = TREE_OPERAND (exp, 1);
6772 	    tree this_offset = component_ref_field_offset (exp);
6773 
6774 	    /* If this field hasn't been filled in yet, don't go past it.
6775 	       This should only happen when folding expressions made during
6776 	       type construction.  */
6777 	    if (this_offset == 0)
6778 	      break;
6779 
6780 	    offset = size_binop (PLUS_EXPR, offset, this_offset);
6781 	    bit_offset += tree_to_double_int (DECL_FIELD_BIT_OFFSET (field));
6782 
6783 	    /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN.  */
6784 	  }
6785 	  break;
6786 
6787 	case ARRAY_REF:
6788 	case ARRAY_RANGE_REF:
6789 	  {
6790 	    tree index = TREE_OPERAND (exp, 1);
6791 	    tree low_bound = array_ref_low_bound (exp);
6792 	    tree unit_size = array_ref_element_size (exp);
6793 
6794 	    /* We assume all arrays have sizes that are a multiple of a byte.
6795 	       First subtract the lower bound, if any, in the type of the
6796 	       index, then convert to sizetype and multiply by the size of
6797 	       the array element.  */
6798 	    if (! integer_zerop (low_bound))
6799 	      index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6800 				   index, low_bound);
6801 
6802 	    offset = size_binop (PLUS_EXPR, offset,
6803 			         size_binop (MULT_EXPR,
6804 					     fold_convert (sizetype, index),
6805 					     unit_size));
6806 	  }
6807 	  break;
6808 
6809 	case REALPART_EXPR:
6810 	  break;
6811 
6812 	case IMAGPART_EXPR:
6813 	  bit_offset += double_int::from_uhwi (*pbitsize);
6814 	  break;
6815 
6816 	case VIEW_CONVERT_EXPR:
6817 	  if (keep_aligning && STRICT_ALIGNMENT
6818 	      && (TYPE_ALIGN (TREE_TYPE (exp))
6819 	       > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6820 	      && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6821 		  < BIGGEST_ALIGNMENT)
6822 	      && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6823 		  || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6824 	    goto done;
6825 	  break;
6826 
6827 	case MEM_REF:
6828 	  /* Hand back the decl for MEM[&decl, off].  */
6829 	  if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
6830 	    {
6831 	      tree off = TREE_OPERAND (exp, 1);
6832 	      if (!integer_zerop (off))
6833 		{
6834 		  double_int boff, coff = mem_ref_offset (exp);
6835 		  boff = coff.lshift (BITS_PER_UNIT == 8
6836 				      ? 3 : exact_log2 (BITS_PER_UNIT));
6837 		  bit_offset += boff;
6838 		}
6839 	      exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6840 	    }
6841 	  goto done;
6842 
6843 	default:
6844 	  goto done;
6845 	}
6846 
6847       /* If any reference in the chain is volatile, the effect is volatile.  */
6848       if (TREE_THIS_VOLATILE (exp))
6849 	*pvolatilep = 1;
6850 
6851       exp = TREE_OPERAND (exp, 0);
6852     }
6853  done:
6854 
6855   /* If OFFSET is constant, see if we can return the whole thing as a
6856      constant bit position.  Make sure to handle overflow during
6857      this conversion.  */
6858   if (TREE_CODE (offset) == INTEGER_CST)
6859     {
6860       double_int tem = tree_to_double_int (offset);
6861       tem = tem.sext (TYPE_PRECISION (sizetype));
6862       tem = tem.lshift (BITS_PER_UNIT == 8 ? 3 : exact_log2 (BITS_PER_UNIT));
6863       tem += bit_offset;
6864       if (tem.fits_shwi ())
6865 	{
6866 	  *pbitpos = tem.to_shwi ();
6867 	  *poffset = offset = NULL_TREE;
6868 	}
6869     }
6870 
6871   /* Otherwise, split it up.  */
6872   if (offset)
6873     {
6874       /* Avoid returning a negative bitpos as this may wreak havoc later.  */
6875       if (bit_offset.is_negative ())
6876         {
6877 	  double_int mask
6878 	    = double_int::mask (BITS_PER_UNIT == 8
6879 			       ? 3 : exact_log2 (BITS_PER_UNIT));
6880 	  double_int tem = bit_offset.and_not (mask);
6881 	  /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
6882 	     Subtract it to BIT_OFFSET and add it (scaled) to OFFSET.  */
6883 	  bit_offset -= tem;
6884 	  tem = tem.arshift (BITS_PER_UNIT == 8
6885 			     ? 3 : exact_log2 (BITS_PER_UNIT),
6886 			     HOST_BITS_PER_DOUBLE_INT);
6887 	  offset = size_binop (PLUS_EXPR, offset,
6888 			       double_int_to_tree (sizetype, tem));
6889 	}
6890 
6891       *pbitpos = bit_offset.to_shwi ();
6892       *poffset = offset;
6893     }
6894 
6895   /* We can use BLKmode for a byte-aligned BLKmode bitfield.  */
6896   if (mode == VOIDmode
6897       && blkmode_bitfield
6898       && (*pbitpos % BITS_PER_UNIT) == 0
6899       && (*pbitsize % BITS_PER_UNIT) == 0)
6900     *pmode = BLKmode;
6901   else
6902     *pmode = mode;
6903 
6904   return exp;
6905 }
6906 
6907 /* Return a tree of sizetype representing the size, in bytes, of the element
6908    of EXP, an ARRAY_REF or an ARRAY_RANGE_REF.  */
6909 
6910 tree
array_ref_element_size(tree exp)6911 array_ref_element_size (tree exp)
6912 {
6913   tree aligned_size = TREE_OPERAND (exp, 3);
6914   tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6915   location_t loc = EXPR_LOCATION (exp);
6916 
6917   /* If a size was specified in the ARRAY_REF, it's the size measured
6918      in alignment units of the element type.  So multiply by that value.  */
6919   if (aligned_size)
6920     {
6921       /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6922 	 sizetype from another type of the same width and signedness.  */
6923       if (TREE_TYPE (aligned_size) != sizetype)
6924 	aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
6925       return size_binop_loc (loc, MULT_EXPR, aligned_size,
6926 			     size_int (TYPE_ALIGN_UNIT (elmt_type)));
6927     }
6928 
6929   /* Otherwise, take the size from that of the element type.  Substitute
6930      any PLACEHOLDER_EXPR that we have.  */
6931   else
6932     return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
6933 }
6934 
6935 /* Return a tree representing the lower bound of the array mentioned in
6936    EXP, an ARRAY_REF or an ARRAY_RANGE_REF.  */
6937 
6938 tree
array_ref_low_bound(tree exp)6939 array_ref_low_bound (tree exp)
6940 {
6941   tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6942 
6943   /* If a lower bound is specified in EXP, use it.  */
6944   if (TREE_OPERAND (exp, 2))
6945     return TREE_OPERAND (exp, 2);
6946 
6947   /* Otherwise, if there is a domain type and it has a lower bound, use it,
6948      substituting for a PLACEHOLDER_EXPR as needed.  */
6949   if (domain_type && TYPE_MIN_VALUE (domain_type))
6950     return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
6951 
6952   /* Otherwise, return a zero of the appropriate type.  */
6953   return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
6954 }
6955 
6956 /* Returns true if REF is an array reference to an array at the end of
6957    a structure.  If this is the case, the array may be allocated larger
6958    than its upper bound implies.  */
6959 
6960 bool
array_at_struct_end_p(tree ref)6961 array_at_struct_end_p (tree ref)
6962 {
6963   if (TREE_CODE (ref) != ARRAY_REF
6964       && TREE_CODE (ref) != ARRAY_RANGE_REF)
6965     return false;
6966 
6967   while (handled_component_p (ref))
6968     {
6969       /* If the reference chain contains a component reference to a
6970          non-union type and there follows another field the reference
6971 	 is not at the end of a structure.  */
6972       if (TREE_CODE (ref) == COMPONENT_REF
6973 	  && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
6974 	{
6975 	  tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
6976 	  while (nextf && TREE_CODE (nextf) != FIELD_DECL)
6977 	    nextf = DECL_CHAIN (nextf);
6978 	  if (nextf)
6979 	    return false;
6980 	}
6981 
6982       ref = TREE_OPERAND (ref, 0);
6983     }
6984 
6985   /* If the reference is based on a declared entity, the size of the array
6986      is constrained by its given domain.  */
6987   if (DECL_P (ref))
6988     return false;
6989 
6990   return true;
6991 }
6992 
6993 /* Return a tree representing the upper bound of the array mentioned in
6994    EXP, an ARRAY_REF or an ARRAY_RANGE_REF.  */
6995 
6996 tree
array_ref_up_bound(tree exp)6997 array_ref_up_bound (tree exp)
6998 {
6999   tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
7000 
7001   /* If there is a domain type and it has an upper bound, use it, substituting
7002      for a PLACEHOLDER_EXPR as needed.  */
7003   if (domain_type && TYPE_MAX_VALUE (domain_type))
7004     return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
7005 
7006   /* Otherwise fail.  */
7007   return NULL_TREE;
7008 }
7009 
7010 /* Return a tree representing the offset, in bytes, of the field referenced
7011    by EXP.  This does not include any offset in DECL_FIELD_BIT_OFFSET.  */
7012 
7013 tree
component_ref_field_offset(tree exp)7014 component_ref_field_offset (tree exp)
7015 {
7016   tree aligned_offset = TREE_OPERAND (exp, 2);
7017   tree field = TREE_OPERAND (exp, 1);
7018   location_t loc = EXPR_LOCATION (exp);
7019 
7020   /* If an offset was specified in the COMPONENT_REF, it's the offset measured
7021      in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT.  So multiply by that
7022      value.  */
7023   if (aligned_offset)
7024     {
7025       /* ??? tree_ssa_useless_type_conversion will eliminate casts to
7026 	 sizetype from another type of the same width and signedness.  */
7027       if (TREE_TYPE (aligned_offset) != sizetype)
7028 	aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
7029       return size_binop_loc (loc, MULT_EXPR, aligned_offset,
7030 			     size_int (DECL_OFFSET_ALIGN (field)
7031 				       / BITS_PER_UNIT));
7032     }
7033 
7034   /* Otherwise, take the offset from that of the field.  Substitute
7035      any PLACEHOLDER_EXPR that we have.  */
7036   else
7037     return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
7038 }
7039 
7040 /* Alignment in bits the TARGET of an assignment may be assumed to have.  */
7041 
7042 static unsigned HOST_WIDE_INT
target_align(const_tree target)7043 target_align (const_tree target)
7044 {
7045   /* We might have a chain of nested references with intermediate misaligning
7046      bitfields components, so need to recurse to find out.  */
7047 
7048   unsigned HOST_WIDE_INT this_align, outer_align;
7049 
7050   switch (TREE_CODE (target))
7051     {
7052     case BIT_FIELD_REF:
7053       return 1;
7054 
7055     case COMPONENT_REF:
7056       this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
7057       outer_align = target_align (TREE_OPERAND (target, 0));
7058       return MIN (this_align, outer_align);
7059 
7060     case ARRAY_REF:
7061     case ARRAY_RANGE_REF:
7062       this_align = TYPE_ALIGN (TREE_TYPE (target));
7063       outer_align = target_align (TREE_OPERAND (target, 0));
7064       return MIN (this_align, outer_align);
7065 
7066     CASE_CONVERT:
7067     case NON_LVALUE_EXPR:
7068     case VIEW_CONVERT_EXPR:
7069       this_align = TYPE_ALIGN (TREE_TYPE (target));
7070       outer_align = target_align (TREE_OPERAND (target, 0));
7071       return MAX (this_align, outer_align);
7072 
7073     default:
7074       return TYPE_ALIGN (TREE_TYPE (target));
7075     }
7076 }
7077 
7078 
7079 /* Given an rtx VALUE that may contain additions and multiplications, return
7080    an equivalent value that just refers to a register, memory, or constant.
7081    This is done by generating instructions to perform the arithmetic and
7082    returning a pseudo-register containing the value.
7083 
7084    The returned value may be a REG, SUBREG, MEM or constant.  */
7085 
7086 rtx
force_operand(rtx value,rtx target)7087 force_operand (rtx value, rtx target)
7088 {
7089   rtx op1, op2;
7090   /* Use subtarget as the target for operand 0 of a binary operation.  */
7091   rtx subtarget = get_subtarget (target);
7092   enum rtx_code code = GET_CODE (value);
7093 
7094   /* Check for subreg applied to an expression produced by loop optimizer.  */
7095   if (code == SUBREG
7096       && !REG_P (SUBREG_REG (value))
7097       && !MEM_P (SUBREG_REG (value)))
7098     {
7099       value
7100 	= simplify_gen_subreg (GET_MODE (value),
7101 			       force_reg (GET_MODE (SUBREG_REG (value)),
7102 					  force_operand (SUBREG_REG (value),
7103 							 NULL_RTX)),
7104 			       GET_MODE (SUBREG_REG (value)),
7105 			       SUBREG_BYTE (value));
7106       code = GET_CODE (value);
7107     }
7108 
7109   /* Check for a PIC address load.  */
7110   if ((code == PLUS || code == MINUS)
7111       && XEXP (value, 0) == pic_offset_table_rtx
7112       && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
7113 	  || GET_CODE (XEXP (value, 1)) == LABEL_REF
7114 	  || GET_CODE (XEXP (value, 1)) == CONST))
7115     {
7116       if (!subtarget)
7117 	subtarget = gen_reg_rtx (GET_MODE (value));
7118       emit_move_insn (subtarget, value);
7119       return subtarget;
7120     }
7121 
7122   if (ARITHMETIC_P (value))
7123     {
7124       op2 = XEXP (value, 1);
7125       if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
7126 	subtarget = 0;
7127       if (code == MINUS && CONST_INT_P (op2))
7128 	{
7129 	  code = PLUS;
7130 	  op2 = negate_rtx (GET_MODE (value), op2);
7131 	}
7132 
7133       /* Check for an addition with OP2 a constant integer and our first
7134          operand a PLUS of a virtual register and something else.  In that
7135          case, we want to emit the sum of the virtual register and the
7136          constant first and then add the other value.  This allows virtual
7137          register instantiation to simply modify the constant rather than
7138          creating another one around this addition.  */
7139       if (code == PLUS && CONST_INT_P (op2)
7140 	  && GET_CODE (XEXP (value, 0)) == PLUS
7141 	  && REG_P (XEXP (XEXP (value, 0), 0))
7142 	  && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
7143 	  && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
7144 	{
7145 	  rtx temp = expand_simple_binop (GET_MODE (value), code,
7146 					  XEXP (XEXP (value, 0), 0), op2,
7147 					  subtarget, 0, OPTAB_LIB_WIDEN);
7148 	  return expand_simple_binop (GET_MODE (value), code, temp,
7149 				      force_operand (XEXP (XEXP (value,
7150 								 0), 1), 0),
7151 				      target, 0, OPTAB_LIB_WIDEN);
7152 	}
7153 
7154       op1 = force_operand (XEXP (value, 0), subtarget);
7155       op2 = force_operand (op2, NULL_RTX);
7156       switch (code)
7157 	{
7158 	case MULT:
7159 	  return expand_mult (GET_MODE (value), op1, op2, target, 1);
7160 	case DIV:
7161 	  if (!INTEGRAL_MODE_P (GET_MODE (value)))
7162 	    return expand_simple_binop (GET_MODE (value), code, op1, op2,
7163 					target, 1, OPTAB_LIB_WIDEN);
7164 	  else
7165 	    return expand_divmod (0,
7166 				  FLOAT_MODE_P (GET_MODE (value))
7167 				  ? RDIV_EXPR : TRUNC_DIV_EXPR,
7168 				  GET_MODE (value), op1, op2, target, 0);
7169 	case MOD:
7170 	  return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7171 				target, 0);
7172 	case UDIV:
7173 	  return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
7174 				target, 1);
7175 	case UMOD:
7176 	  return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7177 				target, 1);
7178 	case ASHIFTRT:
7179 	  return expand_simple_binop (GET_MODE (value), code, op1, op2,
7180 				      target, 0, OPTAB_LIB_WIDEN);
7181 	default:
7182 	  return expand_simple_binop (GET_MODE (value), code, op1, op2,
7183 				      target, 1, OPTAB_LIB_WIDEN);
7184 	}
7185     }
7186   if (UNARY_P (value))
7187     {
7188       if (!target)
7189 	target = gen_reg_rtx (GET_MODE (value));
7190       op1 = force_operand (XEXP (value, 0), NULL_RTX);
7191       switch (code)
7192 	{
7193 	case ZERO_EXTEND:
7194 	case SIGN_EXTEND:
7195 	case TRUNCATE:
7196 	case FLOAT_EXTEND:
7197 	case FLOAT_TRUNCATE:
7198 	  convert_move (target, op1, code == ZERO_EXTEND);
7199 	  return target;
7200 
7201 	case FIX:
7202 	case UNSIGNED_FIX:
7203 	  expand_fix (target, op1, code == UNSIGNED_FIX);
7204 	  return target;
7205 
7206 	case FLOAT:
7207 	case UNSIGNED_FLOAT:
7208 	  expand_float (target, op1, code == UNSIGNED_FLOAT);
7209 	  return target;
7210 
7211 	default:
7212 	  return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
7213 	}
7214     }
7215 
7216 #ifdef INSN_SCHEDULING
7217   /* On machines that have insn scheduling, we want all memory reference to be
7218      explicit, so we need to deal with such paradoxical SUBREGs.  */
7219   if (paradoxical_subreg_p (value) && MEM_P (SUBREG_REG (value)))
7220     value
7221       = simplify_gen_subreg (GET_MODE (value),
7222 			     force_reg (GET_MODE (SUBREG_REG (value)),
7223 					force_operand (SUBREG_REG (value),
7224 						       NULL_RTX)),
7225 			     GET_MODE (SUBREG_REG (value)),
7226 			     SUBREG_BYTE (value));
7227 #endif
7228 
7229   return value;
7230 }
7231 
7232 /* Subroutine of expand_expr: return nonzero iff there is no way that
7233    EXP can reference X, which is being modified.  TOP_P is nonzero if this
7234    call is going to be used to determine whether we need a temporary
7235    for EXP, as opposed to a recursive call to this function.
7236 
7237    It is always safe for this routine to return zero since it merely
7238    searches for optimization opportunities.  */
7239 
7240 int
safe_from_p(const_rtx x,tree exp,int top_p)7241 safe_from_p (const_rtx x, tree exp, int top_p)
7242 {
7243   rtx exp_rtl = 0;
7244   int i, nops;
7245 
7246   if (x == 0
7247       /* If EXP has varying size, we MUST use a target since we currently
7248 	 have no way of allocating temporaries of variable size
7249 	 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7250 	 So we assume here that something at a higher level has prevented a
7251 	 clash.  This is somewhat bogus, but the best we can do.  Only
7252 	 do this when X is BLKmode and when we are at the top level.  */
7253       || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
7254 	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
7255 	  && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
7256 	      || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
7257 	      || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
7258 	      != INTEGER_CST)
7259 	  && GET_MODE (x) == BLKmode)
7260       /* If X is in the outgoing argument area, it is always safe.  */
7261       || (MEM_P (x)
7262 	  && (XEXP (x, 0) == virtual_outgoing_args_rtx
7263 	      || (GET_CODE (XEXP (x, 0)) == PLUS
7264 		  && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
7265     return 1;
7266 
7267   /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7268      find the underlying pseudo.  */
7269   if (GET_CODE (x) == SUBREG)
7270     {
7271       x = SUBREG_REG (x);
7272       if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7273 	return 0;
7274     }
7275 
7276   /* Now look at our tree code and possibly recurse.  */
7277   switch (TREE_CODE_CLASS (TREE_CODE (exp)))
7278     {
7279     case tcc_declaration:
7280       exp_rtl = DECL_RTL_IF_SET (exp);
7281       break;
7282 
7283     case tcc_constant:
7284       return 1;
7285 
7286     case tcc_exceptional:
7287       if (TREE_CODE (exp) == TREE_LIST)
7288 	{
7289 	  while (1)
7290 	    {
7291 	      if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
7292 		return 0;
7293 	      exp = TREE_CHAIN (exp);
7294 	      if (!exp)
7295 		return 1;
7296 	      if (TREE_CODE (exp) != TREE_LIST)
7297 		return safe_from_p (x, exp, 0);
7298 	    }
7299 	}
7300       else if (TREE_CODE (exp) == CONSTRUCTOR)
7301 	{
7302 	  constructor_elt *ce;
7303 	  unsigned HOST_WIDE_INT idx;
7304 
7305 	  FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp), idx, ce)
7306 	    if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
7307 		|| !safe_from_p (x, ce->value, 0))
7308 	      return 0;
7309 	  return 1;
7310 	}
7311       else if (TREE_CODE (exp) == ERROR_MARK)
7312 	return 1;	/* An already-visited SAVE_EXPR? */
7313       else
7314 	return 0;
7315 
7316     case tcc_statement:
7317       /* The only case we look at here is the DECL_INITIAL inside a
7318 	 DECL_EXPR.  */
7319       return (TREE_CODE (exp) != DECL_EXPR
7320 	      || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
7321 	      || !DECL_INITIAL (DECL_EXPR_DECL (exp))
7322 	      || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
7323 
7324     case tcc_binary:
7325     case tcc_comparison:
7326       if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
7327 	return 0;
7328       /* Fall through.  */
7329 
7330     case tcc_unary:
7331       return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7332 
7333     case tcc_expression:
7334     case tcc_reference:
7335     case tcc_vl_exp:
7336       /* Now do code-specific tests.  EXP_RTL is set to any rtx we find in
7337 	 the expression.  If it is set, we conflict iff we are that rtx or
7338 	 both are in memory.  Otherwise, we check all operands of the
7339 	 expression recursively.  */
7340 
7341       switch (TREE_CODE (exp))
7342 	{
7343 	case ADDR_EXPR:
7344 	  /* If the operand is static or we are static, we can't conflict.
7345 	     Likewise if we don't conflict with the operand at all.  */
7346 	  if (staticp (TREE_OPERAND (exp, 0))
7347 	      || TREE_STATIC (exp)
7348 	      || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
7349 	    return 1;
7350 
7351 	  /* Otherwise, the only way this can conflict is if we are taking
7352 	     the address of a DECL a that address if part of X, which is
7353 	     very rare.  */
7354 	  exp = TREE_OPERAND (exp, 0);
7355 	  if (DECL_P (exp))
7356 	    {
7357 	      if (!DECL_RTL_SET_P (exp)
7358 		  || !MEM_P (DECL_RTL (exp)))
7359 		return 0;
7360 	      else
7361 		exp_rtl = XEXP (DECL_RTL (exp), 0);
7362 	    }
7363 	  break;
7364 
7365 	case MEM_REF:
7366 	  if (MEM_P (x)
7367 	      && alias_sets_conflict_p (MEM_ALIAS_SET (x),
7368 					get_alias_set (exp)))
7369 	    return 0;
7370 	  break;
7371 
7372 	case CALL_EXPR:
7373 	  /* Assume that the call will clobber all hard registers and
7374 	     all of memory.  */
7375 	  if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7376 	      || MEM_P (x))
7377 	    return 0;
7378 	  break;
7379 
7380 	case WITH_CLEANUP_EXPR:
7381 	case CLEANUP_POINT_EXPR:
7382 	  /* Lowered by gimplify.c.  */
7383 	  gcc_unreachable ();
7384 
7385 	case SAVE_EXPR:
7386 	  return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7387 
7388 	default:
7389 	  break;
7390 	}
7391 
7392       /* If we have an rtx, we do not need to scan our operands.  */
7393       if (exp_rtl)
7394 	break;
7395 
7396       nops = TREE_OPERAND_LENGTH (exp);
7397       for (i = 0; i < nops; i++)
7398 	if (TREE_OPERAND (exp, i) != 0
7399 	    && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
7400 	  return 0;
7401 
7402       break;
7403 
7404     case tcc_type:
7405       /* Should never get a type here.  */
7406       gcc_unreachable ();
7407     }
7408 
7409   /* If we have an rtl, find any enclosed object.  Then see if we conflict
7410      with it.  */
7411   if (exp_rtl)
7412     {
7413       if (GET_CODE (exp_rtl) == SUBREG)
7414 	{
7415 	  exp_rtl = SUBREG_REG (exp_rtl);
7416 	  if (REG_P (exp_rtl)
7417 	      && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
7418 	    return 0;
7419 	}
7420 
7421       /* If the rtl is X, then it is not safe.  Otherwise, it is unless both
7422 	 are memory and they conflict.  */
7423       return ! (rtx_equal_p (x, exp_rtl)
7424 		|| (MEM_P (x) && MEM_P (exp_rtl)
7425 		    && true_dependence (exp_rtl, VOIDmode, x)));
7426     }
7427 
7428   /* If we reach here, it is safe.  */
7429   return 1;
7430 }
7431 
7432 
7433 /* Return the highest power of two that EXP is known to be a multiple of.
7434    This is used in updating alignment of MEMs in array references.  */
7435 
7436 unsigned HOST_WIDE_INT
highest_pow2_factor(const_tree exp)7437 highest_pow2_factor (const_tree exp)
7438 {
7439   unsigned HOST_WIDE_INT ret;
7440   int trailing_zeros = tree_ctz (exp);
7441   if (trailing_zeros >= HOST_BITS_PER_WIDE_INT)
7442     return BIGGEST_ALIGNMENT;
7443   ret = (unsigned HOST_WIDE_INT) 1 << trailing_zeros;
7444   if (ret > BIGGEST_ALIGNMENT)
7445     return BIGGEST_ALIGNMENT;
7446   return ret;
7447 }
7448 
7449 /* Similar, except that the alignment requirements of TARGET are
7450    taken into account.  Assume it is at least as aligned as its
7451    type, unless it is a COMPONENT_REF in which case the layout of
7452    the structure gives the alignment.  */
7453 
7454 static unsigned HOST_WIDE_INT
highest_pow2_factor_for_target(const_tree target,const_tree exp)7455 highest_pow2_factor_for_target (const_tree target, const_tree exp)
7456 {
7457   unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
7458   unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
7459 
7460   return MAX (factor, talign);
7461 }
7462 
7463 #ifdef HAVE_conditional_move
7464 /* Convert the tree comparison code TCODE to the rtl one where the
7465    signedness is UNSIGNEDP.  */
7466 
7467 static enum rtx_code
convert_tree_comp_to_rtx(enum tree_code tcode,int unsignedp)7468 convert_tree_comp_to_rtx (enum tree_code tcode, int unsignedp)
7469 {
7470   enum rtx_code code;
7471   switch (tcode)
7472     {
7473     case EQ_EXPR:
7474       code = EQ;
7475       break;
7476     case NE_EXPR:
7477       code = NE;
7478       break;
7479     case LT_EXPR:
7480       code = unsignedp ? LTU : LT;
7481       break;
7482     case LE_EXPR:
7483       code = unsignedp ? LEU : LE;
7484       break;
7485     case GT_EXPR:
7486       code = unsignedp ? GTU : GT;
7487       break;
7488     case GE_EXPR:
7489       code = unsignedp ? GEU : GE;
7490       break;
7491     case UNORDERED_EXPR:
7492       code = UNORDERED;
7493       break;
7494     case ORDERED_EXPR:
7495       code = ORDERED;
7496       break;
7497     case UNLT_EXPR:
7498       code = UNLT;
7499       break;
7500     case UNLE_EXPR:
7501       code = UNLE;
7502       break;
7503     case UNGT_EXPR:
7504       code = UNGT;
7505       break;
7506     case UNGE_EXPR:
7507       code = UNGE;
7508       break;
7509     case UNEQ_EXPR:
7510       code = UNEQ;
7511       break;
7512     case LTGT_EXPR:
7513       code = LTGT;
7514       break;
7515 
7516     default:
7517       gcc_unreachable ();
7518     }
7519   return code;
7520 }
7521 #endif
7522 
7523 /* Subroutine of expand_expr.  Expand the two operands of a binary
7524    expression EXP0 and EXP1 placing the results in OP0 and OP1.
7525    The value may be stored in TARGET if TARGET is nonzero.  The
7526    MODIFIER argument is as documented by expand_expr.  */
7527 
7528 static void
expand_operands(tree exp0,tree exp1,rtx target,rtx * op0,rtx * op1,enum expand_modifier modifier)7529 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
7530 		 enum expand_modifier modifier)
7531 {
7532   if (! safe_from_p (target, exp1, 1))
7533     target = 0;
7534   if (operand_equal_p (exp0, exp1, 0))
7535     {
7536       *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7537       *op1 = copy_rtx (*op0);
7538     }
7539   else
7540     {
7541       /* If we need to preserve evaluation order, copy exp0 into its own
7542 	 temporary variable so that it can't be clobbered by exp1.  */
7543       if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
7544 	exp0 = save_expr (exp0);
7545       *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7546       *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
7547     }
7548 }
7549 
7550 
7551 /* Return a MEM that contains constant EXP.  DEFER is as for
7552    output_constant_def and MODIFIER is as for expand_expr.  */
7553 
7554 static rtx
expand_expr_constant(tree exp,int defer,enum expand_modifier modifier)7555 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
7556 {
7557   rtx mem;
7558 
7559   mem = output_constant_def (exp, defer);
7560   if (modifier != EXPAND_INITIALIZER)
7561     mem = use_anchored_address (mem);
7562   return mem;
7563 }
7564 
7565 /* A subroutine of expand_expr_addr_expr.  Evaluate the address of EXP.
7566    The TARGET, TMODE and MODIFIER arguments are as for expand_expr.  */
7567 
7568 static rtx
expand_expr_addr_expr_1(tree exp,rtx target,enum machine_mode tmode,enum expand_modifier modifier,addr_space_t as)7569 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
7570 		         enum expand_modifier modifier, addr_space_t as)
7571 {
7572   rtx result, subtarget;
7573   tree inner, offset;
7574   HOST_WIDE_INT bitsize, bitpos;
7575   int volatilep, unsignedp;
7576   enum machine_mode mode1;
7577 
7578   /* If we are taking the address of a constant and are at the top level,
7579      we have to use output_constant_def since we can't call force_const_mem
7580      at top level.  */
7581   /* ??? This should be considered a front-end bug.  We should not be
7582      generating ADDR_EXPR of something that isn't an LVALUE.  The only
7583      exception here is STRING_CST.  */
7584   if (CONSTANT_CLASS_P (exp))
7585     {
7586       result = XEXP (expand_expr_constant (exp, 0, modifier), 0);
7587       if (modifier < EXPAND_SUM)
7588 	result = force_operand (result, target);
7589       return result;
7590     }
7591 
7592   /* Everything must be something allowed by is_gimple_addressable.  */
7593   switch (TREE_CODE (exp))
7594     {
7595     case INDIRECT_REF:
7596       /* This case will happen via recursion for &a->b.  */
7597       return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7598 
7599     case MEM_REF:
7600       {
7601 	tree tem = TREE_OPERAND (exp, 0);
7602 	if (!integer_zerop (TREE_OPERAND (exp, 1)))
7603 	  tem = fold_build_pointer_plus (tem, TREE_OPERAND (exp, 1));
7604 	return expand_expr (tem, target, tmode, modifier);
7605       }
7606 
7607     case CONST_DECL:
7608       /* Expand the initializer like constants above.  */
7609       result = XEXP (expand_expr_constant (DECL_INITIAL (exp),
7610 					   0, modifier), 0);
7611       if (modifier < EXPAND_SUM)
7612 	result = force_operand (result, target);
7613       return result;
7614 
7615     case REALPART_EXPR:
7616       /* The real part of the complex number is always first, therefore
7617 	 the address is the same as the address of the parent object.  */
7618       offset = 0;
7619       bitpos = 0;
7620       inner = TREE_OPERAND (exp, 0);
7621       break;
7622 
7623     case IMAGPART_EXPR:
7624       /* The imaginary part of the complex number is always second.
7625 	 The expression is therefore always offset by the size of the
7626 	 scalar type.  */
7627       offset = 0;
7628       bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
7629       inner = TREE_OPERAND (exp, 0);
7630       break;
7631 
7632     case COMPOUND_LITERAL_EXPR:
7633       /* Allow COMPOUND_LITERAL_EXPR in initializers, if e.g.
7634 	 rtl_for_decl_init is called on DECL_INITIAL with
7635 	 COMPOUNT_LITERAL_EXPRs in it, they aren't gimplified.  */
7636       if (modifier == EXPAND_INITIALIZER
7637 	  && COMPOUND_LITERAL_EXPR_DECL (exp))
7638 	return expand_expr_addr_expr_1 (COMPOUND_LITERAL_EXPR_DECL (exp),
7639 					target, tmode, modifier, as);
7640       /* FALLTHRU */
7641     default:
7642       /* If the object is a DECL, then expand it for its rtl.  Don't bypass
7643 	 expand_expr, as that can have various side effects; LABEL_DECLs for
7644 	 example, may not have their DECL_RTL set yet.  Expand the rtl of
7645 	 CONSTRUCTORs too, which should yield a memory reference for the
7646 	 constructor's contents.  Assume language specific tree nodes can
7647 	 be expanded in some interesting way.  */
7648       gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
7649       if (DECL_P (exp)
7650 	  || TREE_CODE (exp) == CONSTRUCTOR
7651 	  || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
7652 	{
7653 	  result = expand_expr (exp, target, tmode,
7654 				modifier == EXPAND_INITIALIZER
7655 				? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
7656 
7657 	  /* If the DECL isn't in memory, then the DECL wasn't properly
7658 	     marked TREE_ADDRESSABLE, which will be either a front-end
7659 	     or a tree optimizer bug.  */
7660 
7661 	  if (TREE_ADDRESSABLE (exp)
7662 	      && ! MEM_P (result)
7663 	      && ! targetm.calls.allocate_stack_slots_for_args ())
7664 	    {
7665 	      error ("local frame unavailable (naked function?)");
7666 	      return result;
7667 	    }
7668 	  else
7669 	    gcc_assert (MEM_P (result));
7670 	  result = XEXP (result, 0);
7671 
7672 	  /* ??? Is this needed anymore?  */
7673 	  if (DECL_P (exp))
7674 	    TREE_USED (exp) = 1;
7675 
7676 	  if (modifier != EXPAND_INITIALIZER
7677 	      && modifier != EXPAND_CONST_ADDRESS
7678 	      && modifier != EXPAND_SUM)
7679 	    result = force_operand (result, target);
7680 	  return result;
7681 	}
7682 
7683       /* Pass FALSE as the last argument to get_inner_reference although
7684 	 we are expanding to RTL.  The rationale is that we know how to
7685 	 handle "aligning nodes" here: we can just bypass them because
7686 	 they won't change the final object whose address will be returned
7687 	 (they actually exist only for that purpose).  */
7688       inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7689 				   &mode1, &unsignedp, &volatilep, false);
7690       break;
7691     }
7692 
7693   /* We must have made progress.  */
7694   gcc_assert (inner != exp);
7695 
7696   subtarget = offset || bitpos ? NULL_RTX : target;
7697   /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7698      inner alignment, force the inner to be sufficiently aligned.  */
7699   if (CONSTANT_CLASS_P (inner)
7700       && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
7701     {
7702       inner = copy_node (inner);
7703       TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
7704       TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
7705       TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
7706     }
7707   result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
7708 
7709   if (offset)
7710     {
7711       rtx tmp;
7712 
7713       if (modifier != EXPAND_NORMAL)
7714 	result = force_operand (result, NULL);
7715       tmp = expand_expr (offset, NULL_RTX, tmode,
7716 			 modifier == EXPAND_INITIALIZER
7717 			  ? EXPAND_INITIALIZER : EXPAND_NORMAL);
7718 
7719       /* expand_expr is allowed to return an object in a mode other
7720 	 than TMODE.  If it did, we need to convert.  */
7721       if (GET_MODE (tmp) != VOIDmode && tmode != GET_MODE (tmp))
7722 	tmp = convert_modes (tmode, GET_MODE (tmp),
7723 			     tmp, TYPE_UNSIGNED (TREE_TYPE (offset)));
7724       result = convert_memory_address_addr_space (tmode, result, as);
7725       tmp = convert_memory_address_addr_space (tmode, tmp, as);
7726 
7727       if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7728 	result = simplify_gen_binary (PLUS, tmode, result, tmp);
7729       else
7730 	{
7731 	  subtarget = bitpos ? NULL_RTX : target;
7732 	  result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
7733 					1, OPTAB_LIB_WIDEN);
7734 	}
7735     }
7736 
7737   if (bitpos)
7738     {
7739       /* Someone beforehand should have rejected taking the address
7740 	 of such an object.  */
7741       gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
7742 
7743       result = convert_memory_address_addr_space (tmode, result, as);
7744       result = plus_constant (tmode, result, bitpos / BITS_PER_UNIT);
7745       if (modifier < EXPAND_SUM)
7746 	result = force_operand (result, target);
7747     }
7748 
7749   return result;
7750 }
7751 
7752 /* A subroutine of expand_expr.  Evaluate EXP, which is an ADDR_EXPR.
7753    The TARGET, TMODE and MODIFIER arguments are as for expand_expr.  */
7754 
7755 static rtx
expand_expr_addr_expr(tree exp,rtx target,enum machine_mode tmode,enum expand_modifier modifier)7756 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
7757 		       enum expand_modifier modifier)
7758 {
7759   addr_space_t as = ADDR_SPACE_GENERIC;
7760   enum machine_mode address_mode = Pmode;
7761   enum machine_mode pointer_mode = ptr_mode;
7762   enum machine_mode rmode;
7763   rtx result;
7764 
7765   /* Target mode of VOIDmode says "whatever's natural".  */
7766   if (tmode == VOIDmode)
7767     tmode = TYPE_MODE (TREE_TYPE (exp));
7768 
7769   if (POINTER_TYPE_P (TREE_TYPE (exp)))
7770     {
7771       as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
7772       address_mode = targetm.addr_space.address_mode (as);
7773       pointer_mode = targetm.addr_space.pointer_mode (as);
7774     }
7775 
7776   /* We can get called with some Weird Things if the user does silliness
7777      like "(short) &a".  In that case, convert_memory_address won't do
7778      the right thing, so ignore the given target mode.  */
7779   if (tmode != address_mode && tmode != pointer_mode)
7780     tmode = address_mode;
7781 
7782   result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
7783 				    tmode, modifier, as);
7784 
7785   /* Despite expand_expr claims concerning ignoring TMODE when not
7786      strictly convenient, stuff breaks if we don't honor it.  Note
7787      that combined with the above, we only do this for pointer modes.  */
7788   rmode = GET_MODE (result);
7789   if (rmode == VOIDmode)
7790     rmode = tmode;
7791   if (rmode != tmode)
7792     result = convert_memory_address_addr_space (tmode, result, as);
7793 
7794   return result;
7795 }
7796 
7797 /* Generate code for computing CONSTRUCTOR EXP.
7798    An rtx for the computed value is returned.  If AVOID_TEMP_MEM
7799    is TRUE, instead of creating a temporary variable in memory
7800    NULL is returned and the caller needs to handle it differently.  */
7801 
7802 static rtx
expand_constructor(tree exp,rtx target,enum expand_modifier modifier,bool avoid_temp_mem)7803 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7804 		    bool avoid_temp_mem)
7805 {
7806   tree type = TREE_TYPE (exp);
7807   enum machine_mode mode = TYPE_MODE (type);
7808 
7809   /* Try to avoid creating a temporary at all.  This is possible
7810      if all of the initializer is zero.
7811      FIXME: try to handle all [0..255] initializers we can handle
7812      with memset.  */
7813   if (TREE_STATIC (exp)
7814       && !TREE_ADDRESSABLE (exp)
7815       && target != 0 && mode == BLKmode
7816       && all_zeros_p (exp))
7817     {
7818       clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7819       return target;
7820     }
7821 
7822   /* All elts simple constants => refer to a constant in memory.  But
7823      if this is a non-BLKmode mode, let it store a field at a time
7824      since that should make a CONST_INT or CONST_DOUBLE when we
7825      fold.  Likewise, if we have a target we can use, it is best to
7826      store directly into the target unless the type is large enough
7827      that memcpy will be used.  If we are making an initializer and
7828      all operands are constant, put it in memory as well.
7829 
7830      FIXME: Avoid trying to fill vector constructors piece-meal.
7831      Output them with output_constant_def below unless we're sure
7832      they're zeros.  This should go away when vector initializers
7833      are treated like VECTOR_CST instead of arrays.  */
7834   if ((TREE_STATIC (exp)
7835        && ((mode == BLKmode
7836 	    && ! (target != 0 && safe_from_p (target, exp, 1)))
7837 		  || TREE_ADDRESSABLE (exp)
7838 		  || (tree_fits_uhwi_p (TYPE_SIZE_UNIT (type))
7839 		      && (! MOVE_BY_PIECES_P
7840 				     (tree_to_uhwi (TYPE_SIZE_UNIT (type)),
7841 				      TYPE_ALIGN (type)))
7842 		      && ! mostly_zeros_p (exp))))
7843       || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7844 	  && TREE_CONSTANT (exp)))
7845     {
7846       rtx constructor;
7847 
7848       if (avoid_temp_mem)
7849 	return NULL_RTX;
7850 
7851       constructor = expand_expr_constant (exp, 1, modifier);
7852 
7853       if (modifier != EXPAND_CONST_ADDRESS
7854 	  && modifier != EXPAND_INITIALIZER
7855 	  && modifier != EXPAND_SUM)
7856 	constructor = validize_mem (constructor);
7857 
7858       return constructor;
7859     }
7860 
7861   /* Handle calls that pass values in multiple non-contiguous
7862      locations.  The Irix 6 ABI has examples of this.  */
7863   if (target == 0 || ! safe_from_p (target, exp, 1)
7864       || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
7865     {
7866       if (avoid_temp_mem)
7867 	return NULL_RTX;
7868 
7869       target = assign_temp (type, TREE_ADDRESSABLE (exp), 1);
7870     }
7871 
7872   store_constructor (exp, target, 0, int_expr_size (exp));
7873   return target;
7874 }
7875 
7876 
7877 /* expand_expr: generate code for computing expression EXP.
7878    An rtx for the computed value is returned.  The value is never null.
7879    In the case of a void EXP, const0_rtx is returned.
7880 
7881    The value may be stored in TARGET if TARGET is nonzero.
7882    TARGET is just a suggestion; callers must assume that
7883    the rtx returned may not be the same as TARGET.
7884 
7885    If TARGET is CONST0_RTX, it means that the value will be ignored.
7886 
7887    If TMODE is not VOIDmode, it suggests generating the
7888    result in mode TMODE.  But this is done only when convenient.
7889    Otherwise, TMODE is ignored and the value generated in its natural mode.
7890    TMODE is just a suggestion; callers must assume that
7891    the rtx returned may not have mode TMODE.
7892 
7893    Note that TARGET may have neither TMODE nor MODE.  In that case, it
7894    probably will not be used.
7895 
7896    If MODIFIER is EXPAND_SUM then when EXP is an addition
7897    we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7898    or a nest of (PLUS ...) and (MINUS ...) where the terms are
7899    products as above, or REG or MEM, or constant.
7900    Ordinarily in such cases we would output mul or add instructions
7901    and then return a pseudo reg containing the sum.
7902 
7903    EXPAND_INITIALIZER is much like EXPAND_SUM except that
7904    it also marks a label as absolutely required (it can't be dead).
7905    It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7906    This is used for outputting expressions used in initializers.
7907 
7908    EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7909    with a constant address even if that address is not normally legitimate.
7910    EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7911 
7912    EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7913    a call parameter.  Such targets require special care as we haven't yet
7914    marked TARGET so that it's safe from being trashed by libcalls.  We
7915    don't want to use TARGET for anything but the final result;
7916    Intermediate values must go elsewhere.   Additionally, calls to
7917    emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7918 
7919    If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7920    address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7921    DECL_RTL of the VAR_DECL.  *ALT_RTL is also set if EXP is a
7922    COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7923    recursively.
7924 
7925    If INNER_REFERENCE_P is true, we are expanding an inner reference.
7926    In this case, we don't adjust a returned MEM rtx that wouldn't be
7927    sufficiently aligned for its mode; instead, it's up to the caller
7928    to deal with it afterwards.  This is used to make sure that unaligned
7929    base objects for which out-of-bounds accesses are supported, for
7930    example record types with trailing arrays, aren't realigned behind
7931    the back of the caller.
7932    The normal operating mode is to pass FALSE for this parameter.  */
7933 
7934 rtx
expand_expr_real(tree exp,rtx target,enum machine_mode tmode,enum expand_modifier modifier,rtx * alt_rtl,bool inner_reference_p)7935 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
7936 		  enum expand_modifier modifier, rtx *alt_rtl,
7937 		  bool inner_reference_p)
7938 {
7939   rtx ret;
7940 
7941   /* Handle ERROR_MARK before anybody tries to access its type.  */
7942   if (TREE_CODE (exp) == ERROR_MARK
7943       || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7944     {
7945       ret = CONST0_RTX (tmode);
7946       return ret ? ret : const0_rtx;
7947     }
7948 
7949   ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl,
7950 			    inner_reference_p);
7951   return ret;
7952 }
7953 
7954 /* Try to expand the conditional expression which is represented by
7955    TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves.  If succeseds
7956    return the rtl reg which repsents the result.  Otherwise return
7957    NULL_RTL.  */
7958 
7959 static rtx
expand_cond_expr_using_cmove(tree treeop0 ATTRIBUTE_UNUSED,tree treeop1 ATTRIBUTE_UNUSED,tree treeop2 ATTRIBUTE_UNUSED)7960 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED,
7961 			      tree treeop1 ATTRIBUTE_UNUSED,
7962 			      tree treeop2 ATTRIBUTE_UNUSED)
7963 {
7964 #ifdef HAVE_conditional_move
7965   rtx insn;
7966   rtx op00, op01, op1, op2;
7967   enum rtx_code comparison_code;
7968   enum machine_mode comparison_mode;
7969   gimple srcstmt;
7970   rtx temp;
7971   tree type = TREE_TYPE (treeop1);
7972   int unsignedp = TYPE_UNSIGNED (type);
7973   enum machine_mode mode = TYPE_MODE (type);
7974   enum machine_mode orig_mode = mode;
7975 
7976   /* If we cannot do a conditional move on the mode, try doing it
7977      with the promoted mode. */
7978   if (!can_conditionally_move_p (mode))
7979     {
7980       mode = promote_mode (type, mode, &unsignedp);
7981       if (!can_conditionally_move_p (mode))
7982 	return NULL_RTX;
7983       temp = assign_temp (type, 0, 0); /* Use promoted mode for temp.  */
7984     }
7985   else
7986     temp = assign_temp (type, 0, 1);
7987 
7988   start_sequence ();
7989   expand_operands (treeop1, treeop2,
7990 		   temp, &op1, &op2, EXPAND_NORMAL);
7991 
7992   if (TREE_CODE (treeop0) == SSA_NAME
7993       && (srcstmt = get_def_for_expr_class (treeop0, tcc_comparison)))
7994     {
7995       tree type = TREE_TYPE (gimple_assign_rhs1 (srcstmt));
7996       enum tree_code cmpcode = gimple_assign_rhs_code (srcstmt);
7997       op00 = expand_normal (gimple_assign_rhs1 (srcstmt));
7998       op01 = expand_normal (gimple_assign_rhs2 (srcstmt));
7999       comparison_mode = TYPE_MODE (type);
8000       unsignedp = TYPE_UNSIGNED (type);
8001       comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
8002     }
8003   else if (TREE_CODE_CLASS (TREE_CODE (treeop0)) == tcc_comparison)
8004     {
8005       tree type = TREE_TYPE (TREE_OPERAND (treeop0, 0));
8006       enum tree_code cmpcode = TREE_CODE (treeop0);
8007       op00 = expand_normal (TREE_OPERAND (treeop0, 0));
8008       op01 = expand_normal (TREE_OPERAND (treeop0, 1));
8009       unsignedp = TYPE_UNSIGNED (type);
8010       comparison_mode = TYPE_MODE (type);
8011       comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
8012     }
8013   else
8014     {
8015       op00 = expand_normal (treeop0);
8016       op01 = const0_rtx;
8017       comparison_code = NE;
8018       comparison_mode = TYPE_MODE (TREE_TYPE (treeop0));
8019     }
8020 
8021   if (GET_MODE (op1) != mode)
8022     op1 = gen_lowpart (mode, op1);
8023 
8024   if (GET_MODE (op2) != mode)
8025     op2 = gen_lowpart (mode, op2);
8026 
8027   /* Try to emit the conditional move.  */
8028   insn = emit_conditional_move (temp, comparison_code,
8029 				op00, op01, comparison_mode,
8030 				op1, op2, mode,
8031 				unsignedp);
8032 
8033   /* If we could do the conditional move, emit the sequence,
8034      and return.  */
8035   if (insn)
8036     {
8037       rtx seq = get_insns ();
8038       end_sequence ();
8039       emit_insn (seq);
8040       return convert_modes (orig_mode, mode, temp, 0);
8041     }
8042 
8043   /* Otherwise discard the sequence and fall back to code with
8044      branches.  */
8045   end_sequence ();
8046 #endif
8047   return NULL_RTX;
8048 }
8049 
8050 rtx
expand_expr_real_2(sepops ops,rtx target,enum machine_mode tmode,enum expand_modifier modifier)8051 expand_expr_real_2 (sepops ops, rtx target, enum machine_mode tmode,
8052 		    enum expand_modifier modifier)
8053 {
8054   rtx op0, op1, op2, temp;
8055   tree type;
8056   int unsignedp;
8057   enum machine_mode mode;
8058   enum tree_code code = ops->code;
8059   optab this_optab;
8060   rtx subtarget, original_target;
8061   int ignore;
8062   bool reduce_bit_field;
8063   location_t loc = ops->location;
8064   tree treeop0, treeop1, treeop2;
8065 #define REDUCE_BIT_FIELD(expr)	(reduce_bit_field			  \
8066 				 ? reduce_to_bit_field_precision ((expr), \
8067 								  target, \
8068 								  type)	  \
8069 				 : (expr))
8070 
8071   type = ops->type;
8072   mode = TYPE_MODE (type);
8073   unsignedp = TYPE_UNSIGNED (type);
8074 
8075   treeop0 = ops->op0;
8076   treeop1 = ops->op1;
8077   treeop2 = ops->op2;
8078 
8079   /* We should be called only on simple (binary or unary) expressions,
8080      exactly those that are valid in gimple expressions that aren't
8081      GIMPLE_SINGLE_RHS (or invalid).  */
8082   gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
8083 	      || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
8084 	      || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
8085 
8086   ignore = (target == const0_rtx
8087 	    || ((CONVERT_EXPR_CODE_P (code)
8088 		 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
8089 		&& TREE_CODE (type) == VOID_TYPE));
8090 
8091   /* We should be called only if we need the result.  */
8092   gcc_assert (!ignore);
8093 
8094   /* An operation in what may be a bit-field type needs the
8095      result to be reduced to the precision of the bit-field type,
8096      which is narrower than that of the type's mode.  */
8097   reduce_bit_field = (INTEGRAL_TYPE_P (type)
8098 		      && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
8099 
8100   if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
8101     target = 0;
8102 
8103   /* Use subtarget as the target for operand 0 of a binary operation.  */
8104   subtarget = get_subtarget (target);
8105   original_target = target;
8106 
8107   switch (code)
8108     {
8109     case NON_LVALUE_EXPR:
8110     case PAREN_EXPR:
8111     CASE_CONVERT:
8112       if (treeop0 == error_mark_node)
8113 	return const0_rtx;
8114 
8115       if (TREE_CODE (type) == UNION_TYPE)
8116 	{
8117 	  tree valtype = TREE_TYPE (treeop0);
8118 
8119 	  /* If both input and output are BLKmode, this conversion isn't doing
8120 	     anything except possibly changing memory attribute.  */
8121 	  if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
8122 	    {
8123 	      rtx result = expand_expr (treeop0, target, tmode,
8124 					modifier);
8125 
8126 	      result = copy_rtx (result);
8127 	      set_mem_attributes (result, type, 0);
8128 	      return result;
8129 	    }
8130 
8131 	  if (target == 0)
8132 	    {
8133 	      if (TYPE_MODE (type) != BLKmode)
8134 		target = gen_reg_rtx (TYPE_MODE (type));
8135 	      else
8136 		target = assign_temp (type, 1, 1);
8137 	    }
8138 
8139 	  if (MEM_P (target))
8140 	    /* Store data into beginning of memory target.  */
8141 	    store_expr (treeop0,
8142 			adjust_address (target, TYPE_MODE (valtype), 0),
8143 			modifier == EXPAND_STACK_PARM,
8144 			false);
8145 
8146 	  else
8147 	    {
8148 	      gcc_assert (REG_P (target));
8149 
8150 	      /* Store this field into a union of the proper type.  */
8151 	      store_field (target,
8152 			   MIN ((int_size_in_bytes (TREE_TYPE
8153 						    (treeop0))
8154 				 * BITS_PER_UNIT),
8155 				(HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
8156 			   0, 0, 0, TYPE_MODE (valtype), treeop0, 0, false);
8157 	    }
8158 
8159 	  /* Return the entire union.  */
8160 	  return target;
8161 	}
8162 
8163       if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
8164 	{
8165 	  op0 = expand_expr (treeop0, target, VOIDmode,
8166 			     modifier);
8167 
8168 	  /* If the signedness of the conversion differs and OP0 is
8169 	     a promoted SUBREG, clear that indication since we now
8170 	     have to do the proper extension.  */
8171 	  if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
8172 	      && GET_CODE (op0) == SUBREG)
8173 	    SUBREG_PROMOTED_VAR_P (op0) = 0;
8174 
8175 	  return REDUCE_BIT_FIELD (op0);
8176 	}
8177 
8178       op0 = expand_expr (treeop0, NULL_RTX, mode,
8179 			 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8180       if (GET_MODE (op0) == mode)
8181 	;
8182 
8183       /* If OP0 is a constant, just convert it into the proper mode.  */
8184       else if (CONSTANT_P (op0))
8185 	{
8186 	  tree inner_type = TREE_TYPE (treeop0);
8187 	  enum machine_mode inner_mode = GET_MODE (op0);
8188 
8189 	  if (inner_mode == VOIDmode)
8190 	    inner_mode = TYPE_MODE (inner_type);
8191 
8192 	  if (modifier == EXPAND_INITIALIZER)
8193 	    op0 = simplify_gen_subreg (mode, op0, inner_mode,
8194 				       subreg_lowpart_offset (mode,
8195 							      inner_mode));
8196 	  else
8197 	    op0=  convert_modes (mode, inner_mode, op0,
8198 				 TYPE_UNSIGNED (inner_type));
8199 	}
8200 
8201       else if (modifier == EXPAND_INITIALIZER)
8202 	op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8203 
8204       else if (target == 0)
8205 	op0 = convert_to_mode (mode, op0,
8206 			       TYPE_UNSIGNED (TREE_TYPE
8207 					      (treeop0)));
8208       else
8209 	{
8210 	  convert_move (target, op0,
8211 			TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8212 	  op0 = target;
8213 	}
8214 
8215       return REDUCE_BIT_FIELD (op0);
8216 
8217     case ADDR_SPACE_CONVERT_EXPR:
8218       {
8219 	tree treeop0_type = TREE_TYPE (treeop0);
8220 	addr_space_t as_to;
8221 	addr_space_t as_from;
8222 
8223 	gcc_assert (POINTER_TYPE_P (type));
8224 	gcc_assert (POINTER_TYPE_P (treeop0_type));
8225 
8226 	as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
8227 	as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
8228 
8229         /* Conversions between pointers to the same address space should
8230 	   have been implemented via CONVERT_EXPR / NOP_EXPR.  */
8231 	gcc_assert (as_to != as_from);
8232 
8233         /* Ask target code to handle conversion between pointers
8234 	   to overlapping address spaces.  */
8235 	if (targetm.addr_space.subset_p (as_to, as_from)
8236 	    || targetm.addr_space.subset_p (as_from, as_to))
8237 	  {
8238 	    op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
8239 	    op0 = targetm.addr_space.convert (op0, treeop0_type, type);
8240 	    gcc_assert (op0);
8241 	    return op0;
8242 	  }
8243 
8244 	/* For disjoint address spaces, converting anything but
8245 	   a null pointer invokes undefined behaviour.  We simply
8246 	   always return a null pointer here.  */
8247 	return CONST0_RTX (mode);
8248       }
8249 
8250     case POINTER_PLUS_EXPR:
8251       /* Even though the sizetype mode and the pointer's mode can be different
8252          expand is able to handle this correctly and get the correct result out
8253          of the PLUS_EXPR code.  */
8254       /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8255          if sizetype precision is smaller than pointer precision.  */
8256       if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
8257 	treeop1 = fold_convert_loc (loc, type,
8258 				    fold_convert_loc (loc, ssizetype,
8259 						      treeop1));
8260       /* If sizetype precision is larger than pointer precision, truncate the
8261 	 offset to have matching modes.  */
8262       else if (TYPE_PRECISION (sizetype) > TYPE_PRECISION (type))
8263 	treeop1 = fold_convert_loc (loc, type, treeop1);
8264 
8265     case PLUS_EXPR:
8266       /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8267 	 something else, make sure we add the register to the constant and
8268 	 then to the other thing.  This case can occur during strength
8269 	 reduction and doing it this way will produce better code if the
8270 	 frame pointer or argument pointer is eliminated.
8271 
8272 	 fold-const.c will ensure that the constant is always in the inner
8273 	 PLUS_EXPR, so the only case we need to do anything about is if
8274 	 sp, ap, or fp is our second argument, in which case we must swap
8275 	 the innermost first argument and our second argument.  */
8276 
8277       if (TREE_CODE (treeop0) == PLUS_EXPR
8278 	  && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
8279 	  && TREE_CODE (treeop1) == VAR_DECL
8280 	  && (DECL_RTL (treeop1) == frame_pointer_rtx
8281 	      || DECL_RTL (treeop1) == stack_pointer_rtx
8282 	      || DECL_RTL (treeop1) == arg_pointer_rtx))
8283 	{
8284 	  gcc_unreachable ();
8285 	}
8286 
8287       /* If the result is to be ptr_mode and we are adding an integer to
8288 	 something, we might be forming a constant.  So try to use
8289 	 plus_constant.  If it produces a sum and we can't accept it,
8290 	 use force_operand.  This allows P = &ARR[const] to generate
8291 	 efficient code on machines where a SYMBOL_REF is not a valid
8292 	 address.
8293 
8294 	 If this is an EXPAND_SUM call, always return the sum.  */
8295       if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8296 	  || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8297 	{
8298 	  if (modifier == EXPAND_STACK_PARM)
8299 	    target = 0;
8300 	  if (TREE_CODE (treeop0) == INTEGER_CST
8301 	      && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8302 	      && TREE_CONSTANT (treeop1))
8303 	    {
8304 	      rtx constant_part;
8305 
8306 	      op1 = expand_expr (treeop1, subtarget, VOIDmode,
8307 				 EXPAND_SUM);
8308 	      /* Use immed_double_const to ensure that the constant is
8309 		 truncated according to the mode of OP1, then sign extended
8310 		 to a HOST_WIDE_INT.  Using the constant directly can result
8311 		 in non-canonical RTL in a 64x32 cross compile.  */
8312 	      constant_part
8313 		= immed_double_const (TREE_INT_CST_LOW (treeop0),
8314 				      (HOST_WIDE_INT) 0,
8315 				      TYPE_MODE (TREE_TYPE (treeop1)));
8316 	      op1 = plus_constant (mode, op1, INTVAL (constant_part));
8317 	      if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8318 		op1 = force_operand (op1, target);
8319 	      return REDUCE_BIT_FIELD (op1);
8320 	    }
8321 
8322 	  else if (TREE_CODE (treeop1) == INTEGER_CST
8323 		   && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8324 		   && TREE_CONSTANT (treeop0))
8325 	    {
8326 	      rtx constant_part;
8327 
8328 	      op0 = expand_expr (treeop0, subtarget, VOIDmode,
8329 				 (modifier == EXPAND_INITIALIZER
8330 				 ? EXPAND_INITIALIZER : EXPAND_SUM));
8331 	      if (! CONSTANT_P (op0))
8332 		{
8333 		  op1 = expand_expr (treeop1, NULL_RTX,
8334 				     VOIDmode, modifier);
8335 		  /* Return a PLUS if modifier says it's OK.  */
8336 		  if (modifier == EXPAND_SUM
8337 		      || modifier == EXPAND_INITIALIZER)
8338 		    return simplify_gen_binary (PLUS, mode, op0, op1);
8339 		  goto binop2;
8340 		}
8341 	      /* Use immed_double_const to ensure that the constant is
8342 		 truncated according to the mode of OP1, then sign extended
8343 		 to a HOST_WIDE_INT.  Using the constant directly can result
8344 		 in non-canonical RTL in a 64x32 cross compile.  */
8345 	      constant_part
8346 		= immed_double_const (TREE_INT_CST_LOW (treeop1),
8347 				      (HOST_WIDE_INT) 0,
8348 				      TYPE_MODE (TREE_TYPE (treeop0)));
8349 	      op0 = plus_constant (mode, op0, INTVAL (constant_part));
8350 	      if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8351 		op0 = force_operand (op0, target);
8352 	      return REDUCE_BIT_FIELD (op0);
8353 	    }
8354 	}
8355 
8356       /* Use TER to expand pointer addition of a negated value
8357 	 as pointer subtraction.  */
8358       if ((POINTER_TYPE_P (TREE_TYPE (treeop0))
8359 	   || (TREE_CODE (TREE_TYPE (treeop0)) == VECTOR_TYPE
8360 	       && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))))
8361 	  && TREE_CODE (treeop1) == SSA_NAME
8362 	  && TYPE_MODE (TREE_TYPE (treeop0))
8363 	     == TYPE_MODE (TREE_TYPE (treeop1)))
8364 	{
8365 	  gimple def = get_def_for_expr (treeop1, NEGATE_EXPR);
8366 	  if (def)
8367 	    {
8368 	      treeop1 = gimple_assign_rhs1 (def);
8369 	      code = MINUS_EXPR;
8370 	      goto do_minus;
8371 	    }
8372 	}
8373 
8374       /* No sense saving up arithmetic to be done
8375 	 if it's all in the wrong mode to form part of an address.
8376 	 And force_operand won't know whether to sign-extend or
8377 	 zero-extend.  */
8378       if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8379 	  || mode != ptr_mode)
8380 	{
8381 	  expand_operands (treeop0, treeop1,
8382 			   subtarget, &op0, &op1, EXPAND_NORMAL);
8383 	  if (op0 == const0_rtx)
8384 	    return op1;
8385 	  if (op1 == const0_rtx)
8386 	    return op0;
8387 	  goto binop2;
8388 	}
8389 
8390       expand_operands (treeop0, treeop1,
8391 		       subtarget, &op0, &op1, modifier);
8392       return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8393 
8394     case MINUS_EXPR:
8395     do_minus:
8396       /* For initializers, we are allowed to return a MINUS of two
8397 	 symbolic constants.  Here we handle all cases when both operands
8398 	 are constant.  */
8399       /* Handle difference of two symbolic constants,
8400 	 for the sake of an initializer.  */
8401       if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8402 	  && really_constant_p (treeop0)
8403 	  && really_constant_p (treeop1))
8404 	{
8405 	  expand_operands (treeop0, treeop1,
8406 			   NULL_RTX, &op0, &op1, modifier);
8407 
8408 	  /* If the last operand is a CONST_INT, use plus_constant of
8409 	     the negated constant.  Else make the MINUS.  */
8410 	  if (CONST_INT_P (op1))
8411 	    return REDUCE_BIT_FIELD (plus_constant (mode, op0,
8412 						    -INTVAL (op1)));
8413 	  else
8414 	    return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8415 	}
8416 
8417       /* No sense saving up arithmetic to be done
8418 	 if it's all in the wrong mode to form part of an address.
8419 	 And force_operand won't know whether to sign-extend or
8420 	 zero-extend.  */
8421       if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8422 	  || mode != ptr_mode)
8423 	goto binop;
8424 
8425       expand_operands (treeop0, treeop1,
8426 		       subtarget, &op0, &op1, modifier);
8427 
8428       /* Convert A - const to A + (-const).  */
8429       if (CONST_INT_P (op1))
8430 	{
8431 	  op1 = negate_rtx (mode, op1);
8432 	  return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8433 	}
8434 
8435       goto binop2;
8436 
8437     case WIDEN_MULT_PLUS_EXPR:
8438     case WIDEN_MULT_MINUS_EXPR:
8439       expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8440       op2 = expand_normal (treeop2);
8441       target = expand_widen_pattern_expr (ops, op0, op1, op2,
8442 					  target, unsignedp);
8443       return target;
8444 
8445     case WIDEN_MULT_EXPR:
8446       /* If first operand is constant, swap them.
8447 	 Thus the following special case checks need only
8448 	 check the second operand.  */
8449       if (TREE_CODE (treeop0) == INTEGER_CST)
8450 	{
8451 	  tree t1 = treeop0;
8452 	  treeop0 = treeop1;
8453 	  treeop1 = t1;
8454 	}
8455 
8456       /* First, check if we have a multiplication of one signed and one
8457 	 unsigned operand.  */
8458       if (TREE_CODE (treeop1) != INTEGER_CST
8459 	  && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
8460 	      != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
8461 	{
8462 	  enum machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
8463 	  this_optab = usmul_widen_optab;
8464 	  if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8465 		!= CODE_FOR_nothing)
8466 	    {
8467 	      if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8468 		expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8469 				 EXPAND_NORMAL);
8470 	      else
8471 		expand_operands (treeop0, treeop1, NULL_RTX, &op1, &op0,
8472 				 EXPAND_NORMAL);
8473 	      /* op0 and op1 might still be constant, despite the above
8474 		 != INTEGER_CST check.  Handle it.  */
8475 	      if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8476 		{
8477 		  op0 = convert_modes (innermode, mode, op0, true);
8478 		  op1 = convert_modes (innermode, mode, op1, false);
8479 		  return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8480 							target, unsignedp));
8481 		}
8482 	      goto binop3;
8483 	    }
8484 	}
8485       /* Check for a multiplication with matching signedness.  */
8486       else if ((TREE_CODE (treeop1) == INTEGER_CST
8487 		&& int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
8488 	       || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
8489 		   == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
8490 	{
8491 	  tree op0type = TREE_TYPE (treeop0);
8492 	  enum machine_mode innermode = TYPE_MODE (op0type);
8493 	  bool zextend_p = TYPE_UNSIGNED (op0type);
8494 	  optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8495 	  this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8496 
8497 	  if (TREE_CODE (treeop0) != INTEGER_CST)
8498 	    {
8499 	      if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8500 		    != CODE_FOR_nothing)
8501 		{
8502 		  expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8503 				   EXPAND_NORMAL);
8504 		  /* op0 and op1 might still be constant, despite the above
8505 		     != INTEGER_CST check.  Handle it.  */
8506 		  if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8507 		    {
8508 		     widen_mult_const:
8509 		      op0 = convert_modes (innermode, mode, op0, zextend_p);
8510 		      op1
8511 			= convert_modes (innermode, mode, op1,
8512 					 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8513 		      return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8514 							    target,
8515 							    unsignedp));
8516 		    }
8517 		  temp = expand_widening_mult (mode, op0, op1, target,
8518 					       unsignedp, this_optab);
8519 		  return REDUCE_BIT_FIELD (temp);
8520 		}
8521 	      if (find_widening_optab_handler (other_optab, mode, innermode, 0)
8522 		    != CODE_FOR_nothing
8523 		  && innermode == word_mode)
8524 		{
8525 		  rtx htem, hipart;
8526 		  op0 = expand_normal (treeop0);
8527 		  if (TREE_CODE (treeop1) == INTEGER_CST)
8528 		    op1 = convert_modes (innermode, mode,
8529 					 expand_normal (treeop1),
8530 					 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8531 		  else
8532 		    op1 = expand_normal (treeop1);
8533 		  /* op0 and op1 might still be constant, despite the above
8534 		     != INTEGER_CST check.  Handle it.  */
8535 		  if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8536 		    goto widen_mult_const;
8537 		  temp = expand_binop (mode, other_optab, op0, op1, target,
8538 				       unsignedp, OPTAB_LIB_WIDEN);
8539 		  hipart = gen_highpart (innermode, temp);
8540 		  htem = expand_mult_highpart_adjust (innermode, hipart,
8541 						      op0, op1, hipart,
8542 						      zextend_p);
8543 		  if (htem != hipart)
8544 		    emit_move_insn (hipart, htem);
8545 		  return REDUCE_BIT_FIELD (temp);
8546 		}
8547 	    }
8548 	}
8549       treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
8550       treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
8551       expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8552       return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8553 
8554     case FMA_EXPR:
8555       {
8556 	optab opt = fma_optab;
8557 	gimple def0, def2;
8558 
8559 	/* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8560 	   call.  */
8561 	if (optab_handler (fma_optab, mode) == CODE_FOR_nothing)
8562 	  {
8563 	    tree fn = mathfn_built_in (TREE_TYPE (treeop0), BUILT_IN_FMA);
8564 	    tree call_expr;
8565 
8566 	    gcc_assert (fn != NULL_TREE);
8567 	    call_expr = build_call_expr (fn, 3, treeop0, treeop1, treeop2);
8568 	    return expand_builtin (call_expr, target, subtarget, mode, false);
8569 	  }
8570 
8571 	def0 = get_def_for_expr (treeop0, NEGATE_EXPR);
8572 	def2 = get_def_for_expr (treeop2, NEGATE_EXPR);
8573 
8574 	op0 = op2 = NULL;
8575 
8576 	if (def0 && def2
8577 	    && optab_handler (fnms_optab, mode) != CODE_FOR_nothing)
8578 	  {
8579 	    opt = fnms_optab;
8580 	    op0 = expand_normal (gimple_assign_rhs1 (def0));
8581 	    op2 = expand_normal (gimple_assign_rhs1 (def2));
8582 	  }
8583 	else if (def0
8584 		 && optab_handler (fnma_optab, mode) != CODE_FOR_nothing)
8585 	  {
8586 	    opt = fnma_optab;
8587 	    op0 = expand_normal (gimple_assign_rhs1 (def0));
8588 	  }
8589 	else if (def2
8590 		 && optab_handler (fms_optab, mode) != CODE_FOR_nothing)
8591 	  {
8592 	    opt = fms_optab;
8593 	    op2 = expand_normal (gimple_assign_rhs1 (def2));
8594 	  }
8595 
8596 	if (op0 == NULL)
8597 	  op0 = expand_expr (treeop0, subtarget, VOIDmode, EXPAND_NORMAL);
8598 	if (op2 == NULL)
8599 	  op2 = expand_normal (treeop2);
8600 	op1 = expand_normal (treeop1);
8601 
8602 	return expand_ternary_op (TYPE_MODE (type), opt,
8603 				  op0, op1, op2, target, 0);
8604       }
8605 
8606     case MULT_EXPR:
8607       /* If this is a fixed-point operation, then we cannot use the code
8608 	 below because "expand_mult" doesn't support sat/no-sat fixed-point
8609          multiplications.   */
8610       if (ALL_FIXED_POINT_MODE_P (mode))
8611 	goto binop;
8612 
8613       /* If first operand is constant, swap them.
8614 	 Thus the following special case checks need only
8615 	 check the second operand.  */
8616       if (TREE_CODE (treeop0) == INTEGER_CST)
8617 	{
8618 	  tree t1 = treeop0;
8619 	  treeop0 = treeop1;
8620 	  treeop1 = t1;
8621 	}
8622 
8623       /* Attempt to return something suitable for generating an
8624 	 indexed address, for machines that support that.  */
8625 
8626       if (modifier == EXPAND_SUM && mode == ptr_mode
8627 	  && tree_fits_shwi_p (treeop1))
8628 	{
8629 	  tree exp1 = treeop1;
8630 
8631 	  op0 = expand_expr (treeop0, subtarget, VOIDmode,
8632 			     EXPAND_SUM);
8633 
8634 	  if (!REG_P (op0))
8635 	    op0 = force_operand (op0, NULL_RTX);
8636 	  if (!REG_P (op0))
8637 	    op0 = copy_to_mode_reg (mode, op0);
8638 
8639 	  return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8640 			       gen_int_mode (tree_to_shwi (exp1),
8641 					     TYPE_MODE (TREE_TYPE (exp1)))));
8642 	}
8643 
8644       if (modifier == EXPAND_STACK_PARM)
8645 	target = 0;
8646 
8647       expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8648       return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8649 
8650     case TRUNC_DIV_EXPR:
8651     case FLOOR_DIV_EXPR:
8652     case CEIL_DIV_EXPR:
8653     case ROUND_DIV_EXPR:
8654     case EXACT_DIV_EXPR:
8655       /* If this is a fixed-point operation, then we cannot use the code
8656 	 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8657          divisions.   */
8658       if (ALL_FIXED_POINT_MODE_P (mode))
8659 	goto binop;
8660 
8661       if (modifier == EXPAND_STACK_PARM)
8662 	target = 0;
8663       /* Possible optimization: compute the dividend with EXPAND_SUM
8664 	 then if the divisor is constant can optimize the case
8665 	 where some terms of the dividend have coeffs divisible by it.  */
8666       expand_operands (treeop0, treeop1,
8667 		       subtarget, &op0, &op1, EXPAND_NORMAL);
8668       return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8669 
8670     case RDIV_EXPR:
8671       goto binop;
8672 
8673     case MULT_HIGHPART_EXPR:
8674       expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8675       temp = expand_mult_highpart (mode, op0, op1, target, unsignedp);
8676       gcc_assert (temp);
8677       return temp;
8678 
8679     case TRUNC_MOD_EXPR:
8680     case FLOOR_MOD_EXPR:
8681     case CEIL_MOD_EXPR:
8682     case ROUND_MOD_EXPR:
8683       if (modifier == EXPAND_STACK_PARM)
8684 	target = 0;
8685       expand_operands (treeop0, treeop1,
8686 		       subtarget, &op0, &op1, EXPAND_NORMAL);
8687       return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8688 
8689     case FIXED_CONVERT_EXPR:
8690       op0 = expand_normal (treeop0);
8691       if (target == 0 || modifier == EXPAND_STACK_PARM)
8692 	target = gen_reg_rtx (mode);
8693 
8694       if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
8695 	   && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8696           || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
8697 	expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
8698       else
8699 	expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
8700       return target;
8701 
8702     case FIX_TRUNC_EXPR:
8703       op0 = expand_normal (treeop0);
8704       if (target == 0 || modifier == EXPAND_STACK_PARM)
8705 	target = gen_reg_rtx (mode);
8706       expand_fix (target, op0, unsignedp);
8707       return target;
8708 
8709     case FLOAT_EXPR:
8710       op0 = expand_normal (treeop0);
8711       if (target == 0 || modifier == EXPAND_STACK_PARM)
8712 	target = gen_reg_rtx (mode);
8713       /* expand_float can't figure out what to do if FROM has VOIDmode.
8714 	 So give it the correct mode.  With -O, cse will optimize this.  */
8715       if (GET_MODE (op0) == VOIDmode)
8716 	op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
8717 				op0);
8718       expand_float (target, op0,
8719 		    TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8720       return target;
8721 
8722     case NEGATE_EXPR:
8723       op0 = expand_expr (treeop0, subtarget,
8724 			 VOIDmode, EXPAND_NORMAL);
8725       if (modifier == EXPAND_STACK_PARM)
8726 	target = 0;
8727       temp = expand_unop (mode,
8728       			  optab_for_tree_code (NEGATE_EXPR, type,
8729 					       optab_default),
8730 			  op0, target, 0);
8731       gcc_assert (temp);
8732       return REDUCE_BIT_FIELD (temp);
8733 
8734     case ABS_EXPR:
8735       op0 = expand_expr (treeop0, subtarget,
8736 			 VOIDmode, EXPAND_NORMAL);
8737       if (modifier == EXPAND_STACK_PARM)
8738 	target = 0;
8739 
8740       /* ABS_EXPR is not valid for complex arguments.  */
8741       gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8742 		  && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8743 
8744       /* Unsigned abs is simply the operand.  Testing here means we don't
8745 	 risk generating incorrect code below.  */
8746       if (TYPE_UNSIGNED (type))
8747 	return op0;
8748 
8749       return expand_abs (mode, op0, target, unsignedp,
8750 			 safe_from_p (target, treeop0, 1));
8751 
8752     case MAX_EXPR:
8753     case MIN_EXPR:
8754       target = original_target;
8755       if (target == 0
8756 	  || modifier == EXPAND_STACK_PARM
8757 	  || (MEM_P (target) && MEM_VOLATILE_P (target))
8758 	  || GET_MODE (target) != mode
8759 	  || (REG_P (target)
8760 	      && REGNO (target) < FIRST_PSEUDO_REGISTER))
8761 	target = gen_reg_rtx (mode);
8762       expand_operands (treeop0, treeop1,
8763 		       target, &op0, &op1, EXPAND_NORMAL);
8764 
8765       /* First try to do it with a special MIN or MAX instruction.
8766 	 If that does not win, use a conditional jump to select the proper
8767 	 value.  */
8768       this_optab = optab_for_tree_code (code, type, optab_default);
8769       temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8770 			   OPTAB_WIDEN);
8771       if (temp != 0)
8772 	return temp;
8773 
8774       /* At this point, a MEM target is no longer useful; we will get better
8775 	 code without it.  */
8776 
8777       if (! REG_P (target))
8778 	target = gen_reg_rtx (mode);
8779 
8780       /* If op1 was placed in target, swap op0 and op1.  */
8781       if (target != op0 && target == op1)
8782 	{
8783 	  temp = op0;
8784 	  op0 = op1;
8785 	  op1 = temp;
8786 	}
8787 
8788       /* We generate better code and avoid problems with op1 mentioning
8789 	 target by forcing op1 into a pseudo if it isn't a constant.  */
8790       if (! CONSTANT_P (op1))
8791 	op1 = force_reg (mode, op1);
8792 
8793       {
8794 	enum rtx_code comparison_code;
8795 	rtx cmpop1 = op1;
8796 
8797 	if (code == MAX_EXPR)
8798 	  comparison_code = unsignedp ? GEU : GE;
8799 	else
8800 	  comparison_code = unsignedp ? LEU : LE;
8801 
8802 	/* Canonicalize to comparisons against 0.  */
8803 	if (op1 == const1_rtx)
8804 	  {
8805 	    /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8806 	       or (a != 0 ? a : 1) for unsigned.
8807 	       For MIN we are safe converting (a <= 1 ? a : 1)
8808 	       into (a <= 0 ? a : 1)  */
8809 	    cmpop1 = const0_rtx;
8810 	    if (code == MAX_EXPR)
8811 	      comparison_code = unsignedp ? NE : GT;
8812 	  }
8813 	if (op1 == constm1_rtx && !unsignedp)
8814 	  {
8815 	    /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8816 	       and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8817 	    cmpop1 = const0_rtx;
8818 	    if (code == MIN_EXPR)
8819 	      comparison_code = LT;
8820 	  }
8821 #ifdef HAVE_conditional_move
8822 	/* Use a conditional move if possible.  */
8823 	if (can_conditionally_move_p (mode))
8824 	  {
8825 	    rtx insn;
8826 
8827 	    start_sequence ();
8828 
8829 	    /* Try to emit the conditional move.  */
8830 	    insn = emit_conditional_move (target, comparison_code,
8831 					  op0, cmpop1, mode,
8832 					  op0, op1, mode,
8833 					  unsignedp);
8834 
8835 	    /* If we could do the conditional move, emit the sequence,
8836 	       and return.  */
8837 	    if (insn)
8838 	      {
8839 		rtx seq = get_insns ();
8840 		end_sequence ();
8841 		emit_insn (seq);
8842 		return target;
8843 	      }
8844 
8845 	    /* Otherwise discard the sequence and fall back to code with
8846 	       branches.  */
8847 	    end_sequence ();
8848 	  }
8849 #endif
8850 	if (target != op0)
8851 	  emit_move_insn (target, op0);
8852 
8853 	temp = gen_label_rtx ();
8854 	do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8855 				 unsignedp, mode, NULL_RTX, NULL_RTX, temp,
8856 				 -1);
8857       }
8858       emit_move_insn (target, op1);
8859       emit_label (temp);
8860       return target;
8861 
8862     case BIT_NOT_EXPR:
8863       op0 = expand_expr (treeop0, subtarget,
8864 			 VOIDmode, EXPAND_NORMAL);
8865       if (modifier == EXPAND_STACK_PARM)
8866 	target = 0;
8867       /* In case we have to reduce the result to bitfield precision
8868 	 for unsigned bitfield expand this as XOR with a proper constant
8869 	 instead.  */
8870       if (reduce_bit_field && TYPE_UNSIGNED (type))
8871 	temp = expand_binop (mode, xor_optab, op0,
8872 			     immed_double_int_const
8873 			       (double_int::mask (TYPE_PRECISION (type)), mode),
8874 			     target, 1, OPTAB_LIB_WIDEN);
8875       else
8876 	temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8877       gcc_assert (temp);
8878       return temp;
8879 
8880       /* ??? Can optimize bitwise operations with one arg constant.
8881 	 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8882 	 and (a bitwise1 b) bitwise2 b (etc)
8883 	 but that is probably not worth while.  */
8884 
8885     case BIT_AND_EXPR:
8886     case BIT_IOR_EXPR:
8887     case BIT_XOR_EXPR:
8888       goto binop;
8889 
8890     case LROTATE_EXPR:
8891     case RROTATE_EXPR:
8892       gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8893 		  || (GET_MODE_PRECISION (TYPE_MODE (type))
8894 		      == TYPE_PRECISION (type)));
8895       /* fall through */
8896 
8897     case LSHIFT_EXPR:
8898     case RSHIFT_EXPR:
8899       /* If this is a fixed-point operation, then we cannot use the code
8900 	 below because "expand_shift" doesn't support sat/no-sat fixed-point
8901          shifts.   */
8902       if (ALL_FIXED_POINT_MODE_P (mode))
8903 	goto binop;
8904 
8905       if (! safe_from_p (subtarget, treeop1, 1))
8906 	subtarget = 0;
8907       if (modifier == EXPAND_STACK_PARM)
8908 	target = 0;
8909       op0 = expand_expr (treeop0, subtarget,
8910 			 VOIDmode, EXPAND_NORMAL);
8911       temp = expand_variable_shift (code, mode, op0, treeop1, target,
8912 				    unsignedp);
8913       if (code == LSHIFT_EXPR)
8914 	temp = REDUCE_BIT_FIELD (temp);
8915       return temp;
8916 
8917       /* Could determine the answer when only additive constants differ.  Also,
8918 	 the addition of one can be handled by changing the condition.  */
8919     case LT_EXPR:
8920     case LE_EXPR:
8921     case GT_EXPR:
8922     case GE_EXPR:
8923     case EQ_EXPR:
8924     case NE_EXPR:
8925     case UNORDERED_EXPR:
8926     case ORDERED_EXPR:
8927     case UNLT_EXPR:
8928     case UNLE_EXPR:
8929     case UNGT_EXPR:
8930     case UNGE_EXPR:
8931     case UNEQ_EXPR:
8932     case LTGT_EXPR:
8933       temp = do_store_flag (ops,
8934 			    modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8935 			    tmode != VOIDmode ? tmode : mode);
8936       if (temp)
8937 	return temp;
8938 
8939       /* Use a compare and a jump for BLKmode comparisons, or for function
8940 	 type comparisons is HAVE_canonicalize_funcptr_for_compare.  */
8941 
8942       if ((target == 0
8943 	   || modifier == EXPAND_STACK_PARM
8944 	   || ! safe_from_p (target, treeop0, 1)
8945 	   || ! safe_from_p (target, treeop1, 1)
8946 	   /* Make sure we don't have a hard reg (such as function's return
8947 	      value) live across basic blocks, if not optimizing.  */
8948 	   || (!optimize && REG_P (target)
8949 	       && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8950 	target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8951 
8952       emit_move_insn (target, const0_rtx);
8953 
8954       op1 = gen_label_rtx ();
8955       jumpifnot_1 (code, treeop0, treeop1, op1, -1);
8956 
8957       if (TYPE_PRECISION (type) == 1 && !TYPE_UNSIGNED (type))
8958 	emit_move_insn (target, constm1_rtx);
8959       else
8960 	emit_move_insn (target, const1_rtx);
8961 
8962       emit_label (op1);
8963       return target;
8964 
8965     case COMPLEX_EXPR:
8966       /* Get the rtx code of the operands.  */
8967       op0 = expand_normal (treeop0);
8968       op1 = expand_normal (treeop1);
8969 
8970       if (!target)
8971 	target = gen_reg_rtx (TYPE_MODE (type));
8972       else
8973 	/* If target overlaps with op1, then either we need to force
8974 	   op1 into a pseudo (if target also overlaps with op0),
8975 	   or write the complex parts in reverse order.  */
8976 	switch (GET_CODE (target))
8977 	  {
8978 	  case CONCAT:
8979 	    if (reg_overlap_mentioned_p (XEXP (target, 0), op1))
8980 	      {
8981 		if (reg_overlap_mentioned_p (XEXP (target, 1), op0))
8982 		  {
8983 		  complex_expr_force_op1:
8984 		    temp = gen_reg_rtx (GET_MODE_INNER (GET_MODE (target)));
8985 		    emit_move_insn (temp, op1);
8986 		    op1 = temp;
8987 		    break;
8988 		  }
8989 	      complex_expr_swap_order:
8990 		/* Move the imaginary (op1) and real (op0) parts to their
8991 		   location.  */
8992 		write_complex_part (target, op1, true);
8993 		write_complex_part (target, op0, false);
8994 
8995 		return target;
8996 	      }
8997 	    break;
8998 	  case MEM:
8999 	    temp = adjust_address_nv (target,
9000 				      GET_MODE_INNER (GET_MODE (target)), 0);
9001 	    if (reg_overlap_mentioned_p (temp, op1))
9002 	      {
9003 		enum machine_mode imode = GET_MODE_INNER (GET_MODE (target));
9004 		temp = adjust_address_nv (target, imode,
9005 					  GET_MODE_SIZE (imode));
9006 		if (reg_overlap_mentioned_p (temp, op0))
9007 		  goto complex_expr_force_op1;
9008 		goto complex_expr_swap_order;
9009 	      }
9010 	    break;
9011 	  default:
9012 	    if (reg_overlap_mentioned_p (target, op1))
9013 	      {
9014 		if (reg_overlap_mentioned_p (target, op0))
9015 		  goto complex_expr_force_op1;
9016 		goto complex_expr_swap_order;
9017 	      }
9018 	    break;
9019 	  }
9020 
9021       /* Move the real (op0) and imaginary (op1) parts to their location.  */
9022       write_complex_part (target, op0, false);
9023       write_complex_part (target, op1, true);
9024 
9025       return target;
9026 
9027     case WIDEN_SUM_EXPR:
9028       {
9029         tree oprnd0 = treeop0;
9030         tree oprnd1 = treeop1;
9031 
9032         expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9033         target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
9034                                             target, unsignedp);
9035         return target;
9036       }
9037 
9038     case REDUC_MAX_EXPR:
9039     case REDUC_MIN_EXPR:
9040     case REDUC_PLUS_EXPR:
9041       {
9042         op0 = expand_normal (treeop0);
9043         this_optab = optab_for_tree_code (code, type, optab_default);
9044         temp = expand_unop (mode, this_optab, op0, target, unsignedp);
9045         gcc_assert (temp);
9046         return temp;
9047       }
9048 
9049     case VEC_LSHIFT_EXPR:
9050     case VEC_RSHIFT_EXPR:
9051       {
9052 	target = expand_vec_shift_expr (ops, target);
9053 	return target;
9054       }
9055 
9056     case VEC_UNPACK_HI_EXPR:
9057     case VEC_UNPACK_LO_EXPR:
9058       {
9059 	op0 = expand_normal (treeop0);
9060 	temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
9061 					  target, unsignedp);
9062 	gcc_assert (temp);
9063 	return temp;
9064       }
9065 
9066     case VEC_UNPACK_FLOAT_HI_EXPR:
9067     case VEC_UNPACK_FLOAT_LO_EXPR:
9068       {
9069 	op0 = expand_normal (treeop0);
9070 	/* The signedness is determined from input operand.  */
9071 	temp = expand_widen_pattern_expr
9072 	  (ops, op0, NULL_RTX, NULL_RTX,
9073 	   target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
9074 
9075 	gcc_assert (temp);
9076 	return temp;
9077       }
9078 
9079     case VEC_WIDEN_MULT_HI_EXPR:
9080     case VEC_WIDEN_MULT_LO_EXPR:
9081     case VEC_WIDEN_MULT_EVEN_EXPR:
9082     case VEC_WIDEN_MULT_ODD_EXPR:
9083     case VEC_WIDEN_LSHIFT_HI_EXPR:
9084     case VEC_WIDEN_LSHIFT_LO_EXPR:
9085       expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9086       target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
9087 					  target, unsignedp);
9088       gcc_assert (target);
9089       return target;
9090 
9091     case VEC_PACK_TRUNC_EXPR:
9092     case VEC_PACK_SAT_EXPR:
9093     case VEC_PACK_FIX_TRUNC_EXPR:
9094       mode = TYPE_MODE (TREE_TYPE (treeop0));
9095       goto binop;
9096 
9097     case VEC_PERM_EXPR:
9098       expand_operands (treeop0, treeop1, target, &op0, &op1, EXPAND_NORMAL);
9099       op2 = expand_normal (treeop2);
9100 
9101       /* Careful here: if the target doesn't support integral vector modes,
9102 	 a constant selection vector could wind up smooshed into a normal
9103 	 integral constant.  */
9104       if (CONSTANT_P (op2) && GET_CODE (op2) != CONST_VECTOR)
9105 	{
9106 	  tree sel_type = TREE_TYPE (treeop2);
9107 	  enum machine_mode vmode
9108 	    = mode_for_vector (TYPE_MODE (TREE_TYPE (sel_type)),
9109 			       TYPE_VECTOR_SUBPARTS (sel_type));
9110 	  gcc_assert (GET_MODE_CLASS (vmode) == MODE_VECTOR_INT);
9111 	  op2 = simplify_subreg (vmode, op2, TYPE_MODE (sel_type), 0);
9112 	  gcc_assert (op2 && GET_CODE (op2) == CONST_VECTOR);
9113 	}
9114       else
9115         gcc_assert (GET_MODE_CLASS (GET_MODE (op2)) == MODE_VECTOR_INT);
9116 
9117       temp = expand_vec_perm (mode, op0, op1, op2, target);
9118       gcc_assert (temp);
9119       return temp;
9120 
9121     case DOT_PROD_EXPR:
9122       {
9123 	tree oprnd0 = treeop0;
9124 	tree oprnd1 = treeop1;
9125 	tree oprnd2 = treeop2;
9126 	rtx op2;
9127 
9128 	expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9129 	op2 = expand_normal (oprnd2);
9130 	target = expand_widen_pattern_expr (ops, op0, op1, op2,
9131 					    target, unsignedp);
9132 	return target;
9133       }
9134 
9135     case REALIGN_LOAD_EXPR:
9136       {
9137         tree oprnd0 = treeop0;
9138         tree oprnd1 = treeop1;
9139         tree oprnd2 = treeop2;
9140         rtx op2;
9141 
9142         this_optab = optab_for_tree_code (code, type, optab_default);
9143         expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9144         op2 = expand_normal (oprnd2);
9145         temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9146 				  target, unsignedp);
9147         gcc_assert (temp);
9148         return temp;
9149       }
9150 
9151     case COND_EXPR:
9152       /* A COND_EXPR with its type being VOID_TYPE represents a
9153 	 conditional jump and is handled in
9154 	 expand_gimple_cond_expr.  */
9155       gcc_assert (!VOID_TYPE_P (type));
9156 
9157       /* Note that COND_EXPRs whose type is a structure or union
9158 	 are required to be constructed to contain assignments of
9159 	 a temporary variable, so that we can evaluate them here
9160 	 for side effect only.  If type is void, we must do likewise.  */
9161 
9162       gcc_assert (!TREE_ADDRESSABLE (type)
9163 		  && !ignore
9164 		  && TREE_TYPE (treeop1) != void_type_node
9165 		  && TREE_TYPE (treeop2) != void_type_node);
9166 
9167       temp = expand_cond_expr_using_cmove (treeop0, treeop1, treeop2);
9168       if (temp)
9169 	return temp;
9170 
9171       /* If we are not to produce a result, we have no target.  Otherwise,
9172 	 if a target was specified use it; it will not be used as an
9173 	 intermediate target unless it is safe.  If no target, use a
9174 	 temporary.  */
9175 
9176       if (modifier != EXPAND_STACK_PARM
9177 	  && original_target
9178 	  && safe_from_p (original_target, treeop0, 1)
9179 	  && GET_MODE (original_target) == mode
9180 	  && !MEM_P (original_target))
9181 	temp = original_target;
9182       else
9183 	temp = assign_temp (type, 0, 1);
9184 
9185       do_pending_stack_adjust ();
9186       NO_DEFER_POP;
9187       op0 = gen_label_rtx ();
9188       op1 = gen_label_rtx ();
9189       jumpifnot (treeop0, op0, -1);
9190       store_expr (treeop1, temp,
9191 		  modifier == EXPAND_STACK_PARM,
9192 		  false);
9193 
9194       emit_jump_insn (gen_jump (op1));
9195       emit_barrier ();
9196       emit_label (op0);
9197       store_expr (treeop2, temp,
9198 		  modifier == EXPAND_STACK_PARM,
9199 		  false);
9200 
9201       emit_label (op1);
9202       OK_DEFER_POP;
9203       return temp;
9204 
9205     case VEC_COND_EXPR:
9206       target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9207       return target;
9208 
9209     default:
9210       gcc_unreachable ();
9211     }
9212 
9213   /* Here to do an ordinary binary operator.  */
9214  binop:
9215   expand_operands (treeop0, treeop1,
9216 		   subtarget, &op0, &op1, EXPAND_NORMAL);
9217  binop2:
9218   this_optab = optab_for_tree_code (code, type, optab_default);
9219  binop3:
9220   if (modifier == EXPAND_STACK_PARM)
9221     target = 0;
9222   temp = expand_binop (mode, this_optab, op0, op1, target,
9223 		       unsignedp, OPTAB_LIB_WIDEN);
9224   gcc_assert (temp);
9225   /* Bitwise operations do not need bitfield reduction as we expect their
9226      operands being properly truncated.  */
9227   if (code == BIT_XOR_EXPR
9228       || code == BIT_AND_EXPR
9229       || code == BIT_IOR_EXPR)
9230     return temp;
9231   return REDUCE_BIT_FIELD (temp);
9232 }
9233 #undef REDUCE_BIT_FIELD
9234 
9235 
9236 /* Return TRUE if expression STMT is suitable for replacement.
9237    Never consider memory loads as replaceable, because those don't ever lead
9238    into constant expressions.  */
9239 
9240 static bool
stmt_is_replaceable_p(gimple stmt)9241 stmt_is_replaceable_p (gimple stmt)
9242 {
9243   if (ssa_is_replaceable_p (stmt))
9244     {
9245       /* Don't move around loads.  */
9246       if (!gimple_assign_single_p (stmt)
9247 	  || is_gimple_val (gimple_assign_rhs1 (stmt)))
9248 	return true;
9249     }
9250   return false;
9251 }
9252 
9253 rtx
expand_expr_real_1(tree exp,rtx target,enum machine_mode tmode,enum expand_modifier modifier,rtx * alt_rtl,bool inner_reference_p)9254 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
9255 		    enum expand_modifier modifier, rtx *alt_rtl,
9256 		    bool inner_reference_p)
9257 {
9258   rtx op0, op1, temp, decl_rtl;
9259   tree type;
9260   int unsignedp;
9261   enum machine_mode mode;
9262   enum tree_code code = TREE_CODE (exp);
9263   rtx subtarget, original_target;
9264   int ignore;
9265   tree context;
9266   bool reduce_bit_field;
9267   location_t loc = EXPR_LOCATION (exp);
9268   struct separate_ops ops;
9269   tree treeop0, treeop1, treeop2;
9270   tree ssa_name = NULL_TREE;
9271   gimple g;
9272 
9273   type = TREE_TYPE (exp);
9274   mode = TYPE_MODE (type);
9275   unsignedp = TYPE_UNSIGNED (type);
9276 
9277   treeop0 = treeop1 = treeop2 = NULL_TREE;
9278   if (!VL_EXP_CLASS_P (exp))
9279     switch (TREE_CODE_LENGTH (code))
9280       {
9281 	default:
9282 	case 3: treeop2 = TREE_OPERAND (exp, 2);
9283 	case 2: treeop1 = TREE_OPERAND (exp, 1);
9284 	case 1: treeop0 = TREE_OPERAND (exp, 0);
9285 	case 0: break;
9286       }
9287   ops.code = code;
9288   ops.type = type;
9289   ops.op0 = treeop0;
9290   ops.op1 = treeop1;
9291   ops.op2 = treeop2;
9292   ops.location = loc;
9293 
9294   ignore = (target == const0_rtx
9295 	    || ((CONVERT_EXPR_CODE_P (code)
9296 		 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
9297 		&& TREE_CODE (type) == VOID_TYPE));
9298 
9299   /* An operation in what may be a bit-field type needs the
9300      result to be reduced to the precision of the bit-field type,
9301      which is narrower than that of the type's mode.  */
9302   reduce_bit_field = (!ignore
9303 		      && INTEGRAL_TYPE_P (type)
9304 		      && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
9305 
9306   /* If we are going to ignore this result, we need only do something
9307      if there is a side-effect somewhere in the expression.  If there
9308      is, short-circuit the most common cases here.  Note that we must
9309      not call expand_expr with anything but const0_rtx in case this
9310      is an initial expansion of a size that contains a PLACEHOLDER_EXPR.  */
9311 
9312   if (ignore)
9313     {
9314       if (! TREE_SIDE_EFFECTS (exp))
9315 	return const0_rtx;
9316 
9317       /* Ensure we reference a volatile object even if value is ignored, but
9318 	 don't do this if all we are doing is taking its address.  */
9319       if (TREE_THIS_VOLATILE (exp)
9320 	  && TREE_CODE (exp) != FUNCTION_DECL
9321 	  && mode != VOIDmode && mode != BLKmode
9322 	  && modifier != EXPAND_CONST_ADDRESS)
9323 	{
9324 	  temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
9325 	  if (MEM_P (temp))
9326 	    copy_to_reg (temp);
9327 	  return const0_rtx;
9328 	}
9329 
9330       if (TREE_CODE_CLASS (code) == tcc_unary
9331 	  || code == BIT_FIELD_REF
9332 	  || code == COMPONENT_REF
9333 	  || code == INDIRECT_REF)
9334 	return expand_expr (treeop0, const0_rtx, VOIDmode,
9335 			    modifier);
9336 
9337       else if (TREE_CODE_CLASS (code) == tcc_binary
9338 	       || TREE_CODE_CLASS (code) == tcc_comparison
9339 	       || code == ARRAY_REF || code == ARRAY_RANGE_REF)
9340 	{
9341 	  expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
9342 	  expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
9343 	  return const0_rtx;
9344 	}
9345 
9346       target = 0;
9347     }
9348 
9349   if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
9350     target = 0;
9351 
9352   /* Use subtarget as the target for operand 0 of a binary operation.  */
9353   subtarget = get_subtarget (target);
9354   original_target = target;
9355 
9356   switch (code)
9357     {
9358     case LABEL_DECL:
9359       {
9360 	tree function = decl_function_context (exp);
9361 
9362 	temp = label_rtx (exp);
9363 	temp = gen_rtx_LABEL_REF (Pmode, temp);
9364 
9365 	if (function != current_function_decl
9366 	    && function != 0)
9367 	  LABEL_REF_NONLOCAL_P (temp) = 1;
9368 
9369 	temp = gen_rtx_MEM (FUNCTION_MODE, temp);
9370 	return temp;
9371       }
9372 
9373     case SSA_NAME:
9374       /* ??? ivopts calls expander, without any preparation from
9375          out-of-ssa.  So fake instructions as if this was an access to the
9376 	 base variable.  This unnecessarily allocates a pseudo, see how we can
9377 	 reuse it, if partition base vars have it set already.  */
9378       if (!currently_expanding_to_rtl)
9379 	{
9380 	  tree var = SSA_NAME_VAR (exp);
9381 	  if (var && DECL_RTL_SET_P (var))
9382 	    return DECL_RTL (var);
9383 	  return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp)),
9384 			      LAST_VIRTUAL_REGISTER + 1);
9385 	}
9386 
9387       g = get_gimple_for_ssa_name (exp);
9388       /* For EXPAND_INITIALIZER try harder to get something simpler.  */
9389       if (g == NULL
9390 	  && modifier == EXPAND_INITIALIZER
9391 	  && !SSA_NAME_IS_DEFAULT_DEF (exp)
9392 	  && (optimize || DECL_IGNORED_P (SSA_NAME_VAR (exp)))
9393 	  && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp)))
9394 	g = SSA_NAME_DEF_STMT (exp);
9395       if (g)
9396 	{
9397 	  rtx r;
9398 	  location_t saved_loc = curr_insn_location ();
9399 
9400 	  set_curr_insn_location (gimple_location (g));
9401 	  r = expand_expr_real (gimple_assign_rhs_to_tree (g), target,
9402 				tmode, modifier, NULL, inner_reference_p);
9403 	  set_curr_insn_location (saved_loc);
9404 	  if (REG_P (r) && !REG_EXPR (r))
9405 	    set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp), r);
9406 	  return r;
9407 	}
9408 
9409       ssa_name = exp;
9410       decl_rtl = get_rtx_for_ssa_name (ssa_name);
9411       exp = SSA_NAME_VAR (ssa_name);
9412       goto expand_decl_rtl;
9413 
9414     case PARM_DECL:
9415     case VAR_DECL:
9416       /* If a static var's type was incomplete when the decl was written,
9417 	 but the type is complete now, lay out the decl now.  */
9418       if (DECL_SIZE (exp) == 0
9419 	  && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
9420 	  && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
9421 	layout_decl (exp, 0);
9422 
9423       /* ... fall through ...  */
9424 
9425     case FUNCTION_DECL:
9426     case RESULT_DECL:
9427       decl_rtl = DECL_RTL (exp);
9428     expand_decl_rtl:
9429       gcc_assert (decl_rtl);
9430       decl_rtl = copy_rtx (decl_rtl);
9431       /* Record writes to register variables.  */
9432       if (modifier == EXPAND_WRITE
9433 	  && REG_P (decl_rtl)
9434 	  && HARD_REGISTER_P (decl_rtl))
9435         add_to_hard_reg_set (&crtl->asm_clobbers,
9436 			     GET_MODE (decl_rtl), REGNO (decl_rtl));
9437 
9438       /* Ensure variable marked as used even if it doesn't go through
9439 	 a parser.  If it hasn't be used yet, write out an external
9440 	 definition.  */
9441       TREE_USED (exp) = 1;
9442 
9443       /* Show we haven't gotten RTL for this yet.  */
9444       temp = 0;
9445 
9446       /* Variables inherited from containing functions should have
9447 	 been lowered by this point.  */
9448       context = decl_function_context (exp);
9449       gcc_assert (SCOPE_FILE_SCOPE_P (context)
9450 		  || context == current_function_decl
9451 		  || TREE_STATIC (exp)
9452 		  || DECL_EXTERNAL (exp)
9453 		  /* ??? C++ creates functions that are not TREE_STATIC.  */
9454 		  || TREE_CODE (exp) == FUNCTION_DECL);
9455 
9456       /* This is the case of an array whose size is to be determined
9457 	 from its initializer, while the initializer is still being parsed.
9458 	 ??? We aren't parsing while expanding anymore.  */
9459 
9460       if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
9461 	temp = validize_mem (decl_rtl);
9462 
9463       /* If DECL_RTL is memory, we are in the normal case and the
9464 	 address is not valid, get the address into a register.  */
9465 
9466       else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
9467 	{
9468 	  if (alt_rtl)
9469 	    *alt_rtl = decl_rtl;
9470 	  decl_rtl = use_anchored_address (decl_rtl);
9471 	  if (modifier != EXPAND_CONST_ADDRESS
9472 	      && modifier != EXPAND_SUM
9473 	      && !memory_address_addr_space_p (DECL_MODE (exp),
9474 					       XEXP (decl_rtl, 0),
9475 					       MEM_ADDR_SPACE (decl_rtl)))
9476 	    temp = replace_equiv_address (decl_rtl,
9477 					  copy_rtx (XEXP (decl_rtl, 0)));
9478 	}
9479 
9480       /* If we got something, return it.  But first, set the alignment
9481 	 if the address is a register.  */
9482       if (temp != 0)
9483 	{
9484 	  if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
9485 	    mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
9486 
9487 	  return temp;
9488 	}
9489 
9490       /* If the mode of DECL_RTL does not match that of the decl,
9491 	 there are two cases: we are dealing with a BLKmode value
9492 	 that is returned in a register, or we are dealing with
9493 	 a promoted value.  In the latter case, return a SUBREG
9494 	 of the wanted mode, but mark it so that we know that it
9495 	 was already extended.  */
9496       if (REG_P (decl_rtl)
9497 	  && DECL_MODE (exp) != BLKmode
9498 	  && GET_MODE (decl_rtl) != DECL_MODE (exp))
9499 	{
9500 	  enum machine_mode pmode;
9501 
9502 	  /* Get the signedness to be used for this variable.  Ensure we get
9503 	     the same mode we got when the variable was declared.  */
9504 	  if (code == SSA_NAME
9505 	      && (g = SSA_NAME_DEF_STMT (ssa_name))
9506 	      && gimple_code (g) == GIMPLE_CALL
9507 	      && !gimple_call_internal_p (g))
9508 	    pmode = promote_function_mode (type, mode, &unsignedp,
9509 					   gimple_call_fntype (g),
9510 					   2);
9511 	  else
9512 	    pmode = promote_decl_mode (exp, &unsignedp);
9513 	  gcc_assert (GET_MODE (decl_rtl) == pmode);
9514 
9515 	  temp = gen_lowpart_SUBREG (mode, decl_rtl);
9516 	  SUBREG_PROMOTED_VAR_P (temp) = 1;
9517 	  SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
9518 	  return temp;
9519 	}
9520 
9521       return decl_rtl;
9522 
9523     case INTEGER_CST:
9524       temp = immed_double_const (TREE_INT_CST_LOW (exp),
9525 				 TREE_INT_CST_HIGH (exp), mode);
9526 
9527       return temp;
9528 
9529     case VECTOR_CST:
9530       {
9531 	tree tmp = NULL_TREE;
9532 	if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
9533 	    || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
9534 	    || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
9535 	    || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
9536 	    || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
9537 	    || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
9538 	  return const_vector_from_tree (exp);
9539 	if (GET_MODE_CLASS (mode) == MODE_INT)
9540 	  {
9541 	    tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
9542 	    if (type_for_mode)
9543 	      tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
9544 	  }
9545 	if (!tmp)
9546 	  {
9547 	    vec<constructor_elt, va_gc> *v;
9548 	    unsigned i;
9549 	    vec_alloc (v, VECTOR_CST_NELTS (exp));
9550 	    for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
9551 	      CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, VECTOR_CST_ELT (exp, i));
9552 	    tmp = build_constructor (type, v);
9553 	  }
9554 	return expand_expr (tmp, ignore ? const0_rtx : target,
9555 			    tmode, modifier);
9556       }
9557 
9558     case CONST_DECL:
9559       return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
9560 
9561     case REAL_CST:
9562       /* If optimized, generate immediate CONST_DOUBLE
9563 	 which will be turned into memory by reload if necessary.
9564 
9565 	 We used to force a register so that loop.c could see it.  But
9566 	 this does not allow gen_* patterns to perform optimizations with
9567 	 the constants.  It also produces two insns in cases like "x = 1.0;".
9568 	 On most machines, floating-point constants are not permitted in
9569 	 many insns, so we'd end up copying it to a register in any case.
9570 
9571 	 Now, we do the copying in expand_binop, if appropriate.  */
9572       return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
9573 					   TYPE_MODE (TREE_TYPE (exp)));
9574 
9575     case FIXED_CST:
9576       return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
9577 					   TYPE_MODE (TREE_TYPE (exp)));
9578 
9579     case COMPLEX_CST:
9580       /* Handle evaluating a complex constant in a CONCAT target.  */
9581       if (original_target && GET_CODE (original_target) == CONCAT)
9582 	{
9583 	  enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9584 	  rtx rtarg, itarg;
9585 
9586 	  rtarg = XEXP (original_target, 0);
9587 	  itarg = XEXP (original_target, 1);
9588 
9589 	  /* Move the real and imaginary parts separately.  */
9590 	  op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
9591 	  op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
9592 
9593 	  if (op0 != rtarg)
9594 	    emit_move_insn (rtarg, op0);
9595 	  if (op1 != itarg)
9596 	    emit_move_insn (itarg, op1);
9597 
9598 	  return original_target;
9599 	}
9600 
9601       /* ... fall through ...  */
9602 
9603     case STRING_CST:
9604       temp = expand_expr_constant (exp, 1, modifier);
9605 
9606       /* temp contains a constant address.
9607 	 On RISC machines where a constant address isn't valid,
9608 	 make some insns to get that address into a register.  */
9609       if (modifier != EXPAND_CONST_ADDRESS
9610 	  && modifier != EXPAND_INITIALIZER
9611 	  && modifier != EXPAND_SUM
9612 	  && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
9613 					    MEM_ADDR_SPACE (temp)))
9614 	return replace_equiv_address (temp,
9615 				      copy_rtx (XEXP (temp, 0)));
9616       return temp;
9617 
9618     case SAVE_EXPR:
9619       {
9620 	tree val = treeop0;
9621 	rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl,
9622 				      inner_reference_p);
9623 
9624 	if (!SAVE_EXPR_RESOLVED_P (exp))
9625 	  {
9626 	    /* We can indeed still hit this case, typically via builtin
9627 	       expanders calling save_expr immediately before expanding
9628 	       something.  Assume this means that we only have to deal
9629 	       with non-BLKmode values.  */
9630 	    gcc_assert (GET_MODE (ret) != BLKmode);
9631 
9632 	    val = build_decl (curr_insn_location (),
9633 			      VAR_DECL, NULL, TREE_TYPE (exp));
9634 	    DECL_ARTIFICIAL (val) = 1;
9635 	    DECL_IGNORED_P (val) = 1;
9636 	    treeop0 = val;
9637 	    TREE_OPERAND (exp, 0) = treeop0;
9638 	    SAVE_EXPR_RESOLVED_P (exp) = 1;
9639 
9640 	    if (!CONSTANT_P (ret))
9641 	      ret = copy_to_reg (ret);
9642 	    SET_DECL_RTL (val, ret);
9643 	  }
9644 
9645         return ret;
9646       }
9647 
9648 
9649     case CONSTRUCTOR:
9650       /* If we don't need the result, just ensure we evaluate any
9651 	 subexpressions.  */
9652       if (ignore)
9653 	{
9654 	  unsigned HOST_WIDE_INT idx;
9655 	  tree value;
9656 
9657 	  FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
9658 	    expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
9659 
9660 	  return const0_rtx;
9661 	}
9662 
9663       return expand_constructor (exp, target, modifier, false);
9664 
9665     case TARGET_MEM_REF:
9666       {
9667 	addr_space_t as
9668 	  = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9669 	enum insn_code icode;
9670 	unsigned int align;
9671 
9672 	op0 = addr_for_mem_ref (exp, as, true);
9673 	op0 = memory_address_addr_space (mode, op0, as);
9674 	temp = gen_rtx_MEM (mode, op0);
9675 	set_mem_attributes (temp, exp, 0);
9676 	set_mem_addr_space (temp, as);
9677 	align = get_object_alignment (exp);
9678 	if (modifier != EXPAND_WRITE
9679 	    && modifier != EXPAND_MEMORY
9680 	    && mode != BLKmode
9681 	    && align < GET_MODE_ALIGNMENT (mode)
9682 	    /* If the target does not have special handling for unaligned
9683 	       loads of mode then it can use regular moves for them.  */
9684 	    && ((icode = optab_handler (movmisalign_optab, mode))
9685 		!= CODE_FOR_nothing))
9686 	  {
9687 	    struct expand_operand ops[2];
9688 
9689 	    /* We've already validated the memory, and we're creating a
9690 	       new pseudo destination.  The predicates really can't fail,
9691 	       nor can the generator.  */
9692 	    create_output_operand (&ops[0], NULL_RTX, mode);
9693 	    create_fixed_operand (&ops[1], temp);
9694 	    expand_insn (icode, 2, ops);
9695 	    temp = ops[0].value;
9696 	  }
9697 	return temp;
9698       }
9699 
9700     case MEM_REF:
9701       {
9702 	addr_space_t as
9703 	  = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9704 	enum machine_mode address_mode;
9705 	tree base = TREE_OPERAND (exp, 0);
9706 	gimple def_stmt;
9707 	enum insn_code icode;
9708 	unsigned align;
9709 	/* Handle expansion of non-aliased memory with non-BLKmode.  That
9710 	   might end up in a register.  */
9711 	if (mem_ref_refers_to_non_mem_p (exp))
9712 	  {
9713 	    HOST_WIDE_INT offset = mem_ref_offset (exp).low;
9714 	    base = TREE_OPERAND (base, 0);
9715 	    if (offset == 0
9716 		&& tree_fits_uhwi_p (TYPE_SIZE (type))
9717 		&& (GET_MODE_BITSIZE (DECL_MODE (base))
9718 		    == tree_to_uhwi (TYPE_SIZE (type))))
9719 	      return expand_expr (build1 (VIEW_CONVERT_EXPR, type, base),
9720 				  target, tmode, modifier);
9721 	    if (TYPE_MODE (type) == BLKmode)
9722 	      {
9723 		temp = assign_stack_temp (DECL_MODE (base),
9724 					  GET_MODE_SIZE (DECL_MODE (base)));
9725 		store_expr (base, temp, 0, false);
9726 		temp = adjust_address (temp, BLKmode, offset);
9727 		set_mem_size (temp, int_size_in_bytes (type));
9728 		return temp;
9729 	      }
9730 	    exp = build3 (BIT_FIELD_REF, type, base, TYPE_SIZE (type),
9731 			  bitsize_int (offset * BITS_PER_UNIT));
9732 	    return expand_expr (exp, target, tmode, modifier);
9733 	  }
9734 	address_mode = targetm.addr_space.address_mode (as);
9735 	base = TREE_OPERAND (exp, 0);
9736 	if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
9737 	  {
9738 	    tree mask = gimple_assign_rhs2 (def_stmt);
9739 	    base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
9740 			   gimple_assign_rhs1 (def_stmt), mask);
9741 	    TREE_OPERAND (exp, 0) = base;
9742 	  }
9743 	align = get_object_alignment (exp);
9744 	op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_SUM);
9745 	op0 = memory_address_addr_space (mode, op0, as);
9746 	if (!integer_zerop (TREE_OPERAND (exp, 1)))
9747 	  {
9748 	    rtx off
9749 	      = immed_double_int_const (mem_ref_offset (exp), address_mode);
9750 	    op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
9751 	    op0 = memory_address_addr_space (mode, op0, as);
9752 	  }
9753 	temp = gen_rtx_MEM (mode, op0);
9754 	set_mem_attributes (temp, exp, 0);
9755 	set_mem_addr_space (temp, as);
9756 	if (TREE_THIS_VOLATILE (exp))
9757 	  MEM_VOLATILE_P (temp) = 1;
9758 	if (modifier != EXPAND_WRITE
9759 	    && modifier != EXPAND_MEMORY
9760 	    && !inner_reference_p
9761 	    && mode != BLKmode
9762 	    && align < GET_MODE_ALIGNMENT (mode))
9763 	  {
9764 	    if ((icode = optab_handler (movmisalign_optab, mode))
9765 		!= CODE_FOR_nothing)
9766 	      {
9767 		struct expand_operand ops[2];
9768 
9769 		/* We've already validated the memory, and we're creating a
9770 		   new pseudo destination.  The predicates really can't fail,
9771 		   nor can the generator.  */
9772 		create_output_operand (&ops[0], NULL_RTX, mode);
9773 		create_fixed_operand (&ops[1], temp);
9774 		expand_insn (icode, 2, ops);
9775 		temp = ops[0].value;
9776 	      }
9777 	    else if (SLOW_UNALIGNED_ACCESS (mode, align))
9778 	      temp = extract_bit_field (temp, GET_MODE_BITSIZE (mode),
9779 					0, TYPE_UNSIGNED (TREE_TYPE (exp)),
9780 					(modifier == EXPAND_STACK_PARM
9781 					 ? NULL_RTX : target),
9782 					mode, mode);
9783 	  }
9784 	return temp;
9785       }
9786 
9787     case ARRAY_REF:
9788 
9789       {
9790 	tree array = treeop0;
9791 	tree index = treeop1;
9792 	tree init;
9793 
9794 	/* Fold an expression like: "foo"[2].
9795 	   This is not done in fold so it won't happen inside &.
9796 	   Don't fold if this is for wide characters since it's too
9797 	   difficult to do correctly and this is a very rare case.  */
9798 
9799 	if (modifier != EXPAND_CONST_ADDRESS
9800 	    && modifier != EXPAND_INITIALIZER
9801 	    && modifier != EXPAND_MEMORY)
9802 	  {
9803 	    tree t = fold_read_from_constant_string (exp);
9804 
9805 	    if (t)
9806 	      return expand_expr (t, target, tmode, modifier);
9807 	  }
9808 
9809 	/* If this is a constant index into a constant array,
9810 	   just get the value from the array.  Handle both the cases when
9811 	   we have an explicit constructor and when our operand is a variable
9812 	   that was declared const.  */
9813 
9814 	if (modifier != EXPAND_CONST_ADDRESS
9815 	    && modifier != EXPAND_INITIALIZER
9816 	    && modifier != EXPAND_MEMORY
9817 	    && TREE_CODE (array) == CONSTRUCTOR
9818 	    && ! TREE_SIDE_EFFECTS (array)
9819 	    && TREE_CODE (index) == INTEGER_CST)
9820 	  {
9821 	    unsigned HOST_WIDE_INT ix;
9822 	    tree field, value;
9823 
9824 	    FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
9825 				      field, value)
9826 	      if (tree_int_cst_equal (field, index))
9827 		{
9828 		  if (!TREE_SIDE_EFFECTS (value))
9829 		    return expand_expr (fold (value), target, tmode, modifier);
9830 		  break;
9831 		}
9832 	  }
9833 
9834 	else if (optimize >= 1
9835 		 && modifier != EXPAND_CONST_ADDRESS
9836 		 && modifier != EXPAND_INITIALIZER
9837 		 && modifier != EXPAND_MEMORY
9838 		 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
9839 		 && TREE_CODE (index) == INTEGER_CST
9840 		 && (TREE_CODE (array) == VAR_DECL
9841 		     || TREE_CODE (array) == CONST_DECL)
9842 		 && (init = ctor_for_folding (array)) != error_mark_node)
9843 	  {
9844 	    if (init == NULL_TREE)
9845 	      {
9846 		tree value = build_zero_cst (type);
9847 		if (TREE_CODE (value) == CONSTRUCTOR)
9848 		  {
9849 		    /* If VALUE is a CONSTRUCTOR, this optimization is only
9850 		       useful if this doesn't store the CONSTRUCTOR into
9851 		       memory.  If it does, it is more efficient to just
9852 		       load the data from the array directly.  */
9853 		    rtx ret = expand_constructor (value, target,
9854 						  modifier, true);
9855 		    if (ret == NULL_RTX)
9856 		      value = NULL_TREE;
9857 		  }
9858 
9859 		if (value)
9860 		  return expand_expr (value, target, tmode, modifier);
9861 	      }
9862 	    else if (TREE_CODE (init) == CONSTRUCTOR)
9863 	      {
9864 		unsigned HOST_WIDE_INT ix;
9865 		tree field, value;
9866 
9867 		FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
9868 					  field, value)
9869 		  if (tree_int_cst_equal (field, index))
9870 		    {
9871 		      if (TREE_SIDE_EFFECTS (value))
9872 			break;
9873 
9874 		      if (TREE_CODE (value) == CONSTRUCTOR)
9875 			{
9876 			  /* If VALUE is a CONSTRUCTOR, this
9877 			     optimization is only useful if
9878 			     this doesn't store the CONSTRUCTOR
9879 			     into memory.  If it does, it is more
9880 			     efficient to just load the data from
9881 			     the array directly.  */
9882 			  rtx ret = expand_constructor (value, target,
9883 							modifier, true);
9884 			  if (ret == NULL_RTX)
9885 			    break;
9886 			}
9887 
9888 		      return
9889 		        expand_expr (fold (value), target, tmode, modifier);
9890 		    }
9891 	      }
9892 	    else if (TREE_CODE (init) == STRING_CST)
9893 	      {
9894 		tree low_bound = array_ref_low_bound (exp);
9895 		tree index1 = fold_convert_loc (loc, sizetype, treeop1);
9896 
9897 		/* Optimize the special case of a zero lower bound.
9898 
9899 		   We convert the lower bound to sizetype to avoid problems
9900 		   with constant folding.  E.g. suppose the lower bound is
9901 		   1 and its mode is QI.  Without the conversion
9902 		      (ARRAY + (INDEX - (unsigned char)1))
9903 		   becomes
9904 		      (ARRAY + (-(unsigned char)1) + INDEX)
9905 		   which becomes
9906 		      (ARRAY + 255 + INDEX).  Oops!  */
9907 		if (!integer_zerop (low_bound))
9908 		  index1 = size_diffop_loc (loc, index1,
9909 					    fold_convert_loc (loc, sizetype,
9910 							      low_bound));
9911 
9912 		if (compare_tree_int (index1, TREE_STRING_LENGTH (init)) < 0)
9913 		  {
9914 		    tree type = TREE_TYPE (TREE_TYPE (init));
9915 		    enum machine_mode mode = TYPE_MODE (type);
9916 
9917 		    if (GET_MODE_CLASS (mode) == MODE_INT
9918 			&& GET_MODE_SIZE (mode) == 1)
9919 		      return gen_int_mode (TREE_STRING_POINTER (init)
9920 					   [TREE_INT_CST_LOW (index1)],
9921 					   mode);
9922 		  }
9923 	      }
9924 	  }
9925       }
9926       goto normal_inner_ref;
9927 
9928     case COMPONENT_REF:
9929       /* If the operand is a CONSTRUCTOR, we can just extract the
9930 	 appropriate field if it is present.  */
9931       if (TREE_CODE (treeop0) == CONSTRUCTOR)
9932 	{
9933 	  unsigned HOST_WIDE_INT idx;
9934 	  tree field, value;
9935 
9936 	  FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
9937 				    idx, field, value)
9938 	    if (field == treeop1
9939 		/* We can normally use the value of the field in the
9940 		   CONSTRUCTOR.  However, if this is a bitfield in
9941 		   an integral mode that we can fit in a HOST_WIDE_INT,
9942 		   we must mask only the number of bits in the bitfield,
9943 		   since this is done implicitly by the constructor.  If
9944 		   the bitfield does not meet either of those conditions,
9945 		   we can't do this optimization.  */
9946 		&& (! DECL_BIT_FIELD (field)
9947 		    || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
9948 			&& (GET_MODE_PRECISION (DECL_MODE (field))
9949 			    <= HOST_BITS_PER_WIDE_INT))))
9950 	      {
9951 		if (DECL_BIT_FIELD (field)
9952 		    && modifier == EXPAND_STACK_PARM)
9953 		  target = 0;
9954 		op0 = expand_expr (value, target, tmode, modifier);
9955 		if (DECL_BIT_FIELD (field))
9956 		  {
9957 		    HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
9958 		    enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
9959 
9960 		    if (TYPE_UNSIGNED (TREE_TYPE (field)))
9961 		      {
9962 			op1 = gen_int_mode (((HOST_WIDE_INT) 1 << bitsize) - 1,
9963 					    imode);
9964 			op0 = expand_and (imode, op0, op1, target);
9965 		      }
9966 		    else
9967 		      {
9968 			int count = GET_MODE_PRECISION (imode) - bitsize;
9969 
9970 			op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
9971 					    target, 0);
9972 			op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
9973 					    target, 0);
9974 		      }
9975 		  }
9976 
9977 		return op0;
9978 	      }
9979 	}
9980       goto normal_inner_ref;
9981 
9982     case BIT_FIELD_REF:
9983     case ARRAY_RANGE_REF:
9984     normal_inner_ref:
9985       {
9986 	enum machine_mode mode1, mode2;
9987 	HOST_WIDE_INT bitsize, bitpos;
9988 	tree offset;
9989 	int volatilep = 0, must_force_mem;
9990 	tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
9991 					&mode1, &unsignedp, &volatilep, true);
9992 	rtx orig_op0, memloc;
9993 	bool mem_attrs_from_type = false;
9994 
9995 	/* If we got back the original object, something is wrong.  Perhaps
9996 	   we are evaluating an expression too early.  In any event, don't
9997 	   infinitely recurse.  */
9998 	gcc_assert (tem != exp);
9999 
10000 	/* If TEM's type is a union of variable size, pass TARGET to the inner
10001 	   computation, since it will need a temporary and TARGET is known
10002 	   to have to do.  This occurs in unchecked conversion in Ada.  */
10003 	orig_op0 = op0
10004 	  = expand_expr_real (tem,
10005 			      (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10006 			       && COMPLETE_TYPE_P (TREE_TYPE (tem))
10007 			       && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10008 				   != INTEGER_CST)
10009 			       && modifier != EXPAND_STACK_PARM
10010 			       ? target : NULL_RTX),
10011 			      VOIDmode,
10012 			      modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10013 			      NULL, true);
10014 
10015 	/* If the field has a mode, we want to access it in the
10016 	   field's mode, not the computed mode.
10017 	   If a MEM has VOIDmode (external with incomplete type),
10018 	   use BLKmode for it instead.  */
10019 	if (MEM_P (op0))
10020 	  {
10021 	    if (mode1 != VOIDmode)
10022 	      op0 = adjust_address (op0, mode1, 0);
10023 	    else if (GET_MODE (op0) == VOIDmode)
10024 	      op0 = adjust_address (op0, BLKmode, 0);
10025 	  }
10026 
10027 	mode2
10028 	  = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
10029 
10030 	/* If we have either an offset, a BLKmode result, or a reference
10031 	   outside the underlying object, we must force it to memory.
10032 	   Such a case can occur in Ada if we have unchecked conversion
10033 	   of an expression from a scalar type to an aggregate type or
10034 	   for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
10035 	   passed a partially uninitialized object or a view-conversion
10036 	   to a larger size.  */
10037 	must_force_mem = (offset
10038 			  || mode1 == BLKmode
10039 			  || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
10040 
10041 	/* Handle CONCAT first.  */
10042 	if (GET_CODE (op0) == CONCAT && !must_force_mem)
10043 	  {
10044 	    if (bitpos == 0
10045 		&& bitsize == GET_MODE_BITSIZE (GET_MODE (op0)))
10046 	      return op0;
10047 	    if (bitpos == 0
10048 		&& bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10049 		&& bitsize)
10050 	      {
10051 		op0 = XEXP (op0, 0);
10052 		mode2 = GET_MODE (op0);
10053 	      }
10054 	    else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10055 		     && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
10056 		     && bitpos
10057 		     && bitsize)
10058 	      {
10059 		op0 = XEXP (op0, 1);
10060 		bitpos = 0;
10061 		mode2 = GET_MODE (op0);
10062 	      }
10063 	    else
10064 	      /* Otherwise force into memory.  */
10065 	      must_force_mem = 1;
10066 	  }
10067 
10068 	/* If this is a constant, put it in a register if it is a legitimate
10069 	   constant and we don't need a memory reference.  */
10070 	if (CONSTANT_P (op0)
10071 	    && mode2 != BLKmode
10072 	    && targetm.legitimate_constant_p (mode2, op0)
10073 	    && !must_force_mem)
10074 	  op0 = force_reg (mode2, op0);
10075 
10076 	/* Otherwise, if this is a constant, try to force it to the constant
10077 	   pool.  Note that back-ends, e.g. MIPS, may refuse to do so if it
10078 	   is a legitimate constant.  */
10079 	else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
10080 	  op0 = validize_mem (memloc);
10081 
10082 	/* Otherwise, if this is a constant or the object is not in memory
10083 	   and need be, put it there.  */
10084 	else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
10085 	  {
10086 	    memloc = assign_temp (TREE_TYPE (tem), 1, 1);
10087 	    emit_move_insn (memloc, op0);
10088 	    op0 = memloc;
10089 	    mem_attrs_from_type = true;
10090 	  }
10091 
10092 	if (offset)
10093 	  {
10094 	    enum machine_mode address_mode;
10095 	    rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
10096 					  EXPAND_SUM);
10097 
10098 	    gcc_assert (MEM_P (op0));
10099 
10100 	    address_mode = get_address_mode (op0);
10101 	    if (GET_MODE (offset_rtx) != address_mode)
10102 	      offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
10103 
10104 	    if (GET_MODE (op0) == BLKmode
10105 		/* The check for a constant address in OP0 not having VOIDmode
10106 		   is probably no longer necessary.  */
10107 		&& GET_MODE (XEXP (op0, 0)) != VOIDmode
10108 		&& bitsize != 0
10109 		&& (bitpos % bitsize) == 0
10110 		&& (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
10111 		&& MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
10112 	      {
10113 		op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10114 		bitpos = 0;
10115 	      }
10116 
10117 	    op0 = offset_address (op0, offset_rtx,
10118 				  highest_pow2_factor (offset));
10119 	  }
10120 
10121 	/* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
10122 	   record its alignment as BIGGEST_ALIGNMENT.  */
10123 	if (MEM_P (op0) && bitpos == 0 && offset != 0
10124 	    && is_aligning_offset (offset, tem))
10125 	  set_mem_align (op0, BIGGEST_ALIGNMENT);
10126 
10127 	/* Don't forget about volatility even if this is a bitfield.  */
10128 	if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
10129 	  {
10130 	    if (op0 == orig_op0)
10131 	      op0 = copy_rtx (op0);
10132 
10133 	    MEM_VOLATILE_P (op0) = 1;
10134 	  }
10135 
10136 	/* In cases where an aligned union has an unaligned object
10137 	   as a field, we might be extracting a BLKmode value from
10138 	   an integer-mode (e.g., SImode) object.  Handle this case
10139 	   by doing the extract into an object as wide as the field
10140 	   (which we know to be the width of a basic mode), then
10141 	   storing into memory, and changing the mode to BLKmode.  */
10142 	if (mode1 == VOIDmode
10143 	    || REG_P (op0) || GET_CODE (op0) == SUBREG
10144 	    || (mode1 != BLKmode && ! direct_load[(int) mode1]
10145 		&& GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
10146 		&& GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
10147 		&& modifier != EXPAND_CONST_ADDRESS
10148 		&& modifier != EXPAND_INITIALIZER
10149 		&& modifier != EXPAND_MEMORY)
10150 	    /* If the bitfield is volatile and the bitsize
10151 	       is narrower than the access size of the bitfield,
10152 	       we need to extract bitfields from the access.  */
10153 	    || (volatilep && TREE_CODE (exp) == COMPONENT_REF
10154 		&& DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp, 1))
10155 		&& mode1 != BLKmode
10156 		&& bitsize < GET_MODE_SIZE (mode1) * BITS_PER_UNIT)
10157 	    /* If the field isn't aligned enough to fetch as a memref,
10158 	       fetch it as a bit field.  */
10159 	    || (mode1 != BLKmode
10160 		&& (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
10161 		      || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
10162 		      || (MEM_P (op0)
10163 			  && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
10164 			      || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
10165 		     && modifier != EXPAND_MEMORY
10166 		     && ((modifier == EXPAND_CONST_ADDRESS
10167 			  || modifier == EXPAND_INITIALIZER)
10168 			 ? STRICT_ALIGNMENT
10169 			 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
10170 		    || (bitpos % BITS_PER_UNIT != 0)))
10171 	    /* If the type and the field are a constant size and the
10172 	       size of the type isn't the same size as the bitfield,
10173 	       we must use bitfield operations.  */
10174 	    || (bitsize >= 0
10175 		&& TYPE_SIZE (TREE_TYPE (exp))
10176 		&& TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
10177 		&& 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
10178 					  bitsize)))
10179 	  {
10180 	    enum machine_mode ext_mode = mode;
10181 
10182 	    if (ext_mode == BLKmode
10183 		&& ! (target != 0 && MEM_P (op0)
10184 		      && MEM_P (target)
10185 		      && bitpos % BITS_PER_UNIT == 0))
10186 	      ext_mode = mode_for_size (bitsize, MODE_INT, 1);
10187 
10188 	    if (ext_mode == BLKmode)
10189 	      {
10190 		if (target == 0)
10191 		  target = assign_temp (type, 1, 1);
10192 
10193 		/* ??? Unlike the similar test a few lines below, this one is
10194 		   very likely obsolete.  */
10195 		if (bitsize == 0)
10196 		  return target;
10197 
10198 		/* In this case, BITPOS must start at a byte boundary and
10199 		   TARGET, if specified, must be a MEM.  */
10200 		gcc_assert (MEM_P (op0)
10201 			    && (!target || MEM_P (target))
10202 			    && !(bitpos % BITS_PER_UNIT));
10203 
10204 		emit_block_move (target,
10205 				 adjust_address (op0, VOIDmode,
10206 						 bitpos / BITS_PER_UNIT),
10207 				 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
10208 					  / BITS_PER_UNIT),
10209 				 (modifier == EXPAND_STACK_PARM
10210 				  ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10211 
10212 		return target;
10213 	      }
10214 
10215 	    /* If we have nothing to extract, the result will be 0 for targets
10216 	       with SHIFT_COUNT_TRUNCATED == 0 and garbage otherwise.  Always
10217 	       return 0 for the sake of consistency, as reading a zero-sized
10218 	       bitfield is valid in Ada and the value is fully specified.  */
10219 	    if (bitsize == 0)
10220 	      return const0_rtx;
10221 
10222 	    op0 = validize_mem (op0);
10223 
10224 	    if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
10225 	      mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10226 
10227 	    op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
10228 				     (modifier == EXPAND_STACK_PARM
10229 				      ? NULL_RTX : target),
10230 				     ext_mode, ext_mode);
10231 
10232 	    /* If the result is a record type and BITSIZE is narrower than
10233 	       the mode of OP0, an integral mode, and this is a big endian
10234 	       machine, we must put the field into the high-order bits.  */
10235 	    if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
10236 		&& GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10237 		&& bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
10238 	      op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
10239 				  GET_MODE_BITSIZE (GET_MODE (op0))
10240 				  - bitsize, op0, 1);
10241 
10242 	    /* If the result type is BLKmode, store the data into a temporary
10243 	       of the appropriate type, but with the mode corresponding to the
10244 	       mode for the data we have (op0's mode).  */
10245 	    if (mode == BLKmode)
10246 	      {
10247 		rtx new_rtx
10248 		  = assign_stack_temp_for_type (ext_mode,
10249 						GET_MODE_BITSIZE (ext_mode),
10250 						type);
10251 		emit_move_insn (new_rtx, op0);
10252 		op0 = copy_rtx (new_rtx);
10253 		PUT_MODE (op0, BLKmode);
10254 	      }
10255 
10256 	    return op0;
10257 	  }
10258 
10259 	/* If the result is BLKmode, use that to access the object
10260 	   now as well.  */
10261 	if (mode == BLKmode)
10262 	  mode1 = BLKmode;
10263 
10264 	/* Get a reference to just this component.  */
10265 	if (modifier == EXPAND_CONST_ADDRESS
10266 	    || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
10267 	  op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
10268 	else
10269 	  op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10270 
10271 	if (op0 == orig_op0)
10272 	  op0 = copy_rtx (op0);
10273 
10274 	/* If op0 is a temporary because of forcing to memory, pass only the
10275 	   type to set_mem_attributes so that the original expression is never
10276 	   marked as ADDRESSABLE through MEM_EXPR of the temporary.  */
10277 	if (mem_attrs_from_type)
10278 	  set_mem_attributes (op0, type, 0);
10279 	else
10280 	  set_mem_attributes (op0, exp, 0);
10281 
10282 	if (REG_P (XEXP (op0, 0)))
10283 	  mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10284 
10285 	MEM_VOLATILE_P (op0) |= volatilep;
10286 	if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
10287 	    || modifier == EXPAND_CONST_ADDRESS
10288 	    || modifier == EXPAND_INITIALIZER)
10289 	  return op0;
10290 
10291 	if (target == 0)
10292 	  target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
10293 
10294 	convert_move (target, op0, unsignedp);
10295 	return target;
10296       }
10297 
10298     case OBJ_TYPE_REF:
10299       return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
10300 
10301     case CALL_EXPR:
10302       /* All valid uses of __builtin_va_arg_pack () are removed during
10303 	 inlining.  */
10304       if (CALL_EXPR_VA_ARG_PACK (exp))
10305 	error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
10306       {
10307 	tree fndecl = get_callee_fndecl (exp), attr;
10308 
10309 	if (fndecl
10310 	    && (attr = lookup_attribute ("error",
10311 					 DECL_ATTRIBUTES (fndecl))) != NULL)
10312 	  error ("%Kcall to %qs declared with attribute error: %s",
10313 		 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10314 		 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10315 	if (fndecl
10316 	    && (attr = lookup_attribute ("warning",
10317 					 DECL_ATTRIBUTES (fndecl))) != NULL)
10318 	  warning_at (tree_nonartificial_location (exp),
10319 		      0, "%Kcall to %qs declared with attribute warning: %s",
10320 		      exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10321 		      TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10322 
10323 	/* Check for a built-in function.  */
10324 	if (fndecl && DECL_BUILT_IN (fndecl))
10325 	  {
10326 	    gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
10327 	    return expand_builtin (exp, target, subtarget, tmode, ignore);
10328 	  }
10329       }
10330       return expand_call (exp, target, ignore);
10331 
10332     case VIEW_CONVERT_EXPR:
10333       op0 = NULL_RTX;
10334 
10335       /* If we are converting to BLKmode, try to avoid an intermediate
10336 	 temporary by fetching an inner memory reference.  */
10337       if (mode == BLKmode
10338 	  && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
10339 	  && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
10340 	  && handled_component_p (treeop0))
10341       {
10342 	enum machine_mode mode1;
10343 	HOST_WIDE_INT bitsize, bitpos;
10344 	tree offset;
10345 	int unsignedp;
10346 	int volatilep = 0;
10347 	tree tem
10348 	  = get_inner_reference (treeop0, &bitsize, &bitpos,
10349 				 &offset, &mode1, &unsignedp, &volatilep,
10350 				 true);
10351 	rtx orig_op0;
10352 
10353 	/* ??? We should work harder and deal with non-zero offsets.  */
10354 	if (!offset
10355 	    && (bitpos % BITS_PER_UNIT) == 0
10356 	    && bitsize >= 0
10357 	    && compare_tree_int (TYPE_SIZE (type), bitsize) == 0)
10358 	  {
10359 	    /* See the normal_inner_ref case for the rationale.  */
10360 	    orig_op0
10361 	      = expand_expr_real (tem,
10362 				  (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10363 				   && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10364 				       != INTEGER_CST)
10365 				   && modifier != EXPAND_STACK_PARM
10366 				   ? target : NULL_RTX),
10367 				  VOIDmode,
10368 				  modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10369 				  NULL, true);
10370 
10371 	    if (MEM_P (orig_op0))
10372 	      {
10373 		op0 = orig_op0;
10374 
10375 		/* Get a reference to just this component.  */
10376 		if (modifier == EXPAND_CONST_ADDRESS
10377 		    || modifier == EXPAND_SUM
10378 		    || modifier == EXPAND_INITIALIZER)
10379 		  op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
10380 		else
10381 		  op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
10382 
10383 		if (op0 == orig_op0)
10384 		  op0 = copy_rtx (op0);
10385 
10386 		set_mem_attributes (op0, treeop0, 0);
10387 		if (REG_P (XEXP (op0, 0)))
10388 		  mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10389 
10390 		MEM_VOLATILE_P (op0) |= volatilep;
10391 	      }
10392 	  }
10393       }
10394 
10395       if (!op0)
10396 	op0 = expand_expr_real (treeop0, NULL_RTX, VOIDmode, modifier,
10397 				NULL, inner_reference_p);
10398 
10399       /* If the input and output modes are both the same, we are done.  */
10400       if (mode == GET_MODE (op0))
10401 	;
10402       /* If neither mode is BLKmode, and both modes are the same size
10403 	 then we can use gen_lowpart.  */
10404       else if (mode != BLKmode && GET_MODE (op0) != BLKmode
10405 	       && (GET_MODE_PRECISION (mode)
10406 		   == GET_MODE_PRECISION (GET_MODE (op0)))
10407 	       && !COMPLEX_MODE_P (GET_MODE (op0)))
10408 	{
10409 	  if (GET_CODE (op0) == SUBREG)
10410 	    op0 = force_reg (GET_MODE (op0), op0);
10411 	  temp = gen_lowpart_common (mode, op0);
10412 	  if (temp)
10413 	    op0 = temp;
10414 	  else
10415 	    {
10416 	      if (!REG_P (op0) && !MEM_P (op0))
10417 		op0 = force_reg (GET_MODE (op0), op0);
10418 	      op0 = gen_lowpart (mode, op0);
10419 	    }
10420 	}
10421       /* If both types are integral, convert from one mode to the other.  */
10422       else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
10423 	op0 = convert_modes (mode, GET_MODE (op0), op0,
10424 			     TYPE_UNSIGNED (TREE_TYPE (treeop0)));
10425       /* If the output type is a bit-field type, do an extraction.  */
10426       else if (reduce_bit_field)
10427 	return extract_bit_field (op0, TYPE_PRECISION (type), 0,
10428 				  TYPE_UNSIGNED (type), NULL_RTX,
10429 				  mode, mode);
10430       /* As a last resort, spill op0 to memory, and reload it in a
10431 	 different mode.  */
10432       else if (!MEM_P (op0))
10433 	{
10434 	  /* If the operand is not a MEM, force it into memory.  Since we
10435 	     are going to be changing the mode of the MEM, don't call
10436 	     force_const_mem for constants because we don't allow pool
10437 	     constants to change mode.  */
10438 	  tree inner_type = TREE_TYPE (treeop0);
10439 
10440 	  gcc_assert (!TREE_ADDRESSABLE (exp));
10441 
10442 	  if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
10443 	    target
10444 	      = assign_stack_temp_for_type
10445 		(TYPE_MODE (inner_type),
10446 		 GET_MODE_SIZE (TYPE_MODE (inner_type)), inner_type);
10447 
10448 	  emit_move_insn (target, op0);
10449 	  op0 = target;
10450 	}
10451 
10452       /* If OP0 is (now) a MEM, we need to deal with alignment issues.  If the
10453 	 output type is such that the operand is known to be aligned, indicate
10454 	 that it is.  Otherwise, we need only be concerned about alignment for
10455 	 non-BLKmode results.  */
10456       if (MEM_P (op0))
10457 	{
10458 	  enum insn_code icode;
10459 
10460 	  if (TYPE_ALIGN_OK (type))
10461 	    {
10462 	      /* ??? Copying the MEM without substantially changing it might
10463 		 run afoul of the code handling volatile memory references in
10464 		 store_expr, which assumes that TARGET is returned unmodified
10465 		 if it has been used.  */
10466 	      op0 = copy_rtx (op0);
10467 	      set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
10468 	    }
10469 	  else if (modifier != EXPAND_WRITE
10470 		   && modifier != EXPAND_MEMORY
10471 		   && !inner_reference_p
10472 		   && mode != BLKmode
10473 		   && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
10474 	    {
10475 	      /* If the target does have special handling for unaligned
10476 		 loads of mode then use them.  */
10477 	      if ((icode = optab_handler (movmisalign_optab, mode))
10478 		  != CODE_FOR_nothing)
10479 		{
10480 		  rtx reg, insn;
10481 
10482 		  op0 = adjust_address (op0, mode, 0);
10483 		  /* We've already validated the memory, and we're creating a
10484 		     new pseudo destination.  The predicates really can't
10485 		     fail.  */
10486 		  reg = gen_reg_rtx (mode);
10487 
10488 		  /* Nor can the insn generator.  */
10489 		  insn = GEN_FCN (icode) (reg, op0);
10490 		  emit_insn (insn);
10491 		  return reg;
10492 		}
10493 	      else if (STRICT_ALIGNMENT)
10494 		{
10495 		  tree inner_type = TREE_TYPE (treeop0);
10496 		  HOST_WIDE_INT temp_size
10497 		    = MAX (int_size_in_bytes (inner_type),
10498 			   (HOST_WIDE_INT) GET_MODE_SIZE (mode));
10499 		  rtx new_rtx
10500 		    = assign_stack_temp_for_type (mode, temp_size, type);
10501 		  rtx new_with_op0_mode
10502 		    = adjust_address (new_rtx, GET_MODE (op0), 0);
10503 
10504 		  gcc_assert (!TREE_ADDRESSABLE (exp));
10505 
10506 		  if (GET_MODE (op0) == BLKmode)
10507 		    emit_block_move (new_with_op0_mode, op0,
10508 				     GEN_INT (GET_MODE_SIZE (mode)),
10509 				     (modifier == EXPAND_STACK_PARM
10510 				      ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10511 		  else
10512 		    emit_move_insn (new_with_op0_mode, op0);
10513 
10514 		  op0 = new_rtx;
10515 		}
10516 	    }
10517 
10518 	  op0 = adjust_address (op0, mode, 0);
10519 	}
10520 
10521       return op0;
10522 
10523     case MODIFY_EXPR:
10524       {
10525 	tree lhs = treeop0;
10526 	tree rhs = treeop1;
10527 	gcc_assert (ignore);
10528 
10529 	/* Check for |= or &= of a bitfield of size one into another bitfield
10530 	   of size 1.  In this case, (unless we need the result of the
10531 	   assignment) we can do this more efficiently with a
10532 	   test followed by an assignment, if necessary.
10533 
10534 	   ??? At this point, we can't get a BIT_FIELD_REF here.  But if
10535 	   things change so we do, this code should be enhanced to
10536 	   support it.  */
10537 	if (TREE_CODE (lhs) == COMPONENT_REF
10538 	    && (TREE_CODE (rhs) == BIT_IOR_EXPR
10539 		|| TREE_CODE (rhs) == BIT_AND_EXPR)
10540 	    && TREE_OPERAND (rhs, 0) == lhs
10541 	    && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
10542 	    && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
10543 	    && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
10544 	  {
10545 	    rtx label = gen_label_rtx ();
10546 	    int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
10547 	    do_jump (TREE_OPERAND (rhs, 1),
10548 		     value ? label : 0,
10549 		     value ? 0 : label, -1);
10550 	    expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
10551 			       false);
10552 	    do_pending_stack_adjust ();
10553 	    emit_label (label);
10554 	    return const0_rtx;
10555 	  }
10556 
10557 	expand_assignment (lhs, rhs, false);
10558 	return const0_rtx;
10559       }
10560 
10561     case ADDR_EXPR:
10562       return expand_expr_addr_expr (exp, target, tmode, modifier);
10563 
10564     case REALPART_EXPR:
10565       op0 = expand_normal (treeop0);
10566       return read_complex_part (op0, false);
10567 
10568     case IMAGPART_EXPR:
10569       op0 = expand_normal (treeop0);
10570       return read_complex_part (op0, true);
10571 
10572     case RETURN_EXPR:
10573     case LABEL_EXPR:
10574     case GOTO_EXPR:
10575     case SWITCH_EXPR:
10576     case ASM_EXPR:
10577       /* Expanded in cfgexpand.c.  */
10578       gcc_unreachable ();
10579 
10580     case TRY_CATCH_EXPR:
10581     case CATCH_EXPR:
10582     case EH_FILTER_EXPR:
10583     case TRY_FINALLY_EXPR:
10584       /* Lowered by tree-eh.c.  */
10585       gcc_unreachable ();
10586 
10587     case WITH_CLEANUP_EXPR:
10588     case CLEANUP_POINT_EXPR:
10589     case TARGET_EXPR:
10590     case CASE_LABEL_EXPR:
10591     case VA_ARG_EXPR:
10592     case BIND_EXPR:
10593     case INIT_EXPR:
10594     case CONJ_EXPR:
10595     case COMPOUND_EXPR:
10596     case PREINCREMENT_EXPR:
10597     case PREDECREMENT_EXPR:
10598     case POSTINCREMENT_EXPR:
10599     case POSTDECREMENT_EXPR:
10600     case LOOP_EXPR:
10601     case EXIT_EXPR:
10602     case COMPOUND_LITERAL_EXPR:
10603       /* Lowered by gimplify.c.  */
10604       gcc_unreachable ();
10605 
10606     case FDESC_EXPR:
10607       /* Function descriptors are not valid except for as
10608 	 initialization constants, and should not be expanded.  */
10609       gcc_unreachable ();
10610 
10611     case WITH_SIZE_EXPR:
10612       /* WITH_SIZE_EXPR expands to its first argument.  The caller should
10613 	 have pulled out the size to use in whatever context it needed.  */
10614       return expand_expr_real (treeop0, original_target, tmode,
10615 			       modifier, alt_rtl, inner_reference_p);
10616 
10617     default:
10618       return expand_expr_real_2 (&ops, target, tmode, modifier);
10619     }
10620 }
10621 
10622 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
10623    signedness of TYPE), possibly returning the result in TARGET.  */
10624 static rtx
reduce_to_bit_field_precision(rtx exp,rtx target,tree type)10625 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
10626 {
10627   HOST_WIDE_INT prec = TYPE_PRECISION (type);
10628   if (target && GET_MODE (target) != GET_MODE (exp))
10629     target = 0;
10630   /* For constant values, reduce using build_int_cst_type. */
10631   if (CONST_INT_P (exp))
10632     {
10633       HOST_WIDE_INT value = INTVAL (exp);
10634       tree t = build_int_cst_type (type, value);
10635       return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
10636     }
10637   else if (TYPE_UNSIGNED (type))
10638     {
10639       rtx mask = immed_double_int_const (double_int::mask (prec),
10640 					 GET_MODE (exp));
10641       return expand_and (GET_MODE (exp), exp, mask, target);
10642     }
10643   else
10644     {
10645       int count = GET_MODE_PRECISION (GET_MODE (exp)) - prec;
10646       exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp),
10647 			  exp, count, target, 0);
10648       return expand_shift (RSHIFT_EXPR, GET_MODE (exp),
10649 			   exp, count, target, 0);
10650     }
10651 }
10652 
10653 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
10654    when applied to the address of EXP produces an address known to be
10655    aligned more than BIGGEST_ALIGNMENT.  */
10656 
10657 static int
is_aligning_offset(const_tree offset,const_tree exp)10658 is_aligning_offset (const_tree offset, const_tree exp)
10659 {
10660   /* Strip off any conversions.  */
10661   while (CONVERT_EXPR_P (offset))
10662     offset = TREE_OPERAND (offset, 0);
10663 
10664   /* We must now have a BIT_AND_EXPR with a constant that is one less than
10665      power of 2 and which is larger than BIGGEST_ALIGNMENT.  */
10666   if (TREE_CODE (offset) != BIT_AND_EXPR
10667       || !tree_fits_uhwi_p (TREE_OPERAND (offset, 1))
10668       || compare_tree_int (TREE_OPERAND (offset, 1),
10669 			   BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
10670       || exact_log2 (tree_to_uhwi (TREE_OPERAND (offset, 1)) + 1) < 0)
10671     return 0;
10672 
10673   /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
10674      It must be NEGATE_EXPR.  Then strip any more conversions.  */
10675   offset = TREE_OPERAND (offset, 0);
10676   while (CONVERT_EXPR_P (offset))
10677     offset = TREE_OPERAND (offset, 0);
10678 
10679   if (TREE_CODE (offset) != NEGATE_EXPR)
10680     return 0;
10681 
10682   offset = TREE_OPERAND (offset, 0);
10683   while (CONVERT_EXPR_P (offset))
10684     offset = TREE_OPERAND (offset, 0);
10685 
10686   /* This must now be the address of EXP.  */
10687   return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
10688 }
10689 
10690 /* Return the tree node if an ARG corresponds to a string constant or zero
10691    if it doesn't.  If we return nonzero, set *PTR_OFFSET to the offset
10692    in bytes within the string that ARG is accessing.  The type of the
10693    offset will be `sizetype'.  */
10694 
10695 tree
string_constant(tree arg,tree * ptr_offset)10696 string_constant (tree arg, tree *ptr_offset)
10697 {
10698   tree array, offset, lower_bound;
10699   STRIP_NOPS (arg);
10700 
10701   if (TREE_CODE (arg) == ADDR_EXPR)
10702     {
10703       if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
10704 	{
10705 	  *ptr_offset = size_zero_node;
10706 	  return TREE_OPERAND (arg, 0);
10707 	}
10708       else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
10709 	{
10710 	  array = TREE_OPERAND (arg, 0);
10711 	  offset = size_zero_node;
10712 	}
10713       else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
10714 	{
10715 	  array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10716 	  offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10717 	  if (TREE_CODE (array) != STRING_CST
10718 	      && TREE_CODE (array) != VAR_DECL)
10719 	    return 0;
10720 
10721 	  /* Check if the array has a nonzero lower bound.  */
10722 	  lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
10723 	  if (!integer_zerop (lower_bound))
10724 	    {
10725 	      /* If the offset and base aren't both constants, return 0.  */
10726 	      if (TREE_CODE (lower_bound) != INTEGER_CST)
10727 	        return 0;
10728 	      if (TREE_CODE (offset) != INTEGER_CST)
10729 		return 0;
10730 	      /* Adjust offset by the lower bound.  */
10731 	      offset = size_diffop (fold_convert (sizetype, offset),
10732 				    fold_convert (sizetype, lower_bound));
10733 	    }
10734 	}
10735       else if (TREE_CODE (TREE_OPERAND (arg, 0)) == MEM_REF)
10736 	{
10737 	  array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10738 	  offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10739 	  if (TREE_CODE (array) != ADDR_EXPR)
10740 	    return 0;
10741 	  array = TREE_OPERAND (array, 0);
10742 	  if (TREE_CODE (array) != STRING_CST
10743 	      && TREE_CODE (array) != VAR_DECL)
10744 	    return 0;
10745 	}
10746       else
10747 	return 0;
10748     }
10749   else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
10750     {
10751       tree arg0 = TREE_OPERAND (arg, 0);
10752       tree arg1 = TREE_OPERAND (arg, 1);
10753 
10754       STRIP_NOPS (arg0);
10755       STRIP_NOPS (arg1);
10756 
10757       if (TREE_CODE (arg0) == ADDR_EXPR
10758 	  && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
10759 	      || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
10760 	{
10761 	  array = TREE_OPERAND (arg0, 0);
10762 	  offset = arg1;
10763 	}
10764       else if (TREE_CODE (arg1) == ADDR_EXPR
10765 	       && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
10766 		   || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
10767 	{
10768 	  array = TREE_OPERAND (arg1, 0);
10769 	  offset = arg0;
10770 	}
10771       else
10772 	return 0;
10773     }
10774   else
10775     return 0;
10776 
10777   if (TREE_CODE (array) == STRING_CST)
10778     {
10779       *ptr_offset = fold_convert (sizetype, offset);
10780       return array;
10781     }
10782   else if (TREE_CODE (array) == VAR_DECL
10783 	   || TREE_CODE (array) == CONST_DECL)
10784     {
10785       int length;
10786       tree init = ctor_for_folding (array);
10787 
10788       /* Variables initialized to string literals can be handled too.  */
10789       if (init == error_mark_node
10790 	  || !init
10791 	  || TREE_CODE (init) != STRING_CST)
10792 	return 0;
10793 
10794       /* Avoid const char foo[4] = "abcde";  */
10795       if (DECL_SIZE_UNIT (array) == NULL_TREE
10796 	  || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
10797 	  || (length = TREE_STRING_LENGTH (init)) <= 0
10798 	  || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
10799 	return 0;
10800 
10801       /* If variable is bigger than the string literal, OFFSET must be constant
10802 	 and inside of the bounds of the string literal.  */
10803       offset = fold_convert (sizetype, offset);
10804       if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
10805 	  && (! tree_fits_uhwi_p (offset)
10806 	      || compare_tree_int (offset, length) >= 0))
10807 	return 0;
10808 
10809       *ptr_offset = offset;
10810       return init;
10811     }
10812 
10813   return 0;
10814 }
10815 
10816 /* Generate code to calculate OPS, and exploded expression
10817    using a store-flag instruction and return an rtx for the result.
10818    OPS reflects a comparison.
10819 
10820    If TARGET is nonzero, store the result there if convenient.
10821 
10822    Return zero if there is no suitable set-flag instruction
10823    available on this machine.
10824 
10825    Once expand_expr has been called on the arguments of the comparison,
10826    we are committed to doing the store flag, since it is not safe to
10827    re-evaluate the expression.  We emit the store-flag insn by calling
10828    emit_store_flag, but only expand the arguments if we have a reason
10829    to believe that emit_store_flag will be successful.  If we think that
10830    it will, but it isn't, we have to simulate the store-flag with a
10831    set/jump/set sequence.  */
10832 
10833 static rtx
do_store_flag(sepops ops,rtx target,enum machine_mode mode)10834 do_store_flag (sepops ops, rtx target, enum machine_mode mode)
10835 {
10836   enum rtx_code code;
10837   tree arg0, arg1, type;
10838   tree tem;
10839   enum machine_mode operand_mode;
10840   int unsignedp;
10841   rtx op0, op1;
10842   rtx subtarget = target;
10843   location_t loc = ops->location;
10844 
10845   arg0 = ops->op0;
10846   arg1 = ops->op1;
10847 
10848   /* Don't crash if the comparison was erroneous.  */
10849   if (arg0 == error_mark_node || arg1 == error_mark_node)
10850     return const0_rtx;
10851 
10852   type = TREE_TYPE (arg0);
10853   operand_mode = TYPE_MODE (type);
10854   unsignedp = TYPE_UNSIGNED (type);
10855 
10856   /* We won't bother with BLKmode store-flag operations because it would mean
10857      passing a lot of information to emit_store_flag.  */
10858   if (operand_mode == BLKmode)
10859     return 0;
10860 
10861   /* We won't bother with store-flag operations involving function pointers
10862      when function pointers must be canonicalized before comparisons.  */
10863 #ifdef HAVE_canonicalize_funcptr_for_compare
10864   if (HAVE_canonicalize_funcptr_for_compare
10865       && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
10866 	   && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
10867 	       == FUNCTION_TYPE))
10868 	  || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
10869 	      && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
10870 		  == FUNCTION_TYPE))))
10871     return 0;
10872 #endif
10873 
10874   STRIP_NOPS (arg0);
10875   STRIP_NOPS (arg1);
10876 
10877   /* For vector typed comparisons emit code to generate the desired
10878      all-ones or all-zeros mask.  Conveniently use the VEC_COND_EXPR
10879      expander for this.  */
10880   if (TREE_CODE (ops->type) == VECTOR_TYPE)
10881     {
10882       tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
10883       tree if_true = constant_boolean_node (true, ops->type);
10884       tree if_false = constant_boolean_node (false, ops->type);
10885       return expand_vec_cond_expr (ops->type, ifexp, if_true, if_false, target);
10886     }
10887 
10888   /* Get the rtx comparison code to use.  We know that EXP is a comparison
10889      operation of some type.  Some comparisons against 1 and -1 can be
10890      converted to comparisons with zero.  Do so here so that the tests
10891      below will be aware that we have a comparison with zero.   These
10892      tests will not catch constants in the first operand, but constants
10893      are rarely passed as the first operand.  */
10894 
10895   switch (ops->code)
10896     {
10897     case EQ_EXPR:
10898       code = EQ;
10899       break;
10900     case NE_EXPR:
10901       code = NE;
10902       break;
10903     case LT_EXPR:
10904       if (integer_onep (arg1))
10905 	arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10906       else
10907 	code = unsignedp ? LTU : LT;
10908       break;
10909     case LE_EXPR:
10910       if (! unsignedp && integer_all_onesp (arg1))
10911 	arg1 = integer_zero_node, code = LT;
10912       else
10913 	code = unsignedp ? LEU : LE;
10914       break;
10915     case GT_EXPR:
10916       if (! unsignedp && integer_all_onesp (arg1))
10917 	arg1 = integer_zero_node, code = GE;
10918       else
10919 	code = unsignedp ? GTU : GT;
10920       break;
10921     case GE_EXPR:
10922       if (integer_onep (arg1))
10923 	arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10924       else
10925 	code = unsignedp ? GEU : GE;
10926       break;
10927 
10928     case UNORDERED_EXPR:
10929       code = UNORDERED;
10930       break;
10931     case ORDERED_EXPR:
10932       code = ORDERED;
10933       break;
10934     case UNLT_EXPR:
10935       code = UNLT;
10936       break;
10937     case UNLE_EXPR:
10938       code = UNLE;
10939       break;
10940     case UNGT_EXPR:
10941       code = UNGT;
10942       break;
10943     case UNGE_EXPR:
10944       code = UNGE;
10945       break;
10946     case UNEQ_EXPR:
10947       code = UNEQ;
10948       break;
10949     case LTGT_EXPR:
10950       code = LTGT;
10951       break;
10952 
10953     default:
10954       gcc_unreachable ();
10955     }
10956 
10957   /* Put a constant second.  */
10958   if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
10959       || TREE_CODE (arg0) == FIXED_CST)
10960     {
10961       tem = arg0; arg0 = arg1; arg1 = tem;
10962       code = swap_condition (code);
10963     }
10964 
10965   /* If this is an equality or inequality test of a single bit, we can
10966      do this by shifting the bit being tested to the low-order bit and
10967      masking the result with the constant 1.  If the condition was EQ,
10968      we xor it with 1.  This does not require an scc insn and is faster
10969      than an scc insn even if we have it.
10970 
10971      The code to make this transformation was moved into fold_single_bit_test,
10972      so we just call into the folder and expand its result.  */
10973 
10974   if ((code == NE || code == EQ)
10975       && integer_zerop (arg1)
10976       && (TYPE_PRECISION (ops->type) != 1 || TYPE_UNSIGNED (ops->type)))
10977     {
10978       gimple srcstmt = get_def_for_expr (arg0, BIT_AND_EXPR);
10979       if (srcstmt
10980 	  && integer_pow2p (gimple_assign_rhs2 (srcstmt)))
10981 	{
10982 	  enum tree_code tcode = code == NE ? NE_EXPR : EQ_EXPR;
10983 	  tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
10984 	  tree temp = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg1),
10985 				       gimple_assign_rhs1 (srcstmt),
10986 				       gimple_assign_rhs2 (srcstmt));
10987 	  temp = fold_single_bit_test (loc, tcode, temp, arg1, type);
10988 	  if (temp)
10989 	    return expand_expr (temp, target, VOIDmode, EXPAND_NORMAL);
10990 	}
10991     }
10992 
10993   if (! get_subtarget (target)
10994       || GET_MODE (subtarget) != operand_mode)
10995     subtarget = 0;
10996 
10997   expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
10998 
10999   if (target == 0)
11000     target = gen_reg_rtx (mode);
11001 
11002   /* Try a cstore if possible.  */
11003   return emit_store_flag_force (target, code, op0, op1,
11004 				operand_mode, unsignedp,
11005 				(TYPE_PRECISION (ops->type) == 1
11006 				 && !TYPE_UNSIGNED (ops->type)) ? -1 : 1);
11007 }
11008 
11009 
11010 /* Stubs in case we haven't got a casesi insn.  */
11011 #ifndef HAVE_casesi
11012 # define HAVE_casesi 0
11013 # define gen_casesi(a, b, c, d, e) (0)
11014 # define CODE_FOR_casesi CODE_FOR_nothing
11015 #endif
11016 
11017 /* Attempt to generate a casesi instruction.  Returns 1 if successful,
11018    0 otherwise (i.e. if there is no casesi instruction).
11019 
11020    DEFAULT_PROBABILITY is the probability of jumping to the default
11021    label.  */
11022 int
try_casesi(tree index_type,tree index_expr,tree minval,tree range,rtx table_label,rtx default_label,rtx fallback_label,int default_probability)11023 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
11024 	    rtx table_label, rtx default_label, rtx fallback_label,
11025             int default_probability)
11026 {
11027   struct expand_operand ops[5];
11028   enum machine_mode index_mode = SImode;
11029   rtx op1, op2, index;
11030 
11031   if (! HAVE_casesi)
11032     return 0;
11033 
11034   /* Convert the index to SImode.  */
11035   if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
11036     {
11037       enum machine_mode omode = TYPE_MODE (index_type);
11038       rtx rangertx = expand_normal (range);
11039 
11040       /* We must handle the endpoints in the original mode.  */
11041       index_expr = build2 (MINUS_EXPR, index_type,
11042 			   index_expr, minval);
11043       minval = integer_zero_node;
11044       index = expand_normal (index_expr);
11045       if (default_label)
11046         emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
11047 				 omode, 1, default_label,
11048                                  default_probability);
11049       /* Now we can safely truncate.  */
11050       index = convert_to_mode (index_mode, index, 0);
11051     }
11052   else
11053     {
11054       if (TYPE_MODE (index_type) != index_mode)
11055 	{
11056 	  index_type = lang_hooks.types.type_for_mode (index_mode, 0);
11057 	  index_expr = fold_convert (index_type, index_expr);
11058 	}
11059 
11060       index = expand_normal (index_expr);
11061     }
11062 
11063   do_pending_stack_adjust ();
11064 
11065   op1 = expand_normal (minval);
11066   op2 = expand_normal (range);
11067 
11068   create_input_operand (&ops[0], index, index_mode);
11069   create_convert_operand_from_type (&ops[1], op1, TREE_TYPE (minval));
11070   create_convert_operand_from_type (&ops[2], op2, TREE_TYPE (range));
11071   create_fixed_operand (&ops[3], table_label);
11072   create_fixed_operand (&ops[4], (default_label
11073 				  ? default_label
11074 				  : fallback_label));
11075   expand_jump_insn (CODE_FOR_casesi, 5, ops);
11076   return 1;
11077 }
11078 
11079 /* Attempt to generate a tablejump instruction; same concept.  */
11080 #ifndef HAVE_tablejump
11081 #define HAVE_tablejump 0
11082 #define gen_tablejump(x, y) (0)
11083 #endif
11084 
11085 /* Subroutine of the next function.
11086 
11087    INDEX is the value being switched on, with the lowest value
11088    in the table already subtracted.
11089    MODE is its expected mode (needed if INDEX is constant).
11090    RANGE is the length of the jump table.
11091    TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11092 
11093    DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11094    index value is out of range.
11095    DEFAULT_PROBABILITY is the probability of jumping to
11096    the default label.  */
11097 
11098 static void
do_tablejump(rtx index,enum machine_mode mode,rtx range,rtx table_label,rtx default_label,int default_probability)11099 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
11100 	      rtx default_label, int default_probability)
11101 {
11102   rtx temp, vector;
11103 
11104   if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
11105     cfun->cfg->max_jumptable_ents = INTVAL (range);
11106 
11107   /* Do an unsigned comparison (in the proper mode) between the index
11108      expression and the value which represents the length of the range.
11109      Since we just finished subtracting the lower bound of the range
11110      from the index expression, this comparison allows us to simultaneously
11111      check that the original index expression value is both greater than
11112      or equal to the minimum value of the range and less than or equal to
11113      the maximum value of the range.  */
11114 
11115   if (default_label)
11116     emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
11117 			     default_label, default_probability);
11118 
11119 
11120   /* If index is in range, it must fit in Pmode.
11121      Convert to Pmode so we can index with it.  */
11122   if (mode != Pmode)
11123     index = convert_to_mode (Pmode, index, 1);
11124 
11125   /* Don't let a MEM slip through, because then INDEX that comes
11126      out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11127      and break_out_memory_refs will go to work on it and mess it up.  */
11128 #ifdef PIC_CASE_VECTOR_ADDRESS
11129   if (flag_pic && !REG_P (index))
11130     index = copy_to_mode_reg (Pmode, index);
11131 #endif
11132 
11133   /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11134      GET_MODE_SIZE, because this indicates how large insns are.  The other
11135      uses should all be Pmode, because they are addresses.  This code
11136      could fail if addresses and insns are not the same size.  */
11137   index = simplify_gen_binary (MULT, Pmode, index,
11138 			       gen_int_mode (GET_MODE_SIZE (CASE_VECTOR_MODE),
11139 					     Pmode));
11140   index = simplify_gen_binary (PLUS, Pmode, index,
11141 			       gen_rtx_LABEL_REF (Pmode, table_label));
11142 
11143 #ifdef PIC_CASE_VECTOR_ADDRESS
11144   if (flag_pic)
11145     index = PIC_CASE_VECTOR_ADDRESS (index);
11146   else
11147 #endif
11148     index = memory_address (CASE_VECTOR_MODE, index);
11149   temp = gen_reg_rtx (CASE_VECTOR_MODE);
11150   vector = gen_const_mem (CASE_VECTOR_MODE, index);
11151   convert_move (temp, vector, 0);
11152 
11153   emit_jump_insn (gen_tablejump (temp, table_label));
11154 
11155   /* If we are generating PIC code or if the table is PC-relative, the
11156      table and JUMP_INSN must be adjacent, so don't output a BARRIER.  */
11157   if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11158     emit_barrier ();
11159 }
11160 
11161 int
try_tablejump(tree index_type,tree index_expr,tree minval,tree range,rtx table_label,rtx default_label,int default_probability)11162 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
11163 	       rtx table_label, rtx default_label, int default_probability)
11164 {
11165   rtx index;
11166 
11167   if (! HAVE_tablejump)
11168     return 0;
11169 
11170   index_expr = fold_build2 (MINUS_EXPR, index_type,
11171 			    fold_convert (index_type, index_expr),
11172 			    fold_convert (index_type, minval));
11173   index = expand_normal (index_expr);
11174   do_pending_stack_adjust ();
11175 
11176   do_tablejump (index, TYPE_MODE (index_type),
11177 		convert_modes (TYPE_MODE (index_type),
11178 			       TYPE_MODE (TREE_TYPE (range)),
11179 			       expand_normal (range),
11180 			       TYPE_UNSIGNED (TREE_TYPE (range))),
11181 		table_label, default_label, default_probability);
11182   return 1;
11183 }
11184 
11185 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree.  */
11186 static rtx
const_vector_from_tree(tree exp)11187 const_vector_from_tree (tree exp)
11188 {
11189   rtvec v;
11190   unsigned i;
11191   int units;
11192   tree elt;
11193   enum machine_mode inner, mode;
11194 
11195   mode = TYPE_MODE (TREE_TYPE (exp));
11196 
11197   if (initializer_zerop (exp))
11198     return CONST0_RTX (mode);
11199 
11200   units = GET_MODE_NUNITS (mode);
11201   inner = GET_MODE_INNER (mode);
11202 
11203   v = rtvec_alloc (units);
11204 
11205   for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
11206     {
11207       elt = VECTOR_CST_ELT (exp, i);
11208 
11209       if (TREE_CODE (elt) == REAL_CST)
11210 	RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
11211 							 inner);
11212       else if (TREE_CODE (elt) == FIXED_CST)
11213 	RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
11214 							 inner);
11215       else
11216 	RTVEC_ELT (v, i) = immed_double_int_const (tree_to_double_int (elt),
11217 						   inner);
11218     }
11219 
11220   return gen_rtx_CONST_VECTOR (mode, v);
11221 }
11222 
11223 /* Build a decl for a personality function given a language prefix.  */
11224 
11225 tree
build_personality_function(const char * lang)11226 build_personality_function (const char *lang)
11227 {
11228   const char *unwind_and_version;
11229   tree decl, type;
11230   char *name;
11231 
11232   switch (targetm_common.except_unwind_info (&global_options))
11233     {
11234     case UI_NONE:
11235       return NULL;
11236     case UI_SJLJ:
11237       unwind_and_version = "_sj0";
11238       break;
11239     case UI_DWARF2:
11240     case UI_TARGET:
11241       unwind_and_version = "_v0";
11242       break;
11243     case UI_SEH:
11244       unwind_and_version = "_seh0";
11245       break;
11246     default:
11247       gcc_unreachable ();
11248     }
11249 
11250   name = ACONCAT (("__", lang, "_personality", unwind_and_version, NULL));
11251 
11252   type = build_function_type_list (integer_type_node, integer_type_node,
11253 				   long_long_unsigned_type_node,
11254 				   ptr_type_node, ptr_type_node, NULL_TREE);
11255   decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
11256 		     get_identifier (name), type);
11257   DECL_ARTIFICIAL (decl) = 1;
11258   DECL_EXTERNAL (decl) = 1;
11259   TREE_PUBLIC (decl) = 1;
11260 
11261   /* Zap the nonsensical SYMBOL_REF_DECL for this.  What we're left with
11262      are the flags assigned by targetm.encode_section_info.  */
11263   SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
11264 
11265   return decl;
11266 }
11267 
11268 /* Extracts the personality function of DECL and returns the corresponding
11269    libfunc.  */
11270 
11271 rtx
get_personality_function(tree decl)11272 get_personality_function (tree decl)
11273 {
11274   tree personality = DECL_FUNCTION_PERSONALITY (decl);
11275   enum eh_personality_kind pk;
11276 
11277   pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
11278   if (pk == eh_personality_none)
11279     return NULL;
11280 
11281   if (!personality
11282       && pk == eh_personality_any)
11283     personality = lang_hooks.eh_personality ();
11284 
11285   if (pk == eh_personality_lang)
11286     gcc_assert (personality != NULL_TREE);
11287 
11288   return XEXP (DECL_RTL (personality), 0);
11289 }
11290 
11291 #include "gt-expr.h"
11292