1 /* Operations with affine combinations of trees.
2    Copyright (C) 2005-2019 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
9 later version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "ssa.h"
28 #include "tree-pretty-print.h"
29 #include "fold-const.h"
30 #include "tree-affine.h"
31 #include "gimplify.h"
32 #include "dumpfile.h"
33 #include "cfgexpand.h"
34 
35 /* Extends CST as appropriate for the affine combinations COMB.  */
36 
37 static widest_int
wide_int_ext_for_comb(const widest_int & cst,tree type)38 wide_int_ext_for_comb (const widest_int &cst, tree type)
39 {
40   return wi::sext (cst, TYPE_PRECISION (type));
41 }
42 
43 /* Likewise for polynomial offsets.  */
44 
45 static poly_widest_int
wide_int_ext_for_comb(const poly_widest_int & cst,tree type)46 wide_int_ext_for_comb (const poly_widest_int &cst, tree type)
47 {
48   return wi::sext (cst, TYPE_PRECISION (type));
49 }
50 
51 /* Initializes affine combination COMB so that its value is zero in TYPE.  */
52 
53 static void
aff_combination_zero(aff_tree * comb,tree type)54 aff_combination_zero (aff_tree *comb, tree type)
55 {
56   int i;
57   comb->type = type;
58   comb->offset = 0;
59   comb->n = 0;
60   for (i = 0; i < MAX_AFF_ELTS; i++)
61     comb->elts[i].coef = 0;
62   comb->rest = NULL_TREE;
63 }
64 
65 /* Sets COMB to CST.  */
66 
67 void
aff_combination_const(aff_tree * comb,tree type,const poly_widest_int & cst)68 aff_combination_const (aff_tree *comb, tree type, const poly_widest_int &cst)
69 {
70   aff_combination_zero (comb, type);
71   comb->offset = wide_int_ext_for_comb (cst, comb->type);;
72 }
73 
74 /* Sets COMB to single element ELT.  */
75 
76 void
aff_combination_elt(aff_tree * comb,tree type,tree elt)77 aff_combination_elt (aff_tree *comb, tree type, tree elt)
78 {
79   aff_combination_zero (comb, type);
80 
81   comb->n = 1;
82   comb->elts[0].val = elt;
83   comb->elts[0].coef = 1;
84 }
85 
86 /* Scales COMB by SCALE.  */
87 
88 void
aff_combination_scale(aff_tree * comb,const widest_int & scale_in)89 aff_combination_scale (aff_tree *comb, const widest_int &scale_in)
90 {
91   unsigned i, j;
92 
93   widest_int scale = wide_int_ext_for_comb (scale_in, comb->type);
94   if (scale == 1)
95     return;
96 
97   if (scale == 0)
98     {
99       aff_combination_zero (comb, comb->type);
100       return;
101     }
102 
103   comb->offset = wide_int_ext_for_comb (scale * comb->offset, comb->type);
104   for (i = 0, j = 0; i < comb->n; i++)
105     {
106       widest_int new_coef
107 	= wide_int_ext_for_comb (scale * comb->elts[i].coef, comb->type);
108       /* A coefficient may become zero due to overflow.  Remove the zero
109 	 elements.  */
110       if (new_coef == 0)
111 	continue;
112       comb->elts[j].coef = new_coef;
113       comb->elts[j].val = comb->elts[i].val;
114       j++;
115     }
116   comb->n = j;
117 
118   if (comb->rest)
119     {
120       tree type = comb->type;
121       if (POINTER_TYPE_P (type))
122 	type = sizetype;
123       if (comb->n < MAX_AFF_ELTS)
124 	{
125 	  comb->elts[comb->n].coef = scale;
126 	  comb->elts[comb->n].val = comb->rest;
127 	  comb->rest = NULL_TREE;
128 	  comb->n++;
129 	}
130       else
131 	comb->rest = fold_build2 (MULT_EXPR, type, comb->rest,
132 				  wide_int_to_tree (type, scale));
133     }
134 }
135 
136 /* Adds ELT * SCALE to COMB.  */
137 
138 void
aff_combination_add_elt(aff_tree * comb,tree elt,const widest_int & scale_in)139 aff_combination_add_elt (aff_tree *comb, tree elt, const widest_int &scale_in)
140 {
141   unsigned i;
142   tree type;
143 
144   widest_int scale = wide_int_ext_for_comb (scale_in, comb->type);
145   if (scale == 0)
146     return;
147 
148   for (i = 0; i < comb->n; i++)
149     if (operand_equal_p (comb->elts[i].val, elt, 0))
150       {
151 	widest_int new_coef
152 	  = wide_int_ext_for_comb (comb->elts[i].coef + scale, comb->type);
153 	if (new_coef != 0)
154 	  {
155 	    comb->elts[i].coef = new_coef;
156 	    return;
157 	  }
158 
159 	comb->n--;
160 	comb->elts[i] = comb->elts[comb->n];
161 
162 	if (comb->rest)
163 	  {
164 	    gcc_assert (comb->n == MAX_AFF_ELTS - 1);
165 	    comb->elts[comb->n].coef = 1;
166 	    comb->elts[comb->n].val = comb->rest;
167 	    comb->rest = NULL_TREE;
168 	    comb->n++;
169 	  }
170 	return;
171       }
172   if (comb->n < MAX_AFF_ELTS)
173     {
174       comb->elts[comb->n].coef = scale;
175       comb->elts[comb->n].val = elt;
176       comb->n++;
177       return;
178     }
179 
180   type = comb->type;
181   if (POINTER_TYPE_P (type))
182     type = sizetype;
183 
184   if (scale == 1)
185     elt = fold_convert (type, elt);
186   else
187     elt = fold_build2 (MULT_EXPR, type,
188 		       fold_convert (type, elt),
189 		       wide_int_to_tree (type, scale));
190 
191   if (comb->rest)
192     comb->rest = fold_build2 (PLUS_EXPR, type, comb->rest,
193 			      elt);
194   else
195     comb->rest = elt;
196 }
197 
198 /* Adds CST to C.  */
199 
200 static void
aff_combination_add_cst(aff_tree * c,const poly_widest_int & cst)201 aff_combination_add_cst (aff_tree *c, const poly_widest_int &cst)
202 {
203   c->offset = wide_int_ext_for_comb (c->offset + cst, c->type);
204 }
205 
206 /* Adds COMB2 to COMB1.  */
207 
208 void
aff_combination_add(aff_tree * comb1,aff_tree * comb2)209 aff_combination_add (aff_tree *comb1, aff_tree *comb2)
210 {
211   unsigned i;
212 
213   aff_combination_add_cst (comb1, comb2->offset);
214   for (i = 0; i < comb2->n; i++)
215     aff_combination_add_elt (comb1, comb2->elts[i].val, comb2->elts[i].coef);
216   if (comb2->rest)
217     aff_combination_add_elt (comb1, comb2->rest, 1);
218 }
219 
220 /* Converts affine combination COMB to TYPE.  */
221 
222 void
aff_combination_convert(aff_tree * comb,tree type)223 aff_combination_convert (aff_tree *comb, tree type)
224 {
225   unsigned i, j;
226   tree comb_type = comb->type;
227 
228   if  (TYPE_PRECISION (type) > TYPE_PRECISION (comb_type))
229     {
230       tree val = fold_convert (type, aff_combination_to_tree (comb));
231       tree_to_aff_combination (val, type, comb);
232       return;
233     }
234 
235   comb->type = type;
236   if (comb->rest && !POINTER_TYPE_P (type))
237     comb->rest = fold_convert (type, comb->rest);
238 
239   if (TYPE_PRECISION (type) == TYPE_PRECISION (comb_type))
240     return;
241 
242   comb->offset = wide_int_ext_for_comb (comb->offset, comb->type);
243   for (i = j = 0; i < comb->n; i++)
244     {
245       if (comb->elts[i].coef == 0)
246 	continue;
247       comb->elts[j].coef = comb->elts[i].coef;
248       comb->elts[j].val = fold_convert (type, comb->elts[i].val);
249       j++;
250     }
251 
252   comb->n = j;
253   if (comb->n < MAX_AFF_ELTS && comb->rest)
254     {
255       comb->elts[comb->n].coef = 1;
256       comb->elts[comb->n].val = comb->rest;
257       comb->rest = NULL_TREE;
258       comb->n++;
259     }
260 }
261 
262 /* Splits EXPR into an affine combination of parts.  */
263 
264 void
tree_to_aff_combination(tree expr,tree type,aff_tree * comb)265 tree_to_aff_combination (tree expr, tree type, aff_tree *comb)
266 {
267   aff_tree tmp;
268   enum tree_code code;
269   tree cst, core, toffset;
270   poly_int64 bitpos, bitsize, bytepos;
271   machine_mode mode;
272   int unsignedp, reversep, volatilep;
273 
274   STRIP_NOPS (expr);
275 
276   code = TREE_CODE (expr);
277   switch (code)
278     {
279     case POINTER_PLUS_EXPR:
280       tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
281       tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
282       aff_combination_add (comb, &tmp);
283       return;
284 
285     case PLUS_EXPR:
286     case MINUS_EXPR:
287       tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
288       tree_to_aff_combination (TREE_OPERAND (expr, 1), type, &tmp);
289       if (code == MINUS_EXPR)
290 	aff_combination_scale (&tmp, -1);
291       aff_combination_add (comb, &tmp);
292       return;
293 
294     case MULT_EXPR:
295       cst = TREE_OPERAND (expr, 1);
296       if (TREE_CODE (cst) != INTEGER_CST)
297 	break;
298       tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
299       aff_combination_scale (comb, wi::to_widest (cst));
300       return;
301 
302     case NEGATE_EXPR:
303       tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
304       aff_combination_scale (comb, -1);
305       return;
306 
307     case BIT_NOT_EXPR:
308       /* ~x = -x - 1 */
309       tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
310       aff_combination_scale (comb, -1);
311       aff_combination_add_cst (comb, -1);
312       return;
313 
314     case ADDR_EXPR:
315       /* Handle &MEM[ptr + CST] which is equivalent to POINTER_PLUS_EXPR.  */
316       if (TREE_CODE (TREE_OPERAND (expr, 0)) == MEM_REF)
317 	{
318 	  expr = TREE_OPERAND (expr, 0);
319 	  tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
320 	  tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
321 	  aff_combination_add (comb, &tmp);
322 	  return;
323 	}
324       core = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize, &bitpos,
325 				  &toffset, &mode, &unsignedp, &reversep,
326 				  &volatilep);
327       if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos))
328 	break;
329       aff_combination_const (comb, type, bytepos);
330       if (TREE_CODE (core) == MEM_REF)
331 	{
332 	  tree mem_offset = TREE_OPERAND (core, 1);
333 	  aff_combination_add_cst (comb, wi::to_poly_widest (mem_offset));
334 	  core = TREE_OPERAND (core, 0);
335 	}
336       else
337 	core = build_fold_addr_expr (core);
338 
339       if (TREE_CODE (core) == ADDR_EXPR)
340 	aff_combination_add_elt (comb, core, 1);
341       else
342 	{
343 	  tree_to_aff_combination (core, type, &tmp);
344 	  aff_combination_add (comb, &tmp);
345 	}
346       if (toffset)
347 	{
348 	  tree_to_aff_combination (toffset, type, &tmp);
349 	  aff_combination_add (comb, &tmp);
350 	}
351       return;
352 
353     CASE_CONVERT:
354       {
355 	tree otype = TREE_TYPE (expr);
356 	tree inner = TREE_OPERAND (expr, 0);
357 	tree itype = TREE_TYPE (inner);
358 	enum tree_code icode = TREE_CODE (inner);
359 
360 	/* In principle this is a valid folding, but it isn't necessarily
361 	   an optimization, so do it here and not in fold_unary.  */
362 	if ((icode == PLUS_EXPR || icode == MINUS_EXPR || icode == MULT_EXPR)
363 	    && TREE_CODE (itype) == INTEGER_TYPE
364 	    && TREE_CODE (otype) == INTEGER_TYPE
365 	    && TYPE_PRECISION (otype) > TYPE_PRECISION (itype))
366 	  {
367 	    tree op0 = TREE_OPERAND (inner, 0), op1 = TREE_OPERAND (inner, 1);
368 
369 	    /* If inner type has undefined overflow behavior, fold conversion
370 	       for below two cases:
371 		 (T1)(X *+- CST) -> (T1)X *+- (T1)CST
372 		 (T1)(X + X)     -> (T1)X + (T1)X.  */
373 	    if (TYPE_OVERFLOW_UNDEFINED (itype)
374 		&& (TREE_CODE (op1) == INTEGER_CST
375 		    || (icode == PLUS_EXPR && operand_equal_p (op0, op1, 0))))
376 	      {
377 		op0 = fold_convert (otype, op0);
378 		op1 = fold_convert (otype, op1);
379 		expr = fold_build2 (icode, otype, op0, op1);
380 		tree_to_aff_combination (expr, type, comb);
381 		return;
382 	      }
383 	    wide_int minv, maxv;
384 	    /* If inner type has wrapping overflow behavior, fold conversion
385 	       for below case:
386 		 (T1)(X - CST) -> (T1)X - (T1)CST
387 	       if X - CST doesn't overflow by range information.  Also handle
388 	       (T1)(X + CST) as (T1)(X - (-CST)).  */
389 	    if (TYPE_UNSIGNED (itype)
390 		&& TYPE_OVERFLOW_WRAPS (itype)
391 		&& TREE_CODE (op0) == SSA_NAME
392 		&& TREE_CODE (op1) == INTEGER_CST
393 		&& icode != MULT_EXPR
394 		&& get_range_info (op0, &minv, &maxv) == VR_RANGE)
395 	      {
396 		if (icode == PLUS_EXPR)
397 		  op1 = wide_int_to_tree (itype, -wi::to_wide (op1));
398 		if (wi::geu_p (minv, wi::to_wide (op1)))
399 		  {
400 		    op0 = fold_convert (otype, op0);
401 		    op1 = fold_convert (otype, op1);
402 		    expr = fold_build2 (MINUS_EXPR, otype, op0, op1);
403 		    tree_to_aff_combination (expr, type, comb);
404 		    return;
405 		  }
406 	      }
407 	  }
408       }
409       break;
410 
411     default:
412       {
413 	if (poly_int_tree_p (expr))
414 	  {
415 	    aff_combination_const (comb, type, wi::to_poly_widest (expr));
416 	    return;
417 	  }
418 	break;
419       }
420     }
421 
422   aff_combination_elt (comb, type, expr);
423 }
424 
425 /* Creates EXPR + ELT * SCALE in TYPE.  EXPR is taken from affine
426    combination COMB.  */
427 
428 static tree
add_elt_to_tree(tree expr,tree type,tree elt,const widest_int & scale_in)429 add_elt_to_tree (tree expr, tree type, tree elt, const widest_int &scale_in)
430 {
431   enum tree_code code;
432 
433   widest_int scale = wide_int_ext_for_comb (scale_in, type);
434 
435   elt = fold_convert (type, elt);
436   if (scale == 1)
437     {
438       if (!expr)
439 	return elt;
440 
441       return fold_build2 (PLUS_EXPR, type, expr, elt);
442     }
443 
444   if (scale == -1)
445     {
446       if (!expr)
447 	return fold_build1 (NEGATE_EXPR, type, elt);
448 
449       return fold_build2 (MINUS_EXPR, type, expr, elt);
450     }
451 
452   if (!expr)
453     return fold_build2 (MULT_EXPR, type, elt, wide_int_to_tree (type, scale));
454 
455   if (wi::neg_p (scale))
456     {
457       code = MINUS_EXPR;
458       scale = -scale;
459     }
460   else
461     code = PLUS_EXPR;
462 
463   elt = fold_build2 (MULT_EXPR, type, elt, wide_int_to_tree (type, scale));
464   return fold_build2 (code, type, expr, elt);
465 }
466 
467 /* Makes tree from the affine combination COMB.  */
468 
469 tree
aff_combination_to_tree(aff_tree * comb)470 aff_combination_to_tree (aff_tree *comb)
471 {
472   tree type = comb->type, base = NULL_TREE, expr = NULL_TREE;
473   unsigned i;
474   poly_widest_int off;
475   int sgn;
476 
477   gcc_assert (comb->n == MAX_AFF_ELTS || comb->rest == NULL_TREE);
478 
479   i = 0;
480   if (POINTER_TYPE_P (type))
481     {
482       type = sizetype;
483       if (comb->n > 0 && comb->elts[0].coef == 1
484 	  && POINTER_TYPE_P (TREE_TYPE (comb->elts[0].val)))
485 	{
486 	  base = comb->elts[0].val;
487 	  ++i;
488 	}
489     }
490 
491   for (; i < comb->n; i++)
492     expr = add_elt_to_tree (expr, type, comb->elts[i].val, comb->elts[i].coef);
493 
494   if (comb->rest)
495     expr = add_elt_to_tree (expr, type, comb->rest, 1);
496 
497   /* Ensure that we get x - 1, not x + (-1) or x + 0xff..f if x is
498      unsigned.  */
499   if (known_lt (comb->offset, 0))
500     {
501       off = -comb->offset;
502       sgn = -1;
503     }
504   else
505     {
506       off = comb->offset;
507       sgn = 1;
508     }
509   expr = add_elt_to_tree (expr, type, wide_int_to_tree (type, off), sgn);
510 
511   if (base)
512     return fold_build_pointer_plus (base, expr);
513   else
514     return fold_convert (comb->type, expr);
515 }
516 
517 /* Copies the tree elements of COMB to ensure that they are not shared.  */
518 
519 void
unshare_aff_combination(aff_tree * comb)520 unshare_aff_combination (aff_tree *comb)
521 {
522   unsigned i;
523 
524   for (i = 0; i < comb->n; i++)
525     comb->elts[i].val = unshare_expr (comb->elts[i].val);
526   if (comb->rest)
527     comb->rest = unshare_expr (comb->rest);
528 }
529 
530 /* Remove M-th element from COMB.  */
531 
532 void
aff_combination_remove_elt(aff_tree * comb,unsigned m)533 aff_combination_remove_elt (aff_tree *comb, unsigned m)
534 {
535   comb->n--;
536   if (m <= comb->n)
537     comb->elts[m] = comb->elts[comb->n];
538   if (comb->rest)
539     {
540       comb->elts[comb->n].coef = 1;
541       comb->elts[comb->n].val = comb->rest;
542       comb->rest = NULL_TREE;
543       comb->n++;
544     }
545 }
546 
547 /* Adds C * COEF * VAL to R.  VAL may be NULL, in that case only
548    C * COEF is added to R.  */
549 
550 
551 static void
aff_combination_add_product(aff_tree * c,const widest_int & coef,tree val,aff_tree * r)552 aff_combination_add_product (aff_tree *c, const widest_int &coef, tree val,
553 			     aff_tree *r)
554 {
555   unsigned i;
556   tree aval, type;
557 
558   for (i = 0; i < c->n; i++)
559     {
560       aval = c->elts[i].val;
561       if (val)
562 	{
563 	  type = TREE_TYPE (aval);
564 	  aval = fold_build2 (MULT_EXPR, type, aval,
565 			      fold_convert (type, val));
566 	}
567 
568       aff_combination_add_elt (r, aval, coef * c->elts[i].coef);
569     }
570 
571   if (c->rest)
572     {
573       aval = c->rest;
574       if (val)
575 	{
576 	  type = TREE_TYPE (aval);
577 	  aval = fold_build2 (MULT_EXPR, type, aval,
578 			      fold_convert (type, val));
579 	}
580 
581       aff_combination_add_elt (r, aval, coef);
582     }
583 
584   if (val)
585     {
586       if (c->offset.is_constant ())
587 	/* Access coeffs[0] directly, for efficiency.  */
588 	aff_combination_add_elt (r, val, coef * c->offset.coeffs[0]);
589       else
590 	{
591 	  /* c->offset is polynomial, so multiply VAL rather than COEF
592 	     by it.  */
593 	  tree offset = wide_int_to_tree (TREE_TYPE (val), c->offset);
594 	  val = fold_build2 (MULT_EXPR, TREE_TYPE (val), val, offset);
595 	  aff_combination_add_elt (r, val, coef);
596 	}
597     }
598   else
599     aff_combination_add_cst (r, coef * c->offset);
600 }
601 
602 /* Multiplies C1 by C2, storing the result to R  */
603 
604 void
aff_combination_mult(aff_tree * c1,aff_tree * c2,aff_tree * r)605 aff_combination_mult (aff_tree *c1, aff_tree *c2, aff_tree *r)
606 {
607   unsigned i;
608   gcc_assert (TYPE_PRECISION (c1->type) == TYPE_PRECISION (c2->type));
609 
610   aff_combination_zero (r, c1->type);
611 
612   for (i = 0; i < c2->n; i++)
613     aff_combination_add_product (c1, c2->elts[i].coef, c2->elts[i].val, r);
614   if (c2->rest)
615     aff_combination_add_product (c1, 1, c2->rest, r);
616   if (c2->offset.is_constant ())
617     /* Access coeffs[0] directly, for efficiency.  */
618     aff_combination_add_product (c1, c2->offset.coeffs[0], NULL, r);
619   else
620     {
621       /* c2->offset is polynomial, so do the multiplication in tree form.  */
622       tree offset = wide_int_to_tree (c2->type, c2->offset);
623       aff_combination_add_product (c1, 1, offset, r);
624     }
625 }
626 
627 /* Returns the element of COMB whose value is VAL, or NULL if no such
628    element exists.  If IDX is not NULL, it is set to the index of VAL in
629    COMB.  */
630 
631 static struct aff_comb_elt *
aff_combination_find_elt(aff_tree * comb,tree val,unsigned * idx)632 aff_combination_find_elt (aff_tree *comb, tree val, unsigned *idx)
633 {
634   unsigned i;
635 
636   for (i = 0; i < comb->n; i++)
637     if (operand_equal_p (comb->elts[i].val, val, 0))
638       {
639 	if (idx)
640 	  *idx = i;
641 
642 	return &comb->elts[i];
643       }
644 
645   return NULL;
646 }
647 
648 /* Element of the cache that maps ssa name NAME to its expanded form
649    as an affine expression EXPANSION.  */
650 
651 struct name_expansion
652 {
653   aff_tree expansion;
654 
655   /* True if the expansion for the name is just being generated.  */
656   unsigned in_progress : 1;
657 };
658 
659 /* Expands SSA names in COMB recursively.  CACHE is used to cache the
660    results.  */
661 
662 void
aff_combination_expand(aff_tree * comb ATTRIBUTE_UNUSED,hash_map<tree,name_expansion * > ** cache)663 aff_combination_expand (aff_tree *comb ATTRIBUTE_UNUSED,
664 			hash_map<tree, name_expansion *> **cache)
665 {
666   unsigned i;
667   aff_tree to_add, current, curre;
668   tree e, rhs;
669   gimple *def;
670   widest_int scale;
671   struct name_expansion *exp;
672 
673   aff_combination_zero (&to_add, comb->type);
674   for (i = 0; i < comb->n; i++)
675     {
676       tree type, name;
677       enum tree_code code;
678 
679       e = comb->elts[i].val;
680       type = TREE_TYPE (e);
681       name = e;
682       /* Look through some conversions.  */
683       if (CONVERT_EXPR_P (e)
684           && (TYPE_PRECISION (type)
685 	      >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (e, 0)))))
686 	name = TREE_OPERAND (e, 0);
687       if (TREE_CODE (name) != SSA_NAME)
688 	continue;
689       def = SSA_NAME_DEF_STMT (name);
690       if (!is_gimple_assign (def) || gimple_assign_lhs (def) != name)
691 	continue;
692 
693       code = gimple_assign_rhs_code (def);
694       if (code != SSA_NAME
695 	  && !IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
696 	  && (get_gimple_rhs_class (code) != GIMPLE_SINGLE_RHS
697 	      || !is_gimple_min_invariant (gimple_assign_rhs1 (def))))
698 	continue;
699 
700       /* We do not know whether the reference retains its value at the
701 	 place where the expansion is used.  */
702       if (TREE_CODE_CLASS (code) == tcc_reference)
703 	continue;
704 
705       name_expansion **slot = NULL;
706       if (*cache)
707 	slot = (*cache)->get (name);
708       exp = slot ? *slot : NULL;
709       if (!exp)
710 	{
711 	  /* Only bother to handle cases tree_to_aff_combination will.  */
712 	  switch (code)
713 	    {
714 	    case POINTER_PLUS_EXPR:
715 	    case PLUS_EXPR:
716 	    case MINUS_EXPR:
717 	    case MULT_EXPR:
718 	    case NEGATE_EXPR:
719 	    case BIT_NOT_EXPR:
720 	    CASE_CONVERT:
721 	      rhs = gimple_assign_rhs_to_tree (def);
722 	      break;
723 	    case ADDR_EXPR:
724 	    case INTEGER_CST:
725 	    case POLY_INT_CST:
726 	      rhs = gimple_assign_rhs1 (def);
727 	      break;
728 	    default:
729 	      continue;
730 	    }
731 	  tree_to_aff_combination (rhs, TREE_TYPE (name), &current);
732 	  exp = XNEW (struct name_expansion);
733 	  exp->in_progress = 1;
734 	  if (!*cache)
735 	    *cache = new hash_map<tree, name_expansion *>;
736 	  (*cache)->put (name, exp);
737 	  aff_combination_expand (&current, cache);
738 	  exp->expansion = current;
739 	  exp->in_progress = 0;
740 	}
741       else
742 	{
743 	  /* Since we follow the definitions in the SSA form, we should not
744 	     enter a cycle unless we pass through a phi node.  */
745 	  gcc_assert (!exp->in_progress);
746 	  current = exp->expansion;
747 	}
748       if (!useless_type_conversion_p (comb->type, current.type))
749 	aff_combination_convert (&current, comb->type);
750 
751       /* Accumulate the new terms to TO_ADD, so that we do not modify
752 	 COMB while traversing it; include the term -coef * E, to remove
753          it from COMB.  */
754       scale = comb->elts[i].coef;
755       aff_combination_zero (&curre, comb->type);
756       aff_combination_add_elt (&curre, e, -scale);
757       aff_combination_scale (&current, scale);
758       aff_combination_add (&to_add, &current);
759       aff_combination_add (&to_add, &curre);
760     }
761   aff_combination_add (comb, &to_add);
762 }
763 
764 /* Similar to tree_to_aff_combination, but follows SSA name definitions
765    and expands them recursively.  CACHE is used to cache the expansions
766    of the ssa names, to avoid exponential time complexity for cases
767    like
768 
769    a1 = a0 + a0;
770    a2 = a1 + a1;
771    a3 = a2 + a2;
772    ...  */
773 
774 void
tree_to_aff_combination_expand(tree expr,tree type,aff_tree * comb,hash_map<tree,name_expansion * > ** cache)775 tree_to_aff_combination_expand (tree expr, tree type, aff_tree *comb,
776 				hash_map<tree, name_expansion *> **cache)
777 {
778   tree_to_aff_combination (expr, type, comb);
779   aff_combination_expand (comb, cache);
780 }
781 
782 /* Frees memory occupied by struct name_expansion in *VALUE.  Callback for
783    hash_map::traverse.  */
784 
785 bool
free_name_expansion(tree const &,name_expansion ** value,void *)786 free_name_expansion (tree const &, name_expansion **value, void *)
787 {
788   free (*value);
789   return true;
790 }
791 
792 /* Frees memory allocated for the CACHE used by
793    tree_to_aff_combination_expand.  */
794 
795 void
free_affine_expand_cache(hash_map<tree,name_expansion * > ** cache)796 free_affine_expand_cache (hash_map<tree, name_expansion *> **cache)
797 {
798   if (!*cache)
799     return;
800 
801   (*cache)->traverse<void *, free_name_expansion> (NULL);
802   delete (*cache);
803   *cache = NULL;
804 }
805 
806 /* If VAL != CST * DIV for any constant CST, returns false.
807    Otherwise, if *MULT_SET is true, additionally compares CST and MULT,
808    and if they are different, returns false.  Finally, if neither of these
809    two cases occur, true is returned, and CST is stored to MULT and MULT_SET
810    is set to true.  */
811 
812 static bool
wide_int_constant_multiple_p(const poly_widest_int & val,const poly_widest_int & div,bool * mult_set,poly_widest_int * mult)813 wide_int_constant_multiple_p (const poly_widest_int &val,
814 			      const poly_widest_int &div,
815 			      bool *mult_set, poly_widest_int *mult)
816 {
817   poly_widest_int rem, cst;
818 
819   if (known_eq (val, 0))
820     {
821       if (*mult_set && maybe_ne (*mult, 0))
822 	return false;
823       *mult_set = true;
824       *mult = 0;
825       return true;
826     }
827 
828   if (maybe_eq (div, 0))
829     return false;
830 
831   if (!multiple_p (val, div, &cst))
832     return false;
833 
834   if (*mult_set && maybe_ne (*mult, cst))
835     return false;
836 
837   *mult_set = true;
838   *mult = cst;
839   return true;
840 }
841 
842 /* Returns true if VAL = X * DIV for some constant X.  If this is the case,
843    X is stored to MULT.  */
844 
845 bool
aff_combination_constant_multiple_p(aff_tree * val,aff_tree * div,poly_widest_int * mult)846 aff_combination_constant_multiple_p (aff_tree *val, aff_tree *div,
847 				     poly_widest_int *mult)
848 {
849   bool mult_set = false;
850   unsigned i;
851 
852   if (val->n == 0 && known_eq (val->offset, 0))
853     {
854       *mult = 0;
855       return true;
856     }
857   if (val->n != div->n)
858     return false;
859 
860   if (val->rest || div->rest)
861     return false;
862 
863   if (!wide_int_constant_multiple_p (val->offset, div->offset,
864 				     &mult_set, mult))
865     return false;
866 
867   for (i = 0; i < div->n; i++)
868     {
869       struct aff_comb_elt *elt
870 	      = aff_combination_find_elt (val, div->elts[i].val, NULL);
871       if (!elt)
872 	return false;
873       if (!wide_int_constant_multiple_p (elt->coef, div->elts[i].coef,
874 					 &mult_set, mult))
875 	return false;
876     }
877 
878   gcc_assert (mult_set);
879   return true;
880 }
881 
882 /* Prints the affine VAL to the FILE. */
883 
884 static void
print_aff(FILE * file,aff_tree * val)885 print_aff (FILE *file, aff_tree *val)
886 {
887   unsigned i;
888   signop sgn = TYPE_SIGN (val->type);
889   if (POINTER_TYPE_P (val->type))
890     sgn = SIGNED;
891   fprintf (file, "{\n  type = ");
892   print_generic_expr (file, val->type, TDF_VOPS|TDF_MEMSYMS);
893   fprintf (file, "\n  offset = ");
894   print_dec (val->offset, file, sgn);
895   if (val->n > 0)
896     {
897       fprintf (file, "\n  elements = {\n");
898       for (i = 0; i < val->n; i++)
899 	{
900 	  fprintf (file, "    [%d] = ", i);
901 	  print_generic_expr (file, val->elts[i].val, TDF_VOPS|TDF_MEMSYMS);
902 
903 	  fprintf (file, " * ");
904 	  print_dec (val->elts[i].coef, file, sgn);
905 	  if (i != val->n - 1)
906 	    fprintf (file, ", \n");
907 	}
908       fprintf (file, "\n  }");
909   }
910   if (val->rest)
911     {
912       fprintf (file, "\n  rest = ");
913       print_generic_expr (file, val->rest, TDF_VOPS|TDF_MEMSYMS);
914     }
915   fprintf (file, "\n}");
916 }
917 
918 /* Prints the affine VAL to the standard error, used for debugging.  */
919 
920 DEBUG_FUNCTION void
debug_aff(aff_tree * val)921 debug_aff (aff_tree *val)
922 {
923   print_aff (stderr, val);
924   fprintf (stderr, "\n");
925 }
926 
927 /* Computes address of the reference REF in ADDR.  The size of the accessed
928    location is stored to SIZE.  Returns the ultimate containing object to
929    which REF refers.  */
930 
931 tree
get_inner_reference_aff(tree ref,aff_tree * addr,poly_widest_int * size)932 get_inner_reference_aff (tree ref, aff_tree *addr, poly_widest_int *size)
933 {
934   poly_int64 bitsize, bitpos;
935   tree toff;
936   machine_mode mode;
937   int uns, rev, vol;
938   aff_tree tmp;
939   tree base = get_inner_reference (ref, &bitsize, &bitpos, &toff, &mode,
940 				   &uns, &rev, &vol);
941   tree base_addr = build_fold_addr_expr (base);
942 
943   /* ADDR = &BASE + TOFF + BITPOS / BITS_PER_UNIT.  */
944 
945   tree_to_aff_combination (base_addr, sizetype, addr);
946 
947   if (toff)
948     {
949       tree_to_aff_combination (toff, sizetype, &tmp);
950       aff_combination_add (addr, &tmp);
951     }
952 
953   aff_combination_const (&tmp, sizetype, bits_to_bytes_round_down (bitpos));
954   aff_combination_add (addr, &tmp);
955 
956   *size = bits_to_bytes_round_up (bitsize);
957 
958   return base;
959 }
960 
961 /* Returns true if a region of size SIZE1 at position 0 and a region of
962    size SIZE2 at position DIFF cannot overlap.  */
963 
964 bool
aff_comb_cannot_overlap_p(aff_tree * diff,const poly_widest_int & size1,const poly_widest_int & size2)965 aff_comb_cannot_overlap_p (aff_tree *diff, const poly_widest_int &size1,
966 			   const poly_widest_int &size2)
967 {
968   /* Unless the difference is a constant, we fail.  */
969   if (diff->n != 0)
970     return false;
971 
972   if (!ordered_p (diff->offset, 0))
973     return false;
974 
975   if (maybe_lt (diff->offset, 0))
976     {
977       /* The second object is before the first one, we succeed if the last
978 	 element of the second object is before the start of the first one.  */
979       return known_le (diff->offset + size2, 0);
980     }
981   else
982     {
983       /* We succeed if the second object starts after the first one ends.  */
984       return known_le (size1, diff->offset);
985     }
986 }
987 
988