1 /* Gimple decl, type, and expression support functions.
2 
3    Copyright (C) 2007-2016 Free Software Foundation, Inc.
4    Contributed by Aldy Hernandez <aldyh@redhat.com>
5 
6 This file is part of GCC.
7 
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12 
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16 for more details.
17 
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3.  If not see
20 <http://www.gnu.org/licenses/>.  */
21 
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "backend.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "stringpool.h"
29 #include "gimple-ssa.h"
30 #include "fold-const.h"
31 #include "tree-eh.h"
32 #include "gimplify.h"
33 #include "stor-layout.h"
34 #include "demangle.h"
35 #include "hash-set.h"
36 #include "rtl.h"
37 
38 /* ----- Type related -----  */
39 
40 /* Return true if the conversion from INNER_TYPE to OUTER_TYPE is a
41    useless type conversion, otherwise return false.
42 
43    This function implicitly defines the middle-end type system.  With
44    the notion of 'a < b' meaning that useless_type_conversion_p (a, b)
45    holds and 'a > b' meaning that useless_type_conversion_p (b, a) holds,
46    the following invariants shall be fulfilled:
47 
48      1) useless_type_conversion_p is transitive.
49 	If a < b and b < c then a < c.
50 
51      2) useless_type_conversion_p is not symmetric.
52 	From a < b does not follow a > b.
53 
54      3) Types define the available set of operations applicable to values.
55 	A type conversion is useless if the operations for the target type
56 	is a subset of the operations for the source type.  For example
57 	casts to void* are useless, casts from void* are not (void* can't
58 	be dereferenced or offsetted, but copied, hence its set of operations
59 	is a strict subset of that of all other data pointer types).  Casts
60 	to const T* are useless (can't be written to), casts from const T*
61 	to T* are not.  */
62 
63 bool
useless_type_conversion_p(tree outer_type,tree inner_type)64 useless_type_conversion_p (tree outer_type, tree inner_type)
65 {
66   /* Do the following before stripping toplevel qualifiers.  */
67   if (POINTER_TYPE_P (inner_type)
68       && POINTER_TYPE_P (outer_type))
69     {
70       /* Do not lose casts between pointers to different address spaces.  */
71       if (TYPE_ADDR_SPACE (TREE_TYPE (outer_type))
72 	  != TYPE_ADDR_SPACE (TREE_TYPE (inner_type)))
73 	return false;
74       /* Do not lose casts to function pointer types.  */
75       if ((TREE_CODE (TREE_TYPE (outer_type)) == FUNCTION_TYPE
76 	   || TREE_CODE (TREE_TYPE (outer_type)) == METHOD_TYPE)
77 	  && !(TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE
78 	       || TREE_CODE (TREE_TYPE (inner_type)) == METHOD_TYPE))
79 	return false;
80     }
81 
82   /* From now on qualifiers on value types do not matter.  */
83   inner_type = TYPE_MAIN_VARIANT (inner_type);
84   outer_type = TYPE_MAIN_VARIANT (outer_type);
85 
86   if (inner_type == outer_type)
87     return true;
88 
89   /* Changes in machine mode are never useless conversions because the RTL
90      middle-end expects explicit conversions between modes.  */
91   if (TYPE_MODE (inner_type) != TYPE_MODE (outer_type))
92     return false;
93 
94   /* If both the inner and outer types are integral types, then the
95      conversion is not necessary if they have the same mode and
96      signedness and precision, and both or neither are boolean.  */
97   if (INTEGRAL_TYPE_P (inner_type)
98       && INTEGRAL_TYPE_P (outer_type))
99     {
100       /* Preserve changes in signedness or precision.  */
101       if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
102 	  || TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
103 	return false;
104 
105       /* Preserve conversions to/from BOOLEAN_TYPE if types are not
106 	 of precision one.  */
107       if (((TREE_CODE (inner_type) == BOOLEAN_TYPE)
108 	   != (TREE_CODE (outer_type) == BOOLEAN_TYPE))
109 	  && TYPE_PRECISION (outer_type) != 1)
110 	return false;
111 
112       /* We don't need to preserve changes in the types minimum or
113 	 maximum value in general as these do not generate code
114 	 unless the types precisions are different.  */
115       return true;
116     }
117 
118   /* Scalar floating point types with the same mode are compatible.  */
119   else if (SCALAR_FLOAT_TYPE_P (inner_type)
120 	   && SCALAR_FLOAT_TYPE_P (outer_type))
121     return true;
122 
123   /* Fixed point types with the same mode are compatible.  */
124   else if (FIXED_POINT_TYPE_P (inner_type)
125 	   && FIXED_POINT_TYPE_P (outer_type))
126     return TYPE_SATURATING (inner_type) == TYPE_SATURATING (outer_type);
127 
128   /* We need to take special care recursing to pointed-to types.  */
129   else if (POINTER_TYPE_P (inner_type)
130 	   && POINTER_TYPE_P (outer_type))
131     {
132       /* We do not care for const qualification of the pointed-to types
133 	 as const qualification has no semantic value to the middle-end.  */
134 
135       /* Otherwise pointers/references are equivalent.  */
136       return true;
137     }
138 
139   /* Recurse for complex types.  */
140   else if (TREE_CODE (inner_type) == COMPLEX_TYPE
141 	   && TREE_CODE (outer_type) == COMPLEX_TYPE)
142     return useless_type_conversion_p (TREE_TYPE (outer_type),
143 				      TREE_TYPE (inner_type));
144 
145   /* Recurse for vector types with the same number of subparts.  */
146   else if (TREE_CODE (inner_type) == VECTOR_TYPE
147 	   && TREE_CODE (outer_type) == VECTOR_TYPE
148 	   && TYPE_PRECISION (inner_type) == TYPE_PRECISION (outer_type))
149     return useless_type_conversion_p (TREE_TYPE (outer_type),
150 				      TREE_TYPE (inner_type));
151 
152   else if (TREE_CODE (inner_type) == ARRAY_TYPE
153 	   && TREE_CODE (outer_type) == ARRAY_TYPE)
154     {
155       /* Preserve various attributes.  */
156       if (TYPE_REVERSE_STORAGE_ORDER (inner_type)
157 	  != TYPE_REVERSE_STORAGE_ORDER (outer_type))
158 	return false;
159       if (TYPE_STRING_FLAG (inner_type) != TYPE_STRING_FLAG (outer_type))
160 	return false;
161 
162       /* Conversions from array types with unknown extent to
163 	 array types with known extent are not useless.  */
164       if (!TYPE_DOMAIN (inner_type) && TYPE_DOMAIN (outer_type))
165 	return false;
166 
167       /* Nor are conversions from array types with non-constant size to
168          array types with constant size or to different size.  */
169       if (TYPE_SIZE (outer_type)
170 	  && TREE_CODE (TYPE_SIZE (outer_type)) == INTEGER_CST
171 	  && (!TYPE_SIZE (inner_type)
172 	      || TREE_CODE (TYPE_SIZE (inner_type)) != INTEGER_CST
173 	      || !tree_int_cst_equal (TYPE_SIZE (outer_type),
174 				      TYPE_SIZE (inner_type))))
175 	return false;
176 
177       /* Check conversions between arrays with partially known extents.
178 	 If the array min/max values are constant they have to match.
179 	 Otherwise allow conversions to unknown and variable extents.
180 	 In particular this declares conversions that may change the
181 	 mode to BLKmode as useless.  */
182       if (TYPE_DOMAIN (inner_type)
183 	  && TYPE_DOMAIN (outer_type)
184 	  && TYPE_DOMAIN (inner_type) != TYPE_DOMAIN (outer_type))
185 	{
186 	  tree inner_min = TYPE_MIN_VALUE (TYPE_DOMAIN (inner_type));
187 	  tree outer_min = TYPE_MIN_VALUE (TYPE_DOMAIN (outer_type));
188 	  tree inner_max = TYPE_MAX_VALUE (TYPE_DOMAIN (inner_type));
189 	  tree outer_max = TYPE_MAX_VALUE (TYPE_DOMAIN (outer_type));
190 
191 	  /* After gimplification a variable min/max value carries no
192 	     additional information compared to a NULL value.  All that
193 	     matters has been lowered to be part of the IL.  */
194 	  if (inner_min && TREE_CODE (inner_min) != INTEGER_CST)
195 	    inner_min = NULL_TREE;
196 	  if (outer_min && TREE_CODE (outer_min) != INTEGER_CST)
197 	    outer_min = NULL_TREE;
198 	  if (inner_max && TREE_CODE (inner_max) != INTEGER_CST)
199 	    inner_max = NULL_TREE;
200 	  if (outer_max && TREE_CODE (outer_max) != INTEGER_CST)
201 	    outer_max = NULL_TREE;
202 
203 	  /* Conversions NULL / variable <- cst are useless, but not
204 	     the other way around.  */
205 	  if (outer_min
206 	      && (!inner_min
207 		  || !tree_int_cst_equal (inner_min, outer_min)))
208 	    return false;
209 	  if (outer_max
210 	      && (!inner_max
211 		  || !tree_int_cst_equal (inner_max, outer_max)))
212 	    return false;
213 	}
214 
215       /* Recurse on the element check.  */
216       return useless_type_conversion_p (TREE_TYPE (outer_type),
217 					TREE_TYPE (inner_type));
218     }
219 
220   else if ((TREE_CODE (inner_type) == FUNCTION_TYPE
221 	    || TREE_CODE (inner_type) == METHOD_TYPE)
222 	   && TREE_CODE (inner_type) == TREE_CODE (outer_type))
223     {
224       tree outer_parm, inner_parm;
225 
226       /* If the return types are not compatible bail out.  */
227       if (!useless_type_conversion_p (TREE_TYPE (outer_type),
228 				      TREE_TYPE (inner_type)))
229 	return false;
230 
231       /* Method types should belong to a compatible base class.  */
232       if (TREE_CODE (inner_type) == METHOD_TYPE
233 	  && !useless_type_conversion_p (TYPE_METHOD_BASETYPE (outer_type),
234 					 TYPE_METHOD_BASETYPE (inner_type)))
235 	return false;
236 
237       /* A conversion to an unprototyped argument list is ok.  */
238       if (!prototype_p (outer_type))
239 	return true;
240 
241       /* If the unqualified argument types are compatible the conversion
242 	 is useless.  */
243       if (TYPE_ARG_TYPES (outer_type) == TYPE_ARG_TYPES (inner_type))
244 	return true;
245 
246       for (outer_parm = TYPE_ARG_TYPES (outer_type),
247 	   inner_parm = TYPE_ARG_TYPES (inner_type);
248 	   outer_parm && inner_parm;
249 	   outer_parm = TREE_CHAIN (outer_parm),
250 	   inner_parm = TREE_CHAIN (inner_parm))
251 	if (!useless_type_conversion_p
252 	       (TYPE_MAIN_VARIANT (TREE_VALUE (outer_parm)),
253 		TYPE_MAIN_VARIANT (TREE_VALUE (inner_parm))))
254 	  return false;
255 
256       /* If there is a mismatch in the number of arguments the functions
257 	 are not compatible.  */
258       if (outer_parm || inner_parm)
259 	return false;
260 
261       /* Defer to the target if necessary.  */
262       if (TYPE_ATTRIBUTES (inner_type) || TYPE_ATTRIBUTES (outer_type))
263 	return comp_type_attributes (outer_type, inner_type) != 0;
264 
265       return true;
266     }
267 
268   /* For aggregates we rely on TYPE_CANONICAL exclusively and require
269      explicit conversions for types involving to be structurally
270      compared types.  */
271   else if (AGGREGATE_TYPE_P (inner_type)
272 	   && TREE_CODE (inner_type) == TREE_CODE (outer_type))
273     return TYPE_CANONICAL (inner_type)
274 	   && TYPE_CANONICAL (inner_type) == TYPE_CANONICAL (outer_type);
275 
276   else if (TREE_CODE (inner_type) == OFFSET_TYPE
277 	   && TREE_CODE (outer_type) == OFFSET_TYPE)
278     return useless_type_conversion_p (TREE_TYPE (outer_type),
279 				      TREE_TYPE (inner_type))
280 	   && useless_type_conversion_p
281 	        (TYPE_OFFSET_BASETYPE (outer_type),
282 		 TYPE_OFFSET_BASETYPE (inner_type));
283 
284   return false;
285 }
286 
287 
288 /* ----- Decl related -----  */
289 
290 /* Set sequence SEQ to be the GIMPLE body for function FN.  */
291 
292 void
gimple_set_body(tree fndecl,gimple_seq seq)293 gimple_set_body (tree fndecl, gimple_seq seq)
294 {
295   struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
296   if (fn == NULL)
297     {
298       /* If FNDECL still does not have a function structure associated
299 	 with it, then it does not make sense for it to receive a
300 	 GIMPLE body.  */
301       gcc_assert (seq == NULL);
302     }
303   else
304     fn->gimple_body = seq;
305 }
306 
307 
308 /* Return the body of GIMPLE statements for function FN.  After the
309    CFG pass, the function body doesn't exist anymore because it has
310    been split up into basic blocks.  In this case, it returns
311    NULL.  */
312 
313 gimple_seq
gimple_body(tree fndecl)314 gimple_body (tree fndecl)
315 {
316   struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
317   return fn ? fn->gimple_body : NULL;
318 }
319 
320 /* Return true when FNDECL has Gimple body either in unlowered
321    or CFG form.  */
322 bool
gimple_has_body_p(tree fndecl)323 gimple_has_body_p (tree fndecl)
324 {
325   struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
326   return (gimple_body (fndecl) || (fn && fn->cfg));
327 }
328 
329 /* Return a printable name for symbol DECL.  */
330 
331 const char *
gimple_decl_printable_name(tree decl,int verbosity)332 gimple_decl_printable_name (tree decl, int verbosity)
333 {
334   if (!DECL_NAME (decl))
335     return NULL;
336 
337   if (DECL_ASSEMBLER_NAME_SET_P (decl))
338     {
339       const char *str, *mangled_str;
340       int dmgl_opts = DMGL_NO_OPTS;
341 
342       if (verbosity >= 2)
343 	{
344 	  dmgl_opts = DMGL_VERBOSE
345 		      | DMGL_ANSI
346 		      | DMGL_GNU_V3
347 		      | DMGL_RET_POSTFIX;
348 	  if (TREE_CODE (decl) == FUNCTION_DECL)
349 	    dmgl_opts |= DMGL_PARAMS;
350 	}
351 
352       mangled_str = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
353       str = cplus_demangle_v3 (mangled_str, dmgl_opts);
354       return (str) ? str : mangled_str;
355     }
356 
357   return IDENTIFIER_POINTER (DECL_NAME (decl));
358 }
359 
360 
361 /* Create a new VAR_DECL and copy information from VAR to it.  */
362 
363 tree
copy_var_decl(tree var,tree name,tree type)364 copy_var_decl (tree var, tree name, tree type)
365 {
366   tree copy = build_decl (DECL_SOURCE_LOCATION (var), VAR_DECL, name, type);
367 
368   TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (var);
369   TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (var);
370   DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (var);
371   DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (var);
372   DECL_IGNORED_P (copy) = DECL_IGNORED_P (var);
373   DECL_CONTEXT (copy) = DECL_CONTEXT (var);
374   TREE_NO_WARNING (copy) = TREE_NO_WARNING (var);
375   TREE_USED (copy) = 1;
376   DECL_SEEN_IN_BIND_EXPR_P (copy) = 1;
377   DECL_ATTRIBUTES (copy) = DECL_ATTRIBUTES (var);
378   if (DECL_USER_ALIGN (var))
379     {
380       DECL_ALIGN (copy) = DECL_ALIGN (var);
381       DECL_USER_ALIGN (copy) = 1;
382     }
383 
384   return copy;
385 }
386 
387 /* Strip off a legitimate source ending from the input string NAME of
388    length LEN.  Rather than having to know the names used by all of
389    our front ends, we strip off an ending of a period followed by
390    up to five characters.  (Java uses ".class".)  */
391 
392 static inline void
remove_suffix(char * name,int len)393 remove_suffix (char *name, int len)
394 {
395   int i;
396 
397   for (i = 2;  i < 8 && len > i;  i++)
398     {
399       if (name[len - i] == '.')
400 	{
401 	  name[len - i] = '\0';
402 	  break;
403 	}
404     }
405 }
406 
407 /* Create a new temporary name with PREFIX.  Return an identifier.  */
408 
409 static GTY(()) unsigned int tmp_var_id_num;
410 
411 tree
create_tmp_var_name(const char * prefix)412 create_tmp_var_name (const char *prefix)
413 {
414   char *tmp_name;
415 
416   if (prefix)
417     {
418       char *preftmp = ASTRDUP (prefix);
419 
420       remove_suffix (preftmp, strlen (preftmp));
421       clean_symbol_name (preftmp);
422 
423       prefix = preftmp;
424     }
425 
426   ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix ? prefix : "T", tmp_var_id_num++);
427   return get_identifier (tmp_name);
428 }
429 
430 /* Create a new temporary variable declaration of type TYPE.
431    Do NOT push it into the current binding.  */
432 
433 tree
create_tmp_var_raw(tree type,const char * prefix)434 create_tmp_var_raw (tree type, const char *prefix)
435 {
436   tree tmp_var;
437 
438   tmp_var = build_decl (input_location,
439 			VAR_DECL, prefix ? create_tmp_var_name (prefix) : NULL,
440 			type);
441 
442   /* The variable was declared by the compiler.  */
443   DECL_ARTIFICIAL (tmp_var) = 1;
444   /* And we don't want debug info for it.  */
445   DECL_IGNORED_P (tmp_var) = 1;
446 
447   /* Make the variable writable.  */
448   TREE_READONLY (tmp_var) = 0;
449 
450   DECL_EXTERNAL (tmp_var) = 0;
451   TREE_STATIC (tmp_var) = 0;
452   TREE_USED (tmp_var) = 1;
453 
454   return tmp_var;
455 }
456 
457 /* Create a new temporary variable declaration of type TYPE.  DO push the
458    variable into the current binding.  Further, assume that this is called
459    only from gimplification or optimization, at which point the creation of
460    certain types are bugs.  */
461 
462 tree
create_tmp_var(tree type,const char * prefix)463 create_tmp_var (tree type, const char *prefix)
464 {
465   tree tmp_var;
466 
467   /* We don't allow types that are addressable (meaning we can't make copies),
468      or incomplete.  We also used to reject every variable size objects here,
469      but now support those for which a constant upper bound can be obtained.
470      The processing for variable sizes is performed in gimple_add_tmp_var,
471      point at which it really matters and possibly reached via paths not going
472      through this function, e.g. after direct calls to create_tmp_var_raw.  */
473   gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type));
474 
475   tmp_var = create_tmp_var_raw (type, prefix);
476   gimple_add_tmp_var (tmp_var);
477   return tmp_var;
478 }
479 
480 /* Create a new temporary variable declaration of type TYPE by calling
481    create_tmp_var and if TYPE is a vector or a complex number, mark the new
482    temporary as gimple register.  */
483 
484 tree
create_tmp_reg(tree type,const char * prefix)485 create_tmp_reg (tree type, const char *prefix)
486 {
487   tree tmp;
488 
489   tmp = create_tmp_var (type, prefix);
490   if (TREE_CODE (type) == COMPLEX_TYPE
491       || TREE_CODE (type) == VECTOR_TYPE)
492     DECL_GIMPLE_REG_P (tmp) = 1;
493 
494   return tmp;
495 }
496 
497 /* Create a new temporary variable declaration of type TYPE by calling
498    create_tmp_var and if TYPE is a vector or a complex number, mark the new
499    temporary as gimple register.  */
500 
501 tree
create_tmp_reg_fn(struct function * fn,tree type,const char * prefix)502 create_tmp_reg_fn (struct function *fn, tree type, const char *prefix)
503 {
504   tree tmp;
505 
506   tmp = create_tmp_var_raw (type, prefix);
507   gimple_add_tmp_var_fn (fn, tmp);
508   if (TREE_CODE (type) == COMPLEX_TYPE
509       || TREE_CODE (type) == VECTOR_TYPE)
510     DECL_GIMPLE_REG_P (tmp) = 1;
511 
512   return tmp;
513 }
514 
515 
516 /* ----- Expression related -----  */
517 
518 /* Extract the operands and code for expression EXPR into *SUBCODE_P,
519    *OP1_P, *OP2_P and *OP3_P respectively.  */
520 
521 void
extract_ops_from_tree(tree expr,enum tree_code * subcode_p,tree * op1_p,tree * op2_p,tree * op3_p)522 extract_ops_from_tree (tree expr, enum tree_code *subcode_p, tree *op1_p,
523 		       tree *op2_p, tree *op3_p)
524 {
525   enum gimple_rhs_class grhs_class;
526 
527   *subcode_p = TREE_CODE (expr);
528   grhs_class = get_gimple_rhs_class (*subcode_p);
529 
530   if (grhs_class == GIMPLE_TERNARY_RHS)
531     {
532       *op1_p = TREE_OPERAND (expr, 0);
533       *op2_p = TREE_OPERAND (expr, 1);
534       *op3_p = TREE_OPERAND (expr, 2);
535     }
536   else if (grhs_class == GIMPLE_BINARY_RHS)
537     {
538       *op1_p = TREE_OPERAND (expr, 0);
539       *op2_p = TREE_OPERAND (expr, 1);
540       *op3_p = NULL_TREE;
541     }
542   else if (grhs_class == GIMPLE_UNARY_RHS)
543     {
544       *op1_p = TREE_OPERAND (expr, 0);
545       *op2_p = NULL_TREE;
546       *op3_p = NULL_TREE;
547     }
548   else if (grhs_class == GIMPLE_SINGLE_RHS)
549     {
550       *op1_p = expr;
551       *op2_p = NULL_TREE;
552       *op3_p = NULL_TREE;
553     }
554   else
555     gcc_unreachable ();
556 }
557 
558 /* Extract operands for a GIMPLE_COND statement out of COND_EXPR tree COND.  */
559 
560 void
gimple_cond_get_ops_from_tree(tree cond,enum tree_code * code_p,tree * lhs_p,tree * rhs_p)561 gimple_cond_get_ops_from_tree (tree cond, enum tree_code *code_p,
562                                tree *lhs_p, tree *rhs_p)
563 {
564   gcc_assert (COMPARISON_CLASS_P (cond)
565 	      || TREE_CODE (cond) == TRUTH_NOT_EXPR
566 	      || is_gimple_min_invariant (cond)
567 	      || SSA_VAR_P (cond));
568 
569   extract_ops_from_tree (cond, code_p, lhs_p, rhs_p);
570 
571   /* Canonicalize conditionals of the form 'if (!VAL)'.  */
572   if (*code_p == TRUTH_NOT_EXPR)
573     {
574       *code_p = EQ_EXPR;
575       gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
576       *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
577     }
578   /* Canonicalize conditionals of the form 'if (VAL)'  */
579   else if (TREE_CODE_CLASS (*code_p) != tcc_comparison)
580     {
581       *code_p = NE_EXPR;
582       gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
583       *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
584     }
585 }
586 
587 /*  Return true if T is a valid LHS for a GIMPLE assignment expression.  */
588 
589 bool
is_gimple_lvalue(tree t)590 is_gimple_lvalue (tree t)
591 {
592   return (is_gimple_addressable (t)
593 	  || TREE_CODE (t) == WITH_SIZE_EXPR
594 	  /* These are complex lvalues, but don't have addresses, so they
595 	     go here.  */
596 	  || TREE_CODE (t) == BIT_FIELD_REF);
597 }
598 
599 /*  Return true if T is a GIMPLE condition.  */
600 
601 bool
is_gimple_condexpr(tree t)602 is_gimple_condexpr (tree t)
603 {
604   return (is_gimple_val (t) || (COMPARISON_CLASS_P (t)
605 				&& !tree_could_throw_p (t)
606 				&& is_gimple_val (TREE_OPERAND (t, 0))
607 				&& is_gimple_val (TREE_OPERAND (t, 1))));
608 }
609 
610 /* Return true if T is a gimple address.  */
611 
612 bool
is_gimple_address(const_tree t)613 is_gimple_address (const_tree t)
614 {
615   tree op;
616 
617   if (TREE_CODE (t) != ADDR_EXPR)
618     return false;
619 
620   op = TREE_OPERAND (t, 0);
621   while (handled_component_p (op))
622     {
623       if ((TREE_CODE (op) == ARRAY_REF
624 	   || TREE_CODE (op) == ARRAY_RANGE_REF)
625 	  && !is_gimple_val (TREE_OPERAND (op, 1)))
626 	    return false;
627 
628       op = TREE_OPERAND (op, 0);
629     }
630 
631   if (CONSTANT_CLASS_P (op) || TREE_CODE (op) == MEM_REF)
632     return true;
633 
634   switch (TREE_CODE (op))
635     {
636     case PARM_DECL:
637     case RESULT_DECL:
638     case LABEL_DECL:
639     case FUNCTION_DECL:
640     case VAR_DECL:
641     case CONST_DECL:
642       return true;
643 
644     default:
645       return false;
646     }
647 }
648 
649 /* Return true if T is a gimple invariant address.  */
650 
651 bool
is_gimple_invariant_address(const_tree t)652 is_gimple_invariant_address (const_tree t)
653 {
654   const_tree op;
655 
656   if (TREE_CODE (t) != ADDR_EXPR)
657     return false;
658 
659   op = strip_invariant_refs (TREE_OPERAND (t, 0));
660   if (!op)
661     return false;
662 
663   if (TREE_CODE (op) == MEM_REF)
664     {
665       const_tree op0 = TREE_OPERAND (op, 0);
666       return (TREE_CODE (op0) == ADDR_EXPR
667 	      && (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))
668 		  || decl_address_invariant_p (TREE_OPERAND (op0, 0))));
669     }
670 
671   return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
672 }
673 
674 /* Return true if T is a gimple invariant address at IPA level
675    (so addresses of variables on stack are not allowed).  */
676 
677 bool
is_gimple_ip_invariant_address(const_tree t)678 is_gimple_ip_invariant_address (const_tree t)
679 {
680   const_tree op;
681 
682   if (TREE_CODE (t) != ADDR_EXPR)
683     return false;
684 
685   op = strip_invariant_refs (TREE_OPERAND (t, 0));
686   if (!op)
687     return false;
688 
689   if (TREE_CODE (op) == MEM_REF)
690     {
691       const_tree op0 = TREE_OPERAND (op, 0);
692       return (TREE_CODE (op0) == ADDR_EXPR
693 	      && (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))
694 		  || decl_address_ip_invariant_p (TREE_OPERAND (op0, 0))));
695     }
696 
697   return CONSTANT_CLASS_P (op) || decl_address_ip_invariant_p (op);
698 }
699 
700 /* Return true if T is a GIMPLE minimal invariant.  It's a restricted
701    form of function invariant.  */
702 
703 bool
is_gimple_min_invariant(const_tree t)704 is_gimple_min_invariant (const_tree t)
705 {
706   if (TREE_CODE (t) == ADDR_EXPR)
707     return is_gimple_invariant_address (t);
708 
709   return is_gimple_constant (t);
710 }
711 
712 /* Return true if T is a GIMPLE interprocedural invariant.  It's a restricted
713    form of gimple minimal invariant.  */
714 
715 bool
is_gimple_ip_invariant(const_tree t)716 is_gimple_ip_invariant (const_tree t)
717 {
718   if (TREE_CODE (t) == ADDR_EXPR)
719     return is_gimple_ip_invariant_address (t);
720 
721   return is_gimple_constant (t);
722 }
723 
724 /* Return true if T is a non-aggregate register variable.  */
725 
726 bool
is_gimple_reg(tree t)727 is_gimple_reg (tree t)
728 {
729   if (virtual_operand_p (t))
730     return false;
731 
732   if (TREE_CODE (t) == SSA_NAME)
733     return true;
734 
735   if (!is_gimple_variable (t))
736     return false;
737 
738   if (!is_gimple_reg_type (TREE_TYPE (t)))
739     return false;
740 
741   /* A volatile decl is not acceptable because we can't reuse it as
742      needed.  We need to copy it into a temp first.  */
743   if (TREE_THIS_VOLATILE (t))
744     return false;
745 
746   /* We define "registers" as things that can be renamed as needed,
747      which with our infrastructure does not apply to memory.  */
748   if (needs_to_live_in_memory (t))
749     return false;
750 
751   /* Hard register variables are an interesting case.  For those that
752      are call-clobbered, we don't know where all the calls are, since
753      we don't (want to) take into account which operations will turn
754      into libcalls at the rtl level.  For those that are call-saved,
755      we don't currently model the fact that calls may in fact change
756      global hard registers, nor do we examine ASM_CLOBBERS at the tree
757      level, and so miss variable changes that might imply.  All around,
758      it seems safest to not do too much optimization with these at the
759      tree level at all.  We'll have to rely on the rtl optimizers to
760      clean this up, as there we've got all the appropriate bits exposed.  */
761   if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
762     return false;
763 
764   /* Complex and vector values must have been put into SSA-like form.
765      That is, no assignments to the individual components.  */
766   if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
767       || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
768     return DECL_GIMPLE_REG_P (t);
769 
770   return true;
771 }
772 
773 
774 /* Return true if T is a GIMPLE rvalue, i.e. an identifier or a constant.  */
775 
776 bool
is_gimple_val(tree t)777 is_gimple_val (tree t)
778 {
779   /* Make loads from volatiles and memory vars explicit.  */
780   if (is_gimple_variable (t)
781       && is_gimple_reg_type (TREE_TYPE (t))
782       && !is_gimple_reg (t))
783     return false;
784 
785   return (is_gimple_variable (t) || is_gimple_min_invariant (t));
786 }
787 
788 /* Similarly, but accept hard registers as inputs to asm statements.  */
789 
790 bool
is_gimple_asm_val(tree t)791 is_gimple_asm_val (tree t)
792 {
793   if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
794     return true;
795 
796   return is_gimple_val (t);
797 }
798 
799 /* Return true if T is a GIMPLE minimal lvalue.  */
800 
801 bool
is_gimple_min_lval(tree t)802 is_gimple_min_lval (tree t)
803 {
804   if (!(t = CONST_CAST_TREE (strip_invariant_refs (t))))
805     return false;
806   return (is_gimple_id (t) || TREE_CODE (t) == MEM_REF);
807 }
808 
809 /* Return true if T is a valid function operand of a CALL_EXPR.  */
810 
811 bool
is_gimple_call_addr(tree t)812 is_gimple_call_addr (tree t)
813 {
814   return (TREE_CODE (t) == OBJ_TYPE_REF || is_gimple_val (t));
815 }
816 
817 /* Return true if T is a valid address operand of a MEM_REF.  */
818 
819 bool
is_gimple_mem_ref_addr(tree t)820 is_gimple_mem_ref_addr (tree t)
821 {
822   return (is_gimple_reg (t)
823 	  || TREE_CODE (t) == INTEGER_CST
824 	  || (TREE_CODE (t) == ADDR_EXPR
825 	      && (CONSTANT_CLASS_P (TREE_OPERAND (t, 0))
826 		  || decl_address_invariant_p (TREE_OPERAND (t, 0)))));
827 }
828 
829 /* Hold trees marked addressable during expand.  */
830 
831 static hash_set<tree> *mark_addressable_queue;
832 
833 /* Mark X as addressable or queue it up if called during expand.  We
834    don't want to apply it immediately during expand because decls are
835    made addressable at that point due to RTL-only concerns, such as
836    uses of memcpy for block moves, and TREE_ADDRESSABLE changes
837    is_gimple_reg, which might make it seem like a variable that used
838    to be a gimple_reg shouldn't have been an SSA name.  So we queue up
839    this flag setting and only apply it when we're done with GIMPLE and
840    only RTL issues matter.  */
841 
842 static void
mark_addressable_1(tree x)843 mark_addressable_1 (tree x)
844 {
845   if (!currently_expanding_to_rtl)
846     {
847       TREE_ADDRESSABLE (x) = 1;
848       return;
849     }
850 
851   if (!mark_addressable_queue)
852     mark_addressable_queue = new hash_set<tree>();
853   mark_addressable_queue->add (x);
854 }
855 
856 /* Adaptor for mark_addressable_1 for use in hash_set traversal.  */
857 
858 bool
859 mark_addressable_2 (tree const &x, void * ATTRIBUTE_UNUSED = NULL)
860 {
861   mark_addressable_1 (x);
862   return false;
863 }
864 
865 /* Mark all queued trees as addressable, and empty the queue.  To be
866    called right after clearing CURRENTLY_EXPANDING_TO_RTL.  */
867 
868 void
flush_mark_addressable_queue()869 flush_mark_addressable_queue ()
870 {
871   gcc_assert (!currently_expanding_to_rtl);
872   if (mark_addressable_queue)
873     {
874       mark_addressable_queue->traverse<void*, mark_addressable_2> (NULL);
875       delete mark_addressable_queue;
876       mark_addressable_queue = NULL;
877     }
878 }
879 
880 /* Mark X addressable.  Unlike the langhook we expect X to be in gimple
881    form and we don't do any syntax checking.  */
882 
883 void
mark_addressable(tree x)884 mark_addressable (tree x)
885 {
886   while (handled_component_p (x))
887     x = TREE_OPERAND (x, 0);
888   if (TREE_CODE (x) == MEM_REF
889       && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
890     x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
891   if (TREE_CODE (x) != VAR_DECL
892       && TREE_CODE (x) != PARM_DECL
893       && TREE_CODE (x) != RESULT_DECL)
894     return;
895   mark_addressable_1 (x);
896 
897   /* Also mark the artificial SSA_NAME that points to the partition of X.  */
898   if (TREE_CODE (x) == VAR_DECL
899       && !DECL_EXTERNAL (x)
900       && !TREE_STATIC (x)
901       && cfun->gimple_df != NULL
902       && cfun->gimple_df->decls_to_pointers != NULL)
903     {
904       tree *namep = cfun->gimple_df->decls_to_pointers->get (x);
905       if (namep)
906 	mark_addressable_1 (*namep);
907     }
908 }
909 
910 /* Returns true iff T is a valid RHS for an assignment to a renamed
911    user -- or front-end generated artificial -- variable.  */
912 
913 bool
is_gimple_reg_rhs(tree t)914 is_gimple_reg_rhs (tree t)
915 {
916   return get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS;
917 }
918 
919 #include "gt-gimple-expr.h"
920