1 /* Analysis of polymorphic call context.
2    Copyright (C) 2013-2018 Free Software Foundation, Inc.
3    Contributed by Jan Hubicka
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "tree-pass.h"
29 #include "tree-ssa-operands.h"
30 #include "streamer-hooks.h"
31 #include "cgraph.h"
32 #include "data-streamer.h"
33 #include "diagnostic.h"
34 #include "alias.h"
35 #include "fold-const.h"
36 #include "calls.h"
37 #include "ipa-utils.h"
38 #include "tree-dfa.h"
39 #include "gimple-pretty-print.h"
40 #include "tree-into-ssa.h"
41 #include "params.h"
42 
43 /* Return true when TYPE contains an polymorphic type and thus is interesting
44    for devirtualization machinery.  */
45 
46 static bool contains_type_p (tree, HOST_WIDE_INT, tree,
47 			     bool consider_placement_new = true,
48 			     bool consider_bases = true);
49 
50 bool
51 contains_polymorphic_type_p (const_tree type)
52 {
53   type = TYPE_MAIN_VARIANT (type);
54 
55   if (RECORD_OR_UNION_TYPE_P (type))
56     {
57       if (TYPE_BINFO (type)
58           && polymorphic_type_binfo_p (TYPE_BINFO (type)))
59 	return true;
60       for (tree fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
61 	if (TREE_CODE (fld) == FIELD_DECL
62 	    && !DECL_ARTIFICIAL (fld)
63 	    && contains_polymorphic_type_p (TREE_TYPE (fld)))
64 	  return true;
65       return false;
66     }
67   if (TREE_CODE (type) == ARRAY_TYPE)
68     return contains_polymorphic_type_p (TREE_TYPE (type));
69   return false;
70 }
71 
72 /* Return true if it seems valid to use placement new to build EXPECTED_TYPE
73    at possition CUR_OFFSET within TYPE.
74 
75    POD can be changed to an instance of a polymorphic type by
76    placement new.  Here we play safe and assume that any
77    non-polymorphic type is POD.  */
78 bool
79 possible_placement_new (tree type, tree expected_type,
80 			HOST_WIDE_INT cur_offset)
81 {
82   if (cur_offset < 0)
83     return true;
84   return ((TREE_CODE (type) != RECORD_TYPE
85 	   || !TYPE_BINFO (type)
86 	   || cur_offset >= POINTER_SIZE
87 	   || !polymorphic_type_binfo_p (TYPE_BINFO (type)))
88 	  && (!TYPE_SIZE (type)
89 	      || !tree_fits_shwi_p (TYPE_SIZE (type))
90 	      || (cur_offset
91 		  + (expected_type ? tree_to_uhwi (TYPE_SIZE (expected_type))
92 		     : POINTER_SIZE)
93 		  <= tree_to_uhwi (TYPE_SIZE (type)))));
94 }
95 
96 /* THIS->OUTER_TYPE is a type of memory object where object of OTR_TYPE
97    is contained at THIS->OFFSET.  Walk the memory representation of
98    THIS->OUTER_TYPE and find the outermost class type that match
99    OTR_TYPE or contain OTR_TYPE as a base.  Update THIS
100    to represent it.
101 
102    If OTR_TYPE is NULL, just find outermost polymorphic type with
103    virtual table present at possition OFFSET.
104 
105    For example when THIS represents type
106    class A
107      {
108        int a;
109        class B b;
110      }
111    and we look for type at offset sizeof(int), we end up with B and offset 0.
112    If the same is produced by multiple inheritance, we end up with A and offset
113    sizeof(int).
114 
115    If we can not find corresponding class, give up by setting
116    THIS->OUTER_TYPE to OTR_TYPE and THIS->OFFSET to NULL.
117    Return true when lookup was sucesful.
118 
119    When CONSIDER_PLACEMENT_NEW is false, reject contexts that may be made
120    valid only via allocation of new polymorphic type inside by means
121    of placement new.
122 
123    When CONSIDER_BASES is false, only look for actual fields, not base types
124    of TYPE.  */
125 
126 bool
127 ipa_polymorphic_call_context::restrict_to_inner_class (tree otr_type,
128 						       bool consider_placement_new,
129 						       bool consider_bases)
130 {
131   tree type = outer_type;
132   HOST_WIDE_INT cur_offset = offset;
133   bool speculative = false;
134   bool size_unknown = false;
135   unsigned HOST_WIDE_INT otr_type_size = POINTER_SIZE;
136 
137   /* Update OUTER_TYPE to match EXPECTED_TYPE if it is not set.  */
138   if (!outer_type)
139     {
140       clear_outer_type (otr_type);
141       type = otr_type;
142       cur_offset = 0;
143     }
144  /* See if OFFSET points inside OUTER_TYPE.  If it does not, we know
145     that the context is either invalid, or the instance type must be
146     derived from OUTER_TYPE.
147 
148     Because the instance type may contain field whose type is of OUTER_TYPE,
149     we can not derive any effective information about it.
150 
151     TODO: In the case we know all derrived types, we can definitely do better
152     here.  */
153   else if (TYPE_SIZE (outer_type)
154 	   && tree_fits_shwi_p (TYPE_SIZE (outer_type))
155 	   && tree_to_shwi (TYPE_SIZE (outer_type)) >= 0
156 	   && tree_to_shwi (TYPE_SIZE (outer_type)) <= offset)
157    {
158      bool der = maybe_derived_type; /* clear_outer_type will reset it.  */
159      bool dyn = dynamic;
160      clear_outer_type (otr_type);
161      type = otr_type;
162      cur_offset = 0;
163 
164      /* If derived type is not allowed, we know that the context is invalid.
165 	For dynamic types, we really do not have information about
166 	size of the memory location.  It is possible that completely
167 	different type is stored after outer_type.  */
168      if (!der && !dyn)
169        {
170 	 clear_speculation ();
171 	 invalid = true;
172 	 return false;
173        }
174    }
175 
176   if (otr_type && TYPE_SIZE (otr_type)
177       && tree_fits_shwi_p (TYPE_SIZE (otr_type)))
178     otr_type_size = tree_to_uhwi (TYPE_SIZE (otr_type));
179 
180   if (!type || offset < 0)
181     goto no_useful_type_info;
182 
183   /* Find the sub-object the constant actually refers to and mark whether it is
184      an artificial one (as opposed to a user-defined one).
185 
186      This loop is performed twice; first time for outer_type and second time
187      for speculative_outer_type.  The second run has SPECULATIVE set.  */
188   while (true)
189     {
190       unsigned HOST_WIDE_INT pos, size;
191       tree fld;
192 
193       /* If we do not know size of TYPE, we need to be more conservative
194          about accepting cases where we can not find EXPECTED_TYPE.
195 	 Generally the types that do matter here are of constant size.
196 	 Size_unknown case should be very rare.  */
197       if (TYPE_SIZE (type)
198 	  && tree_fits_shwi_p (TYPE_SIZE (type))
199 	  && tree_to_shwi (TYPE_SIZE (type)) >= 0)
200 	size_unknown = false;
201       else
202 	size_unknown = true;
203 
204       /* On a match, just return what we found.  */
205       if ((otr_type
206 	   && types_odr_comparable (type, otr_type)
207 	   && types_same_for_odr (type, otr_type))
208 	  || (!otr_type
209 	      && TREE_CODE (type) == RECORD_TYPE
210 	      && TYPE_BINFO (type)
211 	      && polymorphic_type_binfo_p (TYPE_BINFO (type))))
212 	{
213 	  if (speculative)
214 	    {
215 	      /* If we did not match the offset, just give up on speculation.  */
216 	      if (cur_offset != 0
217 		  /* Also check if speculation did not end up being same as
218 		     non-speculation.  */
219 		  || (types_must_be_same_for_odr (speculative_outer_type,
220 						  outer_type)
221 		      && (maybe_derived_type
222 			  == speculative_maybe_derived_type)))
223 		clear_speculation ();
224 	      return true;
225 	    }
226 	  else
227 	    {
228 	      /* If type is known to be final, do not worry about derived
229 		 types.  Testing it here may help us to avoid speculation.  */
230 	      if (otr_type && TREE_CODE (outer_type) == RECORD_TYPE
231 		  && (!in_lto_p || odr_type_p (outer_type))
232 		  && type_with_linkage_p (outer_type)
233 		  && type_known_to_have_no_derivations_p (outer_type))
234 		maybe_derived_type = false;
235 
236 	      /* Type can not contain itself on an non-zero offset.  In that case
237 		 just give up.  Still accept the case where size is now known.
238 		 Either the second copy may appear past the end of type or within
239 		 the non-POD buffer located inside the variably sized type
240 		 itself.  */
241 	      if (cur_offset != 0)
242 		goto no_useful_type_info;
243 	      /* If we determined type precisely or we have no clue on
244  		 speuclation, we are done.  */
245 	      if (!maybe_derived_type || !speculative_outer_type
246 		  || !speculation_consistent_p (speculative_outer_type,
247 					        speculative_offset,
248 					        speculative_maybe_derived_type,
249 						otr_type))
250 		{
251 		  clear_speculation ();
252 	          return true;
253 		}
254 	      /* Otherwise look into speculation now.  */
255 	      else
256 		{
257 		  speculative = true;
258 		  type = speculative_outer_type;
259 		  cur_offset = speculative_offset;
260 		  continue;
261 		}
262 	    }
263 	}
264 
265       /* Walk fields and find corresponding on at OFFSET.  */
266       if (TREE_CODE (type) == RECORD_TYPE)
267 	{
268 	  for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
269 	    {
270 	      if (TREE_CODE (fld) != FIELD_DECL
271 		  || TREE_TYPE (fld) == error_mark_node)
272 		continue;
273 
274 	      pos = int_bit_position (fld);
275 	      if (pos > (unsigned HOST_WIDE_INT)cur_offset)
276 		continue;
277 
278 	      /* Do not consider vptr itself.  Not even for placement new.  */
279 	      if (!pos && DECL_ARTIFICIAL (fld)
280 		  && POINTER_TYPE_P (TREE_TYPE (fld))
281 		  && TYPE_BINFO (type)
282 		  && polymorphic_type_binfo_p (TYPE_BINFO (type)))
283 		continue;
284 
285 	      if (!DECL_SIZE (fld) || !tree_fits_uhwi_p (DECL_SIZE (fld)))
286 		goto no_useful_type_info;
287 	      size = tree_to_uhwi (DECL_SIZE (fld));
288 
289 	      /* We can always skip types smaller than pointer size:
290 		 those can not contain a virtual table pointer.
291 
292 		 Disqualifying fields that are too small to fit OTR_TYPE
293 		 saves work needed to walk them for no benefit.
294 		 Because of the way the bases are packed into a class, the
295 		 field's size may be smaller than type size, so it needs
296 		 to be done with a care.  */
297 
298 	      if (pos <= (unsigned HOST_WIDE_INT)cur_offset
299 		  && (pos + size) >= (unsigned HOST_WIDE_INT)cur_offset
300 				     + POINTER_SIZE
301 		  && (!otr_type
302 		      || !TYPE_SIZE (TREE_TYPE (fld))
303 		      || !tree_fits_shwi_p (TYPE_SIZE (TREE_TYPE (fld)))
304 		      || (pos + tree_to_uhwi (TYPE_SIZE (TREE_TYPE (fld))))
305 			  >= cur_offset + otr_type_size))
306 		break;
307 	    }
308 
309 	  if (!fld)
310 	    goto no_useful_type_info;
311 
312 	  type = TYPE_MAIN_VARIANT (TREE_TYPE (fld));
313 	  cur_offset -= pos;
314 	  /* DECL_ARTIFICIAL represents a basetype.  */
315 	  if (!DECL_ARTIFICIAL (fld))
316 	    {
317 	      if (!speculative)
318 		{
319 		  outer_type = type;
320 		  offset = cur_offset;
321 		  /* As soon as we se an field containing the type,
322 		     we know we are not looking for derivations.  */
323 		  maybe_derived_type = false;
324 		}
325 	      else
326 		{
327 		  speculative_outer_type = type;
328 		  speculative_offset = cur_offset;
329 		  speculative_maybe_derived_type = false;
330 		}
331 	    }
332 	  else if (!consider_bases)
333 	    goto no_useful_type_info;
334 	}
335       else if (TREE_CODE (type) == ARRAY_TYPE)
336 	{
337 	  tree subtype = TYPE_MAIN_VARIANT (TREE_TYPE (type));
338 
339 	  /* Give up if we don't know array field size.
340 	     Also give up on non-polymorphic types as they are used
341 	     as buffers for placement new.  */
342 	  if (!TYPE_SIZE (subtype)
343 	      || !tree_fits_shwi_p (TYPE_SIZE (subtype))
344 	      || tree_to_shwi (TYPE_SIZE (subtype)) <= 0
345 	      || !contains_polymorphic_type_p (subtype))
346 	    goto no_useful_type_info;
347 
348 	  HOST_WIDE_INT new_offset = cur_offset % tree_to_shwi (TYPE_SIZE (subtype));
349 
350 	  /* We may see buffer for placement new.  In this case the expected type
351 	     can be bigger than the subtype.  */
352 	  if (TYPE_SIZE (subtype)
353 	      && (cur_offset + otr_type_size
354 		  > tree_to_uhwi (TYPE_SIZE (subtype))))
355 	    goto no_useful_type_info;
356 
357 	  cur_offset = new_offset;
358 	  type = TYPE_MAIN_VARIANT (subtype);
359 	  if (!speculative)
360 	    {
361 	      outer_type = type;
362 	      offset = cur_offset;
363 	      maybe_derived_type = false;
364 	    }
365 	  else
366 	    {
367 	      speculative_outer_type = type;
368 	      speculative_offset = cur_offset;
369 	      speculative_maybe_derived_type = false;
370 	    }
371 	}
372       /* Give up on anything else.  */
373       else
374 	{
375 no_useful_type_info:
376 	  if (maybe_derived_type && !speculative
377 	      && TREE_CODE (outer_type) == RECORD_TYPE
378 	      && TREE_CODE (otr_type) == RECORD_TYPE
379 	      && TYPE_BINFO (otr_type)
380 	      && !offset
381 	      && get_binfo_at_offset (TYPE_BINFO (otr_type), 0, outer_type))
382 	    {
383 	      clear_outer_type (otr_type);
384 	      if (!speculative_outer_type
385 		  || !speculation_consistent_p (speculative_outer_type,
386 						speculative_offset,
387 					        speculative_maybe_derived_type,
388 						otr_type))
389 		clear_speculation ();
390 	      if (speculative_outer_type)
391 		{
392 		  speculative = true;
393 		  type = speculative_outer_type;
394 		  cur_offset = speculative_offset;
395 		}
396 	      else
397 		return true;
398 	    }
399 	  /* We found no way to embedd EXPECTED_TYPE in TYPE.
400 	     We still permit two special cases - placement new and
401 	     the case of variadic types containing themselves.  */
402 	  if (!speculative
403 	      && consider_placement_new
404 	      && (size_unknown || !type || maybe_derived_type
405 		  || possible_placement_new (type, otr_type, cur_offset)))
406 	    {
407 	      /* In these weird cases we want to accept the context.
408 		 In non-speculative run we have no useful outer_type info
409 		 (TODO: we may eventually want to record upper bound on the
410 		  type size that can be used to prune the walk),
411 		 but we still want to consider speculation that may
412 		 give useful info.  */
413 	      if (!speculative)
414 		{
415 		  clear_outer_type (otr_type);
416 		  if (!speculative_outer_type
417 		      || !speculation_consistent_p (speculative_outer_type,
418 						    speculative_offset,
419 						    speculative_maybe_derived_type,
420 						    otr_type))
421 		    clear_speculation ();
422 		  if (speculative_outer_type)
423 		    {
424 		      speculative = true;
425 		      type = speculative_outer_type;
426 		      cur_offset = speculative_offset;
427 		    }
428 		  else
429 		    return true;
430 		}
431 	      else
432 		{
433 		  clear_speculation ();
434 	          return true;
435 		}
436 	    }
437 	  else
438 	    {
439 	      clear_speculation ();
440 	      if (speculative)
441 		return true;
442 	      clear_outer_type (otr_type);
443 	      invalid = true;
444 	      return false;
445 	    }
446 	}
447     }
448 }
449 
450 /* Return true if OUTER_TYPE contains OTR_TYPE at OFFSET.
451    CONSIDER_PLACEMENT_NEW makes function to accept cases where OTR_TYPE can
452    be built within OUTER_TYPE by means of placement new.  CONSIDER_BASES makes
453    function to accept cases where OTR_TYPE appears as base of OUTER_TYPE or as
454    base of one of fields of OUTER_TYPE.  */
455 
456 static bool
457 contains_type_p (tree outer_type, HOST_WIDE_INT offset,
458 		 tree otr_type,
459 		 bool consider_placement_new,
460 		 bool consider_bases)
461 {
462   ipa_polymorphic_call_context context;
463 
464   /* Check that type is within range.  */
465   if (offset < 0)
466     return false;
467 
468   /* PR ipa/71207
469      As OUTER_TYPE can be a type which has a diamond virtual inheritance,
470      it's not necessary that INNER_TYPE will fit within OUTER_TYPE with
471      a given offset.  It can happen that INNER_TYPE also contains a base object,
472      however it would point to the same instance in the OUTER_TYPE.  */
473 
474   context.offset = offset;
475   context.outer_type = TYPE_MAIN_VARIANT (outer_type);
476   context.maybe_derived_type = false;
477   context.dynamic = false;
478   return context.restrict_to_inner_class (otr_type, consider_placement_new,
479 					  consider_bases);
480 }
481 
482 
483 /* Return a FUNCTION_DECL if FN represent a constructor or destructor.
484    If CHECK_CLONES is true, also check for clones of ctor/dtors.  */
485 
486 tree
487 polymorphic_ctor_dtor_p (tree fn, bool check_clones)
488 {
489   if (TREE_CODE (TREE_TYPE (fn)) != METHOD_TYPE
490       || (!DECL_CXX_CONSTRUCTOR_P (fn) && !DECL_CXX_DESTRUCTOR_P (fn)))
491     {
492       if (!check_clones)
493 	return NULL_TREE;
494 
495       /* Watch for clones where we constant propagated the first
496 	 argument (pointer to the instance).  */
497       fn = DECL_ABSTRACT_ORIGIN (fn);
498       if (!fn
499 	  || TREE_CODE (TREE_TYPE (fn)) != METHOD_TYPE
500 	  || (!DECL_CXX_CONSTRUCTOR_P (fn) && !DECL_CXX_DESTRUCTOR_P (fn)))
501 	return NULL_TREE;
502     }
503 
504   if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST))
505     return NULL_TREE;
506 
507   return fn;
508 }
509 
510 /* Return a FUNCTION_DECL if BLOCK represents a constructor or destructor.
511    If CHECK_CLONES is true, also check for clones of ctor/dtors.  */
512 
513 tree
514 inlined_polymorphic_ctor_dtor_block_p (tree block, bool check_clones)
515 {
516   tree fn = block_ultimate_origin (block);
517   if (fn == NULL || TREE_CODE (fn) != FUNCTION_DECL)
518     return NULL_TREE;
519 
520   return polymorphic_ctor_dtor_p (fn, check_clones);
521 }
522 
523 
524 /* We know that the instance is stored in variable or parameter
525    (not dynamically allocated) and we want to disprove the fact
526    that it may be in construction at invocation of CALL.
527 
528    BASE represents memory location where instance is stored.
529    If BASE is NULL, it is assumed to be global memory.
530    OUTER_TYPE is known type of the instance or NULL if not
531    known.
532 
533    For the variable to be in construction we actually need to
534    be in constructor of corresponding global variable or
535    the inline stack of CALL must contain the constructor.
536    Check this condition.  This check works safely only before
537    IPA passes, because inline stacks may become out of date
538    later.  */
539 
540 bool
541 decl_maybe_in_construction_p (tree base, tree outer_type,
542 			      gimple *call, tree function)
543 {
544   if (outer_type)
545     outer_type = TYPE_MAIN_VARIANT (outer_type);
546   gcc_assert (!base || DECL_P (base));
547 
548   /* After inlining the code unification optimizations may invalidate
549      inline stacks.  Also we need to give up on global variables after
550      IPA, because addresses of these may have been propagated to their
551      constructors.  */
552   if (DECL_STRUCT_FUNCTION (function)->after_inlining)
553     return true;
554 
555   /* Pure functions can not do any changes on the dynamic type;
556      that require writting to memory.  */
557   if ((!base || !auto_var_in_fn_p (base, function))
558       && flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
559     return false;
560 
561   bool check_clones = !base || is_global_var (base);
562   for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
563        block = BLOCK_SUPERCONTEXT (block))
564     if (tree fn = inlined_polymorphic_ctor_dtor_block_p (block, check_clones))
565       {
566 	tree type = TYPE_METHOD_BASETYPE (TREE_TYPE (fn));
567 
568 	if (!outer_type || !types_odr_comparable (type, outer_type))
569 	  {
570 	    if (TREE_CODE (type) == RECORD_TYPE
571 		&& TYPE_BINFO (type)
572 		&& polymorphic_type_binfo_p (TYPE_BINFO (type)))
573 	      return true;
574 	  }
575  	else if (types_same_for_odr (type, outer_type))
576 	  return true;
577       }
578 
579   if (!base || (VAR_P (base) && is_global_var (base)))
580     {
581       if (TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE
582 	  || (!DECL_CXX_CONSTRUCTOR_P (function)
583 	      && !DECL_CXX_DESTRUCTOR_P (function)))
584 	{
585 	  if (!DECL_ABSTRACT_ORIGIN (function))
586 	    return false;
587 	  /* Watch for clones where we constant propagated the first
588 	     argument (pointer to the instance).  */
589 	  function = DECL_ABSTRACT_ORIGIN (function);
590 	  if (!function
591 	      || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE
592 	      || (!DECL_CXX_CONSTRUCTOR_P (function)
593 		  && !DECL_CXX_DESTRUCTOR_P (function)))
594 	    return false;
595 	}
596       tree type = TYPE_METHOD_BASETYPE (TREE_TYPE (function));
597       if (!outer_type || !types_odr_comparable (type, outer_type))
598 	{
599 	  if (TREE_CODE (type) == RECORD_TYPE
600 	      && TYPE_BINFO (type)
601 	      && polymorphic_type_binfo_p (TYPE_BINFO (type)))
602 	    return true;
603 	}
604       else if (types_same_for_odr (type, outer_type))
605 	return true;
606     }
607   return false;
608 }
609 
610 /* Dump human readable context to F.  If NEWLINE is true, it will be terminated
611    by a newline.  */
612 
613 void
614 ipa_polymorphic_call_context::dump (FILE *f, bool newline) const
615 {
616   fprintf (f, "    ");
617   if (invalid)
618     fprintf (f, "Call is known to be undefined");
619   else
620     {
621       if (useless_p ())
622 	fprintf (f, "nothing known");
623       if (outer_type || offset)
624 	{
625 	  fprintf (f, "Outer type%s:", dynamic ? " (dynamic)":"");
626 	  print_generic_expr (f, outer_type, TDF_SLIM);
627 	  if (maybe_derived_type)
628 	    fprintf (f, " (or a derived type)");
629 	  if (maybe_in_construction)
630 	    fprintf (f, " (maybe in construction)");
631 	  fprintf (f, " offset " HOST_WIDE_INT_PRINT_DEC,
632 		   offset);
633 	}
634       if (speculative_outer_type)
635 	{
636 	  if (outer_type || offset)
637 	    fprintf (f, " ");
638 	  fprintf (f, "Speculative outer type:");
639 	  print_generic_expr (f, speculative_outer_type, TDF_SLIM);
640 	  if (speculative_maybe_derived_type)
641 	    fprintf (f, " (or a derived type)");
642 	  fprintf (f, " at offset " HOST_WIDE_INT_PRINT_DEC,
643 		   speculative_offset);
644 	}
645     }
646   if (newline)
647     fprintf(f, "\n");
648 }
649 
650 /* Print context to stderr.  */
651 
652 void
653 ipa_polymorphic_call_context::debug () const
654 {
655   dump (stderr);
656 }
657 
658 /* Stream out the context to OB.  */
659 
660 void
661 ipa_polymorphic_call_context::stream_out (struct output_block *ob) const
662 {
663   struct bitpack_d bp = bitpack_create (ob->main_stream);
664 
665   bp_pack_value (&bp, invalid, 1);
666   bp_pack_value (&bp, maybe_in_construction, 1);
667   bp_pack_value (&bp, maybe_derived_type, 1);
668   bp_pack_value (&bp, speculative_maybe_derived_type, 1);
669   bp_pack_value (&bp, dynamic, 1);
670   bp_pack_value (&bp, outer_type != NULL, 1);
671   bp_pack_value (&bp, offset != 0, 1);
672   bp_pack_value (&bp, speculative_outer_type != NULL, 1);
673   streamer_write_bitpack (&bp);
674 
675   if (outer_type != NULL)
676     stream_write_tree (ob, outer_type, true);
677   if (offset)
678     streamer_write_hwi (ob, offset);
679   if (speculative_outer_type != NULL)
680     {
681       stream_write_tree (ob, speculative_outer_type, true);
682       streamer_write_hwi (ob, speculative_offset);
683     }
684   else
685     gcc_assert (!speculative_offset);
686 }
687 
688 /* Stream in the context from IB and DATA_IN.  */
689 
690 void
691 ipa_polymorphic_call_context::stream_in (struct lto_input_block *ib,
692 					 struct data_in *data_in)
693 {
694   struct bitpack_d bp = streamer_read_bitpack (ib);
695 
696   invalid = bp_unpack_value (&bp, 1);
697   maybe_in_construction = bp_unpack_value (&bp, 1);
698   maybe_derived_type = bp_unpack_value (&bp, 1);
699   speculative_maybe_derived_type = bp_unpack_value (&bp, 1);
700   dynamic = bp_unpack_value (&bp, 1);
701   bool outer_type_p = bp_unpack_value (&bp, 1);
702   bool offset_p = bp_unpack_value (&bp, 1);
703   bool speculative_outer_type_p = bp_unpack_value (&bp, 1);
704 
705   if (outer_type_p)
706     outer_type = stream_read_tree (ib, data_in);
707   else
708     outer_type = NULL;
709   if (offset_p)
710     offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
711   else
712     offset = 0;
713   if (speculative_outer_type_p)
714     {
715       speculative_outer_type = stream_read_tree (ib, data_in);
716       speculative_offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
717     }
718   else
719     {
720       speculative_outer_type = NULL;
721       speculative_offset = 0;
722     }
723 }
724 
725 /* Proudce polymorphic call context for call method of instance
726    that is located within BASE (that is assumed to be a decl) at offset OFF. */
727 
728 void
729 ipa_polymorphic_call_context::set_by_decl (tree base, HOST_WIDE_INT off)
730 {
731   gcc_assert (DECL_P (base));
732   clear_speculation ();
733 
734   if (!contains_polymorphic_type_p (TREE_TYPE (base)))
735     {
736       clear_outer_type ();
737       offset = off;
738       return;
739     }
740   outer_type = TYPE_MAIN_VARIANT (TREE_TYPE (base));
741   offset = off;
742   /* Make very conservative assumption that all objects
743      may be in construction.
744 
745      It is up to caller to revisit this via
746      get_dynamic_type or decl_maybe_in_construction_p.  */
747   maybe_in_construction = true;
748   maybe_derived_type = false;
749   dynamic = false;
750 }
751 
752 /* CST is an invariant (address of decl), try to get meaningful
753    polymorphic call context for polymorphic call of method
754    if instance of OTR_TYPE that is located at offset OFF of this invariant.
755    Return FALSE if nothing meaningful can be found.  */
756 
757 bool
758 ipa_polymorphic_call_context::set_by_invariant (tree cst,
759 						tree otr_type,
760 						HOST_WIDE_INT off)
761 {
762   poly_int64 offset2, size, max_size;
763   bool reverse;
764   tree base;
765 
766   invalid = false;
767   off = 0;
768   clear_outer_type (otr_type);
769 
770   if (TREE_CODE (cst) != ADDR_EXPR)
771     return false;
772 
773   cst = TREE_OPERAND (cst, 0);
774   base = get_ref_base_and_extent (cst, &offset2, &size, &max_size, &reverse);
775   if (!DECL_P (base) || !known_size_p (max_size) || maybe_ne (max_size, size))
776     return false;
777 
778   /* Only type inconsistent programs can have otr_type that is
779      not part of outer type.  */
780   if (otr_type && !contains_type_p (TREE_TYPE (base), off, otr_type))
781     return false;
782 
783   set_by_decl (base, off);
784   return true;
785 }
786 
787 /* See if OP is SSA name initialized as a copy or by single assignment.
788    If so, walk the SSA graph up.  Because simple PHI conditional is considered
789    copy, GLOBAL_VISITED may be used to avoid infinite loop walking the SSA
790    graph.  */
791 
792 static tree
793 walk_ssa_copies (tree op, hash_set<tree> **global_visited = NULL)
794 {
795   hash_set <tree> *visited = NULL;
796   STRIP_NOPS (op);
797   while (TREE_CODE (op) == SSA_NAME
798 	 && !SSA_NAME_IS_DEFAULT_DEF (op)
799 	 /* We might be called via fold_stmt during cfgcleanup where
800 	    SSA form need not be up-to-date.  */
801 	 && !name_registered_for_update_p (op)
802 	 && (gimple_assign_single_p (SSA_NAME_DEF_STMT (op))
803 	     || gimple_code (SSA_NAME_DEF_STMT (op)) == GIMPLE_PHI))
804     {
805       if (global_visited)
806 	{
807 	  if (!*global_visited)
808 	    *global_visited = new hash_set<tree>;
809 	  if ((*global_visited)->add (op))
810 	    goto done;
811 	}
812       else
813 	{
814 	  if (!visited)
815 	    visited = new hash_set<tree>;
816 	  if (visited->add (op))
817 	    goto done;
818 	}
819       /* Special case
820 	 if (ptr == 0)
821 	   ptr = 0;
822 	 else
823 	   ptr = ptr.foo;
824 	 This pattern is implicitly produced for casts to non-primary
825 	 bases.  When doing context analysis, we do not really care
826 	 about the case pointer is NULL, because the call will be
827 	 undefined anyway.  */
828       if (gimple_code (SSA_NAME_DEF_STMT (op)) == GIMPLE_PHI)
829 	{
830 	  gimple *phi = SSA_NAME_DEF_STMT (op);
831 
832 	  if (gimple_phi_num_args (phi) > 2)
833 	    goto done;
834 	  if (gimple_phi_num_args (phi) == 1)
835 	    op = gimple_phi_arg_def (phi, 0);
836 	  else if (integer_zerop (gimple_phi_arg_def (phi, 0)))
837 	    op = gimple_phi_arg_def (phi, 1);
838 	  else if (integer_zerop (gimple_phi_arg_def (phi, 1)))
839 	    op = gimple_phi_arg_def (phi, 0);
840 	  else
841 	    goto done;
842 	}
843       else
844 	{
845 	  if (gimple_assign_load_p (SSA_NAME_DEF_STMT (op)))
846 	    goto done;
847 	  op = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (op));
848 	}
849       STRIP_NOPS (op);
850     }
851 done:
852   if (visited)
853     delete (visited);
854   return op;
855 }
856 
857 /* Create polymorphic call context from IP invariant CST.
858    This is typically &global_var.
859    OTR_TYPE specify type of polymorphic call or NULL if unknown, OFF
860    is offset of call.  */
861 
862 ipa_polymorphic_call_context::ipa_polymorphic_call_context (tree cst,
863 							    tree otr_type,
864 							    HOST_WIDE_INT off)
865 {
866   clear_speculation ();
867   set_by_invariant (cst, otr_type, off);
868 }
869 
870 /* Build context for pointer REF contained in FNDECL at statement STMT.
871    if INSTANCE is non-NULL, return pointer to the object described by
872    the context or DECL where context is contained in.  */
873 
874 ipa_polymorphic_call_context::ipa_polymorphic_call_context (tree fndecl,
875 							    tree ref,
876 							    gimple *stmt,
877 							    tree *instance)
878 {
879   tree otr_type = NULL;
880   tree base_pointer;
881   hash_set <tree> *visited = NULL;
882 
883   if (TREE_CODE (ref) == OBJ_TYPE_REF)
884     {
885       otr_type = obj_type_ref_class (ref);
886       base_pointer = OBJ_TYPE_REF_OBJECT (ref);
887     }
888   else
889     base_pointer = ref;
890 
891   /* Set up basic info in case we find nothing interesting in the analysis.  */
892   clear_speculation ();
893   clear_outer_type (otr_type);
894   invalid = false;
895 
896   /* Walk SSA for outer object.  */
897   while (true)
898     {
899       base_pointer = walk_ssa_copies (base_pointer, &visited);
900       if (TREE_CODE (base_pointer) == ADDR_EXPR)
901 	{
902 	  HOST_WIDE_INT offset2, size;
903 	  bool reverse;
904 	  tree base
905 	    = get_ref_base_and_extent_hwi (TREE_OPERAND (base_pointer, 0),
906 					   &offset2, &size, &reverse);
907 	  if (!base)
908 	    break;
909 
910 	  combine_speculation_with (TYPE_MAIN_VARIANT (TREE_TYPE (base)),
911 				    offset + offset2,
912 				    true,
913 				    NULL /* Do not change outer type.  */);
914 
915 	  /* If this is a varying address, punt.  */
916 	  if (TREE_CODE (base) == MEM_REF || DECL_P (base))
917 	    {
918 	      /* We found dereference of a pointer.  Type of the pointer
919 		 and MEM_REF is meaningless, but we can look futher.  */
920 	      offset_int mem_offset;
921 	      if (TREE_CODE (base) == MEM_REF
922 		  && mem_ref_offset (base).is_constant (&mem_offset))
923 		{
924 		  offset_int o = mem_offset * BITS_PER_UNIT;
925 		  o += offset;
926 		  o += offset2;
927 		  if (!wi::fits_shwi_p (o))
928 		    break;
929 		  base_pointer = TREE_OPERAND (base, 0);
930 		  offset = o.to_shwi ();
931 		  outer_type = NULL;
932 		}
933 	      /* We found base object.  In this case the outer_type
934 		 is known.  */
935 	      else if (DECL_P (base))
936 		{
937 		  if (visited)
938 		    delete (visited);
939 		  /* Only type inconsistent programs can have otr_type that is
940 		     not part of outer type.  */
941 		  if (otr_type
942 		      && !contains_type_p (TREE_TYPE (base),
943 					   offset + offset2, otr_type))
944 		    {
945 		      invalid = true;
946 		      if (instance)
947 			*instance = base_pointer;
948 		      return;
949 		    }
950 		  set_by_decl (base, offset + offset2);
951 		  if (outer_type && maybe_in_construction && stmt)
952 		    maybe_in_construction
953 		     = decl_maybe_in_construction_p (base,
954 						     outer_type,
955 						     stmt,
956 						     fndecl);
957 		  if (instance)
958 		    *instance = base;
959 		  return;
960 		}
961 	      else
962 		break;
963 	    }
964 	  else
965 	    break;
966 	}
967       else if (TREE_CODE (base_pointer) == POINTER_PLUS_EXPR
968 	       && TREE_CODE (TREE_OPERAND (base_pointer, 1)) == INTEGER_CST)
969 	{
970 	  offset_int o
971 	    = offset_int::from (wi::to_wide (TREE_OPERAND (base_pointer, 1)),
972 				SIGNED);
973 	  o *= BITS_PER_UNIT;
974 	  o += offset;
975 	  if (!wi::fits_shwi_p (o))
976 	    break;
977 	  offset = o.to_shwi ();
978 	  base_pointer = TREE_OPERAND (base_pointer, 0);
979 	}
980       else
981 	break;
982     }
983 
984   if (visited)
985     delete (visited);
986 
987   /* Try to determine type of the outer object.  */
988   if (TREE_CODE (base_pointer) == SSA_NAME
989       && SSA_NAME_IS_DEFAULT_DEF (base_pointer)
990       && TREE_CODE (SSA_NAME_VAR (base_pointer)) == PARM_DECL)
991     {
992       /* See if parameter is THIS pointer of a method.  */
993       if (TREE_CODE (TREE_TYPE (fndecl)) == METHOD_TYPE
994 	  && SSA_NAME_VAR (base_pointer) == DECL_ARGUMENTS (fndecl))
995 	{
996 	  outer_type
997 	     = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer)));
998 	  gcc_assert (TREE_CODE (outer_type) == RECORD_TYPE
999 		      || TREE_CODE (outer_type) == UNION_TYPE);
1000 
1001 	  /* Dynamic casting has possibly upcasted the type
1002 	     in the hiearchy.  In this case outer type is less
1003 	     informative than inner type and we should forget
1004 	     about it.  */
1005 	  if ((otr_type
1006 	       && !contains_type_p (outer_type, offset,
1007 				    otr_type))
1008 	      || !contains_polymorphic_type_p (outer_type))
1009 	    {
1010 	      outer_type = NULL;
1011 	      if (instance)
1012 		*instance = base_pointer;
1013 	      return;
1014 	    }
1015 
1016 	  dynamic = true;
1017 
1018 	  /* If the function is constructor or destructor, then
1019 	     the type is possibly in construction, but we know
1020 	     it is not derived type.  */
1021 	  if (DECL_CXX_CONSTRUCTOR_P (fndecl)
1022 	      || DECL_CXX_DESTRUCTOR_P (fndecl))
1023 	    {
1024 	      maybe_in_construction = true;
1025 	      maybe_derived_type = false;
1026 	    }
1027 	  else
1028 	    {
1029 	      maybe_derived_type = true;
1030 	      maybe_in_construction = false;
1031 	    }
1032 	  if (instance)
1033 	    *instance = base_pointer;
1034 	  return;
1035 	}
1036       /* Non-PODs passed by value are really passed by invisible
1037 	 reference.  In this case we also know the type of the
1038 	 object.  */
1039       if (DECL_BY_REFERENCE (SSA_NAME_VAR (base_pointer)))
1040 	{
1041 	  outer_type
1042 	     = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer)));
1043 	  /* Only type inconsistent programs can have otr_type that is
1044 	     not part of outer type.  */
1045 	  if (otr_type && !contains_type_p (outer_type, offset,
1046 					    otr_type))
1047 	    {
1048 	      invalid = true;
1049 	      if (instance)
1050 		*instance = base_pointer;
1051 	      return;
1052 	    }
1053 	  /* Non-polymorphic types have no interest for us.  */
1054 	  else if (!otr_type && !contains_polymorphic_type_p (outer_type))
1055 	    {
1056 	      outer_type = NULL;
1057 	      if (instance)
1058 		*instance = base_pointer;
1059 	      return;
1060 	    }
1061 	  maybe_derived_type = false;
1062 	  maybe_in_construction = false;
1063 	  if (instance)
1064 	    *instance = base_pointer;
1065 	  return;
1066 	}
1067     }
1068 
1069   tree base_type = TREE_TYPE (base_pointer);
1070 
1071   if (TREE_CODE (base_pointer) == SSA_NAME
1072       && SSA_NAME_IS_DEFAULT_DEF (base_pointer)
1073       && !(TREE_CODE (SSA_NAME_VAR (base_pointer)) == PARM_DECL
1074 	   || TREE_CODE (SSA_NAME_VAR (base_pointer)) == RESULT_DECL))
1075     {
1076       invalid = true;
1077       if (instance)
1078 	*instance = base_pointer;
1079       return;
1080     }
1081   if (TREE_CODE (base_pointer) == SSA_NAME
1082       && SSA_NAME_DEF_STMT (base_pointer)
1083       && gimple_assign_single_p (SSA_NAME_DEF_STMT (base_pointer)))
1084     base_type = TREE_TYPE (gimple_assign_rhs1
1085 			    (SSA_NAME_DEF_STMT (base_pointer)));
1086 
1087   if (base_type && POINTER_TYPE_P (base_type))
1088     combine_speculation_with (TYPE_MAIN_VARIANT (TREE_TYPE (base_type)),
1089 			      offset,
1090 			      true, NULL /* Do not change type here */);
1091   /* TODO: There are multiple ways to derive a type.  For instance
1092      if BASE_POINTER is passed to an constructor call prior our refernece.
1093      We do not make this type of flow sensitive analysis yet.  */
1094   if (instance)
1095     *instance = base_pointer;
1096   return;
1097 }
1098 
1099 /* Structure to be passed in between detect_type_change and
1100    check_stmt_for_type_change.  */
1101 
1102 struct type_change_info
1103 {
1104   /* Offset into the object where there is the virtual method pointer we are
1105      looking for.  */
1106   HOST_WIDE_INT offset;
1107   /* The declaration or SSA_NAME pointer of the base that we are checking for
1108      type change.  */
1109   tree instance;
1110   /* The reference to virtual table pointer used.  */
1111   tree vtbl_ptr_ref;
1112   tree otr_type;
1113   /* If we actually can tell the type that the object has changed to, it is
1114      stored in this field.  Otherwise it remains NULL_TREE.  */
1115   tree known_current_type;
1116   HOST_WIDE_INT known_current_offset;
1117 
1118   /* Set to nonzero if we possibly missed some dynamic type changes and we
1119      should consider the set to be speculative.  */
1120   unsigned speculative;
1121 
1122   /* Set to true if dynamic type change has been detected.  */
1123   bool type_maybe_changed;
1124   /* Set to true if multiple types have been encountered.  known_current_type
1125      must be disregarded in that case.  */
1126   bool multiple_types_encountered;
1127   bool seen_unanalyzed_store;
1128 };
1129 
1130 /* Return true if STMT is not call and can modify a virtual method table pointer.
1131    We take advantage of fact that vtable stores must appear within constructor
1132    and destructor functions.  */
1133 
1134 static bool
1135 noncall_stmt_may_be_vtbl_ptr_store (gimple *stmt)
1136 {
1137   if (is_gimple_assign (stmt))
1138     {
1139       tree lhs = gimple_assign_lhs (stmt);
1140 
1141       if (gimple_clobber_p (stmt))
1142 	return false;
1143       if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
1144 	{
1145 	  if (flag_strict_aliasing
1146 	      && !POINTER_TYPE_P (TREE_TYPE (lhs)))
1147 	    return false;
1148 
1149 	  if (TREE_CODE (lhs) == COMPONENT_REF
1150 	      && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
1151 	    return false;
1152 	  /* In the future we might want to use get_ref_base_and_extent to find
1153 	     if there is a field corresponding to the offset and if so, proceed
1154 	     almost like if it was a component ref.  */
1155 	}
1156     }
1157 
1158   /* Code unification may mess with inline stacks.  */
1159   if (cfun->after_inlining)
1160     return true;
1161 
1162   /* Walk the inline stack and watch out for ctors/dtors.
1163      TODO: Maybe we can require the store to appear in toplevel
1164      block of CTOR/DTOR.  */
1165   for (tree block = gimple_block (stmt); block && TREE_CODE (block) == BLOCK;
1166        block = BLOCK_SUPERCONTEXT (block))
1167     if (BLOCK_ABSTRACT_ORIGIN (block)
1168 	&& TREE_CODE (block_ultimate_origin (block)) == FUNCTION_DECL)
1169       return inlined_polymorphic_ctor_dtor_block_p (block, false);
1170   return (TREE_CODE (TREE_TYPE (current_function_decl)) == METHOD_TYPE
1171 	  && (DECL_CXX_CONSTRUCTOR_P (current_function_decl)
1172 	      || DECL_CXX_DESTRUCTOR_P (current_function_decl)));
1173 }
1174 
1175 /* If STMT can be proved to be an assignment to the virtual method table
1176    pointer of ANALYZED_OBJ and the type associated with the new table
1177    identified, return the type.  Otherwise return NULL_TREE if type changes
1178    in unknown way or ERROR_MARK_NODE if type is unchanged.  */
1179 
1180 static tree
1181 extr_type_from_vtbl_ptr_store (gimple *stmt, struct type_change_info *tci,
1182 			       HOST_WIDE_INT *type_offset)
1183 {
1184   poly_int64 offset, size, max_size;
1185   tree lhs, rhs, base;
1186   bool reverse;
1187 
1188   if (!gimple_assign_single_p (stmt))
1189     return NULL_TREE;
1190 
1191   lhs = gimple_assign_lhs (stmt);
1192   rhs = gimple_assign_rhs1 (stmt);
1193   if (TREE_CODE (lhs) != COMPONENT_REF
1194       || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
1195      {
1196 	if (dump_file)
1197 	  fprintf (dump_file, "  LHS is not virtual table.\n");
1198 	return NULL_TREE;
1199      }
1200 
1201   if (tci->vtbl_ptr_ref && operand_equal_p (lhs, tci->vtbl_ptr_ref, 0))
1202     ;
1203   else
1204     {
1205       base = get_ref_base_and_extent (lhs, &offset, &size, &max_size, &reverse);
1206       if (DECL_P (tci->instance))
1207 	{
1208 	  if (base != tci->instance)
1209 	    {
1210 	      if (dump_file)
1211 		{
1212 		  fprintf (dump_file, "    base:");
1213 		  print_generic_expr (dump_file, base, TDF_SLIM);
1214 		  fprintf (dump_file, " does not match instance:");
1215 		  print_generic_expr (dump_file, tci->instance, TDF_SLIM);
1216 		  fprintf (dump_file, "\n");
1217 		}
1218 	      return NULL_TREE;
1219 	    }
1220 	}
1221       else if (TREE_CODE (base) == MEM_REF)
1222 	{
1223 	  if (!operand_equal_p (tci->instance, TREE_OPERAND (base, 0), 0))
1224 	    {
1225 	      if (dump_file)
1226 		{
1227 		  fprintf (dump_file, "    base mem ref:");
1228 		  print_generic_expr (dump_file, base, TDF_SLIM);
1229 		  fprintf (dump_file, " does not match instance:");
1230 		  print_generic_expr (dump_file, tci->instance, TDF_SLIM);
1231 		  fprintf (dump_file, "\n");
1232 		}
1233 	      return NULL_TREE;
1234 	    }
1235 	  if (!integer_zerop (TREE_OPERAND (base, 1)))
1236 	    {
1237 	      if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
1238 		{
1239 		  if (dump_file)
1240 		    {
1241 		      fprintf (dump_file, "    base mem ref:");
1242 		      print_generic_expr (dump_file, base, TDF_SLIM);
1243 		      fprintf (dump_file, " has non-representable offset:");
1244 		      print_generic_expr (dump_file, tci->instance, TDF_SLIM);
1245 		      fprintf (dump_file, "\n");
1246 		    }
1247 		  return NULL_TREE;
1248 		}
1249 	      else
1250 	        offset += tree_to_shwi (TREE_OPERAND (base, 1)) * BITS_PER_UNIT;
1251 	    }
1252 	}
1253       else if (!operand_equal_p (tci->instance, base, 0)
1254 	       || tci->offset)
1255 	{
1256 	  if (dump_file)
1257 	    {
1258 	      fprintf (dump_file, "    base:");
1259 	      print_generic_expr (dump_file, base, TDF_SLIM);
1260 	      fprintf (dump_file, " does not match instance:");
1261 	      print_generic_expr (dump_file, tci->instance, TDF_SLIM);
1262 	      fprintf (dump_file, " with offset %i\n", (int)tci->offset);
1263 	    }
1264 	  return tci->offset > POINTER_SIZE ? error_mark_node : NULL_TREE;
1265 	}
1266       if (maybe_ne (offset, tci->offset)
1267 	  || maybe_ne (size, POINTER_SIZE)
1268 	  || maybe_ne (max_size, POINTER_SIZE))
1269 	{
1270 	  if (dump_file)
1271 	    {
1272 	      fprintf (dump_file, "    wrong offset ");
1273 	      print_dec (offset, dump_file);
1274 	      fprintf (dump_file, "!=%i or size ", (int) tci->offset);
1275 	      print_dec (size, dump_file);
1276 	      fprintf (dump_file, "\n");
1277 	    }
1278 	  return (known_le (offset + POINTER_SIZE, tci->offset)
1279 		  || (known_size_p (max_size)
1280 		      && known_gt (tci->offset + POINTER_SIZE,
1281 				   offset + max_size))
1282 		  ? error_mark_node : NULL);
1283 	}
1284     }
1285 
1286   tree vtable;
1287   unsigned HOST_WIDE_INT offset2;
1288 
1289   if (!vtable_pointer_value_to_vtable (rhs, &vtable, &offset2))
1290     {
1291       if (dump_file)
1292 	fprintf (dump_file, "    Failed to lookup binfo\n");
1293       return NULL;
1294     }
1295 
1296   tree binfo = subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable)),
1297 					       offset2, vtable);
1298   if (!binfo)
1299     {
1300       if (dump_file)
1301 	fprintf (dump_file, "    Construction vtable used\n");
1302       /* FIXME: We should suport construction contexts.  */
1303       return NULL;
1304     }
1305 
1306   *type_offset = tree_to_shwi (BINFO_OFFSET (binfo)) * BITS_PER_UNIT;
1307   return DECL_CONTEXT (vtable);
1308 }
1309 
1310 /* Record dynamic type change of TCI to TYPE.  */
1311 
1312 static void
1313 record_known_type (struct type_change_info *tci, tree type, HOST_WIDE_INT offset)
1314 {
1315   if (dump_file)
1316     {
1317       if (type)
1318 	{
1319           fprintf (dump_file, "  Recording type: ");
1320 	  print_generic_expr (dump_file, type, TDF_SLIM);
1321           fprintf (dump_file, " at offset %i\n", (int)offset);
1322 	}
1323      else
1324        fprintf (dump_file, "  Recording unknown type\n");
1325     }
1326 
1327   /* If we found a constructor of type that is not polymorphic or
1328      that may contain the type in question as a field (not as base),
1329      restrict to the inner class first to make type matching bellow
1330      happier.  */
1331   if (type
1332       && (offset
1333           || (TREE_CODE (type) != RECORD_TYPE
1334 	      || !TYPE_BINFO (type)
1335 	      || !polymorphic_type_binfo_p (TYPE_BINFO (type)))))
1336     {
1337       ipa_polymorphic_call_context context;
1338 
1339       context.offset = offset;
1340       context.outer_type = type;
1341       context.maybe_in_construction = false;
1342       context.maybe_derived_type = false;
1343       context.dynamic = true;
1344       /* If we failed to find the inner type, we know that the call
1345 	 would be undefined for type produced here.  */
1346       if (!context.restrict_to_inner_class (tci->otr_type))
1347 	{
1348 	  if (dump_file)
1349 	    fprintf (dump_file, "  Ignoring; does not contain otr_type\n");
1350 	  return;
1351 	}
1352       /* Watch for case we reached an POD type and anticipate placement
1353 	 new.  */
1354       if (!context.maybe_derived_type)
1355 	{
1356           type = context.outer_type;
1357           offset = context.offset;
1358 	}
1359     }
1360   if (tci->type_maybe_changed
1361       && (!types_same_for_odr (type, tci->known_current_type)
1362 	  || offset != tci->known_current_offset))
1363     tci->multiple_types_encountered = true;
1364   tci->known_current_type = TYPE_MAIN_VARIANT (type);
1365   tci->known_current_offset = offset;
1366   tci->type_maybe_changed = true;
1367 }
1368 
1369 
1370 /* The maximum number of may-defs we visit when looking for a must-def
1371    that changes the dynamic type in check_stmt_for_type_change.  Tuned
1372    after the PR12392 testcase which unlimited spends 40% time within
1373    these alias walks and 8% with the following limit.  */
1374 
1375 static inline bool
1376 csftc_abort_walking_p (unsigned speculative)
1377 {
1378   unsigned max = PARAM_VALUE (PARAM_MAX_SPECULATIVE_DEVIRT_MAYDEFS);
1379   return speculative > max ? true : false;
1380 }
1381 
1382 /* Callback of walk_aliased_vdefs and a helper function for
1383    detect_type_change to check whether a particular statement may modify
1384    the virtual table pointer, and if possible also determine the new type of
1385    the (sub-)object.  It stores its result into DATA, which points to a
1386    type_change_info structure.  */
1387 
1388 static bool
1389 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
1390 {
1391   gimple *stmt = SSA_NAME_DEF_STMT (vdef);
1392   struct type_change_info *tci = (struct type_change_info *) data;
1393   tree fn;
1394 
1395   /* If we already gave up, just terminate the rest of walk.  */
1396   if (tci->multiple_types_encountered)
1397     return true;
1398 
1399   if (is_gimple_call (stmt))
1400     {
1401       if (gimple_call_flags (stmt) & (ECF_CONST | ECF_PURE))
1402 	return false;
1403 
1404       /* Check for a constructor call.  */
1405       if ((fn = gimple_call_fndecl (stmt)) != NULL_TREE
1406 	  && DECL_CXX_CONSTRUCTOR_P (fn)
1407 	  && TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE
1408 	  && gimple_call_num_args (stmt))
1409       {
1410 	tree op = walk_ssa_copies (gimple_call_arg (stmt, 0));
1411 	tree type = TYPE_METHOD_BASETYPE (TREE_TYPE (fn));
1412 	HOST_WIDE_INT offset = 0;
1413 	bool reverse;
1414 
1415 	if (dump_file)
1416 	  {
1417 	    fprintf (dump_file, "  Checking constructor call: ");
1418 	    print_gimple_stmt (dump_file, stmt, 0);
1419 	  }
1420 
1421 	/* See if THIS parameter seems like instance pointer.  */
1422 	if (TREE_CODE (op) == ADDR_EXPR)
1423 	  {
1424 	    HOST_WIDE_INT size;
1425 	    op = get_ref_base_and_extent_hwi (TREE_OPERAND (op, 0),
1426 					      &offset, &size, &reverse);
1427 	    if (!op)
1428 	      {
1429                 tci->speculative++;
1430 	        return csftc_abort_walking_p (tci->speculative);
1431 	      }
1432 	    if (TREE_CODE (op) == MEM_REF)
1433 	      {
1434 		if (!tree_fits_shwi_p (TREE_OPERAND (op, 1)))
1435 		  {
1436                     tci->speculative++;
1437 		    return csftc_abort_walking_p (tci->speculative);
1438 		  }
1439 		offset += tree_to_shwi (TREE_OPERAND (op, 1))
1440 			  * BITS_PER_UNIT;
1441 		op = TREE_OPERAND (op, 0);
1442 	      }
1443 	    else if (DECL_P (op))
1444 	      ;
1445 	    else
1446 	      {
1447                 tci->speculative++;
1448 	        return csftc_abort_walking_p (tci->speculative);
1449 	      }
1450 	    op = walk_ssa_copies (op);
1451 	  }
1452 	if (operand_equal_p (op, tci->instance, 0)
1453 	    && TYPE_SIZE (type)
1454 	    && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
1455 	    && tree_fits_shwi_p (TYPE_SIZE (type))
1456 	    && tree_to_shwi (TYPE_SIZE (type)) + offset > tci->offset
1457 	    /* Some inlined constructors may look as follows:
1458 		  _3 = operator new (16);
1459 		  MEM[(struct  &)_3] ={v} {CLOBBER};
1460 		  MEM[(struct CompositeClass *)_3]._vptr.CompositeClass
1461 		    = &MEM[(void *)&_ZTV14CompositeClass + 16B];
1462 		  _7 = &MEM[(struct CompositeClass *)_3].object;
1463 		  EmptyClass::EmptyClass (_7);
1464 
1465 	       When determining dynamic type of _3 and because we stop at first
1466 	       dynamic type found, we would stop on EmptyClass::EmptyClass (_7).
1467 	       In this case the emptyclass is not even polymorphic and we miss
1468 	       it is contained in an outer type that is polymorphic.  */
1469 
1470 	    && (tci->offset == offset || contains_polymorphic_type_p (type)))
1471 	  {
1472 	    record_known_type (tci, type, tci->offset - offset);
1473 	    return true;
1474 	  }
1475       }
1476      /* Calls may possibly change dynamic type by placement new. Assume
1477         it will not happen, but make result speculative only.  */
1478      if (dump_file)
1479 	{
1480           fprintf (dump_file, "  Function call may change dynamic type:");
1481 	  print_gimple_stmt (dump_file, stmt, 0);
1482 	}
1483      tci->speculative++;
1484      return csftc_abort_walking_p (tci->speculative);
1485    }
1486   /* Check for inlined virtual table store.  */
1487   else if (noncall_stmt_may_be_vtbl_ptr_store (stmt))
1488     {
1489       tree type;
1490       HOST_WIDE_INT offset = 0;
1491       if (dump_file)
1492 	{
1493 	  fprintf (dump_file, "  Checking vtbl store: ");
1494 	  print_gimple_stmt (dump_file, stmt, 0);
1495 	}
1496 
1497       type = extr_type_from_vtbl_ptr_store (stmt, tci, &offset);
1498       if (type == error_mark_node)
1499 	return false;
1500       gcc_assert (!type || TYPE_MAIN_VARIANT (type) == type);
1501       if (!type)
1502 	{
1503 	  if (dump_file)
1504 	    fprintf (dump_file, "  Unanalyzed store may change type.\n");
1505 	  tci->seen_unanalyzed_store = true;
1506 	  tci->speculative++;
1507 	}
1508       else
1509         record_known_type (tci, type, offset);
1510       return true;
1511     }
1512   else
1513     return false;
1514 }
1515 
1516 /* THIS is polymorphic call context obtained from get_polymorphic_context.
1517    OTR_OBJECT is pointer to the instance returned by OBJ_TYPE_REF_OBJECT.
1518    INSTANCE is pointer to the outer instance as returned by
1519    get_polymorphic_context.  To avoid creation of temporary expressions,
1520    INSTANCE may also be an declaration of get_polymorphic_context found the
1521    value to be in static storage.
1522 
1523    If the type of instance is not fully determined
1524    (either OUTER_TYPE is unknown or MAYBE_IN_CONSTRUCTION/INCLUDE_DERIVED_TYPES
1525    is set), try to walk memory writes and find the actual construction of the
1526    instance.
1527 
1528    Return true if memory is unchanged from function entry.
1529 
1530    We do not include this analysis in the context analysis itself, because
1531    it needs memory SSA to be fully built and the walk may be expensive.
1532    So it is not suitable for use withing fold_stmt and similar uses.  */
1533 
1534 bool
1535 ipa_polymorphic_call_context::get_dynamic_type (tree instance,
1536 						tree otr_object,
1537 						tree otr_type,
1538 						gimple *call)
1539 {
1540   struct type_change_info tci;
1541   ao_ref ao;
1542   bool function_entry_reached = false;
1543   tree instance_ref = NULL;
1544   gimple *stmt = call;
1545   /* Remember OFFSET before it is modified by restrict_to_inner_class.
1546      This is because we do not update INSTANCE when walking inwards.  */
1547   HOST_WIDE_INT instance_offset = offset;
1548   tree instance_outer_type = outer_type;
1549 
1550   if (otr_type)
1551     otr_type = TYPE_MAIN_VARIANT (otr_type);
1552 
1553   /* Walk into inner type. This may clear maybe_derived_type and save us
1554      from useless work.  It also makes later comparsions with static type
1555      easier.  */
1556   if (outer_type && otr_type)
1557     {
1558       if (!restrict_to_inner_class (otr_type))
1559         return false;
1560     }
1561 
1562   if (!maybe_in_construction && !maybe_derived_type)
1563     return false;
1564 
1565   /* If we are in fact not looking at any object object or the instance is
1566      some placement new into a random load, give up straight away.  */
1567   if (TREE_CODE (instance) == MEM_REF)
1568     return false;
1569 
1570   /* We need to obtain refernce to virtual table pointer.  It is better
1571      to look it up in the code rather than build our own.  This require bit
1572      of pattern matching, but we end up verifying that what we found is
1573      correct.
1574 
1575      What we pattern match is:
1576 
1577        tmp = instance->_vptr.A;   // vtbl ptr load
1578        tmp2 = tmp[otr_token];	  // vtable lookup
1579        OBJ_TYPE_REF(tmp2;instance->0) (instance);
1580 
1581      We want to start alias oracle walk from vtbl pointer load,
1582      but we may not be able to identify it, for example, when PRE moved the
1583      load around.  */
1584 
1585   if (gimple_code (call) == GIMPLE_CALL)
1586     {
1587       tree ref = gimple_call_fn (call);
1588       bool reverse;
1589 
1590       if (TREE_CODE (ref) == OBJ_TYPE_REF)
1591 	{
1592 	  ref = OBJ_TYPE_REF_EXPR (ref);
1593 	  ref = walk_ssa_copies (ref);
1594 
1595 	  /* If call target is already known, no need to do the expensive
1596  	     memory walk.  */
1597 	  if (is_gimple_min_invariant (ref))
1598 	    return false;
1599 
1600 	  /* Check if definition looks like vtable lookup.  */
1601 	  if (TREE_CODE (ref) == SSA_NAME
1602 	      && !SSA_NAME_IS_DEFAULT_DEF (ref)
1603 	      && gimple_assign_load_p (SSA_NAME_DEF_STMT (ref))
1604 	      && TREE_CODE (gimple_assign_rhs1
1605 			     (SSA_NAME_DEF_STMT (ref))) == MEM_REF)
1606 	    {
1607 	      ref = get_base_address
1608 		     (TREE_OPERAND (gimple_assign_rhs1
1609 				     (SSA_NAME_DEF_STMT (ref)), 0));
1610 	      ref = walk_ssa_copies (ref);
1611 	      /* Find base address of the lookup and see if it looks like
1612 		 vptr load.  */
1613 	      if (TREE_CODE (ref) == SSA_NAME
1614 		  && !SSA_NAME_IS_DEFAULT_DEF (ref)
1615 		  && gimple_assign_load_p (SSA_NAME_DEF_STMT (ref)))
1616 		{
1617 		  HOST_WIDE_INT offset2, size;
1618 		  tree ref_exp = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (ref));
1619 		  tree base_ref
1620 		    = get_ref_base_and_extent_hwi (ref_exp, &offset2,
1621 						   &size, &reverse);
1622 
1623 		  /* Finally verify that what we found looks like read from
1624 		     OTR_OBJECT or from INSTANCE with offset OFFSET.  */
1625 		  if (base_ref
1626 		      && ((TREE_CODE (base_ref) == MEM_REF
1627 		           && ((offset2 == instance_offset
1628 		                && TREE_OPERAND (base_ref, 0) == instance)
1629 			       || (!offset2
1630 				   && TREE_OPERAND (base_ref, 0)
1631 				      == otr_object)))
1632 			  || (DECL_P (instance) && base_ref == instance
1633 			      && offset2 == instance_offset)))
1634 		    {
1635 		      stmt = SSA_NAME_DEF_STMT (ref);
1636 		      instance_ref = ref_exp;
1637 		    }
1638 		}
1639 	    }
1640 	}
1641     }
1642 
1643   /* If we failed to look up the reference in code, build our own.  */
1644   if (!instance_ref)
1645     {
1646       /* If the statement in question does not use memory, we can't tell
1647 	 anything.  */
1648       if (!gimple_vuse (stmt))
1649 	return false;
1650       ao_ref_init_from_ptr_and_size (&ao, otr_object, NULL);
1651     }
1652   else
1653   /* Otherwise use the real reference.  */
1654     ao_ref_init (&ao, instance_ref);
1655 
1656   /* We look for vtbl pointer read.  */
1657   ao.size = POINTER_SIZE;
1658   ao.max_size = ao.size;
1659   /* We are looking for stores to vptr pointer within the instance of
1660      outer type.
1661      TODO: The vptr pointer type is globally known, we probably should
1662      keep it and do that even when otr_type is unknown.  */
1663   if (otr_type)
1664     {
1665       ao.base_alias_set
1666 	= get_alias_set (outer_type ? outer_type : otr_type);
1667       ao.ref_alias_set
1668         = get_alias_set (TREE_TYPE (BINFO_VTABLE (TYPE_BINFO (otr_type))));
1669     }
1670 
1671   if (dump_file)
1672     {
1673       fprintf (dump_file, "Determining dynamic type for call: ");
1674       print_gimple_stmt (dump_file, call, 0);
1675       fprintf (dump_file, "  Starting walk at: ");
1676       print_gimple_stmt (dump_file, stmt, 0);
1677       fprintf (dump_file, "  instance pointer: ");
1678       print_generic_expr (dump_file, otr_object, TDF_SLIM);
1679       fprintf (dump_file, "  Outer instance pointer: ");
1680       print_generic_expr (dump_file, instance, TDF_SLIM);
1681       fprintf (dump_file, " offset: %i (bits)", (int)instance_offset);
1682       fprintf (dump_file, " vtbl reference: ");
1683       print_generic_expr (dump_file, instance_ref, TDF_SLIM);
1684       fprintf (dump_file, "\n");
1685     }
1686 
1687   tci.offset = instance_offset;
1688   tci.instance = instance;
1689   tci.vtbl_ptr_ref = instance_ref;
1690   tci.known_current_type = NULL_TREE;
1691   tci.known_current_offset = 0;
1692   tci.otr_type = otr_type;
1693   tci.type_maybe_changed = false;
1694   tci.multiple_types_encountered = false;
1695   tci.speculative = 0;
1696   tci.seen_unanalyzed_store = false;
1697 
1698   walk_aliased_vdefs (&ao, gimple_vuse (stmt), check_stmt_for_type_change,
1699 		      &tci, NULL, &function_entry_reached);
1700 
1701   /* If we did not find any type changing statements, we may still drop
1702      maybe_in_construction flag if the context already have outer type.
1703 
1704      Here we make special assumptions about both constructors and
1705      destructors which are all the functions that are allowed to alter the
1706      VMT pointers.  It assumes that destructors begin with assignment into
1707      all VMT pointers and that constructors essentially look in the
1708      following way:
1709 
1710      1) The very first thing they do is that they call constructors of
1711      ancestor sub-objects that have them.
1712 
1713      2) Then VMT pointers of this and all its ancestors is set to new
1714      values corresponding to the type corresponding to the constructor.
1715 
1716      3) Only afterwards, other stuff such as constructor of member
1717      sub-objects and the code written by the user is run.  Only this may
1718      include calling virtual functions, directly or indirectly.
1719 
1720      4) placement new can not be used to change type of non-POD statically
1721      allocated variables.
1722 
1723      There is no way to call a constructor of an ancestor sub-object in any
1724      other way.
1725 
1726      This means that we do not have to care whether constructors get the
1727      correct type information because they will always change it (in fact,
1728      if we define the type to be given by the VMT pointer, it is undefined).
1729 
1730      The most important fact to derive from the above is that if, for some
1731      statement in the section 3, we try to detect whether the dynamic type
1732      has changed, we can safely ignore all calls as we examine the function
1733      body backwards until we reach statements in section 2 because these
1734      calls cannot be ancestor constructors or destructors (if the input is
1735      not bogus) and so do not change the dynamic type (this holds true only
1736      for automatically allocated objects but at the moment we devirtualize
1737      only these).  We then must detect that statements in section 2 change
1738      the dynamic type and can try to derive the new type.  That is enough
1739      and we can stop, we will never see the calls into constructors of
1740      sub-objects in this code.
1741 
1742      Therefore if the static outer type was found (outer_type)
1743      we can safely ignore tci.speculative that is set on calls and give up
1744      only if there was dyanmic type store that may affect given variable
1745      (seen_unanalyzed_store)  */
1746 
1747   if (!tci.type_maybe_changed
1748       || (outer_type
1749 	  && !dynamic
1750 	  && !tci.seen_unanalyzed_store
1751 	  && !tci.multiple_types_encountered
1752 	  && ((offset == tci.offset
1753 	       && types_same_for_odr (tci.known_current_type,
1754 				      outer_type))
1755 	       || (instance_offset == offset
1756 		   && types_same_for_odr (tci.known_current_type,
1757 					  instance_outer_type)))))
1758     {
1759       if (!outer_type || tci.seen_unanalyzed_store)
1760 	return false;
1761       if (maybe_in_construction)
1762         maybe_in_construction = false;
1763       if (dump_file)
1764 	fprintf (dump_file, "  No dynamic type change found.\n");
1765       return true;
1766     }
1767 
1768   if (tci.known_current_type
1769       && !function_entry_reached
1770       && !tci.multiple_types_encountered)
1771     {
1772       if (!tci.speculative)
1773 	{
1774 	  outer_type = TYPE_MAIN_VARIANT (tci.known_current_type);
1775 	  offset = tci.known_current_offset;
1776 	  dynamic = true;
1777 	  maybe_in_construction = false;
1778 	  maybe_derived_type = false;
1779 	  if (dump_file)
1780 	    fprintf (dump_file, "  Determined dynamic type.\n");
1781 	}
1782       else if (!speculative_outer_type
1783 	       || speculative_maybe_derived_type)
1784 	{
1785 	  speculative_outer_type = TYPE_MAIN_VARIANT (tci.known_current_type);
1786 	  speculative_offset = tci.known_current_offset;
1787 	  speculative_maybe_derived_type = false;
1788 	  if (dump_file)
1789 	    fprintf (dump_file, "  Determined speculative dynamic type.\n");
1790 	}
1791     }
1792   else if (dump_file)
1793     {
1794       fprintf (dump_file, "  Found multiple types%s%s\n",
1795 	       function_entry_reached ? " (function entry reached)" : "",
1796 	       function_entry_reached ? " (multiple types encountered)" : "");
1797     }
1798 
1799   return false;
1800 }
1801 
1802 /* See if speculation given by SPEC_OUTER_TYPE, SPEC_OFFSET and SPEC_MAYBE_DERIVED_TYPE
1803    seems consistent (and useful) with what we already have in the non-speculative context.  */
1804 
1805 bool
1806 ipa_polymorphic_call_context::speculation_consistent_p (tree spec_outer_type,
1807 							HOST_WIDE_INT spec_offset,
1808 							bool spec_maybe_derived_type,
1809 							tree otr_type) const
1810 {
1811   if (!flag_devirtualize_speculatively)
1812     return false;
1813 
1814   /* Non-polymorphic types are useless for deriving likely polymorphic
1815      call targets.  */
1816   if (!spec_outer_type || !contains_polymorphic_type_p (spec_outer_type))
1817     return false;
1818 
1819   /* If we know nothing, speculation is always good.  */
1820   if (!outer_type)
1821     return true;
1822 
1823   /* Speculation is only useful to avoid derived types.
1824      This is not 100% true for placement new, where the outer context may
1825      turn out to be useless, but ignore these for now.  */
1826   if (!maybe_derived_type)
1827     return false;
1828 
1829   /* If types agrees, speculation is consistent, but it makes sense only
1830      when it says something new.  */
1831   if (types_must_be_same_for_odr (spec_outer_type, outer_type))
1832     return maybe_derived_type && !spec_maybe_derived_type;
1833 
1834   /* If speculation does not contain the type in question, ignore it.  */
1835   if (otr_type
1836       && !contains_type_p (spec_outer_type, spec_offset, otr_type, false, true))
1837     return false;
1838 
1839   /* If outer type already contains speculation as a filed,
1840      it is useless.  We already know from OUTER_TYPE
1841      SPEC_TYPE and that it is not in the construction.  */
1842   if (contains_type_p (outer_type, offset - spec_offset,
1843 		       spec_outer_type, false, false))
1844     return false;
1845 
1846   /* If speculative outer type is not more specified than outer
1847      type, just give up.
1848      We can only decide this safely if we can compare types with OUTER_TYPE.
1849    */
1850   if ((!in_lto_p || odr_type_p (outer_type))
1851       && !contains_type_p (spec_outer_type,
1852 			   spec_offset - offset,
1853 			   outer_type, false))
1854     return false;
1855   return true;
1856 }
1857 
1858 /* Improve THIS with speculation described by NEW_OUTER_TYPE, NEW_OFFSET,
1859    NEW_MAYBE_DERIVED_TYPE
1860    If OTR_TYPE is set, assume the context is used with OTR_TYPE.  */
1861 
1862 bool
1863 ipa_polymorphic_call_context::combine_speculation_with
1864    (tree new_outer_type, HOST_WIDE_INT new_offset, bool new_maybe_derived_type,
1865     tree otr_type)
1866 {
1867   if (!new_outer_type)
1868     return false;
1869 
1870   /* restrict_to_inner_class may eliminate wrong speculation making our job
1871      easeier.  */
1872   if (otr_type)
1873     restrict_to_inner_class (otr_type);
1874 
1875   if (!speculation_consistent_p (new_outer_type, new_offset,
1876 				 new_maybe_derived_type, otr_type))
1877     return false;
1878 
1879   /* New speculation is a win in case we have no speculation or new
1880      speculation does not consider derivations.  */
1881   if (!speculative_outer_type
1882       || (speculative_maybe_derived_type
1883 	  && !new_maybe_derived_type))
1884     {
1885       speculative_outer_type = new_outer_type;
1886       speculative_offset = new_offset;
1887       speculative_maybe_derived_type = new_maybe_derived_type;
1888       return true;
1889     }
1890   else if (types_must_be_same_for_odr (speculative_outer_type,
1891 				       new_outer_type))
1892     {
1893       if (speculative_offset != new_offset)
1894 	{
1895 	  /* OK we have two contexts that seems valid but they disagree,
1896 	     just give up.
1897 
1898 	     This is not a lattice operation, so we may want to drop it later.  */
1899 	  if (dump_file && (dump_flags & TDF_DETAILS))
1900 	    fprintf (dump_file,
1901 		     "Speculative outer types match, "
1902 		     "offset mismatch -> invalid speculation\n");
1903 	  clear_speculation ();
1904 	  return true;
1905 	}
1906       else
1907 	{
1908 	  if (speculative_maybe_derived_type && !new_maybe_derived_type)
1909 	    {
1910 	      speculative_maybe_derived_type = false;
1911 	      return true;
1912 	    }
1913 	  else
1914 	    return false;
1915 	}
1916     }
1917   /* Choose type that contains the other.  This one either contains the outer
1918      as a field (thus giving exactly one target) or is deeper in the type
1919      hiearchy.  */
1920   else if (speculative_outer_type
1921 	   && speculative_maybe_derived_type
1922 	   && (new_offset > speculative_offset
1923 	       || (new_offset == speculative_offset
1924 		   && contains_type_p (new_outer_type,
1925 				       0, speculative_outer_type, false))))
1926     {
1927       tree old_outer_type = speculative_outer_type;
1928       HOST_WIDE_INT old_offset = speculative_offset;
1929       bool old_maybe_derived_type = speculative_maybe_derived_type;
1930 
1931       speculative_outer_type = new_outer_type;
1932       speculative_offset = new_offset;
1933       speculative_maybe_derived_type = new_maybe_derived_type;
1934 
1935       if (otr_type)
1936 	restrict_to_inner_class (otr_type);
1937 
1938       /* If the speculation turned out to make no sense, revert to sensible
1939 	 one.  */
1940       if (!speculative_outer_type)
1941 	{
1942 	  speculative_outer_type = old_outer_type;
1943 	  speculative_offset = old_offset;
1944 	  speculative_maybe_derived_type = old_maybe_derived_type;
1945 	  return false;
1946 	}
1947       return (old_offset != speculative_offset
1948 	      || old_maybe_derived_type != speculative_maybe_derived_type
1949 	      || types_must_be_same_for_odr (speculative_outer_type,
1950 					     new_outer_type));
1951     }
1952   return false;
1953 }
1954 
1955 /* Make speculation less specific so
1956    NEW_OUTER_TYPE, NEW_OFFSET, NEW_MAYBE_DERIVED_TYPE is also included.
1957    If OTR_TYPE is set, assume the context is used with OTR_TYPE.  */
1958 
1959 bool
1960 ipa_polymorphic_call_context::meet_speculation_with
1961    (tree new_outer_type, HOST_WIDE_INT new_offset, bool new_maybe_derived_type,
1962     tree otr_type)
1963 {
1964   if (!new_outer_type && speculative_outer_type)
1965     {
1966       clear_speculation ();
1967       return true;
1968     }
1969 
1970   /* restrict_to_inner_class may eliminate wrong speculation making our job
1971      easeier.  */
1972   if (otr_type)
1973     restrict_to_inner_class (otr_type);
1974 
1975   if (!speculative_outer_type
1976       || !speculation_consistent_p (speculative_outer_type,
1977 				    speculative_offset,
1978 				    speculative_maybe_derived_type,
1979 				    otr_type))
1980     return false;
1981 
1982   if (!speculation_consistent_p (new_outer_type, new_offset,
1983 				 new_maybe_derived_type, otr_type))
1984     {
1985       clear_speculation ();
1986       return true;
1987     }
1988 
1989   else if (types_must_be_same_for_odr (speculative_outer_type,
1990 				       new_outer_type))
1991     {
1992       if (speculative_offset != new_offset)
1993 	{
1994 	  clear_speculation ();
1995 	  return true;
1996 	}
1997       else
1998 	{
1999 	  if (!speculative_maybe_derived_type && new_maybe_derived_type)
2000 	    {
2001 	      speculative_maybe_derived_type = true;
2002 	      return true;
2003 	    }
2004 	  else
2005 	    return false;
2006 	}
2007     }
2008   /* See if one type contains the other as a field (not base).  */
2009   else if (contains_type_p (new_outer_type, new_offset - speculative_offset,
2010 			    speculative_outer_type, false, false))
2011     return false;
2012   else if (contains_type_p (speculative_outer_type,
2013 			    speculative_offset - new_offset,
2014 			    new_outer_type, false, false))
2015     {
2016       speculative_outer_type = new_outer_type;
2017       speculative_offset = new_offset;
2018       speculative_maybe_derived_type = new_maybe_derived_type;
2019       return true;
2020     }
2021   /* See if OUTER_TYPE is base of CTX.OUTER_TYPE.  */
2022   else if (contains_type_p (new_outer_type,
2023 			    new_offset - speculative_offset,
2024 			    speculative_outer_type, false, true))
2025     {
2026       if (!speculative_maybe_derived_type)
2027 	{
2028 	  speculative_maybe_derived_type = true;
2029 	  return true;
2030 	}
2031       return false;
2032     }
2033   /* See if CTX.OUTER_TYPE is base of OUTER_TYPE.  */
2034   else if (contains_type_p (speculative_outer_type,
2035 			    speculative_offset - new_offset, new_outer_type, false, true))
2036     {
2037       speculative_outer_type = new_outer_type;
2038       speculative_offset = new_offset;
2039       speculative_maybe_derived_type = true;
2040       return true;
2041     }
2042   else
2043     {
2044       if (dump_file && (dump_flags & TDF_DETAILS))
2045         fprintf (dump_file, "Giving up on speculative meet\n");
2046       clear_speculation ();
2047       return true;
2048     }
2049 }
2050 
2051 /* Assume that both THIS and a given context is valid and strenghten THIS
2052    if possible.  Return true if any strenghtening was made.
2053    If actual type the context is being used in is known, OTR_TYPE should be
2054    set accordingly. This improves quality of combined result.  */
2055 
2056 bool
2057 ipa_polymorphic_call_context::combine_with (ipa_polymorphic_call_context ctx,
2058 					    tree otr_type)
2059 {
2060   bool updated = false;
2061 
2062   if (ctx.useless_p () || invalid)
2063     return false;
2064 
2065   /* Restricting context to inner type makes merging easier, however do not
2066      do that unless we know how the context is used (OTR_TYPE is non-NULL)  */
2067   if (otr_type && !invalid && !ctx.invalid)
2068     {
2069       restrict_to_inner_class (otr_type);
2070       ctx.restrict_to_inner_class (otr_type);
2071       if(invalid)
2072         return false;
2073     }
2074 
2075   if (dump_file && (dump_flags & TDF_DETAILS))
2076     {
2077       fprintf (dump_file, "Polymorphic call context combine:");
2078       dump (dump_file);
2079       fprintf (dump_file, "With context:                    ");
2080       ctx.dump (dump_file);
2081       if (otr_type)
2082 	{
2083           fprintf (dump_file, "To be used with type:            ");
2084 	  print_generic_expr (dump_file, otr_type, TDF_SLIM);
2085           fprintf (dump_file, "\n");
2086 	}
2087     }
2088 
2089   /* If call is known to be invalid, we are done.  */
2090   if (ctx.invalid)
2091     {
2092       if (dump_file && (dump_flags & TDF_DETAILS))
2093         fprintf (dump_file, "-> Invalid context\n");
2094       goto invalidate;
2095     }
2096 
2097   if (!ctx.outer_type)
2098     ;
2099   else if (!outer_type)
2100     {
2101       outer_type = ctx.outer_type;
2102       offset = ctx.offset;
2103       dynamic = ctx.dynamic;
2104       maybe_in_construction = ctx.maybe_in_construction;
2105       maybe_derived_type = ctx.maybe_derived_type;
2106       updated = true;
2107     }
2108   /* If types are known to be same, merging is quite easy.  */
2109   else if (types_must_be_same_for_odr (outer_type, ctx.outer_type))
2110     {
2111       if (offset != ctx.offset
2112 	  && TYPE_SIZE (outer_type)
2113 	  && TREE_CODE (TYPE_SIZE (outer_type)) == INTEGER_CST)
2114 	{
2115 	  if (dump_file && (dump_flags & TDF_DETAILS))
2116 	    fprintf (dump_file, "Outer types match, offset mismatch -> invalid\n");
2117 	  clear_speculation ();
2118 	  clear_outer_type ();
2119 	  invalid = true;
2120 	  return true;
2121 	}
2122       if (dump_file && (dump_flags & TDF_DETAILS))
2123         fprintf (dump_file, "Outer types match, merging flags\n");
2124       if (maybe_in_construction && !ctx.maybe_in_construction)
2125 	{
2126 	  updated = true;
2127 	  maybe_in_construction = false;
2128 	}
2129       if (maybe_derived_type && !ctx.maybe_derived_type)
2130 	{
2131 	  updated = true;
2132 	  maybe_derived_type = false;
2133 	}
2134       if (dynamic && !ctx.dynamic)
2135 	{
2136 	  updated = true;
2137 	  dynamic = false;
2138 	}
2139     }
2140   /* If we know the type precisely, there is not much to improve.  */
2141   else if (!maybe_derived_type && !maybe_in_construction
2142 	   && !ctx.maybe_derived_type && !ctx.maybe_in_construction)
2143     {
2144       /* It may be easy to check if second context permits the first
2145 	 and set INVALID otherwise.  This is not easy to do in general;
2146 	 contains_type_p may return false negatives for non-comparable
2147 	 types.
2148 
2149 	 If OTR_TYPE is known, we however can expect that
2150 	 restrict_to_inner_class should have discovered the same base
2151 	 type.  */
2152       if (otr_type && !ctx.maybe_in_construction && !ctx.maybe_derived_type)
2153 	{
2154 	  if (dump_file && (dump_flags & TDF_DETAILS))
2155 	    fprintf (dump_file, "Contextes disagree -> invalid\n");
2156 	  goto invalidate;
2157 	}
2158     }
2159   /* See if one type contains the other as a field (not base).
2160      In this case we want to choose the wider type, because it contains
2161      more information.  */
2162   else if (contains_type_p (ctx.outer_type, ctx.offset - offset,
2163 			    outer_type, false, false))
2164     {
2165       if (dump_file && (dump_flags & TDF_DETAILS))
2166 	fprintf (dump_file, "Second type contain the first as a field\n");
2167 
2168       if (maybe_derived_type)
2169 	{
2170 	  outer_type = ctx.outer_type;
2171 	  maybe_derived_type = ctx.maybe_derived_type;
2172 	  offset = ctx.offset;
2173 	  dynamic = ctx.dynamic;
2174 	  updated = true;
2175 	}
2176 
2177       /* If we do not know how the context is being used, we can
2178 	 not clear MAYBE_IN_CONSTRUCTION because it may be offseted
2179 	 to other component of OUTER_TYPE later and we know nothing
2180 	 about it.  */
2181       if (otr_type && maybe_in_construction
2182 	  && !ctx.maybe_in_construction)
2183 	{
2184           maybe_in_construction = false;
2185 	  updated = true;
2186 	}
2187     }
2188   else if (contains_type_p (outer_type, offset - ctx.offset,
2189 			    ctx.outer_type, false, false))
2190     {
2191       if (dump_file && (dump_flags & TDF_DETAILS))
2192 	fprintf (dump_file, "First type contain the second as a field\n");
2193 
2194       if (otr_type && maybe_in_construction
2195 	  && !ctx.maybe_in_construction)
2196 	{
2197           maybe_in_construction = false;
2198 	  updated = true;
2199 	}
2200     }
2201   /* See if OUTER_TYPE is base of CTX.OUTER_TYPE.  */
2202   else if (contains_type_p (ctx.outer_type,
2203 			    ctx.offset - offset, outer_type, false, true))
2204     {
2205       if (dump_file && (dump_flags & TDF_DETAILS))
2206 	fprintf (dump_file, "First type is base of second\n");
2207       if (!maybe_derived_type)
2208 	{
2209 	  if (!ctx.maybe_in_construction
2210 	      && types_odr_comparable (outer_type, ctx.outer_type))
2211 	    {
2212 	      if (dump_file && (dump_flags & TDF_DETAILS))
2213 		fprintf (dump_file, "Second context does not permit base -> invalid\n");
2214 	      goto invalidate;
2215 	    }
2216 	}
2217       /* Pick variant deeper in the hiearchy.  */
2218       else
2219 	{
2220 	  outer_type = ctx.outer_type;
2221 	  maybe_in_construction = ctx.maybe_in_construction;
2222 	  maybe_derived_type = ctx.maybe_derived_type;
2223 	  offset = ctx.offset;
2224 	  dynamic = ctx.dynamic;
2225           updated = true;
2226 	}
2227     }
2228   /* See if CTX.OUTER_TYPE is base of OUTER_TYPE.  */
2229   else if (contains_type_p (outer_type,
2230 			    offset - ctx.offset, ctx.outer_type, false, true))
2231     {
2232       if (dump_file && (dump_flags & TDF_DETAILS))
2233 	fprintf (dump_file, "Second type is base of first\n");
2234       if (!ctx.maybe_derived_type)
2235 	{
2236 	  if (!maybe_in_construction
2237 	      && types_odr_comparable (outer_type, ctx.outer_type))
2238 	    {
2239 	      if (dump_file && (dump_flags & TDF_DETAILS))
2240 		fprintf (dump_file, "First context does not permit base -> invalid\n");
2241 	      goto invalidate;
2242 	    }
2243 	  /* Pick the base type.  */
2244 	  else if (maybe_in_construction)
2245 	    {
2246 	      outer_type = ctx.outer_type;
2247 	      maybe_in_construction = ctx.maybe_in_construction;
2248 	      maybe_derived_type = ctx.maybe_derived_type;
2249 	      offset = ctx.offset;
2250 	      dynamic = ctx.dynamic;
2251 	      updated = true;
2252 	    }
2253 	}
2254     }
2255   /* TODO handle merging using hiearchy. */
2256   else if (dump_file && (dump_flags & TDF_DETAILS))
2257     fprintf (dump_file, "Giving up on merge\n");
2258 
2259   updated |= combine_speculation_with (ctx.speculative_outer_type,
2260 				       ctx.speculative_offset,
2261 				       ctx.speculative_maybe_derived_type,
2262 				       otr_type);
2263 
2264   if (updated && dump_file && (dump_flags & TDF_DETAILS))
2265     {
2266       fprintf (dump_file, "Updated as:                      ");
2267       dump (dump_file);
2268       fprintf (dump_file, "\n");
2269     }
2270   return updated;
2271 
2272 invalidate:
2273   invalid = true;
2274   clear_speculation ();
2275   clear_outer_type ();
2276   return true;
2277 }
2278 
2279 /* Take non-speculative info, merge it with speculative and clear speculation.
2280    Used when we no longer manage to keep track of actual outer type, but we
2281    think it is still there.
2282 
2283    If OTR_TYPE is set, the transformation can be done more effectively assuming
2284    that context is going to be used only that way.  */
2285 
2286 void
2287 ipa_polymorphic_call_context::make_speculative (tree otr_type)
2288 {
2289   tree spec_outer_type = outer_type;
2290   HOST_WIDE_INT spec_offset = offset;
2291   bool spec_maybe_derived_type = maybe_derived_type;
2292 
2293   if (invalid)
2294     {
2295       invalid = false;
2296       clear_outer_type ();
2297       clear_speculation ();
2298       return;
2299     }
2300   if (!outer_type)
2301     return;
2302   clear_outer_type ();
2303   combine_speculation_with (spec_outer_type, spec_offset,
2304 			    spec_maybe_derived_type,
2305 			    otr_type);
2306 }
2307 
2308 /* Use when we can not track dynamic type change.  This speculatively assume
2309    type change is not happening.  */
2310 
2311 void
2312 ipa_polymorphic_call_context::possible_dynamic_type_change (bool in_poly_cdtor,
2313 							    tree otr_type)
2314 {
2315   if (dynamic)
2316     make_speculative (otr_type);
2317   else if (in_poly_cdtor)
2318     maybe_in_construction = true;
2319 }
2320 
2321 /* Return TRUE if this context conveys the same information as OTHER.  */
2322 
2323 bool
2324 ipa_polymorphic_call_context::equal_to
2325     (const ipa_polymorphic_call_context &x) const
2326 {
2327   if (useless_p ())
2328     return x.useless_p ();
2329   if (invalid)
2330     return x.invalid;
2331   if (x.useless_p () || x.invalid)
2332     return false;
2333 
2334   if (outer_type)
2335     {
2336       if (!x.outer_type
2337 	  || !types_odr_comparable (outer_type, x.outer_type)
2338 	  || !types_same_for_odr (outer_type, x.outer_type)
2339 	  || offset != x.offset
2340 	  || maybe_in_construction != x.maybe_in_construction
2341 	  || maybe_derived_type != x.maybe_derived_type
2342 	  || dynamic != x.dynamic)
2343 	return false;
2344     }
2345   else if (x.outer_type)
2346     return false;
2347 
2348 
2349   if (speculative_outer_type
2350       && speculation_consistent_p (speculative_outer_type, speculative_offset,
2351 				   speculative_maybe_derived_type, NULL_TREE))
2352     {
2353       if (!x.speculative_outer_type)
2354 	return false;
2355 
2356       if (!types_odr_comparable (speculative_outer_type,
2357 				 x.speculative_outer_type)
2358 	  || !types_same_for_odr  (speculative_outer_type,
2359 				   x.speculative_outer_type)
2360 	  || speculative_offset != x.speculative_offset
2361 	  || speculative_maybe_derived_type != x.speculative_maybe_derived_type)
2362 	return false;
2363     }
2364   else if (x.speculative_outer_type
2365 	   && x.speculation_consistent_p (x.speculative_outer_type,
2366 					  x.speculative_offset,
2367 				  	  x.speculative_maybe_derived_type,
2368 					  NULL))
2369     return false;
2370 
2371   return true;
2372 }
2373 
2374 /* Modify context to be strictly less restrictive than CTX.  */
2375 
2376 bool
2377 ipa_polymorphic_call_context::meet_with (ipa_polymorphic_call_context ctx,
2378 					 tree otr_type)
2379 {
2380   bool updated = false;
2381 
2382   if (useless_p () || ctx.invalid)
2383     return false;
2384 
2385   /* Restricting context to inner type makes merging easier, however do not
2386      do that unless we know how the context is used (OTR_TYPE is non-NULL)  */
2387   if (otr_type && !useless_p () && !ctx.useless_p ())
2388     {
2389       restrict_to_inner_class (otr_type);
2390       ctx.restrict_to_inner_class (otr_type);
2391       if(invalid)
2392         return false;
2393     }
2394 
2395   if (equal_to (ctx))
2396     return false;
2397 
2398   if (ctx.useless_p () || invalid)
2399     {
2400       *this = ctx;
2401       return true;
2402     }
2403 
2404   if (dump_file && (dump_flags & TDF_DETAILS))
2405     {
2406       fprintf (dump_file, "Polymorphic call context meet:");
2407       dump (dump_file);
2408       fprintf (dump_file, "With context:                    ");
2409       ctx.dump (dump_file);
2410       if (otr_type)
2411 	{
2412           fprintf (dump_file, "To be used with type:            ");
2413 	  print_generic_expr (dump_file, otr_type, TDF_SLIM);
2414           fprintf (dump_file, "\n");
2415 	}
2416     }
2417 
2418   if (!dynamic && ctx.dynamic)
2419     {
2420       dynamic = true;
2421       updated = true;
2422     }
2423 
2424   /* If call is known to be invalid, we are done.  */
2425   if (!outer_type)
2426     ;
2427   else if (!ctx.outer_type)
2428     {
2429       clear_outer_type ();
2430       updated = true;
2431     }
2432   /* If types are known to be same, merging is quite easy.  */
2433   else if (types_must_be_same_for_odr (outer_type, ctx.outer_type))
2434     {
2435       if (offset != ctx.offset
2436 	  && TYPE_SIZE (outer_type)
2437 	  && TREE_CODE (TYPE_SIZE (outer_type)) == INTEGER_CST)
2438 	{
2439 	  if (dump_file && (dump_flags & TDF_DETAILS))
2440 	    fprintf (dump_file, "Outer types match, offset mismatch -> clearing\n");
2441 	  clear_outer_type ();
2442 	  return true;
2443 	}
2444       if (dump_file && (dump_flags & TDF_DETAILS))
2445         fprintf (dump_file, "Outer types match, merging flags\n");
2446       if (!maybe_in_construction && ctx.maybe_in_construction)
2447 	{
2448 	  updated = true;
2449 	  maybe_in_construction = true;
2450 	}
2451       if (!maybe_derived_type && ctx.maybe_derived_type)
2452 	{
2453 	  updated = true;
2454 	  maybe_derived_type = true;
2455 	}
2456       if (!dynamic && ctx.dynamic)
2457 	{
2458 	  updated = true;
2459 	  dynamic = true;
2460 	}
2461     }
2462   /* See if one type contains the other as a field (not base).  */
2463   else if (contains_type_p (ctx.outer_type, ctx.offset - offset,
2464 			    outer_type, false, false))
2465     {
2466       if (dump_file && (dump_flags & TDF_DETAILS))
2467 	fprintf (dump_file, "Second type contain the first as a field\n");
2468 
2469       /* The second type is more specified, so we keep the first.
2470          We need to set DYNAMIC flag to avoid declaring context INVALID
2471 	 of OFFSET ends up being out of range.  */
2472       if (!dynamic
2473 	  && (ctx.dynamic
2474 	      || (!otr_type
2475 		  && (!TYPE_SIZE (ctx.outer_type)
2476 		      || !TYPE_SIZE (outer_type)
2477 		      || !operand_equal_p (TYPE_SIZE (ctx.outer_type),
2478 					   TYPE_SIZE (outer_type), 0)))))
2479 	{
2480 	  dynamic = true;
2481 	  updated = true;
2482 	}
2483     }
2484   else if (contains_type_p (outer_type, offset - ctx.offset,
2485 			    ctx.outer_type, false, false))
2486     {
2487       if (dump_file && (dump_flags & TDF_DETAILS))
2488 	fprintf (dump_file, "First type contain the second as a field\n");
2489 
2490       if (!dynamic
2491 	  && (ctx.dynamic
2492 	      || (!otr_type
2493 		  && (!TYPE_SIZE (ctx.outer_type)
2494 		      || !TYPE_SIZE (outer_type)
2495 		      || !operand_equal_p (TYPE_SIZE (ctx.outer_type),
2496 					   TYPE_SIZE (outer_type), 0)))))
2497 	dynamic = true;
2498       outer_type = ctx.outer_type;
2499       offset = ctx.offset;
2500       dynamic = ctx.dynamic;
2501       maybe_in_construction = ctx.maybe_in_construction;
2502       maybe_derived_type = ctx.maybe_derived_type;
2503       updated = true;
2504     }
2505   /* See if OUTER_TYPE is base of CTX.OUTER_TYPE.  */
2506   else if (contains_type_p (ctx.outer_type,
2507 			    ctx.offset - offset, outer_type, false, true))
2508     {
2509       if (dump_file && (dump_flags & TDF_DETAILS))
2510 	fprintf (dump_file, "First type is base of second\n");
2511       if (!maybe_derived_type)
2512 	{
2513 	  maybe_derived_type = true;
2514 	  updated = true;
2515 	}
2516       if (!maybe_in_construction && ctx.maybe_in_construction)
2517 	{
2518 	  maybe_in_construction = true;
2519 	  updated = true;
2520 	}
2521       if (!dynamic && ctx.dynamic)
2522 	{
2523 	  dynamic = true;
2524 	  updated = true;
2525 	}
2526     }
2527   /* See if CTX.OUTER_TYPE is base of OUTER_TYPE.  */
2528   else if (contains_type_p (outer_type,
2529 			    offset - ctx.offset, ctx.outer_type, false, true))
2530     {
2531       if (dump_file && (dump_flags & TDF_DETAILS))
2532 	fprintf (dump_file, "Second type is base of first\n");
2533       outer_type = ctx.outer_type;
2534       offset = ctx.offset;
2535       updated = true;
2536       if (!maybe_derived_type)
2537 	maybe_derived_type = true;
2538       if (!maybe_in_construction && ctx.maybe_in_construction)
2539 	maybe_in_construction = true;
2540       if (!dynamic && ctx.dynamic)
2541 	dynamic = true;
2542     }
2543   /* TODO handle merging using hiearchy. */
2544   else
2545     {
2546       if (dump_file && (dump_flags & TDF_DETAILS))
2547         fprintf (dump_file, "Giving up on meet\n");
2548       clear_outer_type ();
2549       updated = true;
2550     }
2551 
2552   updated |= meet_speculation_with (ctx.speculative_outer_type,
2553 				    ctx.speculative_offset,
2554 				    ctx.speculative_maybe_derived_type,
2555 				    otr_type);
2556 
2557   if (updated && dump_file && (dump_flags & TDF_DETAILS))
2558     {
2559       fprintf (dump_file, "Updated as:                      ");
2560       dump (dump_file);
2561       fprintf (dump_file, "\n");
2562     }
2563   return updated;
2564 }
2565