1 /* Analysis of polymorphic call context.
2    Copyright (C) 2013-2018 Free Software Foundation, Inc.
3    Contributed by Jan Hubicka
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "tree-pass.h"
29 #include "tree-ssa-operands.h"
30 #include "streamer-hooks.h"
31 #include "cgraph.h"
32 #include "data-streamer.h"
33 #include "diagnostic.h"
34 #include "alias.h"
35 #include "fold-const.h"
36 #include "calls.h"
37 #include "ipa-utils.h"
38 #include "tree-dfa.h"
39 #include "gimple-pretty-print.h"
40 #include "tree-into-ssa.h"
41 #include "params.h"
42 
43 /* Return true when TYPE contains an polymorphic type and thus is interesting
44    for devirtualization machinery.  */
45 
46 static bool contains_type_p (tree, HOST_WIDE_INT, tree,
47 			     bool consider_placement_new = true,
48 			     bool consider_bases = true);
49 
50 bool
51 contains_polymorphic_type_p (const_tree type)
52 {
53   type = TYPE_MAIN_VARIANT (type);
54 
55   if (RECORD_OR_UNION_TYPE_P (type))
56     {
57       if (TYPE_BINFO (type)
58           && polymorphic_type_binfo_p (TYPE_BINFO (type)))
59 	return true;
60       for (tree fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
61 	if (TREE_CODE (fld) == FIELD_DECL
62 	    && !DECL_ARTIFICIAL (fld)
63 	    && contains_polymorphic_type_p (TREE_TYPE (fld)))
64 	  return true;
65       return false;
66     }
67   if (TREE_CODE (type) == ARRAY_TYPE)
68     return contains_polymorphic_type_p (TREE_TYPE (type));
69   return false;
70 }
71 
72 /* Return true if it seems valid to use placement new to build EXPECTED_TYPE
73    at possition CUR_OFFSET within TYPE.
74 
75    POD can be changed to an instance of a polymorphic type by
76    placement new.  Here we play safe and assume that any
77    non-polymorphic type is POD.  */
78 bool
79 possible_placement_new (tree type, tree expected_type,
80 			HOST_WIDE_INT cur_offset)
81 {
82   if (cur_offset < 0)
83     return true;
84   return ((TREE_CODE (type) != RECORD_TYPE
85 	   || !TYPE_BINFO (type)
86 	   || cur_offset >= POINTER_SIZE
87 	   || !polymorphic_type_binfo_p (TYPE_BINFO (type)))
88 	  && (!TYPE_SIZE (type)
89 	      || !tree_fits_shwi_p (TYPE_SIZE (type))
90 	      || (cur_offset
91 		  + (expected_type ? tree_to_uhwi (TYPE_SIZE (expected_type))
92 		     : POINTER_SIZE)
93 		  <= tree_to_uhwi (TYPE_SIZE (type)))));
94 }
95 
96 /* THIS->OUTER_TYPE is a type of memory object where object of OTR_TYPE
97    is contained at THIS->OFFSET.  Walk the memory representation of
98    THIS->OUTER_TYPE and find the outermost class type that match
99    OTR_TYPE or contain OTR_TYPE as a base.  Update THIS
100    to represent it.
101 
102    If OTR_TYPE is NULL, just find outermost polymorphic type with
103    virtual table present at possition OFFSET.
104 
105    For example when THIS represents type
106    class A
107      {
108        int a;
109        class B b;
110      }
111    and we look for type at offset sizeof(int), we end up with B and offset 0.
112    If the same is produced by multiple inheritance, we end up with A and offset
113    sizeof(int).
114 
115    If we can not find corresponding class, give up by setting
116    THIS->OUTER_TYPE to OTR_TYPE and THIS->OFFSET to NULL.
117    Return true when lookup was sucesful.
118 
119    When CONSIDER_PLACEMENT_NEW is false, reject contexts that may be made
120    valid only via allocation of new polymorphic type inside by means
121    of placement new.
122 
123    When CONSIDER_BASES is false, only look for actual fields, not base types
124    of TYPE.  */
125 
126 bool
127 ipa_polymorphic_call_context::restrict_to_inner_class (tree otr_type,
128 						       bool consider_placement_new,
129 						       bool consider_bases)
130 {
131   tree type = outer_type;
132   HOST_WIDE_INT cur_offset = offset;
133   bool speculative = false;
134   bool size_unknown = false;
135   unsigned HOST_WIDE_INT otr_type_size = POINTER_SIZE;
136 
137   /* Update OUTER_TYPE to match EXPECTED_TYPE if it is not set.  */
138   if (!outer_type)
139     {
140       clear_outer_type (otr_type);
141       type = otr_type;
142       cur_offset = 0;
143     }
144  /* See if OFFSET points inside OUTER_TYPE.  If it does not, we know
145     that the context is either invalid, or the instance type must be
146     derived from OUTER_TYPE.
147 
148     Because the instance type may contain field whose type is of OUTER_TYPE,
149     we can not derive any effective information about it.
150 
151     TODO: In the case we know all derrived types, we can definitely do better
152     here.  */
153   else if (TYPE_SIZE (outer_type)
154 	   && tree_fits_shwi_p (TYPE_SIZE (outer_type))
155 	   && tree_to_shwi (TYPE_SIZE (outer_type)) >= 0
156 	   && tree_to_shwi (TYPE_SIZE (outer_type)) <= offset)
157    {
158      bool der = maybe_derived_type; /* clear_outer_type will reset it.  */
159      bool dyn = dynamic;
160      clear_outer_type (otr_type);
161      type = otr_type;
162      cur_offset = 0;
163 
164      /* If derived type is not allowed, we know that the context is invalid.
165 	For dynamic types, we really do not have information about
166 	size of the memory location.  It is possible that completely
167 	different type is stored after outer_type.  */
168      if (!der && !dyn)
169        {
170 	 clear_speculation ();
171 	 invalid = true;
172 	 return false;
173        }
174    }
175 
176   if (otr_type && TYPE_SIZE (otr_type)
177       && tree_fits_shwi_p (TYPE_SIZE (otr_type)))
178     otr_type_size = tree_to_uhwi (TYPE_SIZE (otr_type));
179 
180   if (!type || offset < 0)
181     goto no_useful_type_info;
182 
183   /* Find the sub-object the constant actually refers to and mark whether it is
184      an artificial one (as opposed to a user-defined one).
185 
186      This loop is performed twice; first time for outer_type and second time
187      for speculative_outer_type.  The second run has SPECULATIVE set.  */
188   while (true)
189     {
190       unsigned HOST_WIDE_INT pos, size;
191       tree fld;
192 
193       /* If we do not know size of TYPE, we need to be more conservative
194          about accepting cases where we can not find EXPECTED_TYPE.
195 	 Generally the types that do matter here are of constant size.
196 	 Size_unknown case should be very rare.  */
197       if (TYPE_SIZE (type)
198 	  && tree_fits_shwi_p (TYPE_SIZE (type))
199 	  && tree_to_shwi (TYPE_SIZE (type)) >= 0)
200 	size_unknown = false;
201       else
202 	size_unknown = true;
203 
204       /* On a match, just return what we found.  */
205       if ((otr_type
206 	   && types_odr_comparable (type, otr_type)
207 	   && types_same_for_odr (type, otr_type))
208 	  || (!otr_type
209 	      && TREE_CODE (type) == RECORD_TYPE
210 	      && TYPE_BINFO (type)
211 	      && polymorphic_type_binfo_p (TYPE_BINFO (type))))
212 	{
213 	  if (speculative)
214 	    {
215 	      /* If we did not match the offset, just give up on speculation.  */
216 	      if (cur_offset != 0
217 		  /* Also check if speculation did not end up being same as
218 		     non-speculation.  */
219 		  || (types_must_be_same_for_odr (speculative_outer_type,
220 						  outer_type)
221 		      && (maybe_derived_type
222 			  == speculative_maybe_derived_type)))
223 		clear_speculation ();
224 	      return true;
225 	    }
226 	  else
227 	    {
228 	      /* If type is known to be final, do not worry about derived
229 		 types.  Testing it here may help us to avoid speculation.  */
230 	      if (otr_type && TREE_CODE (outer_type) == RECORD_TYPE
231 		  && (!in_lto_p || odr_type_p (outer_type))
232 		  && type_with_linkage_p (outer_type)
233 		  && type_known_to_have_no_derivations_p (outer_type))
234 		maybe_derived_type = false;
235 
236 	      /* Type can not contain itself on an non-zero offset.  In that case
237 		 just give up.  Still accept the case where size is now known.
238 		 Either the second copy may appear past the end of type or within
239 		 the non-POD buffer located inside the variably sized type
240 		 itself.  */
241 	      if (cur_offset != 0)
242 		goto no_useful_type_info;
243 	      /* If we determined type precisely or we have no clue on
244  		 speuclation, we are done.  */
245 	      if (!maybe_derived_type || !speculative_outer_type
246 		  || !speculation_consistent_p (speculative_outer_type,
247 					        speculative_offset,
248 					        speculative_maybe_derived_type,
249 						otr_type))
250 		{
251 		  clear_speculation ();
252 	          return true;
253 		}
254 	      /* Otherwise look into speculation now.  */
255 	      else
256 		{
257 		  speculative = true;
258 		  type = speculative_outer_type;
259 		  cur_offset = speculative_offset;
260 		  continue;
261 		}
262 	    }
263 	}
264 
265       /* Walk fields and find corresponding on at OFFSET.  */
266       if (TREE_CODE (type) == RECORD_TYPE)
267 	{
268 	  for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
269 	    {
270 	      if (TREE_CODE (fld) != FIELD_DECL
271 		  || TREE_TYPE (fld) == error_mark_node)
272 		continue;
273 
274 	      pos = int_bit_position (fld);
275 	      if (pos > (unsigned HOST_WIDE_INT)cur_offset)
276 		continue;
277 
278 	      /* Do not consider vptr itself.  Not even for placement new.  */
279 	      if (!pos && DECL_ARTIFICIAL (fld)
280 		  && POINTER_TYPE_P (TREE_TYPE (fld))
281 		  && TYPE_BINFO (type)
282 		  && polymorphic_type_binfo_p (TYPE_BINFO (type)))
283 		continue;
284 
285 	      if (!DECL_SIZE (fld) || !tree_fits_uhwi_p (DECL_SIZE (fld)))
286 		goto no_useful_type_info;
287 	      size = tree_to_uhwi (DECL_SIZE (fld));
288 
289 	      /* We can always skip types smaller than pointer size:
290 		 those can not contain a virtual table pointer.
291 
292 		 Disqualifying fields that are too small to fit OTR_TYPE
293 		 saves work needed to walk them for no benefit.
294 		 Because of the way the bases are packed into a class, the
295 		 field's size may be smaller than type size, so it needs
296 		 to be done with a care.  */
297 
298 	      if (pos <= (unsigned HOST_WIDE_INT)cur_offset
299 		  && (pos + size) >= (unsigned HOST_WIDE_INT)cur_offset
300 				     + POINTER_SIZE
301 		  && (!otr_type
302 		      || !TYPE_SIZE (TREE_TYPE (fld))
303 		      || !tree_fits_shwi_p (TYPE_SIZE (TREE_TYPE (fld)))
304 		      || (pos + tree_to_uhwi (TYPE_SIZE (TREE_TYPE (fld))))
305 			  >= cur_offset + otr_type_size))
306 		break;
307 	    }
308 
309 	  if (!fld)
310 	    goto no_useful_type_info;
311 
312 	  type = TYPE_MAIN_VARIANT (TREE_TYPE (fld));
313 	  cur_offset -= pos;
314 	  /* DECL_ARTIFICIAL represents a basetype.  */
315 	  if (!DECL_ARTIFICIAL (fld))
316 	    {
317 	      if (!speculative)
318 		{
319 		  outer_type = type;
320 		  offset = cur_offset;
321 		  /* As soon as we se an field containing the type,
322 		     we know we are not looking for derivations.  */
323 		  maybe_derived_type = false;
324 		}
325 	      else
326 		{
327 		  speculative_outer_type = type;
328 		  speculative_offset = cur_offset;
329 		  speculative_maybe_derived_type = false;
330 		}
331 	    }
332 	  else if (!consider_bases)
333 	    goto no_useful_type_info;
334 	}
335       else if (TREE_CODE (type) == ARRAY_TYPE)
336 	{
337 	  tree subtype = TYPE_MAIN_VARIANT (TREE_TYPE (type));
338 
339 	  /* Give up if we don't know array field size.
340 	     Also give up on non-polymorphic types as they are used
341 	     as buffers for placement new.  */
342 	  if (!TYPE_SIZE (subtype)
343 	      || !tree_fits_shwi_p (TYPE_SIZE (subtype))
344 	      || tree_to_shwi (TYPE_SIZE (subtype)) <= 0
345 	      || !contains_polymorphic_type_p (subtype))
346 	    goto no_useful_type_info;
347 
348 	  HOST_WIDE_INT new_offset = cur_offset % tree_to_shwi (TYPE_SIZE (subtype));
349 
350 	  /* We may see buffer for placement new.  In this case the expected type
351 	     can be bigger than the subtype.  */
352 	  if (TYPE_SIZE (subtype)
353 	      && (cur_offset + otr_type_size
354 		  > tree_to_uhwi (TYPE_SIZE (subtype))))
355 	    goto no_useful_type_info;
356 
357 	  cur_offset = new_offset;
358 	  type = TYPE_MAIN_VARIANT (subtype);
359 	  if (!speculative)
360 	    {
361 	      outer_type = type;
362 	      offset = cur_offset;
363 	      maybe_derived_type = false;
364 	    }
365 	  else
366 	    {
367 	      speculative_outer_type = type;
368 	      speculative_offset = cur_offset;
369 	      speculative_maybe_derived_type = false;
370 	    }
371 	}
372       /* Give up on anything else.  */
373       else
374 	{
375 no_useful_type_info:
376 	  if (maybe_derived_type && !speculative
377 	      && TREE_CODE (outer_type) == RECORD_TYPE
378 	      && TREE_CODE (otr_type) == RECORD_TYPE
379 	      && TYPE_BINFO (otr_type)
380 	      && !offset
381 	      && get_binfo_at_offset (TYPE_BINFO (otr_type), 0, outer_type))
382 	    {
383 	      clear_outer_type (otr_type);
384 	      if (!speculative_outer_type
385 		  || !speculation_consistent_p (speculative_outer_type,
386 						speculative_offset,
387 					        speculative_maybe_derived_type,
388 						otr_type))
389 		clear_speculation ();
390 	      if (speculative_outer_type)
391 		{
392 		  speculative = true;
393 		  type = speculative_outer_type;
394 		  cur_offset = speculative_offset;
395 		}
396 	      else
397 		return true;
398 	    }
399 	  /* We found no way to embedd EXPECTED_TYPE in TYPE.
400 	     We still permit two special cases - placement new and
401 	     the case of variadic types containing themselves.  */
402 	  if (!speculative
403 	      && consider_placement_new
404 	      && (size_unknown || !type || maybe_derived_type
405 		  || possible_placement_new (type, otr_type, cur_offset)))
406 	    {
407 	      /* In these weird cases we want to accept the context.
408 		 In non-speculative run we have no useful outer_type info
409 		 (TODO: we may eventually want to record upper bound on the
410 		  type size that can be used to prune the walk),
411 		 but we still want to consider speculation that may
412 		 give useful info.  */
413 	      if (!speculative)
414 		{
415 		  clear_outer_type (otr_type);
416 		  if (!speculative_outer_type
417 		      || !speculation_consistent_p (speculative_outer_type,
418 						    speculative_offset,
419 						    speculative_maybe_derived_type,
420 						    otr_type))
421 		    clear_speculation ();
422 		  if (speculative_outer_type)
423 		    {
424 		      speculative = true;
425 		      type = speculative_outer_type;
426 		      cur_offset = speculative_offset;
427 		    }
428 		  else
429 		    return true;
430 		}
431 	      else
432 		{
433 		  clear_speculation ();
434 	          return true;
435 		}
436 	    }
437 	  else
438 	    {
439 	      clear_speculation ();
440 	      if (speculative)
441 		return true;
442 	      clear_outer_type (otr_type);
443 	      invalid = true;
444 	      return false;
445 	    }
446 	}
447     }
448 }
449 
450 /* Return true if OUTER_TYPE contains OTR_TYPE at OFFSET.
451    CONSIDER_PLACEMENT_NEW makes function to accept cases where OTR_TYPE can
452    be built within OUTER_TYPE by means of placement new.  CONSIDER_BASES makes
453    function to accept cases where OTR_TYPE appears as base of OUTER_TYPE or as
454    base of one of fields of OUTER_TYPE.  */
455 
456 static bool
457 contains_type_p (tree outer_type, HOST_WIDE_INT offset,
458 		 tree otr_type,
459 		 bool consider_placement_new,
460 		 bool consider_bases)
461 {
462   ipa_polymorphic_call_context context;
463 
464   /* Check that type is within range.  */
465   if (offset < 0)
466     return false;
467 
468   /* PR ipa/71207
469      As OUTER_TYPE can be a type which has a diamond virtual inheritance,
470      it's not necessary that INNER_TYPE will fit within OUTER_TYPE with
471      a given offset.  It can happen that INNER_TYPE also contains a base object,
472      however it would point to the same instance in the OUTER_TYPE.  */
473 
474   context.offset = offset;
475   context.outer_type = TYPE_MAIN_VARIANT (outer_type);
476   context.maybe_derived_type = false;
477   context.dynamic = false;
478   return context.restrict_to_inner_class (otr_type, consider_placement_new,
479 					  consider_bases);
480 }
481 
482 
483 /* Return a FUNCTION_DECL if FN represent a constructor or destructor.
484    If CHECK_CLONES is true, also check for clones of ctor/dtors.  */
485 
486 tree
487 polymorphic_ctor_dtor_p (tree fn, bool check_clones)
488 {
489   if (TREE_CODE (TREE_TYPE (fn)) != METHOD_TYPE
490       || (!DECL_CXX_CONSTRUCTOR_P (fn) && !DECL_CXX_DESTRUCTOR_P (fn)))
491     {
492       if (!check_clones)
493 	return NULL_TREE;
494 
495       /* Watch for clones where we constant propagated the first
496 	 argument (pointer to the instance).  */
497       fn = DECL_ABSTRACT_ORIGIN (fn);
498       if (!fn
499 	  || TREE_CODE (TREE_TYPE (fn)) != METHOD_TYPE
500 	  || (!DECL_CXX_CONSTRUCTOR_P (fn) && !DECL_CXX_DESTRUCTOR_P (fn)))
501 	return NULL_TREE;
502     }
503 
504   if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST))
505     return NULL_TREE;
506 
507   return fn;
508 }
509 
510 /* Return a FUNCTION_DECL if BLOCK represents a constructor or destructor.
511    If CHECK_CLONES is true, also check for clones of ctor/dtors.  */
512 
513 tree
514 inlined_polymorphic_ctor_dtor_block_p (tree block, bool check_clones)
515 {
516   tree fn = block_ultimate_origin (block);
517   if (fn == NULL || TREE_CODE (fn) != FUNCTION_DECL)
518     return NULL_TREE;
519 
520   return polymorphic_ctor_dtor_p (fn, check_clones);
521 }
522 
523 
524 /* We know that the instance is stored in variable or parameter
525    (not dynamically allocated) and we want to disprove the fact
526    that it may be in construction at invocation of CALL.
527 
528    BASE represents memory location where instance is stored.
529    If BASE is NULL, it is assumed to be global memory.
530    OUTER_TYPE is known type of the instance or NULL if not
531    known.
532 
533    For the variable to be in construction we actually need to
534    be in constructor of corresponding global variable or
535    the inline stack of CALL must contain the constructor.
536    Check this condition.  This check works safely only before
537    IPA passes, because inline stacks may become out of date
538    later.  */
539 
540 bool
541 decl_maybe_in_construction_p (tree base, tree outer_type,
542 			      gimple *call, tree function)
543 {
544   if (outer_type)
545     outer_type = TYPE_MAIN_VARIANT (outer_type);
546   gcc_assert (!base || DECL_P (base));
547 
548   /* After inlining the code unification optimizations may invalidate
549      inline stacks.  Also we need to give up on global variables after
550      IPA, because addresses of these may have been propagated to their
551      constructors.  */
552   if (DECL_STRUCT_FUNCTION (function)->after_inlining)
553     return true;
554 
555   /* Pure functions can not do any changes on the dynamic type;
556      that require writting to memory.  */
557   if ((!base || !auto_var_in_fn_p (base, function))
558       && flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
559     return false;
560 
561   bool check_clones = !base || is_global_var (base);
562   for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
563        block = BLOCK_SUPERCONTEXT (block))
564     if (tree fn = inlined_polymorphic_ctor_dtor_block_p (block, check_clones))
565       {
566 	tree type = TYPE_METHOD_BASETYPE (TREE_TYPE (fn));
567 
568 	if (!outer_type || !types_odr_comparable (type, outer_type))
569 	  {
570 	    if (TREE_CODE (type) == RECORD_TYPE
571 		&& TYPE_BINFO (type)
572 		&& polymorphic_type_binfo_p (TYPE_BINFO (type)))
573 	      return true;
574 	  }
575  	else if (types_same_for_odr (type, outer_type))
576 	  return true;
577       }
578 
579   if (!base || (VAR_P (base) && is_global_var (base)))
580     {
581       if (TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE
582 	  || (!DECL_CXX_CONSTRUCTOR_P (function)
583 	      && !DECL_CXX_DESTRUCTOR_P (function)))
584 	{
585 	  if (!DECL_ABSTRACT_ORIGIN (function))
586 	    return false;
587 	  /* Watch for clones where we constant propagated the first
588 	     argument (pointer to the instance).  */
589 	  function = DECL_ABSTRACT_ORIGIN (function);
590 	  if (!function
591 	      || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE
592 	      || (!DECL_CXX_CONSTRUCTOR_P (function)
593 		  && !DECL_CXX_DESTRUCTOR_P (function)))
594 	    return false;
595 	}
596       tree type = TYPE_METHOD_BASETYPE (TREE_TYPE (function));
597       if (!outer_type || !types_odr_comparable (type, outer_type))
598 	{
599 	  if (TREE_CODE (type) == RECORD_TYPE
600 	      && TYPE_BINFO (type)
601 	      && polymorphic_type_binfo_p (TYPE_BINFO (type)))
602 	    return true;
603 	}
604       else if (types_same_for_odr (type, outer_type))
605 	return true;
606     }
607   return false;
608 }
609 
610 /* Dump human readable context to F.  If NEWLINE is true, it will be terminated
611    by a newline.  */
612 
613 void
614 ipa_polymorphic_call_context::dump (FILE *f, bool newline) const
615 {
616   fprintf (f, "    ");
617   if (invalid)
618     fprintf (f, "Call is known to be undefined");
619   else
620     {
621       if (useless_p ())
622 	fprintf (f, "nothing known");
623       if (outer_type || offset)
624 	{
625 	  fprintf (f, "Outer type%s:", dynamic ? " (dynamic)":"");
626 	  print_generic_expr (f, outer_type, TDF_SLIM);
627 	  if (maybe_derived_type)
628 	    fprintf (f, " (or a derived type)");
629 	  if (maybe_in_construction)
630 	    fprintf (f, " (maybe in construction)");
631 	  fprintf (f, " offset " HOST_WIDE_INT_PRINT_DEC,
632 		   offset);
633 	}
634       if (speculative_outer_type)
635 	{
636 	  if (outer_type || offset)
637 	    fprintf (f, " ");
638 	  fprintf (f, "Speculative outer type:");
639 	  print_generic_expr (f, speculative_outer_type, TDF_SLIM);
640 	  if (speculative_maybe_derived_type)
641 	    fprintf (f, " (or a derived type)");
642 	  fprintf (f, " at offset " HOST_WIDE_INT_PRINT_DEC,
643 		   speculative_offset);
644 	}
645     }
646   if (newline)
647     fprintf(f, "\n");
648 }
649 
650 /* Print context to stderr.  */
651 
652 void
653 ipa_polymorphic_call_context::debug () const
654 {
655   dump (stderr);
656 }
657 
658 /* Stream out the context to OB.  */
659 
660 void
661 ipa_polymorphic_call_context::stream_out (struct output_block *ob) const
662 {
663   struct bitpack_d bp = bitpack_create (ob->main_stream);
664 
665   bp_pack_value (&bp, invalid, 1);
666   bp_pack_value (&bp, maybe_in_construction, 1);
667   bp_pack_value (&bp, maybe_derived_type, 1);
668   bp_pack_value (&bp, speculative_maybe_derived_type, 1);
669   bp_pack_value (&bp, dynamic, 1);
670   bp_pack_value (&bp, outer_type != NULL, 1);
671   bp_pack_value (&bp, offset != 0, 1);
672   bp_pack_value (&bp, speculative_outer_type != NULL, 1);
673   streamer_write_bitpack (&bp);
674 
675   if (outer_type != NULL)
676     stream_write_tree (ob, outer_type, true);
677   if (offset)
678     streamer_write_hwi (ob, offset);
679   if (speculative_outer_type != NULL)
680     {
681       stream_write_tree (ob, speculative_outer_type, true);
682       streamer_write_hwi (ob, speculative_offset);
683     }
684   else
685     gcc_assert (!speculative_offset);
686 }
687 
688 /* Stream in the context from IB and DATA_IN.  */
689 
690 void
691 ipa_polymorphic_call_context::stream_in (struct lto_input_block *ib,
692 					 struct data_in *data_in)
693 {
694   struct bitpack_d bp = streamer_read_bitpack (ib);
695 
696   invalid = bp_unpack_value (&bp, 1);
697   maybe_in_construction = bp_unpack_value (&bp, 1);
698   maybe_derived_type = bp_unpack_value (&bp, 1);
699   speculative_maybe_derived_type = bp_unpack_value (&bp, 1);
700   dynamic = bp_unpack_value (&bp, 1);
701   bool outer_type_p = bp_unpack_value (&bp, 1);
702   bool offset_p = bp_unpack_value (&bp, 1);
703   bool speculative_outer_type_p = bp_unpack_value (&bp, 1);
704 
705   if (outer_type_p)
706     outer_type = stream_read_tree (ib, data_in);
707   else
708     outer_type = NULL;
709   if (offset_p)
710     offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
711   else
712     offset = 0;
713   if (speculative_outer_type_p)
714     {
715       speculative_outer_type = stream_read_tree (ib, data_in);
716       speculative_offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
717     }
718   else
719     {
720       speculative_outer_type = NULL;
721       speculative_offset = 0;
722     }
723 }
724 
725 /* Proudce polymorphic call context for call method of instance
726    that is located within BASE (that is assumed to be a decl) at offset OFF. */
727 
728 void
729 ipa_polymorphic_call_context::set_by_decl (tree base, HOST_WIDE_INT off)
730 {
731   gcc_assert (DECL_P (base));
732   clear_speculation ();
733 
734   if (!contains_polymorphic_type_p (TREE_TYPE (base)))
735     {
736       clear_outer_type ();
737       offset = off;
738       return;
739     }
740   outer_type = TYPE_MAIN_VARIANT (TREE_TYPE (base));
741   offset = off;
742   /* Make very conservative assumption that all objects
743      may be in construction.
744 
745      It is up to caller to revisit this via
746      get_dynamic_type or decl_maybe_in_construction_p.  */
747   maybe_in_construction = true;
748   maybe_derived_type = false;
749   dynamic = false;
750 }
751 
752 /* CST is an invariant (address of decl), try to get meaningful
753    polymorphic call context for polymorphic call of method
754    if instance of OTR_TYPE that is located at offset OFF of this invariant.
755    Return FALSE if nothing meaningful can be found.  */
756 
757 bool
758 ipa_polymorphic_call_context::set_by_invariant (tree cst,
759 						tree otr_type,
760 						HOST_WIDE_INT off)
761 {
762   poly_int64 offset2, size, max_size;
763   bool reverse;
764   tree base;
765 
766   invalid = false;
767   off = 0;
768   clear_outer_type (otr_type);
769 
770   if (TREE_CODE (cst) != ADDR_EXPR)
771     return false;
772 
773   cst = TREE_OPERAND (cst, 0);
774   base = get_ref_base_and_extent (cst, &offset2, &size, &max_size, &reverse);
775   if (!DECL_P (base) || !known_size_p (max_size) || maybe_ne (max_size, size))
776     return false;
777 
778   /* Only type inconsistent programs can have otr_type that is
779      not part of outer type.  */
780   if (otr_type && !contains_type_p (TREE_TYPE (base), off, otr_type))
781     return false;
782 
783   set_by_decl (base, off);
784   return true;
785 }
786 
787 /* See if OP is SSA name initialized as a copy or by single assignment.
788    If so, walk the SSA graph up.  Because simple PHI conditional is considered
789    copy, GLOBAL_VISITED may be used to avoid infinite loop walking the SSA
790    graph.  */
791 
792 static tree
793 walk_ssa_copies (tree op, hash_set<tree> **global_visited = NULL)
794 {
795   hash_set <tree> *visited = NULL;
796   STRIP_NOPS (op);
797   while (TREE_CODE (op) == SSA_NAME
798 	 && !SSA_NAME_IS_DEFAULT_DEF (op)
799 	 /* We might be called via fold_stmt during cfgcleanup where
800 	    SSA form need not be up-to-date.  */
801 	 && !name_registered_for_update_p (op)
802 	 && (gimple_assign_single_p (SSA_NAME_DEF_STMT (op))
803 	     || gimple_code (SSA_NAME_DEF_STMT (op)) == GIMPLE_PHI))
804     {
805       if (global_visited)
806 	{
807 	  if (!*global_visited)
808 	    *global_visited = new hash_set<tree>;
809 	  if ((*global_visited)->add (op))
810 	    goto done;
811 	}
812       else
813 	{
814 	  if (!visited)
815 	    visited = new hash_set<tree>;
816 	  if (visited->add (op))
817 	    goto done;
818 	}
819       /* Special case
820 	 if (ptr == 0)
821 	   ptr = 0;
822 	 else
823 	   ptr = ptr.foo;
824 	 This pattern is implicitly produced for casts to non-primary
825 	 bases.  When doing context analysis, we do not really care
826 	 about the case pointer is NULL, because the call will be
827 	 undefined anyway.  */
828       if (gimple_code (SSA_NAME_DEF_STMT (op)) == GIMPLE_PHI)
829 	{
830 	  gimple *phi = SSA_NAME_DEF_STMT (op);
831 
832 	  if (gimple_phi_num_args (phi) > 2)
833 	    goto done;
834 	  if (gimple_phi_num_args (phi) == 1)
835 	    op = gimple_phi_arg_def (phi, 0);
836 	  else if (integer_zerop (gimple_phi_arg_def (phi, 0)))
837 	    op = gimple_phi_arg_def (phi, 1);
838 	  else if (integer_zerop (gimple_phi_arg_def (phi, 1)))
839 	    op = gimple_phi_arg_def (phi, 0);
840 	  else
841 	    goto done;
842 	}
843       else
844 	{
845 	  if (gimple_assign_load_p (SSA_NAME_DEF_STMT (op)))
846 	    goto done;
847 	  op = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (op));
848 	}
849       STRIP_NOPS (op);
850     }
851 done:
852   if (visited)
853     delete (visited);
854   return op;
855 }
856 
857 /* Create polymorphic call context from IP invariant CST.
858    This is typically &global_var.
859    OTR_TYPE specify type of polymorphic call or NULL if unknown, OFF
860    is offset of call.  */
861 
862 ipa_polymorphic_call_context::ipa_polymorphic_call_context (tree cst,
863 							    tree otr_type,
864 							    HOST_WIDE_INT off)
865 {
866   clear_speculation ();
867   set_by_invariant (cst, otr_type, off);
868 }
869 
870 /* Build context for pointer REF contained in FNDECL at statement STMT.
871    if INSTANCE is non-NULL, return pointer to the object described by
872    the context or DECL where context is contained in.  */
873 
874 ipa_polymorphic_call_context::ipa_polymorphic_call_context (tree fndecl,
875 							    tree ref,
876 							    gimple *stmt,
877 							    tree *instance)
878 {
879   tree otr_type = NULL;
880   tree base_pointer;
881   hash_set <tree> *visited = NULL;
882 
883   if (TREE_CODE (ref) == OBJ_TYPE_REF)
884     {
885       otr_type = obj_type_ref_class (ref);
886       base_pointer = OBJ_TYPE_REF_OBJECT (ref);
887     }
888   else
889     base_pointer = ref;
890 
891   /* Set up basic info in case we find nothing interesting in the analysis.  */
892   clear_speculation ();
893   clear_outer_type (otr_type);
894   invalid = false;
895 
896   /* Walk SSA for outer object.  */
897   while (true)
898     {
899       base_pointer = walk_ssa_copies (base_pointer, &visited);
900       if (TREE_CODE (base_pointer) == ADDR_EXPR)
901 	{
902 	  HOST_WIDE_INT offset2, size;
903 	  bool reverse;
904 	  tree base
905 	    = get_ref_base_and_extent_hwi (TREE_OPERAND (base_pointer, 0),
906 					   &offset2, &size, &reverse);
907 	  if (!base)
908 	    break;
909 
910 	  combine_speculation_with (TYPE_MAIN_VARIANT (TREE_TYPE (base)),
911 				    offset + offset2,
912 				    true,
913 				    NULL /* Do not change outer type.  */);
914 
915 	  /* If this is a varying address, punt.  */
916 	  if (TREE_CODE (base) == MEM_REF || DECL_P (base))
917 	    {
918 	      /* We found dereference of a pointer.  Type of the pointer
919 		 and MEM_REF is meaningless, but we can look futher.  */
920 	      offset_int mem_offset;
921 	      if (TREE_CODE (base) == MEM_REF
922 		  && mem_ref_offset (base).is_constant (&mem_offset))
923 		{
924 		  offset_int o = mem_offset * BITS_PER_UNIT;
925 		  o += offset;
926 		  o += offset2;
927 		  if (!wi::fits_shwi_p (o))
928 		    break;
929 		  base_pointer = TREE_OPERAND (base, 0);
930 		  offset = o.to_shwi ();
931 		  outer_type = NULL;
932 		}
933 	      /* We found base object.  In this case the outer_type
934 		 is known.  */
935 	      else if (DECL_P (base))
936 		{
937 		  if (visited)
938 		    delete (visited);
939 		  /* Only type inconsistent programs can have otr_type that is
940 		     not part of outer type.  */
941 		  if (otr_type
942 		      && !contains_type_p (TREE_TYPE (base),
943 					   offset + offset2, otr_type))
944 		    {
945 		      invalid = true;
946 		      if (instance)
947 			*instance = base_pointer;
948 		      return;
949 		    }
950 		  set_by_decl (base, offset + offset2);
951 		  if (outer_type && maybe_in_construction && stmt)
952 		    maybe_in_construction
953 		     = decl_maybe_in_construction_p (base,
954 						     outer_type,
955 						     stmt,
956 						     fndecl);
957 		  if (instance)
958 		    *instance = base;
959 		  return;
960 		}
961 	      else
962 		break;
963 	    }
964 	  else
965 	    break;
966 	}
967       else if (TREE_CODE (base_pointer) == POINTER_PLUS_EXPR
968 	       && TREE_CODE (TREE_OPERAND (base_pointer, 1)) == INTEGER_CST)
969 	{
970 	  offset_int o
971 	    = offset_int::from (wi::to_wide (TREE_OPERAND (base_pointer, 1)),
972 				SIGNED);
973 	  o *= BITS_PER_UNIT;
974 	  o += offset;
975 	  if (!wi::fits_shwi_p (o))
976 	    break;
977 	  offset = o.to_shwi ();
978 	  base_pointer = TREE_OPERAND (base_pointer, 0);
979 	}
980       else
981 	break;
982     }
983 
984   if (visited)
985     delete (visited);
986 
987   /* Try to determine type of the outer object.  */
988   if (TREE_CODE (base_pointer) == SSA_NAME
989       && SSA_NAME_IS_DEFAULT_DEF (base_pointer)
990       && TREE_CODE (SSA_NAME_VAR (base_pointer)) == PARM_DECL)
991     {
992       /* See if parameter is THIS pointer of a method.  */
993       if (TREE_CODE (TREE_TYPE (fndecl)) == METHOD_TYPE
994 	  && SSA_NAME_VAR (base_pointer) == DECL_ARGUMENTS (fndecl))
995 	{
996 	  outer_type
997 	     = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer)));
998 	  cgraph_node *node = cgraph_node::get (current_function_decl);
999 	  gcc_assert (TREE_CODE (outer_type) == RECORD_TYPE
1000 		      || TREE_CODE (outer_type) == UNION_TYPE);
1001 
1002 	  /* Handle the case we inlined into a thunk.  In this case
1003 	     thunk has THIS pointer of type bar, but it really receives
1004 	     address to its base type foo which sits in bar at
1005 	     0-thunk.fixed_offset.  It starts with code that adds
1006 	     think.fixed_offset to the pointer to compensate for this.
1007 
1008 	     Because we walked all the way to the begining of thunk, we now
1009 	     see pointer &bar-thunk.fixed_offset and need to compensate
1010 	     for it.  */
1011 	  if (node->thunk.fixed_offset)
1012 	    offset -= node->thunk.fixed_offset * BITS_PER_UNIT;
1013 
1014 	  /* Dynamic casting has possibly upcasted the type
1015 	     in the hiearchy.  In this case outer type is less
1016 	     informative than inner type and we should forget
1017 	     about it.  */
1018 	  if ((otr_type
1019 	       && !contains_type_p (outer_type, offset,
1020 				    otr_type))
1021 	      || !contains_polymorphic_type_p (outer_type)
1022 	      /* If we compile thunk with virtual offset, the THIS pointer
1023 		 is adjusted by unknown value.  We can't thus use outer info
1024 		 at all.  */
1025 	      || node->thunk.virtual_offset_p)
1026 	    {
1027 	      outer_type = NULL;
1028 	      if (instance)
1029 		*instance = base_pointer;
1030 	      return;
1031 	    }
1032 
1033 	  dynamic = true;
1034 
1035 	  /* If the function is constructor or destructor, then
1036 	     the type is possibly in construction, but we know
1037 	     it is not derived type.  */
1038 	  if (DECL_CXX_CONSTRUCTOR_P (fndecl)
1039 	      || DECL_CXX_DESTRUCTOR_P (fndecl))
1040 	    {
1041 	      maybe_in_construction = true;
1042 	      maybe_derived_type = false;
1043 	    }
1044 	  else
1045 	    {
1046 	      maybe_derived_type = true;
1047 	      maybe_in_construction = false;
1048 	    }
1049 	  if (instance)
1050 	    {
1051 	      /* If method is expanded thunk, we need to apply thunk offset
1052 		 to instance pointer.  */
1053 	      if (node->thunk.virtual_offset_p
1054 		  || node->thunk.fixed_offset)
1055 		*instance = NULL;
1056 	      else
1057 	        *instance = base_pointer;
1058 	    }
1059 	  return;
1060 	}
1061       /* Non-PODs passed by value are really passed by invisible
1062 	 reference.  In this case we also know the type of the
1063 	 object.  */
1064       if (DECL_BY_REFERENCE (SSA_NAME_VAR (base_pointer)))
1065 	{
1066 	  outer_type
1067 	     = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (base_pointer)));
1068 	  /* Only type inconsistent programs can have otr_type that is
1069 	     not part of outer type.  */
1070 	  if (otr_type && !contains_type_p (outer_type, offset,
1071 					    otr_type))
1072 	    {
1073 	      invalid = true;
1074 	      if (instance)
1075 		*instance = base_pointer;
1076 	      return;
1077 	    }
1078 	  /* Non-polymorphic types have no interest for us.  */
1079 	  else if (!otr_type && !contains_polymorphic_type_p (outer_type))
1080 	    {
1081 	      outer_type = NULL;
1082 	      if (instance)
1083 		*instance = base_pointer;
1084 	      return;
1085 	    }
1086 	  maybe_derived_type = false;
1087 	  maybe_in_construction = false;
1088 	  if (instance)
1089 	    *instance = base_pointer;
1090 	  return;
1091 	}
1092     }
1093 
1094   tree base_type = TREE_TYPE (base_pointer);
1095 
1096   if (TREE_CODE (base_pointer) == SSA_NAME
1097       && SSA_NAME_IS_DEFAULT_DEF (base_pointer)
1098       && !(TREE_CODE (SSA_NAME_VAR (base_pointer)) == PARM_DECL
1099 	   || TREE_CODE (SSA_NAME_VAR (base_pointer)) == RESULT_DECL))
1100     {
1101       invalid = true;
1102       if (instance)
1103 	*instance = base_pointer;
1104       return;
1105     }
1106   if (TREE_CODE (base_pointer) == SSA_NAME
1107       && SSA_NAME_DEF_STMT (base_pointer)
1108       && gimple_assign_single_p (SSA_NAME_DEF_STMT (base_pointer)))
1109     base_type = TREE_TYPE (gimple_assign_rhs1
1110 			    (SSA_NAME_DEF_STMT (base_pointer)));
1111 
1112   if (base_type && POINTER_TYPE_P (base_type))
1113     combine_speculation_with (TYPE_MAIN_VARIANT (TREE_TYPE (base_type)),
1114 			      offset,
1115 			      true, NULL /* Do not change type here */);
1116   /* TODO: There are multiple ways to derive a type.  For instance
1117      if BASE_POINTER is passed to an constructor call prior our refernece.
1118      We do not make this type of flow sensitive analysis yet.  */
1119   if (instance)
1120     *instance = base_pointer;
1121   return;
1122 }
1123 
1124 /* Structure to be passed in between detect_type_change and
1125    check_stmt_for_type_change.  */
1126 
1127 struct type_change_info
1128 {
1129   /* Offset into the object where there is the virtual method pointer we are
1130      looking for.  */
1131   HOST_WIDE_INT offset;
1132   /* The declaration or SSA_NAME pointer of the base that we are checking for
1133      type change.  */
1134   tree instance;
1135   /* The reference to virtual table pointer used.  */
1136   tree vtbl_ptr_ref;
1137   tree otr_type;
1138   /* If we actually can tell the type that the object has changed to, it is
1139      stored in this field.  Otherwise it remains NULL_TREE.  */
1140   tree known_current_type;
1141   HOST_WIDE_INT known_current_offset;
1142 
1143   /* Set to nonzero if we possibly missed some dynamic type changes and we
1144      should consider the set to be speculative.  */
1145   unsigned speculative;
1146 
1147   /* Set to true if dynamic type change has been detected.  */
1148   bool type_maybe_changed;
1149   /* Set to true if multiple types have been encountered.  known_current_type
1150      must be disregarded in that case.  */
1151   bool multiple_types_encountered;
1152   bool seen_unanalyzed_store;
1153 };
1154 
1155 /* Return true if STMT is not call and can modify a virtual method table pointer.
1156    We take advantage of fact that vtable stores must appear within constructor
1157    and destructor functions.  */
1158 
1159 static bool
1160 noncall_stmt_may_be_vtbl_ptr_store (gimple *stmt)
1161 {
1162   if (is_gimple_assign (stmt))
1163     {
1164       tree lhs = gimple_assign_lhs (stmt);
1165 
1166       if (gimple_clobber_p (stmt))
1167 	return false;
1168       if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
1169 	{
1170 	  if (flag_strict_aliasing
1171 	      && !POINTER_TYPE_P (TREE_TYPE (lhs)))
1172 	    return false;
1173 
1174 	  if (TREE_CODE (lhs) == COMPONENT_REF
1175 	      && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
1176 	    return false;
1177 	  /* In the future we might want to use get_ref_base_and_extent to find
1178 	     if there is a field corresponding to the offset and if so, proceed
1179 	     almost like if it was a component ref.  */
1180 	}
1181     }
1182 
1183   /* Code unification may mess with inline stacks.  */
1184   if (cfun->after_inlining)
1185     return true;
1186 
1187   /* Walk the inline stack and watch out for ctors/dtors.
1188      TODO: Maybe we can require the store to appear in toplevel
1189      block of CTOR/DTOR.  */
1190   for (tree block = gimple_block (stmt); block && TREE_CODE (block) == BLOCK;
1191        block = BLOCK_SUPERCONTEXT (block))
1192     if (BLOCK_ABSTRACT_ORIGIN (block)
1193 	&& TREE_CODE (block_ultimate_origin (block)) == FUNCTION_DECL)
1194       return inlined_polymorphic_ctor_dtor_block_p (block, false);
1195   return (TREE_CODE (TREE_TYPE (current_function_decl)) == METHOD_TYPE
1196 	  && (DECL_CXX_CONSTRUCTOR_P (current_function_decl)
1197 	      || DECL_CXX_DESTRUCTOR_P (current_function_decl)));
1198 }
1199 
1200 /* If STMT can be proved to be an assignment to the virtual method table
1201    pointer of ANALYZED_OBJ and the type associated with the new table
1202    identified, return the type.  Otherwise return NULL_TREE if type changes
1203    in unknown way or ERROR_MARK_NODE if type is unchanged.  */
1204 
1205 static tree
1206 extr_type_from_vtbl_ptr_store (gimple *stmt, struct type_change_info *tci,
1207 			       HOST_WIDE_INT *type_offset)
1208 {
1209   poly_int64 offset, size, max_size;
1210   tree lhs, rhs, base;
1211   bool reverse;
1212 
1213   if (!gimple_assign_single_p (stmt))
1214     return NULL_TREE;
1215 
1216   lhs = gimple_assign_lhs (stmt);
1217   rhs = gimple_assign_rhs1 (stmt);
1218   if (TREE_CODE (lhs) != COMPONENT_REF
1219       || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
1220      {
1221 	if (dump_file)
1222 	  fprintf (dump_file, "  LHS is not virtual table.\n");
1223 	return NULL_TREE;
1224      }
1225 
1226   if (tci->vtbl_ptr_ref && operand_equal_p (lhs, tci->vtbl_ptr_ref, 0))
1227     ;
1228   else
1229     {
1230       base = get_ref_base_and_extent (lhs, &offset, &size, &max_size, &reverse);
1231       if (DECL_P (tci->instance))
1232 	{
1233 	  if (base != tci->instance)
1234 	    {
1235 	      if (dump_file)
1236 		{
1237 		  fprintf (dump_file, "    base:");
1238 		  print_generic_expr (dump_file, base, TDF_SLIM);
1239 		  fprintf (dump_file, " does not match instance:");
1240 		  print_generic_expr (dump_file, tci->instance, TDF_SLIM);
1241 		  fprintf (dump_file, "\n");
1242 		}
1243 	      return NULL_TREE;
1244 	    }
1245 	}
1246       else if (TREE_CODE (base) == MEM_REF)
1247 	{
1248 	  if (!operand_equal_p (tci->instance, TREE_OPERAND (base, 0), 0))
1249 	    {
1250 	      if (dump_file)
1251 		{
1252 		  fprintf (dump_file, "    base mem ref:");
1253 		  print_generic_expr (dump_file, base, TDF_SLIM);
1254 		  fprintf (dump_file, " does not match instance:");
1255 		  print_generic_expr (dump_file, tci->instance, TDF_SLIM);
1256 		  fprintf (dump_file, "\n");
1257 		}
1258 	      return NULL_TREE;
1259 	    }
1260 	  if (!integer_zerop (TREE_OPERAND (base, 1)))
1261 	    {
1262 	      if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
1263 		{
1264 		  if (dump_file)
1265 		    {
1266 		      fprintf (dump_file, "    base mem ref:");
1267 		      print_generic_expr (dump_file, base, TDF_SLIM);
1268 		      fprintf (dump_file, " has non-representable offset:");
1269 		      print_generic_expr (dump_file, tci->instance, TDF_SLIM);
1270 		      fprintf (dump_file, "\n");
1271 		    }
1272 		  return NULL_TREE;
1273 		}
1274 	      else
1275 	        offset += tree_to_shwi (TREE_OPERAND (base, 1)) * BITS_PER_UNIT;
1276 	    }
1277 	}
1278       else if (!operand_equal_p (tci->instance, base, 0)
1279 	       || tci->offset)
1280 	{
1281 	  if (dump_file)
1282 	    {
1283 	      fprintf (dump_file, "    base:");
1284 	      print_generic_expr (dump_file, base, TDF_SLIM);
1285 	      fprintf (dump_file, " does not match instance:");
1286 	      print_generic_expr (dump_file, tci->instance, TDF_SLIM);
1287 	      fprintf (dump_file, " with offset %i\n", (int)tci->offset);
1288 	    }
1289 	  return tci->offset > POINTER_SIZE ? error_mark_node : NULL_TREE;
1290 	}
1291       if (maybe_ne (offset, tci->offset)
1292 	  || maybe_ne (size, POINTER_SIZE)
1293 	  || maybe_ne (max_size, POINTER_SIZE))
1294 	{
1295 	  if (dump_file)
1296 	    {
1297 	      fprintf (dump_file, "    wrong offset ");
1298 	      print_dec (offset, dump_file);
1299 	      fprintf (dump_file, "!=%i or size ", (int) tci->offset);
1300 	      print_dec (size, dump_file);
1301 	      fprintf (dump_file, "\n");
1302 	    }
1303 	  return (known_le (offset + POINTER_SIZE, tci->offset)
1304 		  || (known_size_p (max_size)
1305 		      && known_gt (tci->offset + POINTER_SIZE,
1306 				   offset + max_size))
1307 		  ? error_mark_node : NULL);
1308 	}
1309     }
1310 
1311   tree vtable;
1312   unsigned HOST_WIDE_INT offset2;
1313 
1314   if (!vtable_pointer_value_to_vtable (rhs, &vtable, &offset2))
1315     {
1316       if (dump_file)
1317 	fprintf (dump_file, "    Failed to lookup binfo\n");
1318       return NULL;
1319     }
1320 
1321   tree binfo = subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable)),
1322 					       offset2, vtable);
1323   if (!binfo)
1324     {
1325       if (dump_file)
1326 	fprintf (dump_file, "    Construction vtable used\n");
1327       /* FIXME: We should suport construction contexts.  */
1328       return NULL;
1329     }
1330 
1331   *type_offset = tree_to_shwi (BINFO_OFFSET (binfo)) * BITS_PER_UNIT;
1332   return DECL_CONTEXT (vtable);
1333 }
1334 
1335 /* Record dynamic type change of TCI to TYPE.  */
1336 
1337 static void
1338 record_known_type (struct type_change_info *tci, tree type, HOST_WIDE_INT offset)
1339 {
1340   if (dump_file)
1341     {
1342       if (type)
1343 	{
1344           fprintf (dump_file, "  Recording type: ");
1345 	  print_generic_expr (dump_file, type, TDF_SLIM);
1346           fprintf (dump_file, " at offset %i\n", (int)offset);
1347 	}
1348      else
1349        fprintf (dump_file, "  Recording unknown type\n");
1350     }
1351 
1352   /* If we found a constructor of type that is not polymorphic or
1353      that may contain the type in question as a field (not as base),
1354      restrict to the inner class first to make type matching bellow
1355      happier.  */
1356   if (type
1357       && (offset
1358           || (TREE_CODE (type) != RECORD_TYPE
1359 	      || !TYPE_BINFO (type)
1360 	      || !polymorphic_type_binfo_p (TYPE_BINFO (type)))))
1361     {
1362       ipa_polymorphic_call_context context;
1363 
1364       context.offset = offset;
1365       context.outer_type = type;
1366       context.maybe_in_construction = false;
1367       context.maybe_derived_type = false;
1368       context.dynamic = true;
1369       /* If we failed to find the inner type, we know that the call
1370 	 would be undefined for type produced here.  */
1371       if (!context.restrict_to_inner_class (tci->otr_type))
1372 	{
1373 	  if (dump_file)
1374 	    fprintf (dump_file, "  Ignoring; does not contain otr_type\n");
1375 	  return;
1376 	}
1377       /* Watch for case we reached an POD type and anticipate placement
1378 	 new.  */
1379       if (!context.maybe_derived_type)
1380 	{
1381           type = context.outer_type;
1382           offset = context.offset;
1383 	}
1384     }
1385   if (tci->type_maybe_changed
1386       && (!types_same_for_odr (type, tci->known_current_type)
1387 	  || offset != tci->known_current_offset))
1388     tci->multiple_types_encountered = true;
1389   tci->known_current_type = TYPE_MAIN_VARIANT (type);
1390   tci->known_current_offset = offset;
1391   tci->type_maybe_changed = true;
1392 }
1393 
1394 
1395 /* The maximum number of may-defs we visit when looking for a must-def
1396    that changes the dynamic type in check_stmt_for_type_change.  Tuned
1397    after the PR12392 testcase which unlimited spends 40% time within
1398    these alias walks and 8% with the following limit.  */
1399 
1400 static inline bool
1401 csftc_abort_walking_p (unsigned speculative)
1402 {
1403   unsigned max = PARAM_VALUE (PARAM_MAX_SPECULATIVE_DEVIRT_MAYDEFS);
1404   return speculative > max ? true : false;
1405 }
1406 
1407 /* Callback of walk_aliased_vdefs and a helper function for
1408    detect_type_change to check whether a particular statement may modify
1409    the virtual table pointer, and if possible also determine the new type of
1410    the (sub-)object.  It stores its result into DATA, which points to a
1411    type_change_info structure.  */
1412 
1413 static bool
1414 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
1415 {
1416   gimple *stmt = SSA_NAME_DEF_STMT (vdef);
1417   struct type_change_info *tci = (struct type_change_info *) data;
1418   tree fn;
1419 
1420   /* If we already gave up, just terminate the rest of walk.  */
1421   if (tci->multiple_types_encountered)
1422     return true;
1423 
1424   if (is_gimple_call (stmt))
1425     {
1426       if (gimple_call_flags (stmt) & (ECF_CONST | ECF_PURE))
1427 	return false;
1428 
1429       /* Check for a constructor call.  */
1430       if ((fn = gimple_call_fndecl (stmt)) != NULL_TREE
1431 	  && DECL_CXX_CONSTRUCTOR_P (fn)
1432 	  && TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE
1433 	  && gimple_call_num_args (stmt))
1434       {
1435 	tree op = walk_ssa_copies (gimple_call_arg (stmt, 0));
1436 	tree type = TYPE_METHOD_BASETYPE (TREE_TYPE (fn));
1437 	HOST_WIDE_INT offset = 0;
1438 	bool reverse;
1439 
1440 	if (dump_file)
1441 	  {
1442 	    fprintf (dump_file, "  Checking constructor call: ");
1443 	    print_gimple_stmt (dump_file, stmt, 0);
1444 	  }
1445 
1446 	/* See if THIS parameter seems like instance pointer.  */
1447 	if (TREE_CODE (op) == ADDR_EXPR)
1448 	  {
1449 	    HOST_WIDE_INT size;
1450 	    op = get_ref_base_and_extent_hwi (TREE_OPERAND (op, 0),
1451 					      &offset, &size, &reverse);
1452 	    if (!op)
1453 	      {
1454                 tci->speculative++;
1455 	        return csftc_abort_walking_p (tci->speculative);
1456 	      }
1457 	    if (TREE_CODE (op) == MEM_REF)
1458 	      {
1459 		if (!tree_fits_shwi_p (TREE_OPERAND (op, 1)))
1460 		  {
1461                     tci->speculative++;
1462 		    return csftc_abort_walking_p (tci->speculative);
1463 		  }
1464 		offset += tree_to_shwi (TREE_OPERAND (op, 1))
1465 			  * BITS_PER_UNIT;
1466 		op = TREE_OPERAND (op, 0);
1467 	      }
1468 	    else if (DECL_P (op))
1469 	      ;
1470 	    else
1471 	      {
1472                 tci->speculative++;
1473 	        return csftc_abort_walking_p (tci->speculative);
1474 	      }
1475 	    op = walk_ssa_copies (op);
1476 	  }
1477 	if (operand_equal_p (op, tci->instance, 0)
1478 	    && TYPE_SIZE (type)
1479 	    && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
1480 	    && tree_fits_shwi_p (TYPE_SIZE (type))
1481 	    && tree_to_shwi (TYPE_SIZE (type)) + offset > tci->offset
1482 	    /* Some inlined constructors may look as follows:
1483 		  _3 = operator new (16);
1484 		  MEM[(struct  &)_3] ={v} {CLOBBER};
1485 		  MEM[(struct CompositeClass *)_3]._vptr.CompositeClass
1486 		    = &MEM[(void *)&_ZTV14CompositeClass + 16B];
1487 		  _7 = &MEM[(struct CompositeClass *)_3].object;
1488 		  EmptyClass::EmptyClass (_7);
1489 
1490 	       When determining dynamic type of _3 and because we stop at first
1491 	       dynamic type found, we would stop on EmptyClass::EmptyClass (_7).
1492 	       In this case the emptyclass is not even polymorphic and we miss
1493 	       it is contained in an outer type that is polymorphic.  */
1494 
1495 	    && (tci->offset == offset || contains_polymorphic_type_p (type)))
1496 	  {
1497 	    record_known_type (tci, type, tci->offset - offset);
1498 	    return true;
1499 	  }
1500       }
1501      /* Calls may possibly change dynamic type by placement new. Assume
1502         it will not happen, but make result speculative only.  */
1503      if (dump_file)
1504 	{
1505           fprintf (dump_file, "  Function call may change dynamic type:");
1506 	  print_gimple_stmt (dump_file, stmt, 0);
1507 	}
1508      tci->speculative++;
1509      return csftc_abort_walking_p (tci->speculative);
1510    }
1511   /* Check for inlined virtual table store.  */
1512   else if (noncall_stmt_may_be_vtbl_ptr_store (stmt))
1513     {
1514       tree type;
1515       HOST_WIDE_INT offset = 0;
1516       if (dump_file)
1517 	{
1518 	  fprintf (dump_file, "  Checking vtbl store: ");
1519 	  print_gimple_stmt (dump_file, stmt, 0);
1520 	}
1521 
1522       type = extr_type_from_vtbl_ptr_store (stmt, tci, &offset);
1523       if (type == error_mark_node)
1524 	return false;
1525       gcc_assert (!type || TYPE_MAIN_VARIANT (type) == type);
1526       if (!type)
1527 	{
1528 	  if (dump_file)
1529 	    fprintf (dump_file, "  Unanalyzed store may change type.\n");
1530 	  tci->seen_unanalyzed_store = true;
1531 	  tci->speculative++;
1532 	}
1533       else
1534         record_known_type (tci, type, offset);
1535       return true;
1536     }
1537   else
1538     return false;
1539 }
1540 
1541 /* THIS is polymorphic call context obtained from get_polymorphic_context.
1542    OTR_OBJECT is pointer to the instance returned by OBJ_TYPE_REF_OBJECT.
1543    INSTANCE is pointer to the outer instance as returned by
1544    get_polymorphic_context.  To avoid creation of temporary expressions,
1545    INSTANCE may also be an declaration of get_polymorphic_context found the
1546    value to be in static storage.
1547 
1548    If the type of instance is not fully determined
1549    (either OUTER_TYPE is unknown or MAYBE_IN_CONSTRUCTION/INCLUDE_DERIVED_TYPES
1550    is set), try to walk memory writes and find the actual construction of the
1551    instance.
1552 
1553    Return true if memory is unchanged from function entry.
1554 
1555    We do not include this analysis in the context analysis itself, because
1556    it needs memory SSA to be fully built and the walk may be expensive.
1557    So it is not suitable for use withing fold_stmt and similar uses.  */
1558 
1559 bool
1560 ipa_polymorphic_call_context::get_dynamic_type (tree instance,
1561 						tree otr_object,
1562 						tree otr_type,
1563 						gimple *call)
1564 {
1565   struct type_change_info tci;
1566   ao_ref ao;
1567   bool function_entry_reached = false;
1568   tree instance_ref = NULL;
1569   gimple *stmt = call;
1570   /* Remember OFFSET before it is modified by restrict_to_inner_class.
1571      This is because we do not update INSTANCE when walking inwards.  */
1572   HOST_WIDE_INT instance_offset = offset;
1573   tree instance_outer_type = outer_type;
1574 
1575   if (!instance)
1576     return false;
1577 
1578   if (otr_type)
1579     otr_type = TYPE_MAIN_VARIANT (otr_type);
1580 
1581   /* Walk into inner type. This may clear maybe_derived_type and save us
1582      from useless work.  It also makes later comparsions with static type
1583      easier.  */
1584   if (outer_type && otr_type)
1585     {
1586       if (!restrict_to_inner_class (otr_type))
1587         return false;
1588     }
1589 
1590   if (!maybe_in_construction && !maybe_derived_type)
1591     return false;
1592 
1593   /* If we are in fact not looking at any object object or the instance is
1594      some placement new into a random load, give up straight away.  */
1595   if (TREE_CODE (instance) == MEM_REF)
1596     return false;
1597 
1598   /* We need to obtain refernce to virtual table pointer.  It is better
1599      to look it up in the code rather than build our own.  This require bit
1600      of pattern matching, but we end up verifying that what we found is
1601      correct.
1602 
1603      What we pattern match is:
1604 
1605        tmp = instance->_vptr.A;   // vtbl ptr load
1606        tmp2 = tmp[otr_token];	  // vtable lookup
1607        OBJ_TYPE_REF(tmp2;instance->0) (instance);
1608 
1609      We want to start alias oracle walk from vtbl pointer load,
1610      but we may not be able to identify it, for example, when PRE moved the
1611      load around.  */
1612 
1613   if (gimple_code (call) == GIMPLE_CALL)
1614     {
1615       tree ref = gimple_call_fn (call);
1616       bool reverse;
1617 
1618       if (TREE_CODE (ref) == OBJ_TYPE_REF)
1619 	{
1620 	  ref = OBJ_TYPE_REF_EXPR (ref);
1621 	  ref = walk_ssa_copies (ref);
1622 
1623 	  /* If call target is already known, no need to do the expensive
1624  	     memory walk.  */
1625 	  if (is_gimple_min_invariant (ref))
1626 	    return false;
1627 
1628 	  /* Check if definition looks like vtable lookup.  */
1629 	  if (TREE_CODE (ref) == SSA_NAME
1630 	      && !SSA_NAME_IS_DEFAULT_DEF (ref)
1631 	      && gimple_assign_load_p (SSA_NAME_DEF_STMT (ref))
1632 	      && TREE_CODE (gimple_assign_rhs1
1633 			     (SSA_NAME_DEF_STMT (ref))) == MEM_REF)
1634 	    {
1635 	      ref = get_base_address
1636 		     (TREE_OPERAND (gimple_assign_rhs1
1637 				     (SSA_NAME_DEF_STMT (ref)), 0));
1638 	      ref = walk_ssa_copies (ref);
1639 	      /* Find base address of the lookup and see if it looks like
1640 		 vptr load.  */
1641 	      if (TREE_CODE (ref) == SSA_NAME
1642 		  && !SSA_NAME_IS_DEFAULT_DEF (ref)
1643 		  && gimple_assign_load_p (SSA_NAME_DEF_STMT (ref)))
1644 		{
1645 		  HOST_WIDE_INT offset2, size;
1646 		  tree ref_exp = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (ref));
1647 		  tree base_ref
1648 		    = get_ref_base_and_extent_hwi (ref_exp, &offset2,
1649 						   &size, &reverse);
1650 
1651 		  /* Finally verify that what we found looks like read from
1652 		     OTR_OBJECT or from INSTANCE with offset OFFSET.  */
1653 		  if (base_ref
1654 		      && ((TREE_CODE (base_ref) == MEM_REF
1655 		           && ((offset2 == instance_offset
1656 		                && TREE_OPERAND (base_ref, 0) == instance)
1657 			       || (!offset2
1658 				   && TREE_OPERAND (base_ref, 0)
1659 				      == otr_object)))
1660 			  || (DECL_P (instance) && base_ref == instance
1661 			      && offset2 == instance_offset)))
1662 		    {
1663 		      stmt = SSA_NAME_DEF_STMT (ref);
1664 		      instance_ref = ref_exp;
1665 		    }
1666 		}
1667 	    }
1668 	}
1669     }
1670 
1671   /* If we failed to look up the reference in code, build our own.  */
1672   if (!instance_ref)
1673     {
1674       /* If the statement in question does not use memory, we can't tell
1675 	 anything.  */
1676       if (!gimple_vuse (stmt))
1677 	return false;
1678       ao_ref_init_from_ptr_and_size (&ao, otr_object, NULL);
1679     }
1680   else
1681   /* Otherwise use the real reference.  */
1682     ao_ref_init (&ao, instance_ref);
1683 
1684   /* We look for vtbl pointer read.  */
1685   ao.size = POINTER_SIZE;
1686   ao.max_size = ao.size;
1687   /* We are looking for stores to vptr pointer within the instance of
1688      outer type.
1689      TODO: The vptr pointer type is globally known, we probably should
1690      keep it and do that even when otr_type is unknown.  */
1691   if (otr_type)
1692     {
1693       ao.base_alias_set
1694 	= get_alias_set (outer_type ? outer_type : otr_type);
1695       ao.ref_alias_set
1696         = get_alias_set (TREE_TYPE (BINFO_VTABLE (TYPE_BINFO (otr_type))));
1697     }
1698 
1699   if (dump_file)
1700     {
1701       fprintf (dump_file, "Determining dynamic type for call: ");
1702       print_gimple_stmt (dump_file, call, 0);
1703       fprintf (dump_file, "  Starting walk at: ");
1704       print_gimple_stmt (dump_file, stmt, 0);
1705       fprintf (dump_file, "  instance pointer: ");
1706       print_generic_expr (dump_file, otr_object, TDF_SLIM);
1707       fprintf (dump_file, "  Outer instance pointer: ");
1708       print_generic_expr (dump_file, instance, TDF_SLIM);
1709       fprintf (dump_file, " offset: %i (bits)", (int)instance_offset);
1710       fprintf (dump_file, " vtbl reference: ");
1711       print_generic_expr (dump_file, instance_ref, TDF_SLIM);
1712       fprintf (dump_file, "\n");
1713     }
1714 
1715   tci.offset = instance_offset;
1716   tci.instance = instance;
1717   tci.vtbl_ptr_ref = instance_ref;
1718   tci.known_current_type = NULL_TREE;
1719   tci.known_current_offset = 0;
1720   tci.otr_type = otr_type;
1721   tci.type_maybe_changed = false;
1722   tci.multiple_types_encountered = false;
1723   tci.speculative = 0;
1724   tci.seen_unanalyzed_store = false;
1725 
1726   walk_aliased_vdefs (&ao, gimple_vuse (stmt), check_stmt_for_type_change,
1727 		      &tci, NULL, &function_entry_reached);
1728 
1729   /* If we did not find any type changing statements, we may still drop
1730      maybe_in_construction flag if the context already have outer type.
1731 
1732      Here we make special assumptions about both constructors and
1733      destructors which are all the functions that are allowed to alter the
1734      VMT pointers.  It assumes that destructors begin with assignment into
1735      all VMT pointers and that constructors essentially look in the
1736      following way:
1737 
1738      1) The very first thing they do is that they call constructors of
1739      ancestor sub-objects that have them.
1740 
1741      2) Then VMT pointers of this and all its ancestors is set to new
1742      values corresponding to the type corresponding to the constructor.
1743 
1744      3) Only afterwards, other stuff such as constructor of member
1745      sub-objects and the code written by the user is run.  Only this may
1746      include calling virtual functions, directly or indirectly.
1747 
1748      4) placement new can not be used to change type of non-POD statically
1749      allocated variables.
1750 
1751      There is no way to call a constructor of an ancestor sub-object in any
1752      other way.
1753 
1754      This means that we do not have to care whether constructors get the
1755      correct type information because they will always change it (in fact,
1756      if we define the type to be given by the VMT pointer, it is undefined).
1757 
1758      The most important fact to derive from the above is that if, for some
1759      statement in the section 3, we try to detect whether the dynamic type
1760      has changed, we can safely ignore all calls as we examine the function
1761      body backwards until we reach statements in section 2 because these
1762      calls cannot be ancestor constructors or destructors (if the input is
1763      not bogus) and so do not change the dynamic type (this holds true only
1764      for automatically allocated objects but at the moment we devirtualize
1765      only these).  We then must detect that statements in section 2 change
1766      the dynamic type and can try to derive the new type.  That is enough
1767      and we can stop, we will never see the calls into constructors of
1768      sub-objects in this code.
1769 
1770      Therefore if the static outer type was found (outer_type)
1771      we can safely ignore tci.speculative that is set on calls and give up
1772      only if there was dyanmic type store that may affect given variable
1773      (seen_unanalyzed_store)  */
1774 
1775   if (!tci.type_maybe_changed
1776       || (outer_type
1777 	  && !dynamic
1778 	  && !tci.seen_unanalyzed_store
1779 	  && !tci.multiple_types_encountered
1780 	  && ((offset == tci.offset
1781 	       && types_same_for_odr (tci.known_current_type,
1782 				      outer_type))
1783 	       || (instance_offset == offset
1784 		   && types_same_for_odr (tci.known_current_type,
1785 					  instance_outer_type)))))
1786     {
1787       if (!outer_type || tci.seen_unanalyzed_store)
1788 	return false;
1789       if (maybe_in_construction)
1790         maybe_in_construction = false;
1791       if (dump_file)
1792 	fprintf (dump_file, "  No dynamic type change found.\n");
1793       return true;
1794     }
1795 
1796   if (tci.known_current_type
1797       && !function_entry_reached
1798       && !tci.multiple_types_encountered)
1799     {
1800       if (!tci.speculative)
1801 	{
1802 	  outer_type = TYPE_MAIN_VARIANT (tci.known_current_type);
1803 	  offset = tci.known_current_offset;
1804 	  dynamic = true;
1805 	  maybe_in_construction = false;
1806 	  maybe_derived_type = false;
1807 	  if (dump_file)
1808 	    fprintf (dump_file, "  Determined dynamic type.\n");
1809 	}
1810       else if (!speculative_outer_type
1811 	       || speculative_maybe_derived_type)
1812 	{
1813 	  speculative_outer_type = TYPE_MAIN_VARIANT (tci.known_current_type);
1814 	  speculative_offset = tci.known_current_offset;
1815 	  speculative_maybe_derived_type = false;
1816 	  if (dump_file)
1817 	    fprintf (dump_file, "  Determined speculative dynamic type.\n");
1818 	}
1819     }
1820   else if (dump_file)
1821     {
1822       fprintf (dump_file, "  Found multiple types%s%s\n",
1823 	       function_entry_reached ? " (function entry reached)" : "",
1824 	       function_entry_reached ? " (multiple types encountered)" : "");
1825     }
1826 
1827   return false;
1828 }
1829 
1830 /* See if speculation given by SPEC_OUTER_TYPE, SPEC_OFFSET and SPEC_MAYBE_DERIVED_TYPE
1831    seems consistent (and useful) with what we already have in the non-speculative context.  */
1832 
1833 bool
1834 ipa_polymorphic_call_context::speculation_consistent_p (tree spec_outer_type,
1835 							HOST_WIDE_INT spec_offset,
1836 							bool spec_maybe_derived_type,
1837 							tree otr_type) const
1838 {
1839   if (!flag_devirtualize_speculatively)
1840     return false;
1841 
1842   /* Non-polymorphic types are useless for deriving likely polymorphic
1843      call targets.  */
1844   if (!spec_outer_type || !contains_polymorphic_type_p (spec_outer_type))
1845     return false;
1846 
1847   /* If we know nothing, speculation is always good.  */
1848   if (!outer_type)
1849     return true;
1850 
1851   /* Speculation is only useful to avoid derived types.
1852      This is not 100% true for placement new, where the outer context may
1853      turn out to be useless, but ignore these for now.  */
1854   if (!maybe_derived_type)
1855     return false;
1856 
1857   /* If types agrees, speculation is consistent, but it makes sense only
1858      when it says something new.  */
1859   if (types_must_be_same_for_odr (spec_outer_type, outer_type))
1860     return maybe_derived_type && !spec_maybe_derived_type;
1861 
1862   /* If speculation does not contain the type in question, ignore it.  */
1863   if (otr_type
1864       && !contains_type_p (spec_outer_type, spec_offset, otr_type, false, true))
1865     return false;
1866 
1867   /* If outer type already contains speculation as a filed,
1868      it is useless.  We already know from OUTER_TYPE
1869      SPEC_TYPE and that it is not in the construction.  */
1870   if (contains_type_p (outer_type, offset - spec_offset,
1871 		       spec_outer_type, false, false))
1872     return false;
1873 
1874   /* If speculative outer type is not more specified than outer
1875      type, just give up.
1876      We can only decide this safely if we can compare types with OUTER_TYPE.
1877    */
1878   if ((!in_lto_p || odr_type_p (outer_type))
1879       && !contains_type_p (spec_outer_type,
1880 			   spec_offset - offset,
1881 			   outer_type, false))
1882     return false;
1883   return true;
1884 }
1885 
1886 /* Improve THIS with speculation described by NEW_OUTER_TYPE, NEW_OFFSET,
1887    NEW_MAYBE_DERIVED_TYPE
1888    If OTR_TYPE is set, assume the context is used with OTR_TYPE.  */
1889 
1890 bool
1891 ipa_polymorphic_call_context::combine_speculation_with
1892    (tree new_outer_type, HOST_WIDE_INT new_offset, bool new_maybe_derived_type,
1893     tree otr_type)
1894 {
1895   if (!new_outer_type)
1896     return false;
1897 
1898   /* restrict_to_inner_class may eliminate wrong speculation making our job
1899      easeier.  */
1900   if (otr_type)
1901     restrict_to_inner_class (otr_type);
1902 
1903   if (!speculation_consistent_p (new_outer_type, new_offset,
1904 				 new_maybe_derived_type, otr_type))
1905     return false;
1906 
1907   /* New speculation is a win in case we have no speculation or new
1908      speculation does not consider derivations.  */
1909   if (!speculative_outer_type
1910       || (speculative_maybe_derived_type
1911 	  && !new_maybe_derived_type))
1912     {
1913       speculative_outer_type = new_outer_type;
1914       speculative_offset = new_offset;
1915       speculative_maybe_derived_type = new_maybe_derived_type;
1916       return true;
1917     }
1918   else if (types_must_be_same_for_odr (speculative_outer_type,
1919 				       new_outer_type))
1920     {
1921       if (speculative_offset != new_offset)
1922 	{
1923 	  /* OK we have two contexts that seems valid but they disagree,
1924 	     just give up.
1925 
1926 	     This is not a lattice operation, so we may want to drop it later.  */
1927 	  if (dump_file && (dump_flags & TDF_DETAILS))
1928 	    fprintf (dump_file,
1929 		     "Speculative outer types match, "
1930 		     "offset mismatch -> invalid speculation\n");
1931 	  clear_speculation ();
1932 	  return true;
1933 	}
1934       else
1935 	{
1936 	  if (speculative_maybe_derived_type && !new_maybe_derived_type)
1937 	    {
1938 	      speculative_maybe_derived_type = false;
1939 	      return true;
1940 	    }
1941 	  else
1942 	    return false;
1943 	}
1944     }
1945   /* Choose type that contains the other.  This one either contains the outer
1946      as a field (thus giving exactly one target) or is deeper in the type
1947      hiearchy.  */
1948   else if (speculative_outer_type
1949 	   && speculative_maybe_derived_type
1950 	   && (new_offset > speculative_offset
1951 	       || (new_offset == speculative_offset
1952 		   && contains_type_p (new_outer_type,
1953 				       0, speculative_outer_type, false))))
1954     {
1955       tree old_outer_type = speculative_outer_type;
1956       HOST_WIDE_INT old_offset = speculative_offset;
1957       bool old_maybe_derived_type = speculative_maybe_derived_type;
1958 
1959       speculative_outer_type = new_outer_type;
1960       speculative_offset = new_offset;
1961       speculative_maybe_derived_type = new_maybe_derived_type;
1962 
1963       if (otr_type)
1964 	restrict_to_inner_class (otr_type);
1965 
1966       /* If the speculation turned out to make no sense, revert to sensible
1967 	 one.  */
1968       if (!speculative_outer_type)
1969 	{
1970 	  speculative_outer_type = old_outer_type;
1971 	  speculative_offset = old_offset;
1972 	  speculative_maybe_derived_type = old_maybe_derived_type;
1973 	  return false;
1974 	}
1975       return (old_offset != speculative_offset
1976 	      || old_maybe_derived_type != speculative_maybe_derived_type
1977 	      || types_must_be_same_for_odr (speculative_outer_type,
1978 					     new_outer_type));
1979     }
1980   return false;
1981 }
1982 
1983 /* Make speculation less specific so
1984    NEW_OUTER_TYPE, NEW_OFFSET, NEW_MAYBE_DERIVED_TYPE is also included.
1985    If OTR_TYPE is set, assume the context is used with OTR_TYPE.  */
1986 
1987 bool
1988 ipa_polymorphic_call_context::meet_speculation_with
1989    (tree new_outer_type, HOST_WIDE_INT new_offset, bool new_maybe_derived_type,
1990     tree otr_type)
1991 {
1992   if (!new_outer_type && speculative_outer_type)
1993     {
1994       clear_speculation ();
1995       return true;
1996     }
1997 
1998   /* restrict_to_inner_class may eliminate wrong speculation making our job
1999      easeier.  */
2000   if (otr_type)
2001     restrict_to_inner_class (otr_type);
2002 
2003   if (!speculative_outer_type
2004       || !speculation_consistent_p (speculative_outer_type,
2005 				    speculative_offset,
2006 				    speculative_maybe_derived_type,
2007 				    otr_type))
2008     return false;
2009 
2010   if (!speculation_consistent_p (new_outer_type, new_offset,
2011 				 new_maybe_derived_type, otr_type))
2012     {
2013       clear_speculation ();
2014       return true;
2015     }
2016 
2017   else if (types_must_be_same_for_odr (speculative_outer_type,
2018 				       new_outer_type))
2019     {
2020       if (speculative_offset != new_offset)
2021 	{
2022 	  clear_speculation ();
2023 	  return true;
2024 	}
2025       else
2026 	{
2027 	  if (!speculative_maybe_derived_type && new_maybe_derived_type)
2028 	    {
2029 	      speculative_maybe_derived_type = true;
2030 	      return true;
2031 	    }
2032 	  else
2033 	    return false;
2034 	}
2035     }
2036   /* See if one type contains the other as a field (not base).  */
2037   else if (contains_type_p (new_outer_type, new_offset - speculative_offset,
2038 			    speculative_outer_type, false, false))
2039     return false;
2040   else if (contains_type_p (speculative_outer_type,
2041 			    speculative_offset - new_offset,
2042 			    new_outer_type, false, false))
2043     {
2044       speculative_outer_type = new_outer_type;
2045       speculative_offset = new_offset;
2046       speculative_maybe_derived_type = new_maybe_derived_type;
2047       return true;
2048     }
2049   /* See if OUTER_TYPE is base of CTX.OUTER_TYPE.  */
2050   else if (contains_type_p (new_outer_type,
2051 			    new_offset - speculative_offset,
2052 			    speculative_outer_type, false, true))
2053     {
2054       if (!speculative_maybe_derived_type)
2055 	{
2056 	  speculative_maybe_derived_type = true;
2057 	  return true;
2058 	}
2059       return false;
2060     }
2061   /* See if CTX.OUTER_TYPE is base of OUTER_TYPE.  */
2062   else if (contains_type_p (speculative_outer_type,
2063 			    speculative_offset - new_offset, new_outer_type, false, true))
2064     {
2065       speculative_outer_type = new_outer_type;
2066       speculative_offset = new_offset;
2067       speculative_maybe_derived_type = true;
2068       return true;
2069     }
2070   else
2071     {
2072       if (dump_file && (dump_flags & TDF_DETAILS))
2073         fprintf (dump_file, "Giving up on speculative meet\n");
2074       clear_speculation ();
2075       return true;
2076     }
2077 }
2078 
2079 /* Assume that both THIS and a given context is valid and strenghten THIS
2080    if possible.  Return true if any strenghtening was made.
2081    If actual type the context is being used in is known, OTR_TYPE should be
2082    set accordingly. This improves quality of combined result.  */
2083 
2084 bool
2085 ipa_polymorphic_call_context::combine_with (ipa_polymorphic_call_context ctx,
2086 					    tree otr_type)
2087 {
2088   bool updated = false;
2089 
2090   if (ctx.useless_p () || invalid)
2091     return false;
2092 
2093   /* Restricting context to inner type makes merging easier, however do not
2094      do that unless we know how the context is used (OTR_TYPE is non-NULL)  */
2095   if (otr_type && !invalid && !ctx.invalid)
2096     {
2097       restrict_to_inner_class (otr_type);
2098       ctx.restrict_to_inner_class (otr_type);
2099       if(invalid)
2100         return false;
2101     }
2102 
2103   if (dump_file && (dump_flags & TDF_DETAILS))
2104     {
2105       fprintf (dump_file, "Polymorphic call context combine:");
2106       dump (dump_file);
2107       fprintf (dump_file, "With context:                    ");
2108       ctx.dump (dump_file);
2109       if (otr_type)
2110 	{
2111           fprintf (dump_file, "To be used with type:            ");
2112 	  print_generic_expr (dump_file, otr_type, TDF_SLIM);
2113           fprintf (dump_file, "\n");
2114 	}
2115     }
2116 
2117   /* If call is known to be invalid, we are done.  */
2118   if (ctx.invalid)
2119     {
2120       if (dump_file && (dump_flags & TDF_DETAILS))
2121         fprintf (dump_file, "-> Invalid context\n");
2122       goto invalidate;
2123     }
2124 
2125   if (!ctx.outer_type)
2126     ;
2127   else if (!outer_type)
2128     {
2129       outer_type = ctx.outer_type;
2130       offset = ctx.offset;
2131       dynamic = ctx.dynamic;
2132       maybe_in_construction = ctx.maybe_in_construction;
2133       maybe_derived_type = ctx.maybe_derived_type;
2134       updated = true;
2135     }
2136   /* If types are known to be same, merging is quite easy.  */
2137   else if (types_must_be_same_for_odr (outer_type, ctx.outer_type))
2138     {
2139       if (offset != ctx.offset
2140 	  && TYPE_SIZE (outer_type)
2141 	  && TREE_CODE (TYPE_SIZE (outer_type)) == INTEGER_CST)
2142 	{
2143 	  if (dump_file && (dump_flags & TDF_DETAILS))
2144 	    fprintf (dump_file, "Outer types match, offset mismatch -> invalid\n");
2145 	  clear_speculation ();
2146 	  clear_outer_type ();
2147 	  invalid = true;
2148 	  return true;
2149 	}
2150       if (dump_file && (dump_flags & TDF_DETAILS))
2151         fprintf (dump_file, "Outer types match, merging flags\n");
2152       if (maybe_in_construction && !ctx.maybe_in_construction)
2153 	{
2154 	  updated = true;
2155 	  maybe_in_construction = false;
2156 	}
2157       if (maybe_derived_type && !ctx.maybe_derived_type)
2158 	{
2159 	  updated = true;
2160 	  maybe_derived_type = false;
2161 	}
2162       if (dynamic && !ctx.dynamic)
2163 	{
2164 	  updated = true;
2165 	  dynamic = false;
2166 	}
2167     }
2168   /* If we know the type precisely, there is not much to improve.  */
2169   else if (!maybe_derived_type && !maybe_in_construction
2170 	   && !ctx.maybe_derived_type && !ctx.maybe_in_construction)
2171     {
2172       /* It may be easy to check if second context permits the first
2173 	 and set INVALID otherwise.  This is not easy to do in general;
2174 	 contains_type_p may return false negatives for non-comparable
2175 	 types.
2176 
2177 	 If OTR_TYPE is known, we however can expect that
2178 	 restrict_to_inner_class should have discovered the same base
2179 	 type.  */
2180       if (otr_type && !ctx.maybe_in_construction && !ctx.maybe_derived_type)
2181 	{
2182 	  if (dump_file && (dump_flags & TDF_DETAILS))
2183 	    fprintf (dump_file, "Contextes disagree -> invalid\n");
2184 	  goto invalidate;
2185 	}
2186     }
2187   /* See if one type contains the other as a field (not base).
2188      In this case we want to choose the wider type, because it contains
2189      more information.  */
2190   else if (contains_type_p (ctx.outer_type, ctx.offset - offset,
2191 			    outer_type, false, false))
2192     {
2193       if (dump_file && (dump_flags & TDF_DETAILS))
2194 	fprintf (dump_file, "Second type contain the first as a field\n");
2195 
2196       if (maybe_derived_type)
2197 	{
2198 	  outer_type = ctx.outer_type;
2199 	  maybe_derived_type = ctx.maybe_derived_type;
2200 	  offset = ctx.offset;
2201 	  dynamic = ctx.dynamic;
2202 	  updated = true;
2203 	}
2204 
2205       /* If we do not know how the context is being used, we can
2206 	 not clear MAYBE_IN_CONSTRUCTION because it may be offseted
2207 	 to other component of OUTER_TYPE later and we know nothing
2208 	 about it.  */
2209       if (otr_type && maybe_in_construction
2210 	  && !ctx.maybe_in_construction)
2211 	{
2212           maybe_in_construction = false;
2213 	  updated = true;
2214 	}
2215     }
2216   else if (contains_type_p (outer_type, offset - ctx.offset,
2217 			    ctx.outer_type, false, false))
2218     {
2219       if (dump_file && (dump_flags & TDF_DETAILS))
2220 	fprintf (dump_file, "First type contain the second as a field\n");
2221 
2222       if (otr_type && maybe_in_construction
2223 	  && !ctx.maybe_in_construction)
2224 	{
2225           maybe_in_construction = false;
2226 	  updated = true;
2227 	}
2228     }
2229   /* See if OUTER_TYPE is base of CTX.OUTER_TYPE.  */
2230   else if (contains_type_p (ctx.outer_type,
2231 			    ctx.offset - offset, outer_type, false, true))
2232     {
2233       if (dump_file && (dump_flags & TDF_DETAILS))
2234 	fprintf (dump_file, "First type is base of second\n");
2235       if (!maybe_derived_type)
2236 	{
2237 	  if (!ctx.maybe_in_construction
2238 	      && types_odr_comparable (outer_type, ctx.outer_type))
2239 	    {
2240 	      if (dump_file && (dump_flags & TDF_DETAILS))
2241 		fprintf (dump_file, "Second context does not permit base -> invalid\n");
2242 	      goto invalidate;
2243 	    }
2244 	}
2245       /* Pick variant deeper in the hiearchy.  */
2246       else
2247 	{
2248 	  outer_type = ctx.outer_type;
2249 	  maybe_in_construction = ctx.maybe_in_construction;
2250 	  maybe_derived_type = ctx.maybe_derived_type;
2251 	  offset = ctx.offset;
2252 	  dynamic = ctx.dynamic;
2253           updated = true;
2254 	}
2255     }
2256   /* See if CTX.OUTER_TYPE is base of OUTER_TYPE.  */
2257   else if (contains_type_p (outer_type,
2258 			    offset - ctx.offset, ctx.outer_type, false, true))
2259     {
2260       if (dump_file && (dump_flags & TDF_DETAILS))
2261 	fprintf (dump_file, "Second type is base of first\n");
2262       if (!ctx.maybe_derived_type)
2263 	{
2264 	  if (!maybe_in_construction
2265 	      && types_odr_comparable (outer_type, ctx.outer_type))
2266 	    {
2267 	      if (dump_file && (dump_flags & TDF_DETAILS))
2268 		fprintf (dump_file, "First context does not permit base -> invalid\n");
2269 	      goto invalidate;
2270 	    }
2271 	  /* Pick the base type.  */
2272 	  else if (maybe_in_construction)
2273 	    {
2274 	      outer_type = ctx.outer_type;
2275 	      maybe_in_construction = ctx.maybe_in_construction;
2276 	      maybe_derived_type = ctx.maybe_derived_type;
2277 	      offset = ctx.offset;
2278 	      dynamic = ctx.dynamic;
2279 	      updated = true;
2280 	    }
2281 	}
2282     }
2283   /* TODO handle merging using hiearchy. */
2284   else if (dump_file && (dump_flags & TDF_DETAILS))
2285     fprintf (dump_file, "Giving up on merge\n");
2286 
2287   updated |= combine_speculation_with (ctx.speculative_outer_type,
2288 				       ctx.speculative_offset,
2289 				       ctx.speculative_maybe_derived_type,
2290 				       otr_type);
2291 
2292   if (updated && dump_file && (dump_flags & TDF_DETAILS))
2293     {
2294       fprintf (dump_file, "Updated as:                      ");
2295       dump (dump_file);
2296       fprintf (dump_file, "\n");
2297     }
2298   return updated;
2299 
2300 invalidate:
2301   invalid = true;
2302   clear_speculation ();
2303   clear_outer_type ();
2304   return true;
2305 }
2306 
2307 /* Take non-speculative info, merge it with speculative and clear speculation.
2308    Used when we no longer manage to keep track of actual outer type, but we
2309    think it is still there.
2310 
2311    If OTR_TYPE is set, the transformation can be done more effectively assuming
2312    that context is going to be used only that way.  */
2313 
2314 void
2315 ipa_polymorphic_call_context::make_speculative (tree otr_type)
2316 {
2317   tree spec_outer_type = outer_type;
2318   HOST_WIDE_INT spec_offset = offset;
2319   bool spec_maybe_derived_type = maybe_derived_type;
2320 
2321   if (invalid)
2322     {
2323       invalid = false;
2324       clear_outer_type ();
2325       clear_speculation ();
2326       return;
2327     }
2328   if (!outer_type)
2329     return;
2330   clear_outer_type ();
2331   combine_speculation_with (spec_outer_type, spec_offset,
2332 			    spec_maybe_derived_type,
2333 			    otr_type);
2334 }
2335 
2336 /* Use when we can not track dynamic type change.  This speculatively assume
2337    type change is not happening.  */
2338 
2339 void
2340 ipa_polymorphic_call_context::possible_dynamic_type_change (bool in_poly_cdtor,
2341 							    tree otr_type)
2342 {
2343   if (dynamic)
2344     make_speculative (otr_type);
2345   else if (in_poly_cdtor)
2346     maybe_in_construction = true;
2347 }
2348 
2349 /* Return TRUE if this context conveys the same information as OTHER.  */
2350 
2351 bool
2352 ipa_polymorphic_call_context::equal_to
2353     (const ipa_polymorphic_call_context &x) const
2354 {
2355   if (useless_p ())
2356     return x.useless_p ();
2357   if (invalid)
2358     return x.invalid;
2359   if (x.useless_p () || x.invalid)
2360     return false;
2361 
2362   if (outer_type)
2363     {
2364       if (!x.outer_type
2365 	  || !types_odr_comparable (outer_type, x.outer_type)
2366 	  || !types_same_for_odr (outer_type, x.outer_type)
2367 	  || offset != x.offset
2368 	  || maybe_in_construction != x.maybe_in_construction
2369 	  || maybe_derived_type != x.maybe_derived_type
2370 	  || dynamic != x.dynamic)
2371 	return false;
2372     }
2373   else if (x.outer_type)
2374     return false;
2375 
2376 
2377   if (speculative_outer_type
2378       && speculation_consistent_p (speculative_outer_type, speculative_offset,
2379 				   speculative_maybe_derived_type, NULL_TREE))
2380     {
2381       if (!x.speculative_outer_type)
2382 	return false;
2383 
2384       if (!types_odr_comparable (speculative_outer_type,
2385 				 x.speculative_outer_type)
2386 	  || !types_same_for_odr  (speculative_outer_type,
2387 				   x.speculative_outer_type)
2388 	  || speculative_offset != x.speculative_offset
2389 	  || speculative_maybe_derived_type != x.speculative_maybe_derived_type)
2390 	return false;
2391     }
2392   else if (x.speculative_outer_type
2393 	   && x.speculation_consistent_p (x.speculative_outer_type,
2394 					  x.speculative_offset,
2395 				  	  x.speculative_maybe_derived_type,
2396 					  NULL))
2397     return false;
2398 
2399   return true;
2400 }
2401 
2402 /* Modify context to be strictly less restrictive than CTX.  */
2403 
2404 bool
2405 ipa_polymorphic_call_context::meet_with (ipa_polymorphic_call_context ctx,
2406 					 tree otr_type)
2407 {
2408   bool updated = false;
2409 
2410   if (useless_p () || ctx.invalid)
2411     return false;
2412 
2413   /* Restricting context to inner type makes merging easier, however do not
2414      do that unless we know how the context is used (OTR_TYPE is non-NULL)  */
2415   if (otr_type && !useless_p () && !ctx.useless_p ())
2416     {
2417       restrict_to_inner_class (otr_type);
2418       ctx.restrict_to_inner_class (otr_type);
2419       if(invalid)
2420         return false;
2421     }
2422 
2423   if (equal_to (ctx))
2424     return false;
2425 
2426   if (ctx.useless_p () || invalid)
2427     {
2428       *this = ctx;
2429       return true;
2430     }
2431 
2432   if (dump_file && (dump_flags & TDF_DETAILS))
2433     {
2434       fprintf (dump_file, "Polymorphic call context meet:");
2435       dump (dump_file);
2436       fprintf (dump_file, "With context:                    ");
2437       ctx.dump (dump_file);
2438       if (otr_type)
2439 	{
2440           fprintf (dump_file, "To be used with type:            ");
2441 	  print_generic_expr (dump_file, otr_type, TDF_SLIM);
2442           fprintf (dump_file, "\n");
2443 	}
2444     }
2445 
2446   if (!dynamic && ctx.dynamic)
2447     {
2448       dynamic = true;
2449       updated = true;
2450     }
2451 
2452   /* If call is known to be invalid, we are done.  */
2453   if (!outer_type)
2454     ;
2455   else if (!ctx.outer_type)
2456     {
2457       clear_outer_type ();
2458       updated = true;
2459     }
2460   /* If types are known to be same, merging is quite easy.  */
2461   else if (types_must_be_same_for_odr (outer_type, ctx.outer_type))
2462     {
2463       if (offset != ctx.offset
2464 	  && TYPE_SIZE (outer_type)
2465 	  && TREE_CODE (TYPE_SIZE (outer_type)) == INTEGER_CST)
2466 	{
2467 	  if (dump_file && (dump_flags & TDF_DETAILS))
2468 	    fprintf (dump_file, "Outer types match, offset mismatch -> clearing\n");
2469 	  clear_outer_type ();
2470 	  return true;
2471 	}
2472       if (dump_file && (dump_flags & TDF_DETAILS))
2473         fprintf (dump_file, "Outer types match, merging flags\n");
2474       if (!maybe_in_construction && ctx.maybe_in_construction)
2475 	{
2476 	  updated = true;
2477 	  maybe_in_construction = true;
2478 	}
2479       if (!maybe_derived_type && ctx.maybe_derived_type)
2480 	{
2481 	  updated = true;
2482 	  maybe_derived_type = true;
2483 	}
2484       if (!dynamic && ctx.dynamic)
2485 	{
2486 	  updated = true;
2487 	  dynamic = true;
2488 	}
2489     }
2490   /* See if one type contains the other as a field (not base).  */
2491   else if (contains_type_p (ctx.outer_type, ctx.offset - offset,
2492 			    outer_type, false, false))
2493     {
2494       if (dump_file && (dump_flags & TDF_DETAILS))
2495 	fprintf (dump_file, "Second type contain the first as a field\n");
2496 
2497       /* The second type is more specified, so we keep the first.
2498          We need to set DYNAMIC flag to avoid declaring context INVALID
2499 	 of OFFSET ends up being out of range.  */
2500       if (!dynamic
2501 	  && (ctx.dynamic
2502 	      || (!otr_type
2503 		  && (!TYPE_SIZE (ctx.outer_type)
2504 		      || !TYPE_SIZE (outer_type)
2505 		      || !operand_equal_p (TYPE_SIZE (ctx.outer_type),
2506 					   TYPE_SIZE (outer_type), 0)))))
2507 	{
2508 	  dynamic = true;
2509 	  updated = true;
2510 	}
2511     }
2512   else if (contains_type_p (outer_type, offset - ctx.offset,
2513 			    ctx.outer_type, false, false))
2514     {
2515       if (dump_file && (dump_flags & TDF_DETAILS))
2516 	fprintf (dump_file, "First type contain the second as a field\n");
2517 
2518       if (!dynamic
2519 	  && (ctx.dynamic
2520 	      || (!otr_type
2521 		  && (!TYPE_SIZE (ctx.outer_type)
2522 		      || !TYPE_SIZE (outer_type)
2523 		      || !operand_equal_p (TYPE_SIZE (ctx.outer_type),
2524 					   TYPE_SIZE (outer_type), 0)))))
2525 	dynamic = true;
2526       outer_type = ctx.outer_type;
2527       offset = ctx.offset;
2528       dynamic = ctx.dynamic;
2529       maybe_in_construction = ctx.maybe_in_construction;
2530       maybe_derived_type = ctx.maybe_derived_type;
2531       updated = true;
2532     }
2533   /* See if OUTER_TYPE is base of CTX.OUTER_TYPE.  */
2534   else if (contains_type_p (ctx.outer_type,
2535 			    ctx.offset - offset, outer_type, false, true))
2536     {
2537       if (dump_file && (dump_flags & TDF_DETAILS))
2538 	fprintf (dump_file, "First type is base of second\n");
2539       if (!maybe_derived_type)
2540 	{
2541 	  maybe_derived_type = true;
2542 	  updated = true;
2543 	}
2544       if (!maybe_in_construction && ctx.maybe_in_construction)
2545 	{
2546 	  maybe_in_construction = true;
2547 	  updated = true;
2548 	}
2549       if (!dynamic && ctx.dynamic)
2550 	{
2551 	  dynamic = true;
2552 	  updated = true;
2553 	}
2554     }
2555   /* See if CTX.OUTER_TYPE is base of OUTER_TYPE.  */
2556   else if (contains_type_p (outer_type,
2557 			    offset - ctx.offset, ctx.outer_type, false, true))
2558     {
2559       if (dump_file && (dump_flags & TDF_DETAILS))
2560 	fprintf (dump_file, "Second type is base of first\n");
2561       outer_type = ctx.outer_type;
2562       offset = ctx.offset;
2563       updated = true;
2564       if (!maybe_derived_type)
2565 	maybe_derived_type = true;
2566       if (!maybe_in_construction && ctx.maybe_in_construction)
2567 	maybe_in_construction = true;
2568       if (!dynamic && ctx.dynamic)
2569 	dynamic = true;
2570     }
2571   /* TODO handle merging using hiearchy. */
2572   else
2573     {
2574       if (dump_file && (dump_flags & TDF_DETAILS))
2575         fprintf (dump_file, "Giving up on meet\n");
2576       clear_outer_type ();
2577       updated = true;
2578     }
2579 
2580   updated |= meet_speculation_with (ctx.speculative_outer_type,
2581 				    ctx.speculative_offset,
2582 				    ctx.speculative_maybe_derived_type,
2583 				    otr_type);
2584 
2585   if (updated && dump_file && (dump_flags & TDF_DETAILS))
2586     {
2587       fprintf (dump_file, "Updated as:                      ");
2588       dump (dump_file);
2589       fprintf (dump_file, "\n");
2590     }
2591   return updated;
2592 }
2593