===================================================================
@@ -430,7 +430,7 @@ struct GTY(()) cgraph_indirect_call_info
/* OBJ_TYPE_REF_TOKEN of a polymorphic call (if polymorphic is set). */
HOST_WIDE_INT otr_token;
/* Type of the object from OBJ_TYPE_REF_OBJECT. */
- tree otr_type;
+ tree otr_type, outer_type;
/* Index of the parameter that is called. */
int param_index;
/* ECF flags determined from the caller. */
@@ -451,6 +451,8 @@ struct GTY(()) cgraph_indirect_call_info
/* When the previous bit is set, this one determines whether the destination
is loaded from a parameter passed by reference. */
unsigned by_ref : 1;
+ unsigned int include_bases : 1;
+ unsigned int include_derived_types : 1;
};
struct GTY((chain_next ("%h.next_caller"), chain_prev ("%h.prev_caller"))) cgraph_edge {
===================================================================
@@ -940,16 +940,26 @@ cgraph_create_indirect_edge (struct cgra
&& (target = gimple_call_fn (call_stmt))
&& virtual_method_call_p (target))
{
- tree type = obj_type_ref_class (target);
-
+ tree otr_type, outer_type;
+ HOST_WIDE_INT otr_token, offset;
+ bool include_bases, include_derived_types;
+ get_obj_type_ref_parameters (caller->symbol.decl,
+ target,
+ otr_type, otr_token,
+ outer_type, offset,
+ include_bases,
+ include_derived_types);
/* Only record types can have virtual calls. */
- gcc_assert (TREE_CODE (type) == RECORD_TYPE);
+ gcc_assert (TREE_CODE (otr_type) == RECORD_TYPE);
+ edge->indirect_info->polymorphic = true;
edge->indirect_info->param_index = -1;
- edge->indirect_info->otr_token
- = tree_low_cst (OBJ_TYPE_REF_TOKEN (target), 1);
- edge->indirect_info->otr_type = type;
- edge->indirect_info->polymorphic = 1;
+ edge->indirect_info->otr_token = otr_token;
+ edge->indirect_info->otr_type = otr_type;
+ edge->indirect_info->outer_type = outer_type;
+ edge->indirect_info->offset = offset;
+ edge->indirect_info->include_bases = include_bases;
+ edge->indirect_info->include_derived_types = include_derived_types;
}
edge->next_callee = caller->indirect_calls;
===================================================================
@@ -59,13 +59,21 @@ void build_type_inheritance_graph (void)
void update_type_inheritance_graph (void);
vec <cgraph_node *>
possible_polymorphic_call_targets (tree, HOST_WIDE_INT,
+ tree, HOST_WIDE_INT, bool, bool,
bool *final = NULL,
void **cache_token = NULL);
odr_type get_odr_type (tree, bool insert = false);
-void dump_possible_polymorphic_call_targets (FILE *, tree, HOST_WIDE_INT);
+tree canonicalize_odr_type (tree);
+void dump_possible_polymorphic_call_targets (FILE *, tree, HOST_WIDE_INT,
+ tree, HOST_WIDE_INT, bool, bool);
bool possible_polymorphic_call_target_p (tree, HOST_WIDE_INT,
+ tree, HOST_WIDE_INT, bool, bool,
struct cgraph_node *n);
tree method_class_type (tree);
+void get_obj_type_ref_parameters (tree, tree, tree &,
+ HOST_WIDE_INT &, tree &,
+ HOST_WIDE_INT &, bool &,
+ bool &);
/* Return vector containing possible targets of polymorphic call E.
If FINALP is non-NULL, store true if the list is complette.
@@ -85,6 +93,25 @@ possible_polymorphic_call_targets (struc
gcc_checking_assert (e->indirect_info->polymorphic);
return possible_polymorphic_call_targets (e->indirect_info->otr_type,
e->indirect_info->otr_token,
+ e->indirect_info->outer_type,
+ e->indirect_info->offset,
+ e->indirect_info->include_bases,
+ e->indirect_info->include_derived_types,
+ final, cache_token);
+}
+
+/* Same as above but taking OBJ_TYPE_REF as an parameter. */
+
+inline vec <cgraph_node *>
+possible_polymorphic_call_targets (tree call,
+ bool *final = NULL,
+ void **cache_token = NULL)
+{
+ return possible_polymorphic_call_targets (obj_type_ref_class (call),
+ tree_low_cst
+ (OBJ_TYPE_REF_TOKEN (call), 1),
+ obj_type_ref_class (call),
+ 0, false, true,
final, cache_token);
}
@@ -95,7 +122,11 @@ dump_possible_polymorphic_call_targets (
{
gcc_checking_assert (e->indirect_info->polymorphic);
dump_possible_polymorphic_call_targets (f, e->indirect_info->otr_type,
- e->indirect_info->otr_token);
+ e->indirect_info->otr_token,
+ e->indirect_info->outer_type,
+ e->indirect_info->offset,
+ e->indirect_info->include_bases,
+ e->indirect_info->include_derived_types);
}
/* Return true if N can be possibly target of a polymorphic call of
@@ -106,7 +137,26 @@ possible_polymorphic_call_target_p (stru
struct cgraph_node *n)
{
return possible_polymorphic_call_target_p (e->indirect_info->otr_type,
- e->indirect_info->otr_token, n);
+ e->indirect_info->otr_token,
+ e->indirect_info->outer_type,
+ e->indirect_info->offset,
+ e->indirect_info->include_bases,
+ e->indirect_info->include_derived_types,
+ n);
+}
+
+/* Return true if N can be possibly target of a polymorphic call of
+ OBJ_TYPE_REF expression CALL. */
+
+inline bool
+possible_polymorphic_call_target_p (tree call,
+ struct cgraph_node *n)
+{
+ return possible_polymorphic_call_target_p (obj_type_ref_class (call),
+ tree_low_cst
+ (OBJ_TYPE_REF_TOKEN (call), 1),
+ obj_type_ref_class (call),
+ 0, false, true, n);
}
#endif /* GCC_IPA_UTILS_H */
===================================================================
@@ -291,8 +291,6 @@ add_type_duplicate (odr_type val, tree t
inform (DECL_SOURCE_LOCATION (TYPE_NAME (val->type)),
"a type with the same name but different layout is "
"defined in another translation unit");
- debug_tree (BINFO_VTABLE (TYPE_BINFO (type)));
- debug_tree (BINFO_VTABLE (TYPE_BINFO (val->type)));
if (cgraph_dump_file)
{
fprintf (cgraph_dump_file, "ODR violation or merging or ODR type bug?\n");
@@ -418,6 +416,7 @@ get_odr_type (tree type, bool insert)
tree binfo = TYPE_BINFO (type);
unsigned int i;
+ /*free_polymorphic_call_targets_hash ();*/
val = ggc_alloc_cleared_odr_type_d ();
val->type = type;
val->bases = vNULL;
@@ -444,6 +443,16 @@ get_odr_type (tree type, bool insert)
return val;
}
+/* Return canonical version of the type. */
+
+tree
+canonicalize_odr_type (tree type)
+{
+ if (!BINFO_VTABLE (TYPE_BINFO (type)))
+ return type;
+ return get_odr_type (type, true)->type;
+}
+
/* Dump ODR type T and all its derrived type. INDENT specify indentation for
recusive printing. */
@@ -580,8 +589,9 @@ maybe_record_node (vec <cgraph_node *> &
}
}
-/* See if BINFO's type match OTR_TYPE. If so, lookup method
- in vtable of TYPE_BINFO and insert method to NODES array.
+/* See if BINFO's type match OUTER_TYPE. If so, lookup
+ BINFO of subtype of TYPE at OFFSET and in that BINFO find
+ method in vtable and insert method to NODES array.
Otherwise recurse to base BINFOs.
This match what get_binfo_at_offset does, but with offset
being unknown.
@@ -603,6 +613,8 @@ record_binfo (vec <cgraph_node *> &nodes
tree otr_type,
tree type_binfo,
HOST_WIDE_INT otr_token,
+ tree outer_type,
+ HOST_WIDE_INT offset,
pointer_set_t *inserted,
pointer_set_t *matched_vtables,
bool anonymous)
@@ -613,14 +625,15 @@ record_binfo (vec <cgraph_node *> &nodes
gcc_checking_assert (BINFO_VTABLE (type_binfo));
- if (types_same_for_odr (type, otr_type)
- && !pointer_set_insert (matched_vtables, BINFO_VTABLE (type_binfo)))
+ if (types_same_for_odr (type, outer_type))
{
+ tree inner_binfo = get_binfo_at_offset (type_binfo,
+ offset, otr_type);
/* For types in anonymous namespace first check if the respective vtable
is alive. If not, we know the type can't be called. */
if (!flag_ltrans && anonymous)
{
- tree vtable = BINFO_VTABLE (type_binfo);
+ tree vtable = BINFO_VTABLE (inner_binfo);
struct varpool_node *vnode;
if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
@@ -629,9 +642,13 @@ record_binfo (vec <cgraph_node *> &nodes
if (!vnode || !vnode->symbol.definition)
return;
}
- tree target = gimple_get_virt_method_for_binfo (otr_token, type_binfo);
- if (target)
- maybe_record_node (nodes, target, inserted);
+ gcc_assert (inner_binfo);
+ if (!pointer_set_insert (matched_vtables, BINFO_VTABLE (inner_binfo)))
+ {
+ tree target = gimple_get_virt_method_for_binfo (otr_token, inner_binfo);
+ if (target)
+ maybe_record_node (nodes, target, inserted);
+ }
return;
}
@@ -643,7 +660,7 @@ record_binfo (vec <cgraph_node *> &nodes
/* In the case of single inheritance, the virtual table
is shared with the outer type. */
BINFO_VTABLE (base_binfo) ? base_binfo : type_binfo,
- otr_token, inserted,
+ otr_token, outer_type, offset, inserted,
matched_vtables, anonymous);
}
@@ -658,19 +675,22 @@ possible_polymorphic_call_targets_1 (vec
pointer_set_t *matched_vtables,
tree otr_type,
odr_type type,
- HOST_WIDE_INT otr_token)
+ HOST_WIDE_INT otr_token,
+ tree outer_type,
+ HOST_WIDE_INT offset)
{
tree binfo = TYPE_BINFO (type->type);
unsigned int i;
- record_binfo (nodes, binfo, otr_type, binfo, otr_token, inserted,
- matched_vtables, type->anonymous_namespace);
+ record_binfo (nodes, binfo, otr_type, binfo, otr_token,
+ outer_type, offset,
+ inserted, matched_vtables, type->anonymous_namespace);
for (i = 0; i < type->derived_types.length(); i++)
possible_polymorphic_call_targets_1 (nodes, inserted,
matched_vtables,
otr_type,
type->derived_types[i],
- otr_token);
+ otr_token, outer_type, offset);
}
/* Cache of queries for polymorphic call targets.
@@ -681,9 +701,11 @@ possible_polymorphic_call_targets_1 (vec
struct polymorphic_call_target_d
{
- odr_type type;
- HOST_WIDE_INT otr_token;
+ odr_type type, outer_type;
+ HOST_WIDE_INT otr_token, offset;
+ bool include_bases, include_derived_types;
vec <cgraph_node *> targets;
+ bool final;
};
/* Polymorphic call target cache helpers. */
@@ -702,8 +724,16 @@ struct polymorphic_call_target_hasher
inline hashval_t
polymorphic_call_target_hasher::hash (const value_type *odr_query)
{
- return iterative_hash_hashval_t (odr_query->type->id,
- odr_query->otr_token);
+ hashval_t hash;
+
+ hash = iterative_hash_host_wide_int
+ (odr_query->otr_token,
+ odr_query->type->id);
+ hash = iterative_hash_hashval_t (odr_query->outer_type->id, hash);
+ hash = iterative_hash_host_wide_int (odr_query->offset, hash);
+ return iterative_hash_hashval_t (((int)odr_query->include_bases << 1)
+ | (int)odr_query->include_derived_types,
+ hash);
}
/* Compare cache entries T1 and T2. */
@@ -712,7 +742,10 @@ inline bool
polymorphic_call_target_hasher::equal (const value_type *t1,
const compare_type *t2)
{
- return t1->type == t2->type && t1->otr_token == t2->otr_token;
+ return (t1->type == t2->type && t1->otr_token == t2->otr_token
+ && t1->outer_type == t2->outer_type
+ && t1->offset == t2->offset
+ && t1->include_bases == t2->include_bases);
}
/* Remove entry in polymorphic call target cache hash. */
@@ -732,7 +765,7 @@ static polymorphic_call_target_hash_type
/* Destroy polymorphic call target query cache. */
-static void
+void
free_polymorphic_call_targets_hash ()
{
if (cached_polymorphic_call_targets)
@@ -753,6 +786,344 @@ devirt_node_removal_hook (struct cgraph_
free_polymorphic_call_targets_hash ();
}
+/* Walk fields of CLASS_TYPE to find type of class in a memory layout
+ of TYPE that contains EXPECTED_TYPE at OFFSET (possibly as a basetype)
+
+ class A
+ {
+ int a;
+ class B b;
+ }
+ when we look for type at offset sizeof(int), we end up with B and offset 0.
+ If the same is produced by multiple inheritance, we end up with A and offset
+ sizeof(int).
+
+ If we can not find corresponding class, give up by setting CLASS_TYPE to
+ EXPECTED_TYPE and CLASS_OFFSET to NULL. */
+
+static bool
+get_outer_class_type (tree &class_type, HOST_WIDE_INT &class_offset,
+ bool &include_derived_types,
+ tree expected_type)
+{
+ tree type = class_type;
+ HOST_WIDE_INT offset = class_offset;
+
+ /* Find the sub-object the constant actually refers to and mark whether it is
+ an artificial one (as opposed to a user-defined one). */
+ while (true)
+ {
+ HOST_WIDE_INT pos, size;
+ tree fld;
+
+ /* On a match, just return what we found. */
+ if (TREE_CODE (type) == TREE_CODE (expected_type)
+ && types_same_for_odr (type, expected_type))
+ {
+ gcc_assert (offset == 0);
+ return true;
+ }
+
+ /* Walk fields and find corresponding on at OFFSET. */
+ if (TREE_CODE (type) == RECORD_TYPE)
+ {
+ for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
+ {
+ if (TREE_CODE (fld) != FIELD_DECL)
+ continue;
+
+ pos = int_bit_position (fld);
+ size = tree_low_cst (DECL_SIZE (fld), 1);
+ if (pos <= offset && (pos + size) > offset)
+ break;
+ }
+
+ if (!fld)
+ goto give_up;
+
+ type = TREE_TYPE (fld);
+ offset -= pos;
+ /* DECL_ARTIFICIAL represents a basetype. */
+ if (!DECL_ARTIFICIAL (fld))
+ {
+ class_type = type;
+ class_offset = offset;
+ /* As soon as we se an field containing the type,
+ we know we are not looking for derivations. */
+ include_derived_types = false;
+ }
+ }
+ else if (TREE_CODE (type) == ARRAY_TYPE)
+ {
+ tree subtype = TREE_TYPE (type);
+
+ /* Give up if we don't know array size. */
+ if (!host_integerp (TYPE_SIZE (subtype), 0)
+ || !tree_low_cst (TYPE_SIZE (subtype), 0) <= 0)
+ goto give_up;
+ offset = offset % tree_low_cst (TYPE_SIZE (subtype), 0);
+ type = subtype;
+ class_type = type;
+ class_offset = offset;
+ include_derived_types = false;
+ }
+ /* Give up on anything else. */
+ else
+ goto give_up;
+ }
+
+ /* If we failed to find subtype we look for, give up and fall back to the
+ most generic query. */
+give_up:
+ class_type = expected_type;
+ class_offset = 0;
+ include_derived_types = true;
+ return false;
+}
+
+/* Return true if OUTER_TYPE contains OTR_TYPE at OFFSET. */
+
+bool
+contains_type_p (tree outer_type, HOST_WIDE_INT offset,
+ tree otr_type)
+{
+ bool include_derived_types;
+ return get_outer_class_type (outer_type, offset,
+ include_derived_types, otr_type);
+}
+
+/* Give OBJ_TYPE_REF REF call in FNDECL, determine class of the polymorphic
+ call (OTR_TYPE), its token (OTR_TOKEN).
+ Also determine OUTER_TYPE and OFFSET of the access and set INCLUDE_BASES
+ if OUTER_TYPE is possibly in construction and INCLUDE_DERIVED_TYPES is
+ actual type may be derived type of OUTER_TYPE. */
+
+void
+get_obj_type_ref_parameters (tree fndecl,
+ tree ref,
+ tree &otr_type,
+ HOST_WIDE_INT &otr_token,
+ tree &outer_type,
+ HOST_WIDE_INT &offset,
+ bool &include_bases,
+ bool &include_derived_types)
+{
+ otr_type = obj_type_ref_class (ref);
+ otr_token = tree_low_cst (OBJ_TYPE_REF_TOKEN (ref), 1);
+ tree base_pointer;
+
+ /* Set up basic info in case we find nothing interesting in the analysis. */
+ outer_type = otr_type;
+ offset = 0;
+ base_pointer = OBJ_TYPE_REF_OBJECT (ref);
+ include_derived_types = true;
+ include_bases = false;
+
+ /* Walk SSA for outer object. */
+ do
+ {
+ if (TREE_CODE (base_pointer) == SSA_NAME
+ && !SSA_NAME_IS_DEFAULT_DEF (base_pointer)
+ && SSA_NAME_DEF_STMT (base_pointer)
+ && gimple_assign_single_p (SSA_NAME_DEF_STMT (base_pointer)))
+ {
+ base_pointer = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (base_pointer));
+ STRIP_NOPS (base_pointer);
+ }
+ else if (TREE_CODE (base_pointer) == ADDR_EXPR)
+ {
+ HOST_WIDE_INT size, max_size;
+ HOST_WIDE_INT offset2;
+ tree base = get_ref_base_and_extent (TREE_OPERAND (base_pointer, 0),
+ &offset2, &size, &max_size);
+
+ /* If this is a varying address, punt. */
+ if ((TREE_CODE (base) == MEM_REF || DECL_P (base))
+ && max_size != -1
+ && max_size == size)
+ {
+ /* We found dereference of a pointer. Type of the pointer
+ and MEM_REF is meaningless, but we can look futher. */
+ if (TREE_CODE (base) == MEM_REF)
+ {
+ base_pointer = TREE_OPERAND (base, 0);
+ offset += offset2 + mem_ref_offset (base).low * BITS_PER_UNIT;
+ outer_type = NULL;
+#if 0
+ if (offset < 0)
+ {
+ offset = 0;
+ return;
+ }
+#endif
+ }
+ /* We found base object. In this case the outer_type
+ is known. */
+ else if (DECL_P (base))
+ {
+ outer_type = TREE_TYPE (base);
+ gcc_assert (!POINTER_TYPE_P (outer_type));
+
+ /* Only type inconsistent programs can have otr_type that is
+ not part of outer type. */
+ if (!contains_type_p (outer_type,
+ offset, otr_type))
+ {
+ gcc_unreachable ();
+ return;
+ }
+ offset += offset2;
+ base_pointer = NULL;
+ /* Make very conservative assumption that all objects
+ may be in construction.
+ TODO: ipa-prop already contains code to tell better.
+ merge it later. */
+ include_bases = true;
+ include_derived_types = false;
+ return;
+ }
+ else
+ break;
+ }
+ else
+ break;
+ }
+ else if (TREE_CODE (base_pointer) == POINTER_PLUS_EXPR
+ && host_integerp (TREE_OPERAND (base_pointer, 1), 1))
+ {
+ offset += tree_low_cst (TREE_OPERAND (base_pointer, 1), 1)
+ * BITS_PER_UNIT;
+ base_pointer = TREE_OPERAND (base_pointer, 0);
+ }
+ else
+ break;
+ }
+ while (true);
+
+ /* Try to determine type of the outer object. */
+ if (TREE_CODE (base_pointer) == SSA_NAME
+ && SSA_NAME_IS_DEFAULT_DEF (base_pointer)
+ && TREE_CODE (SSA_NAME_VAR (base_pointer)) == PARM_DECL)
+ {
+ /* See if parameter is THIS pointer of a method. */
+ if (TREE_CODE (TREE_TYPE (fndecl)) == METHOD_TYPE
+ && SSA_NAME_VAR (base_pointer) == DECL_ARGUMENTS (fndecl))
+ {
+ outer_type = TREE_TYPE (TREE_TYPE (base_pointer));
+ gcc_assert (TREE_CODE (outer_type) == RECORD_TYPE);
+
+ /* Dynamic casting has possibly upcasted the type
+ in the hiearchy. In this case outer type is less
+ informative than inner type and we should forget
+ about it. */
+ if (!contains_type_p (outer_type, offset, otr_type))
+ {
+ outer_type = NULL;
+ return;
+ }
+
+ /* If the function is constructor or destructor, then
+ the type is possibly in consturction, but we know
+ it is not derived type. */
+ if (DECL_CXX_CONSTRUCTOR_P (fndecl)
+ || DECL_CXX_DESTRUCTOR_P (fndecl))
+ {
+ include_bases = true;
+ include_derived_types = false;
+ }
+ else
+ {
+ include_derived_types = true;
+ include_bases = false;
+ }
+ return;
+ }
+ /* Non-PODs passed by value are really passed by invisible
+ reference. In this case we also know the type of the
+ object. */
+ if (DECL_BY_REFERENCE (SSA_NAME_VAR (base_pointer)))
+ {
+ outer_type = TREE_TYPE (TREE_TYPE (base_pointer));
+ gcc_assert (!POINTER_TYPE_P (outer_type));
+ /* Only type inconsistent programs can have otr_type that is
+ not part of outer type. */
+ if (!contains_type_p (outer_type, offset, otr_type))
+ {
+ outer_type = NULL;
+ gcc_unreachable ();
+ return;
+ }
+ include_derived_types = false;
+ include_bases = false;
+ return;
+ }
+ }
+ /* TODO: There are multiple ways to derive a type. For instance
+ if BASE_POINTER is passed to an constructor call prior our refernece.
+ We do not make this type of flow sensitive analysis yet. */
+
+}
+
+/* Walk bases of OUTER_TYPE that contain OTR_TYPE at OFFSET.
+ Lookup their respecitve virtual methods for OTR_TOKEN and OTR_TYPE
+ and insert them to NODES.
+
+ MATCHED_VTABLES and INSERTED is used to avoid duplicated work. */
+
+static void
+walk_bases (tree otr_type,
+ HOST_WIDE_INT otr_token,
+ tree outer_type,
+ HOST_WIDE_INT offset,
+ vec <cgraph_node *> nodes,
+ pointer_set_t *inserted,
+ pointer_set_t *matched_vtables,
+ bool *finalp)
+{
+ while (true)
+ {
+ HOST_WIDE_INT pos, size;
+ tree base_binfo;
+ tree fld;
+
+ if (types_same_for_odr (outer_type, otr_type))
+ return;
+ /*gcc_assert (offset >= 0);*/
+
+ for (fld = TYPE_FIELDS (outer_type); fld; fld = DECL_CHAIN (fld))
+ {
+ if (TREE_CODE (fld) != FIELD_DECL)
+ continue;
+
+ pos = int_bit_position (fld);
+ size = tree_low_cst (DECL_SIZE (fld), 1);
+ if (pos <= offset && (pos + size) > offset)
+ break;
+ }
+ /* Withing a class type we should always find correcponding fields. */
+ gcc_assert (fld && TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE);
+
+ /* Nonbasetypes should have been stripped by outer_class_type. */
+ gcc_assert (DECL_ARTIFICIAL (fld));
+
+ outer_type = TREE_TYPE (fld);
+ offset -= pos;
+
+ base_binfo = get_binfo_at_offset (TYPE_BINFO (outer_type),
+ offset, otr_type);
+ gcc_assert (base_binfo);
+ if (!pointer_set_insert (matched_vtables, BINFO_VTABLE (base_binfo)))
+ {
+ tree target = gimple_get_virt_method_for_binfo (otr_token, base_binfo);
+ if (target)
+ maybe_record_node (nodes, target, inserted);
+ else
+ *finalp = false;
+ pointer_set_insert (matched_vtables, BINFO_VTABLE (base_binfo));
+ }
+ }
+}
+
/* When virtual table is removed, we may need to flush the cache. */
static void
@@ -766,8 +1137,14 @@ devirt_variable_node_removal_hook (struc
}
/* Return vector containing possible targets of polymorphic call of type
- OTR_TYPE caling method OTR_TOKEN with OFFSET. If FINALp is non-NULL,
- store true if the list is complette.
+ OTR_TYPE caling method OTR_TOKEN within type of OTR_OUTER_TYPE and OFFSET.
+ If INCLUDE_BASES is true, walk also base types of OUTER_TYPES containig
+ OTR_TYPE and include their virtual method. This is useful for types
+ possibly in construction or destruction where the virtual table may
+ temporarily change to one of base types. INCLUDE_DERIVER_TYPES make
+ us to walk the inheritance graph for all derivations.
+
+ If FINALp is non-NULL, store true if the list is complette.
CACHE_TOKEN (if non-NULL) will get stored to an unique ID of entry
in the target cache. If user needs to visit every target list
just once, it can memoize them.
@@ -779,6 +1156,10 @@ devirt_variable_node_removal_hook (struc
vec <cgraph_node *>
possible_polymorphic_call_targets (tree otr_type,
HOST_WIDE_INT otr_token,
+ tree otr_outer_type,
+ HOST_WIDE_INT offset,
+ bool include_bases,
+ bool include_derived_types,
bool *finalp,
void **cache_token)
{
@@ -786,25 +1167,37 @@ possible_polymorphic_call_targets (tree
pointer_set_t *inserted;
pointer_set_t *matched_vtables;
vec <cgraph_node *> nodes=vNULL;
- odr_type type;
+ odr_type type, outer_type;
polymorphic_call_target_d key;
polymorphic_call_target_d **slot;
unsigned int i;
tree binfo, target;
+ bool final;
- if (finalp)
- *finalp = false;
-
- type = get_odr_type (otr_type, false);
- /* If we do not have type in our hash it means we never seen any method
- in it. */
- if (!type)
- return nodes;
+ type = get_odr_type (otr_type, true);
- /* For anonymous namespace types we can attempt to build full type.
- All derivations must be in this unit. */
- if (type->anonymous_namespace && finalp && !flag_ltrans)
- *finalp = true;
+ /* Lookup the outer class type we want to walk. */
+ if (otr_outer_type)
+ get_outer_class_type (otr_outer_type, offset, include_derived_types,
+ otr_type);
+
+ /* We now canonicalize our query, so we do not need extra hashtable entries. */
+
+ /* Without outer type, we have no use for offset. Just do the
+ basic search from innter type */
+ if (!otr_outer_type)
+ {
+ otr_outer_type = otr_type;
+ offset = 0;
+ }
+ /* We need to update our hiearchy if the type does not exist. */
+ outer_type = get_odr_type (otr_outer_type, true);
+ /* If outer and inner type match, there are no bases to see. */
+ if (type == outer_type)
+ include_bases = false;
+ /* If the type is final, there are no derivations. */
+ if (TYPE_FINAL_P (outer_type->type))
+ include_derived_types = false;
/* Initialize query cache. */
if (!cached_polymorphic_call_targets)
@@ -823,43 +1216,77 @@ possible_polymorphic_call_targets (tree
/* Lookup cached answer. */
key.type = type;
key.otr_token = otr_token;
+ key.outer_type = outer_type;
+ key.offset = offset;
+ key.include_bases = include_bases;
+ key.include_derived_types = include_derived_types;
slot = polymorphic_call_target_hash.find_slot (&key, INSERT);
if (cache_token)
*cache_token = (void *)*slot;
if (*slot)
- return (*slot)->targets;
+ {
+ if (finalp)
+ *finalp = (*slot)->final;
+ return (*slot)->targets;
+ }
+
+ final = true;
/* Do actual search. */
timevar_push (TV_IPA_VIRTUAL_CALL);
*slot = XCNEW (polymorphic_call_target_d);
if (cache_token)
- *cache_token = (void *)*slot;
+ *cache_token = (void *)*slot;
(*slot)->type = type;
(*slot)->otr_token = otr_token;
+ (*slot)->outer_type = outer_type;
+ (*slot)->offset = offset;
+ (*slot)->include_bases = include_bases;
+ (*slot)->include_derived_types = include_derived_types;
inserted = pointer_set_create ();
matched_vtables = pointer_set_create ();
/* First see virtual method of type itself. */
- binfo = TYPE_BINFO (type->type);
+ binfo = get_binfo_at_offset (TYPE_BINFO (outer_type->type),
+ offset, otr_type);
target = gimple_get_virt_method_for_binfo (otr_token, binfo);
if (target)
- maybe_record_node (nodes, target, inserted);
+ {
+ maybe_record_node (nodes, target, inserted);
+
+ /* In the case we get final method, we don't need
+ to walk derivations. */
+ if (DECL_FINAL_P (target))
+ include_derived_types = false;
+ }
+ else
+ final = false;
pointer_set_insert (matched_vtables, BINFO_VTABLE (binfo));
- /* TODO: If method is final, we can stop here and signaize that
- list is final. We need C++ FE to pass our info about final
- methods and classes. */
+ /* Next walk bases, if asked to. */
+ if (include_bases)
+ walk_bases (otr_type, otr_token, outer_type->type, offset, nodes, inserted,
+ matched_vtables, &final);
- /* Walk recursively all derived types. Here we need to lookup proper basetype
- via their BINFO walk that is done by record_binfo */
- for (i = 0; i < type->derived_types.length(); i++)
- possible_polymorphic_call_targets_1 (nodes, inserted,
- matched_vtables,
- otr_type, type->derived_types[i],
- otr_token);
+ /* Finally walk recursively all derived types. */
+ if (include_derived_types)
+ {
+ /* For anonymous namespace types we can attempt to build full type.
+ All derivations must be in this unit (unless we see partial unit). */
+ if (!type->anonymous_namespace || flag_ltrans)
+ final = false;
+ for (i = 0; i < outer_type->derived_types.length(); i++)
+ possible_polymorphic_call_targets_1 (nodes, inserted,
+ matched_vtables,
+ otr_type, outer_type->derived_types[i],
+ otr_token, outer_type->type, offset);
+ }
(*slot)->targets = nodes;
+ (*slot)->final = final;
+ if (finalp)
+ *finalp = final;
pointer_set_destroy (inserted);
pointer_set_destroy (matched_vtables);
@@ -871,8 +1298,12 @@ possible_polymorphic_call_targets (tree
void
dump_possible_polymorphic_call_targets (FILE *f,
- tree otr_type,
- HOST_WIDE_INT otr_token)
+ tree otr_type,
+ HOST_WIDE_INT otr_token,
+ tree outer_type,
+ HOST_WIDE_INT offset,
+ bool include_bases,
+ bool include_derived_types)
{
vec <cgraph_node *> targets;
bool final;
@@ -882,16 +1313,27 @@ dump_possible_polymorphic_call_targets (
if (!type)
return;
targets = possible_polymorphic_call_targets (otr_type, otr_token,
+ outer_type,
+ offset, include_bases,
+ include_derived_types,
&final);
- fprintf (f, "Targets of polymorphic call of type %i ", type->id);
+ fprintf (f, " Targets of polymorphic call of type %i:", type->id);
print_generic_expr (f, type->type, TDF_SLIM);
- fprintf (f, " token %i%s:",
- (int)otr_token,
- final ? " (full list)" : " (partial list, may call to other unit)");
+ fprintf (f, " token %i\n"
+ " Contained in type:",
+ (int)otr_token);
+ print_generic_expr (f, outer_type, TDF_SLIM);
+ fprintf (f, " at offset "HOST_WIDE_INT_PRINT_DEC"\n"
+ " %s%s%s\n ",
+ offset,
+ final ? "This is full list." :
+ "This is partial list; extra targets may be defined in other units.",
+ include_bases ? " (base types included)" : "",
+ include_derived_types ? " (derived types included)" : "");
for (i = 0; i < targets.length (); i++)
fprintf (f, " %s/%i", cgraph_node_name (targets[i]),
targets[i]->symbol.order);
- fprintf (f, "\n");
+ fprintf (f, "\n\n");
}
@@ -901,16 +1343,30 @@ dump_possible_polymorphic_call_targets (
bool
possible_polymorphic_call_target_p (tree otr_type,
HOST_WIDE_INT otr_token,
+ tree outer_type,
+ HOST_WIDE_INT offset,
+ bool include_bases,
+ bool include_derived_types,
struct cgraph_node *n)
{
vec <cgraph_node *> targets;
unsigned int i;
+ enum built_in_function fcode;
+
+ if (TREE_CODE (TREE_TYPE (n->symbol.decl)) == FUNCTION_TYPE
+ && ((fcode = DECL_FUNCTION_CODE (n->symbol.decl))
+ == BUILT_IN_UNREACHABLE
+ || fcode == BUILT_IN_TRAP))
+ return true;
if (!odr_hash.is_created ())
return true;
- targets = possible_polymorphic_call_targets (otr_type, otr_token);
+ targets = possible_polymorphic_call_targets (otr_type, otr_token,
+ outer_type, offset,
+ include_bases,
+ include_derived_types);
for (i = 0; i < targets.length (); i++)
- if (n == targets[i])
+ if (symtab_semantically_equivalent_p ((symtab_node)n, (symtab_node)targets[i]))
return true;
return false;
}
===================================================================
@@ -470,7 +472,7 @@ ipa_set_ancestor_jf (struct ipa_jump_fun
tree
ipa_binfo_from_known_type_jfunc (struct ipa_jump_func *jfunc)
{
- tree base_binfo = TYPE_BINFO (jfunc->value.known_type.base_type);
+ tree base_binfo = TYPE_BINFO (canonicalize_odr_type (jfunc->value.known_type.base_type));
if (!base_binfo)
return NULL_TREE;
return get_binfo_at_offset (base_binfo,
@@ -1724,8 +1726,6 @@ ipa_note_param_call (struct cgraph_node
cs = cgraph_edge (node, stmt);
cs->indirect_info->param_index = param_index;
- cs->indirect_info->offset = 0;
- cs->indirect_info->polymorphic = 0;
cs->indirect_info->agg_contents = 0;
cs->indirect_info->member_ptr = 0;
return cs;
@@ -1822,6 +1822,8 @@ ipa_analyze_indirect_call_uses (struct c
&by_ref))
{
struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
+ if (cs->indirect_info->offset != offset)
+ cs->indirect_info->outer_type = NULL;
cs->indirect_info->offset = offset;
cs->indirect_info->agg_contents = 1;
cs->indirect_info->by_ref = by_ref;
@@ -1919,6 +1921,8 @@ ipa_analyze_indirect_call_uses (struct c
&& parm_preserved_before_stmt_p (&parms_ainfo[index], call, rec))
{
struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
+ if (cs->indirect_info->offset != offset)
+ cs->indirect_info->outer_type = NULL;
cs->indirect_info->offset = offset;
cs->indirect_info->agg_contents = 1;
cs->indirect_info->member_ptr = 1;
@@ -2741,6 +2795,8 @@ update_indirect_edges_after_inlining (st
else
{
ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
+ if (ipa_get_jf_ancestor_offset (jfunc))
+ ici->outer_type = NULL;
ici->offset += ipa_get_jf_ancestor_offset (jfunc);
}
}
@@ -4048,12 +4106,15 @@ ipa_write_indirect_edge_info (struct out
bp_pack_value (&bp, ii->agg_contents, 1);
bp_pack_value (&bp, ii->member_ptr, 1);
bp_pack_value (&bp, ii->by_ref, 1);
+ bp_pack_value (&bp, ii->include_bases, 1);
+ bp_pack_value (&bp, ii->include_derived_types, 1);
streamer_write_bitpack (&bp);
if (ii->polymorphic)
{
streamer_write_hwi (ob, ii->otr_token);
stream_write_tree (ob, ii->otr_type, true);
+ stream_write_tree (ob, ii->outer_type, true);
}
}
@@ -4075,10 +4136,13 @@ ipa_read_indirect_edge_info (struct lto_
ii->agg_contents = bp_unpack_value (&bp, 1);
ii->member_ptr = bp_unpack_value (&bp, 1);
ii->by_ref = bp_unpack_value (&bp, 1);
+ ii->include_bases = bp_unpack_value (&bp, 1);
+ ii->include_derived_types = bp_unpack_value (&bp, 1);
if (ii->polymorphic)
{
ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
ii->otr_type = stream_read_tree (ib, data_in);
+ ii->outer_type = stream_read_tree (ib, data_in);
}
}