===================================================================
@@ -846,7 +846,7 @@ cgraph_set_call_stmt (struct cgraph_edge
indirect call into a direct one. */
struct cgraph_node *new_callee = cgraph_node (decl);
- cgraph_make_edge_direct (e, new_callee);
+ cgraph_make_edge_direct (e, new_callee, NULL);
}
push_cfun (DECL_STRUCT_FUNCTION (e->caller->decl));
@@ -1181,12 +1181,15 @@ cgraph_redirect_edge_callee (struct cgra
}
/* Make an indirect EDGE with an unknown callee an ordinary edge leading to
- CALLEE. */
+ CALLEE. DELTA, if non-NULL, is an integer constant that is to be added to
+ the this pointer (first parameter). */
void
-cgraph_make_edge_direct (struct cgraph_edge *edge, struct cgraph_node *callee)
+cgraph_make_edge_direct (struct cgraph_edge *edge, struct cgraph_node *callee,
+ tree delta)
{
edge->indirect_unknown_callee = 0;
+ edge->indirect_info->thunk_delta = delta;
/* Get the edge out of the indirect edge list. */
if (edge->prev_callee)
@@ -2099,8 +2102,16 @@ cgraph_clone_edge (struct cgraph_edge *e
}
}
else
- new_edge = cgraph_create_edge (n, e->callee, call_stmt, count, freq,
- e->loop_nest + loop_nest);
+ {
+ new_edge = cgraph_create_edge (n, e->callee, call_stmt, count, freq,
+ e->loop_nest + loop_nest);
+ if (e->indirect_info)
+ {
+ new_edge->indirect_info
+ = ggc_alloc_cleared_cgraph_indirect_call_info ();
+ *new_edge->indirect_info = *e->indirect_info;
+ }
+ }
new_edge->inline_failed = e->inline_failed;
new_edge->indirect_inlining_edge = e->indirect_inlining_edge;
===================================================================
@@ -398,6 +398,9 @@ struct GTY(()) cgraph_indirect_call_info
HOST_WIDE_INT otr_token;
/* Type of the object from OBJ_TYPE_REF_OBJECT. */
tree otr_type;
+ /* Delta by which must be added to this parameter. For polymorphic calls
+ only. */
+ tree thunk_delta;
/* Index of the parameter that is called. */
int param_index;
/* ECF flags determined from the caller. */
@@ -585,7 +588,7 @@ struct cgraph_node * cgraph_clone_node (
int, bool, VEC(cgraph_edge_p,heap) *);
void cgraph_redirect_edge_callee (struct cgraph_edge *, struct cgraph_node *);
-void cgraph_make_edge_direct (struct cgraph_edge *, struct cgraph_node *);
+void cgraph_make_edge_direct (struct cgraph_edge *, struct cgraph_node *, tree);
struct cgraph_asm_node *cgraph_add_asm_node (tree);
===================================================================
@@ -2114,6 +2114,8 @@ cgraph_redirect_edge_call_stmt_to_callee
{
tree decl = gimple_call_fndecl (e->call_stmt);
gimple new_stmt;
+ gimple_stmt_iterator gsi;
+ bool gsi_computed = false;
#ifdef ENABLE_CHECKING
struct cgraph_node *node;
#endif
@@ -2146,9 +2148,24 @@ cgraph_redirect_edge_call_stmt_to_callee
}
}
+ if (e->indirect_info && e->indirect_info->thunk_delta
+ && integer_nonzerop (e->indirect_info->thunk_delta))
+ {
+ if (cgraph_dump_file)
+ {
+ fprintf (cgraph_dump_file, " Thunk delta is ");
+ print_generic_expr (cgraph_dump_file,
+ e->indirect_info->thunk_delta, 0);
+ fprintf (cgraph_dump_file, "\n");
+ }
+ gsi = gsi_for_stmt (e->call_stmt);
+ gsi_computed = true;
+ gimple_adjust_this_by_delta (&gsi, e->indirect_info->thunk_delta);
+ e->indirect_info->thunk_delta = NULL_TREE;
+ }
+
if (e->callee->clone.combined_args_to_skip)
{
- gimple_stmt_iterator gsi;
int lp_nr;
new_stmt
@@ -2160,7 +2177,8 @@ cgraph_redirect_edge_call_stmt_to_callee
&& TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
- gsi = gsi_for_stmt (e->call_stmt);
+ if (!gsi_computed)
+ gsi = gsi_for_stmt (e->call_stmt);
gsi_replace (&gsi, new_stmt, false);
/* We need to defer cleaning EH info on the new statement to
fixup-cfg. We may not have dominator information at this point
===================================================================
@@ -1442,17 +1442,26 @@ gimple_get_relevant_ref_binfo (tree ref,
}
}
-/* Fold a OBJ_TYPE_REF expression to the address of a function. TOKEN is
- integer form of OBJ_TYPE_REF_TOKEN of the reference expression. KNOWN_BINFO
- carries the binfo describing the true type of OBJ_TYPE_REF_OBJECT(REF). */
+/* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
+ is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
+ KNOWN_BINFO carries the binfo describing the true type of
+ OBJ_TYPE_REF_OBJECT(REF). If a call to the function must be accompanied
+ with a this adjustment, the constant which should be added to this pointer
+ is stored to *DELTA. If REFUSE_THUNKS is true, return NULL if the function
+ is a thunk (other than a this adjustment which is dealt with by DELTA). */
tree
-gimple_fold_obj_type_ref_known_binfo (HOST_WIDE_INT token, tree known_binfo)
+gimple_get_virt_mehtod_for_binfo (HOST_WIDE_INT token, tree known_binfo,
+ tree *delta, bool refuse_thunks)
{
HOST_WIDE_INT i;
- tree v, fndecl, delta;
+ tree v, fndecl;
+ struct cgraph_node *node;
v = BINFO_VIRTUALS (known_binfo);
+ /* If there is no virtual methods leave the OBJ_TYPE_REF alone. */
+ if (!v)
+ return NULL_TREE;
i = 0;
while (i != token)
{
@@ -1462,62 +1471,91 @@ gimple_fold_obj_type_ref_known_binfo (HO
}
fndecl = TREE_VALUE (v);
- delta = TREE_PURPOSE (v);
- gcc_assert (host_integerp (delta, 0));
-
- if (integer_nonzerop (delta))
- {
- struct cgraph_node *node = cgraph_get_node (fndecl);
- HOST_WIDE_INT off = tree_low_cst (delta, 0);
-
- if (!node)
- return NULL;
- for (node = node->same_body; node; node = node->next)
- if (node->thunk.thunk_p && off == node->thunk.fixed_offset)
- break;
- if (node)
- fndecl = node->decl;
- else
- return NULL;
- }
+ node = cgraph_get_node_or_alias (fndecl);
+ if (refuse_thunks
+ && (!node
+ /* Bail out if it is a thunk declaration. Since simple this_adjusting
+ thunks are represented by a constant in TREE_PURPOSE of items in
+ BINFO_VIRTUALS, this is a more complicate type which we cannot handle as
+ yet.
+
+ FIXME: Remove the following condition once we are able to represent
+ thunk information on call graph edges. */
+ || (node->same_body_alias && node->thunk.thunk_p)))
+ return NULL_TREE;
/* When cgraph node is missing and function is not public, we cannot
devirtualize. This can happen in WHOPR when the actual method
ends up in other partition, because we found devirtualization
possibility too late. */
- if (!can_refer_decl_in_current_unit_p (fndecl))
- return NULL;
- return build_fold_addr_expr (fndecl);
+ if (!can_refer_decl_in_current_unit_p (TREE_VALUE (v)))
+ return NULL_TREE;
+
+ *delta = TREE_PURPOSE (v);
+ gcc_checking_assert (host_integerp (*delta, 0));
+ return fndecl;
}
+/* Generate code adjusting the first parameter of a call statement determined
+ by GSI by DELTA. */
-/* Fold a OBJ_TYPE_REF expression to the address of a function. If KNOWN_TYPE
- is not NULL_TREE, it is the true type of the outmost encapsulating object if
- that comes from a pointer SSA_NAME. If the true outmost encapsulating type
- can be determined from a declaration OBJ_TYPE_REF_OBJECT(REF), it is used
- regardless of KNOWN_TYPE (which thus can be NULL_TREE). */
+void
+gimple_adjust_this_by_delta (gimple_stmt_iterator *gsi, tree delta)
+{
+ gimple call_stmt = gsi_stmt (*gsi);
+ tree parm, tmp;
+ gimple new_stmt;
+
+ delta = fold_convert (sizetype, delta);
+ gcc_assert (gimple_call_num_args (call_stmt) >= 1);
+ parm = gimple_call_arg (call_stmt, 0);
+ gcc_assert (POINTER_TYPE_P (TREE_TYPE (parm)));
+ tmp = create_tmp_var (TREE_TYPE (parm), NULL);
+ add_referenced_var (tmp);
+
+ tmp = make_ssa_name (tmp, NULL);
+ new_stmt = gimple_build_assign_with_ops (POINTER_PLUS_EXPR, tmp, parm, delta);
+ SSA_NAME_DEF_STMT (tmp) = new_stmt;
+ gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
+ gimple_call_set_arg (call_stmt, 0, tmp);
+}
-tree
-gimple_fold_obj_type_ref (tree ref, tree known_type)
+/* Fold a call statement to OBJ_TYPE_REF to a direct call, if possible. GSI
+ determines the statement, generating new statements is allowed only if
+ INPLACE is false. Return true iff the statement was changed. */
+
+static bool
+gimple_fold_obj_type_ref_call (gimple_stmt_iterator *gsi, bool inplace)
{
+ gimple stmt = gsi_stmt (*gsi);
+ tree ref = gimple_call_fn (stmt);
tree obj = OBJ_TYPE_REF_OBJECT (ref);
- tree known_binfo = known_type ? TYPE_BINFO (known_type) : NULL_TREE;
- tree binfo;
+ tree binfo, fndecl, delta;
+ HOST_WIDE_INT token;
if (TREE_CODE (obj) == ADDR_EXPR)
obj = TREE_OPERAND (obj, 0);
+ else
+ return false;
+
+ binfo = gimple_get_relevant_ref_binfo (obj, NULL_TREE);
+ if (!binfo)
+ return false;
+ token = tree_low_cst (OBJ_TYPE_REF_TOKEN (ref), 1);
+ fndecl = gimple_get_virt_mehtod_for_binfo (token, binfo, &delta,
+ !DECL_P (obj));
+ if (!fndecl)
+ return false;
- binfo = gimple_get_relevant_ref_binfo (obj, known_binfo);
- if (binfo)
+ if (integer_nonzerop (delta))
{
- HOST_WIDE_INT token = tree_low_cst (OBJ_TYPE_REF_TOKEN (ref), 1);
- /* If there is no virtual methods leave the OBJ_TYPE_REF alone. */
- if (!BINFO_VIRTUALS (binfo))
- return NULL_TREE;
- return gimple_fold_obj_type_ref_known_binfo (token, binfo);
+ if (inplace)
+ return false;
+ gimple_adjust_this_by_delta (gsi, delta);
}
- else
- return NULL_TREE;
+
+ gimple_call_set_fndecl (stmt, fndecl);
+ return true;
}
/* Attempt to fold a call statement referenced by the statement iterator GSI.
@@ -1525,8 +1563,8 @@ gimple_fold_obj_type_ref (tree ref, tree
simplifies to a constant value. Return true if any changes were made.
It is assumed that the operands have been previously folded. */
-static bool
-fold_gimple_call (gimple_stmt_iterator *gsi, bool inplace)
+bool
+gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
{
gimple stmt = gsi_stmt (*gsi);
@@ -1552,18 +1590,8 @@ fold_gimple_call (gimple_stmt_iterator *
copying EH region info to the new node. Easier to just do it
here where we can just smash the call operand. */
callee = gimple_call_fn (stmt);
- if (TREE_CODE (callee) == OBJ_TYPE_REF
- && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR)
- {
- tree t;
-
- t = gimple_fold_obj_type_ref (callee, NULL_TREE);
- if (t)
- {
- gimple_call_set_fn (stmt, t);
- return true;
- }
- }
+ if (TREE_CODE (callee) == OBJ_TYPE_REF)
+ return gimple_fold_obj_type_ref_call (gsi, inplace);
}
return false;
@@ -1617,7 +1645,7 @@ fold_stmt_1 (gimple_stmt_iterator *gsi,
changed = true;
}
}
- changed |= fold_gimple_call (gsi, inplace);
+ changed |= gimple_fold_call (gsi, inplace);
break;
case GIMPLE_ASM:
===================================================================
@@ -894,10 +894,10 @@ unsigned get_gimple_rhs_num_ops (enum tr
#define gimple_alloc(c, n) gimple_alloc_stat (c, n MEM_STAT_INFO)
gimple gimple_alloc_stat (enum gimple_code, unsigned MEM_STAT_DECL);
const char *gimple_decl_printable_name (tree, int);
-tree gimple_fold_obj_type_ref (tree, tree);
+bool gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace);
tree gimple_get_relevant_ref_binfo (tree ref, tree known_binfo);
-tree gimple_fold_obj_type_ref_known_binfo (HOST_WIDE_INT, tree);
-
+tree gimple_get_virt_mehtod_for_binfo (HOST_WIDE_INT, tree, tree *, bool);
+void gimple_adjust_this_by_delta (gimple_stmt_iterator *, tree);
/* Returns true iff T is a valid GIMPLE statement. */
extern bool is_gimple_stmt (tree);
===================================================================
@@ -1214,7 +1214,7 @@ ipcp_process_devirtualization_opportunit
{
int param_index, types_count, j;
HOST_WIDE_INT token;
- tree target;
+ tree target, delta;
next_ie = ie->next_callee;
if (!ie->indirect_info->polymorphic)
@@ -1231,7 +1231,8 @@ ipcp_process_devirtualization_opportunit
for (j = 0; j < types_count; j++)
{
tree binfo = VEC_index (tree, info->params[param_index].types, j);
- tree t = gimple_fold_obj_type_ref_known_binfo (token, binfo);
+ tree d;
+ tree t = gimple_get_virt_mehtod_for_binfo (token, binfo, &d, true);
if (!t)
{
@@ -1239,8 +1240,11 @@ ipcp_process_devirtualization_opportunit
break;
}
else if (!target)
- target = t;
- else if (target != t)
+ {
+ target = t;
+ delta = d;
+ }
+ else if (target != t || !tree_int_cst_equal (delta, d))
{
target = NULL_TREE;
break;
@@ -1248,7 +1252,7 @@ ipcp_process_devirtualization_opportunit
}
if (target)
- ipa_make_edge_direct_to_target (ie, target);
+ ipa_make_edge_direct_to_target (ie, target, delta);
}
}
@@ -1288,6 +1292,7 @@ ipcp_discover_new_direct_edges (struct c
for (ie = node->indirect_calls; ie; ie = next_ie)
{
struct cgraph_indirect_call_info *ici = ie->indirect_info;
+ tree target, delta = NULL_TREE;
next_ie = ie->next_callee;
if (ici->param_index != index)
@@ -1307,12 +1312,15 @@ ipcp_discover_new_direct_edges (struct c
continue;
gcc_assert (ie->indirect_info->anc_offset == 0);
token = ie->indirect_info->otr_token;
- cst = gimple_fold_obj_type_ref_known_binfo (token, binfo);
- if (!cst)
+ target = gimple_get_virt_mehtod_for_binfo (token, binfo, &delta,
+ true);
+ if (!target)
continue;
}
+ else
+ target = cst;
- ipa_make_edge_direct_to_target (ie, cst);
+ ipa_make_edge_direct_to_target (ie, target, delta);
}
}
===================================================================
@@ -1432,35 +1432,43 @@ update_jump_functions_after_inlining (st
}
/* If TARGET is an addr_expr of a function declaration, make it the destination
- of an indirect edge IE and return the edge. Otherwise, return NULL. */
+ of an indirect edge IE and return the edge. Otherwise, return NULL. Delta,
+ if non-NULL, is an integer constant that must be added to this pointer
+ (first parameter). */
struct cgraph_edge *
-ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
+ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target, tree delta)
{
struct cgraph_node *callee;
- if (TREE_CODE (target) != ADDR_EXPR)
- return NULL;
- target = TREE_OPERAND (target, 0);
+ if (TREE_CODE (target) == ADDR_EXPR)
+ target = TREE_OPERAND (target, 0);
if (TREE_CODE (target) != FUNCTION_DECL)
return NULL;
callee = cgraph_node (target);
if (!callee)
return NULL;
ipa_check_create_node_params ();
- cgraph_make_edge_direct (ie, callee);
+
+ cgraph_make_edge_direct (ie, callee, delta);
if (dump_file)
{
fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
- "(%s/%i -> %s/%i) for stmt ",
+ "(%s/%i -> %s/%i), for stmt ",
ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
cgraph_node_name (ie->caller), ie->caller->uid,
cgraph_node_name (ie->callee), ie->callee->uid);
-
if (ie->call_stmt)
print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
else
fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
+
+ if (delta)
+ {
+ fprintf (dump_file, " Thunk delta is ");
+ print_generic_expr (dump_file, delta, 0);
+ fprintf (dump_file, "\n");
+ }
}
if (ipa_get_cs_argument_count (IPA_EDGE_REF (ie))
@@ -1488,7 +1496,7 @@ try_make_edge_direct_simple_call (struct
else
return NULL;
- return ipa_make_edge_direct_to_target (ie, target);
+ return ipa_make_edge_direct_to_target (ie, target, NULL_TREE);
}
/* Try to find a destination for indirect edge IE that corresponds to a
@@ -1500,7 +1508,7 @@ static struct cgraph_edge *
try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
struct ipa_jump_func *jfunc)
{
- tree binfo, type, target;
+ tree binfo, type, target, delta;
HOST_WIDE_INT token;
if (jfunc->type == IPA_JF_KNOWN_TYPE)
@@ -1524,12 +1532,12 @@ try_make_edge_direct_virtual_call (struc
type = ie->indirect_info->otr_type;
binfo = get_binfo_at_offset (binfo, ie->indirect_info->anc_offset, type);
if (binfo)
- target = gimple_fold_obj_type_ref_known_binfo (token, binfo);
+ target = gimple_get_virt_mehtod_for_binfo (token, binfo, &delta, true);
else
return NULL;
if (target)
- return ipa_make_edge_direct_to_target (ie, target);
+ return ipa_make_edge_direct_to_target (ie, target, delta);
else
return NULL;
}
@@ -2520,6 +2528,7 @@ ipa_write_indirect_edge_info (struct out
{
lto_output_sleb128_stream (ob->main_stream, ii->otr_token);
lto_output_tree (ob, ii->otr_type, true);
+ lto_output_tree (ob, ii->thunk_delta, true);
}
}
@@ -2542,6 +2551,7 @@ ipa_read_indirect_edge_info (struct lto_
{
ii->otr_token = (HOST_WIDE_INT) lto_input_sleb128 (ib);
ii->otr_type = lto_input_tree (ib, data_in);
+ ii->thunk_delta = lto_input_tree (ib, data_in);
}
}
===================================================================
@@ -430,7 +430,8 @@ bool ipa_propagate_indirect_call_infos (
VEC (cgraph_edge_p, heap) **new_edges);
/* Indirect edge and binfo processing. */
-struct cgraph_edge *ipa_make_edge_direct_to_target (struct cgraph_edge *, tree);
+struct cgraph_edge *ipa_make_edge_direct_to_target (struct cgraph_edge *, tree,
+ tree);
/* Debugging interface. */
===================================================================
@@ -2321,16 +2321,8 @@ ccp_fold_stmt (gimple_stmt_iterator *gsi
{
tree expr = OBJ_TYPE_REF_EXPR (callee);
OBJ_TYPE_REF_EXPR (callee) = valueize_op (expr);
- if (TREE_CODE (OBJ_TYPE_REF_EXPR (callee)) == ADDR_EXPR)
- {
- tree t;
- t = gimple_fold_obj_type_ref (callee, NULL_TREE);
- if (t)
- {
- gimple_call_set_fn (stmt, t);
- changed = true;
- }
- }
+ if (gimple_fold_call (gsi, false))
+ changed = true;
OBJ_TYPE_REF_EXPR (callee) = expr;
}
===================================================================
@@ -0,0 +1,41 @@
+/* { dg-do run } */
+/* { dg-options "-O -fipa-cp -fno-early-inlining" } */
+
+extern "C" void abort ();
+
+struct A
+{
+ virtual void foo () = 0;
+};
+
+struct B : A
+{
+ virtual void foo () = 0;
+};
+
+struct C : A
+{
+};
+
+struct D : C, B
+{
+ int i;
+ D () : i(0xaaaa) {}
+ virtual void foo ()
+ {
+ if (i != 0xaaaa)
+ abort();
+ }
+};
+
+static inline void bar (B &b)
+{
+ b.foo ();
+}
+
+int main()
+{
+ D d;
+ bar (d);
+ return 0;
+}
===================================================================
@@ -0,0 +1,67 @@
+// Check that indirect calls to thunks do not lead to errors.
+// { dg-do run }
+// { dg-options "-O" }
+
+extern "C" void abort ();
+
+class A
+{
+public:
+ virtual void foo () {abort();}
+};
+
+class B : public A
+{
+public:
+ int z;
+ virtual void foo () {abort();}
+};
+
+class C : public A
+{
+public:
+ void *a[32];
+ unsigned long b;
+ long c[32];
+
+ virtual void foo () {abort();}
+};
+
+class D : public C, public B
+{
+public:
+ D () : C(), B()
+ {
+ int i;
+ for (i = 0; i < 32; i++)
+ {
+ a[i] = (void *) 0;
+ c[i] = 0;
+ }
+ b = 0xaaaa;
+ }
+
+ virtual void foo ();
+};
+
+inline void D::foo()
+{
+ if (b != 0xaaaa)
+ abort();
+}
+
+static inline void bar (B &b)
+{
+
+ b.foo ();
+}
+
+int main()
+{
+ int i;
+ D d;
+
+ for (i = 0; i < 5000; i++)
+ bar (d);
+ return 0;
+}
===================================================================
@@ -0,0 +1,68 @@
+// Check that indirect calls to thunks do not lead to errors.
+// { dg-do run }
+// { dg-options "-O -finline -finline-small-functions -finline-functions" }
+
+
+extern "C" void abort ();
+
+class A
+{
+public:
+ virtual void foo () {abort();}
+};
+
+class B : public A
+{
+public:
+ int z;
+ virtual void foo () {abort();}
+};
+
+class C : public A
+{
+public:
+ void *a[32];
+ unsigned long b;
+ long c[32];
+
+ virtual void foo () {abort();}
+};
+
+class D : public C, public B
+{
+public:
+ D () : C(), B()
+ {
+ int i;
+ for (i = 0; i < 32; i++)
+ {
+ a[i] = (void *) 0;
+ c[i] = 0;
+ }
+ b = 0xaaaa;
+ }
+
+ virtual void foo ();
+};
+
+void D::foo()
+{
+ if (b != 0xaaaa)
+ abort();
+}
+
+static inline void bar (B &b)
+{
+
+ b.foo ();
+}
+
+int main()
+{
+ int i;
+ D d;
+
+ for (i = 0; i < 5000; i++)
+ bar (d);
+ return 0;
+}
===================================================================
@@ -0,0 +1,67 @@
+// Check that indirect calls to thunks do not lead to errors.
+// { dg-do run }
+// { dg-options "-O -fipa-cp" }
+
+extern "C" void abort ();
+
+class A
+{
+public:
+ virtual void foo () {abort();}
+};
+
+class B : public A
+{
+public:
+ int z;
+ virtual void foo () {abort();}
+};
+
+class C : public A
+{
+public:
+ void *a[32];
+ unsigned long b;
+ long c[32];
+
+ virtual void foo () {abort();}
+};
+
+class D : public C, public B
+{
+public:
+ D () : C(), B()
+ {
+ int i;
+ for (i = 0; i < 32; i++)
+ {
+ a[i] = (void *) 0;
+ c[i] = 0;
+ }
+ b = 0xaaaa;
+ }
+
+ virtual void foo ();
+};
+
+void D::foo()
+{
+ if (b != 0xaaaa)
+ abort();
+}
+
+static void bar (B &b)
+{
+
+ b.foo ();
+}
+
+int main()
+{
+ int i;
+ D d;
+
+ for (i = 0; i < 5000; i++)
+ bar (d);
+ return 0;
+}
===================================================================
@@ -0,0 +1,33 @@
+// { dg-do run }
+
+extern "C" void abort ();
+
+class A {
+public:
+ virtual A* getThis() { return this; }
+};
+
+class B {
+int a;
+public:
+ virtual B* getThis() { return this; }
+};
+
+class AB : public A, public B {
+public:
+ virtual AB* getThis() { return this; }
+};
+
+int main ()
+{
+ AB ab;
+
+ A* a = &ab;
+ B* b = &ab;
+
+ if (a->getThis() != a
+ || b->getThis() != b)
+ abort ();
+
+ return 0;
+}
===================================================================
@@ -0,0 +1,66 @@
+// Check that indirect calls to thunks do not lead to errors.
+// { dg-do run }
+
+extern "C" void abort ();
+
+class A
+{
+public:
+ virtual void foo () {abort();}
+};
+
+class B : public A
+{
+public:
+ int z;
+ virtual void foo () {abort();}
+};
+
+class C : public A
+{
+public:
+ void *a[32];
+ unsigned long b;
+ long c[32];
+
+ virtual void foo () {abort();}
+};
+
+class D : public C, public B
+{
+public:
+ D () : C(), B()
+ {
+ int i;
+ for (i = 0; i < 32; i++)
+ {
+ a[i] = (void *) 0;
+ c[i] = 0;
+ }
+ b = 0xaaaa;
+ }
+
+ virtual void foo ();
+};
+
+void D::foo()
+{
+ if (b != 0xaaaa)
+ abort();
+}
+
+static inline void bar (B &b)
+{
+
+ b.foo ();
+}
+
+int main()
+{
+ int i;
+ D d;
+
+ for (i = 0; i < 5000; i++)
+ bar (d);
+ return 0;
+}