Message ID | alpine.DEB.2.02.1408200901460.5162@stedding.saclay.inria.fr |
---|---|
State | New |
Headers | show |
On Wed, Aug 20, 2014 at 9:14 AM, Marc Glisse <marc.glisse@inria.fr> wrote: > Hello, > > here is a new version of the patch which passed bootstrap+testsuite. I am > still removing the ref_maybe_used_by_stmt_p test, see the previous message > for a discussion. Looks good to me. I'm still nervous about removing that check (though the description looks odd - if the stmt kills the lhs and it also uses it then if this is not an exact overlap it's undefined and if it is then the stmt is still dead...). So - ok if it bootstraps/tests ok. Thanks, Richard. > 2014-08-20 Marc Glisse <marc.glisse@inria.fr> > > PR tree-optimization/62112 > gcc/ > * gimple-iterator.c (gsi_replace): Return whether EH cleanup is > needed. > * gimple-iterator.h (gsi_replace): Return bool. > * tree-ssa-alias.c (ref_may_alias_global_p_1): New helper, code > moved from ref_may_alias_global_p. > > (ref_may_alias_global_p, refs_may_alias_p, > ref_maybe_used_by_stmt_p): > New overloads. > (ref_maybe_used_by_call_p): Take ao_ref* instead of tree. > (stmt_kills_ref_p_1): Rename... > (stmt_kills_ref_p): ... to this. > * tree-ssa-alias.h (ref_may_alias_global_p, > ref_maybe_used_by_stmt_p, > stmt_kills_ref_p): Declare. > * tree-ssa-dse.c (dse_possible_dead_store_p): New argument, use it. > > Move the self-assignment case... > (dse_optimize_stmt): ... here. Handle builtin calls. Remove dead > code. > gcc/testsuite/ > * gcc.dg/tree-ssa/pr62112-1.c: New file. > * gcc.dg/tree-ssa/pr62112-2.c: Likewise. > > -- > Marc Glisse > Index: gcc/gimple-iterator.c > =================================================================== > --- gcc/gimple-iterator.c (revision 214210) > +++ gcc/gimple-iterator.c (working copy) > @@ -422,51 +422,54 @@ gsi_split_seq_before (gimple_stmt_iterat > /* Cut OLD_SEQ before I. */ > gimple_seq_set_last (&old_seq, prev); > if (prev->next) > prev->next = NULL; > } > > > /* Replace the statement pointed-to by GSI to STMT. If UPDATE_EH_INFO > is true, the exception handling information of the original > statement is moved to the new statement. Assignments must only be > - replaced with assignments to the same LHS. */ > + replaced with assignments to the same LHS. Returns whether EH edge > + cleanup is required. */ > > -void > +bool > gsi_replace (gimple_stmt_iterator *gsi, gimple stmt, bool update_eh_info) > { > gimple orig_stmt = gsi_stmt (*gsi); > + bool require_eh_edge_purge = false; > > if (stmt == orig_stmt) > - return; > + return false; > > gcc_assert (!gimple_has_lhs (orig_stmt) || !gimple_has_lhs (stmt) > || gimple_get_lhs (orig_stmt) == gimple_get_lhs (stmt)); > > gimple_set_location (stmt, gimple_location (orig_stmt)); > gimple_set_bb (stmt, gsi_bb (*gsi)); > > /* Preserve EH region information from the original statement, if > requested by the caller. */ > if (update_eh_info) > - maybe_clean_or_replace_eh_stmt (orig_stmt, stmt); > + require_eh_edge_purge = maybe_clean_or_replace_eh_stmt (orig_stmt, > stmt); > > gimple_duplicate_stmt_histograms (cfun, stmt, cfun, orig_stmt); > > /* Free all the data flow information for ORIG_STMT. */ > gimple_set_bb (orig_stmt, NULL); > gimple_remove_stmt_histograms (cfun, orig_stmt); > delink_stmt_imm_use (orig_stmt); > > gsi_set_stmt (gsi, stmt); > gimple_set_modified (stmt, true); > update_modified_stmt (stmt); > + return require_eh_edge_purge; > } > > > /* Replace the statement pointed-to by GSI with the sequence SEQ. > If UPDATE_EH_INFO is true, the exception handling information of > the original statement is moved to the last statement of the new > sequence. If the old statement is an assignment, then so must > be the last statement of the new sequence, and they must have the > same LHS. */ > > Index: gcc/gimple-iterator.h > =================================================================== > --- gcc/gimple-iterator.h (revision 214210) > +++ gcc/gimple-iterator.h (working copy) > @@ -51,21 +51,21 @@ extern void gsi_insert_seq_before_withou > extern void gsi_insert_seq_before (gimple_stmt_iterator *, gimple_seq, > enum gsi_iterator_update); > extern void gsi_insert_seq_after_without_update (gimple_stmt_iterator *, > gimple_seq, > enum gsi_iterator_update); > extern void gsi_insert_seq_after (gimple_stmt_iterator *, gimple_seq, > enum gsi_iterator_update); > extern gimple_seq gsi_split_seq_after (gimple_stmt_iterator); > extern void gsi_set_stmt (gimple_stmt_iterator *, gimple); > extern void gsi_split_seq_before (gimple_stmt_iterator *, gimple_seq *); > -extern void gsi_replace (gimple_stmt_iterator *, gimple, bool); > +extern bool gsi_replace (gimple_stmt_iterator *, gimple, bool); > extern void gsi_replace_with_seq (gimple_stmt_iterator *, gimple_seq, > bool); > extern void gsi_insert_before_without_update (gimple_stmt_iterator *, > gimple, > enum gsi_iterator_update); > extern void gsi_insert_before (gimple_stmt_iterator *, gimple, > enum gsi_iterator_update); > extern void gsi_insert_after_without_update (gimple_stmt_iterator *, > gimple, > enum gsi_iterator_update); > extern void gsi_insert_after (gimple_stmt_iterator *, gimple, > enum gsi_iterator_update); > extern bool gsi_remove (gimple_stmt_iterator *, bool); > Index: gcc/testsuite/gcc.dg/tree-ssa/pr62112-1.c > =================================================================== > --- gcc/testsuite/gcc.dg/tree-ssa/pr62112-1.c (revision 0) > +++ gcc/testsuite/gcc.dg/tree-ssa/pr62112-1.c (working copy) > @@ -0,0 +1,23 @@ > +/* { dg-do compile } */ > +/* { dg-options "-O1 -fdump-tree-dse1-details" } */ > + > +void f(){ > + char*p=__builtin_malloc(42); > + __builtin_memset(p,3,10); > + __builtin_memset(p,7,33); > +} > +char*g; > +void h(){ > + char*p=__builtin_malloc(42); > + g=__builtin_memset(p,3,10); > + __builtin_free(p); > +} > +char*i(){ > + char*p=__builtin_malloc(42); > + __builtin_memset(p,3,10); > + __builtin_memset(p,7,33); > + return p; > +} > + > +/* { dg-final { scan-tree-dump-times "Deleted dead call" 4 "dse1" } } */ > +/* { dg-final { cleanup-tree-dump "dse1" } } */ > Index: gcc/testsuite/gcc.dg/tree-ssa/pr62112-2.c > =================================================================== > --- gcc/testsuite/gcc.dg/tree-ssa/pr62112-2.c (revision 0) > +++ gcc/testsuite/gcc.dg/tree-ssa/pr62112-2.c (working copy) > @@ -0,0 +1,17 @@ > +/* { dg-do compile } */ > +/* { dg-options "-O1 -fdump-tree-dse1-details" } */ > + > +char*g; > +char* f(){ > + char*p=__builtin_malloc(42); > + __builtin_memset(p,3,33); > + __builtin_memset(p,7,10); > + return p; > +} > +void h(){ > + char*p=__builtin_malloc(42); > + g=__builtin_memset(p,3,10); > +} > + > +/* { dg-final { scan-tree-dump-not "Deleted dead" "dse1" } } */ > +/* { dg-final { cleanup-tree-dump "dse1" } } */ > Index: gcc/tree-ssa-alias.c > =================================================================== > --- gcc/tree-ssa-alias.c (revision 214210) > +++ gcc/tree-ssa-alias.c (working copy) > @@ -323,34 +323,47 @@ ptr_deref_may_alias_ref_p_1 (tree ptr, a > > if (TREE_CODE (base) == MEM_REF > || TREE_CODE (base) == TARGET_MEM_REF) > return ptr_derefs_may_alias_p (ptr, TREE_OPERAND (base, 0)); > else if (DECL_P (base)) > return ptr_deref_may_alias_decl_p (ptr, base); > > return true; > } > > -/* Return true whether REF may refer to global memory. */ > +/* Returns whether reference REF to BASE may refer to global memory. */ > > -bool > -ref_may_alias_global_p (tree ref) > +static bool > +ref_may_alias_global_p_1 (tree base) > { > - tree base = get_base_address (ref); > if (DECL_P (base)) > return is_global_var (base); > else if (TREE_CODE (base) == MEM_REF > || TREE_CODE (base) == TARGET_MEM_REF) > return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0)); > return true; > } > > +bool > +ref_may_alias_global_p (ao_ref *ref) > +{ > + tree base = ao_ref_base (ref); > + return ref_may_alias_global_p_1 (base); > +} > + > +bool > +ref_may_alias_global_p (tree ref) > +{ > + tree base = get_base_address (ref); > + return ref_may_alias_global_p_1 (base); > +} > + > /* Return true whether STMT may clobber global memory. */ > > bool > stmt_may_clobber_global_p (gimple stmt) > { > tree lhs; > > if (!gimple_vdef (stmt)) > return false; > > @@ -1406,20 +1419,28 @@ refs_may_alias_p_1 (ao_ref *ref1, ao_ref > tbaa_p); > > /* We really do not want to end up here, but returning true is safe. */ > #ifdef ENABLE_CHECKING > gcc_unreachable (); > #else > return true; > #endif > } > > +static bool > +refs_may_alias_p (tree ref1, ao_ref *ref2) > +{ > + ao_ref r1; > + ao_ref_init (&r1, ref1); > + return refs_may_alias_p_1 (&r1, ref2, true); > +} > + > bool > refs_may_alias_p (tree ref1, tree ref2) > { > ao_ref r1, r2; > bool res; > ao_ref_init (&r1, ref1); > ao_ref_init (&r2, ref2); > res = refs_may_alias_p_1 (&r1, &r2, true); > if (res) > ++alias_stats.refs_may_alias_p_may_alias; > @@ -1762,39 +1783,37 @@ process_args: > ao_ref_init (&r, op); > if (refs_may_alias_p_1 (&r, ref, true)) > return true; > } > } > > return false; > } > > static bool > -ref_maybe_used_by_call_p (gimple call, tree ref) > +ref_maybe_used_by_call_p (gimple call, ao_ref *ref) > { > - ao_ref r; > bool res; > - ao_ref_init (&r, ref); > - res = ref_maybe_used_by_call_p_1 (call, &r); > + res = ref_maybe_used_by_call_p_1 (call, ref); > if (res) > ++alias_stats.ref_maybe_used_by_call_p_may_alias; > else > ++alias_stats.ref_maybe_used_by_call_p_no_alias; > return res; > } > > > /* If the statement STMT may use the memory reference REF return > true, otherwise return false. */ > > bool > -ref_maybe_used_by_stmt_p (gimple stmt, tree ref) > +ref_maybe_used_by_stmt_p (gimple stmt, ao_ref *ref) > { > if (is_gimple_assign (stmt)) > { > tree rhs; > > /* All memory assign statements are single. */ > if (!gimple_assign_single_p (stmt)) > return false; > > rhs = gimple_assign_rhs1 (stmt); > @@ -1803,41 +1822,48 @@ ref_maybe_used_by_stmt_p (gimple stmt, t > || gimple_assign_rhs_code (stmt) == CONSTRUCTOR) > return false; > > return refs_may_alias_p (rhs, ref); > } > else if (is_gimple_call (stmt)) > return ref_maybe_used_by_call_p (stmt, ref); > else if (gimple_code (stmt) == GIMPLE_RETURN) > { > tree retval = gimple_return_retval (stmt); > - tree base; > if (retval > && TREE_CODE (retval) != SSA_NAME > && !is_gimple_min_invariant (retval) > && refs_may_alias_p (retval, ref)) > return true; > /* If ref escapes the function then the return acts as a use. */ > - base = get_base_address (ref); > + tree base = ao_ref_base (ref); > if (!base) > ; > else if (DECL_P (base)) > return is_global_var (base); > else if (TREE_CODE (base) == MEM_REF > || TREE_CODE (base) == TARGET_MEM_REF) > return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0)); > return false; > } > > return true; > } > > +bool > +ref_maybe_used_by_stmt_p (gimple stmt, tree ref) > +{ > + ao_ref r; > + ao_ref_init (&r, ref); > + return ref_maybe_used_by_stmt_p (stmt, &r); > +} > + > /* If the call in statement CALL may clobber the memory reference REF > return true, otherwise return false. */ > > bool > call_may_clobber_ref_p_1 (gimple call, ao_ref *ref) > { > tree base; > tree callee; > > /* If the call is pure or const it cannot clobber anything. */ > @@ -2162,22 +2188,22 @@ bool > stmt_may_clobber_ref_p (gimple stmt, tree ref) > { > ao_ref r; > ao_ref_init (&r, ref); > return stmt_may_clobber_ref_p_1 (stmt, &r); > } > > /* If STMT kills the memory reference REF return true, otherwise > return false. */ > > -static bool > -stmt_kills_ref_p_1 (gimple stmt, ao_ref *ref) > +bool > +stmt_kills_ref_p (gimple stmt, ao_ref *ref) > { > if (!ao_ref_base (ref)) > return false; > > if (gimple_has_lhs (stmt) > && TREE_CODE (gimple_get_lhs (stmt)) != SSA_NAME > /* The assignment is not necessarily carried out if it can throw > and we can catch it in the current function where we could inspect > the previous value. > ??? We only need to care about the RHS throwing. For aggregate > @@ -2350,21 +2376,21 @@ stmt_kills_ref_p_1 (gimple stmt, ao_ref > } > } > return false; > } > > bool > stmt_kills_ref_p (gimple stmt, tree ref) > { > ao_ref r; > ao_ref_init (&r, ref); > - return stmt_kills_ref_p_1 (stmt, &r); > + return stmt_kills_ref_p (stmt, &r); > } > > > /* Walk the virtual use-def chain of VUSE until hitting the virtual operand > TARGET or a statement clobbering the memory reference REF in which > case false is returned. The walk starts with VUSE, one argument of PHI. > */ > > static bool > maybe_skip_until (gimple phi, tree target, ao_ref *ref, > tree vuse, unsigned int *cnt, bitmap *visited, > Index: gcc/tree-ssa-alias.h > =================================================================== > --- gcc/tree-ssa-alias.h (revision 214210) > +++ gcc/tree-ssa-alias.h (working copy) > @@ -94,31 +94,34 @@ struct ao_ref > > > /* In tree-ssa-alias.c */ > extern void ao_ref_init (ao_ref *, tree); > extern void ao_ref_init_from_ptr_and_size (ao_ref *, tree, tree); > extern tree ao_ref_base (ao_ref *); > extern alias_set_type ao_ref_alias_set (ao_ref *); > extern bool ptr_deref_may_alias_global_p (tree); > extern bool ptr_derefs_may_alias_p (tree, tree); > extern bool ref_may_alias_global_p (tree); > +extern bool ref_may_alias_global_p (ao_ref *); > extern bool refs_may_alias_p (tree, tree); > extern bool refs_may_alias_p_1 (ao_ref *, ao_ref *, bool); > extern bool refs_anti_dependent_p (tree, tree); > extern bool refs_output_dependent_p (tree, tree); > extern bool ref_maybe_used_by_stmt_p (gimple, tree); > +extern bool ref_maybe_used_by_stmt_p (gimple, ao_ref *); > extern bool stmt_may_clobber_global_p (gimple); > extern bool stmt_may_clobber_ref_p (gimple, tree); > extern bool stmt_may_clobber_ref_p_1 (gimple, ao_ref *); > extern bool call_may_clobber_ref_p (gimple, tree); > extern bool call_may_clobber_ref_p_1 (gimple, ao_ref *); > extern bool stmt_kills_ref_p (gimple, tree); > +extern bool stmt_kills_ref_p (gimple, ao_ref *); > extern tree get_continuation_for_phi (gimple, ao_ref *, > unsigned int *, bitmap *, bool, > void *(*)(ao_ref *, tree, void *, > bool), > void *); > extern void *walk_non_aliased_vuses (ao_ref *, tree, > void *(*)(ao_ref *, tree, > unsigned int, void *), > void *(*)(ao_ref *, tree, void *, > bool), > void *); > extern unsigned int walk_aliased_vdefs (ao_ref *, tree, > Index: gcc/tree-ssa-dse.c > =================================================================== > --- gcc/tree-ssa-dse.c (revision 214210) > +++ gcc/tree-ssa-dse.c (working copy) > @@ -75,39 +75,32 @@ along with GCC; see the file COPYING3. > fact, they are the same transformation applied to different views of > the CFG. */ > > > /* Bitmap of blocks that have had EH statements cleaned. We should > remove their dead edges eventually. */ > static bitmap need_eh_cleanup; > > > /* A helper of dse_optimize_stmt. > - Given a GIMPLE_ASSIGN in STMT, find a candidate statement *USE_STMT that > - may prove STMT to be dead. > + Given a GIMPLE_ASSIGN in STMT that writes to REF, find a candidate > + statement *USE_STMT that may prove STMT to be dead. > Return TRUE if the above conditions are met, otherwise FALSE. */ > > static bool > -dse_possible_dead_store_p (gimple stmt, gimple *use_stmt) > +dse_possible_dead_store_p (ao_ref *ref, gimple stmt, gimple *use_stmt) > { > gimple temp; > unsigned cnt = 0; > > *use_stmt = NULL; > > - /* Self-assignments are zombies. */ > - if (operand_equal_p (gimple_assign_rhs1 (stmt), gimple_assign_lhs (stmt), > 0)) > - { > - *use_stmt = stmt; > - return true; > - } > - > /* Find the first dominated statement that clobbers (part of) the > memory stmt stores to with no intermediate statement that may use > part of the memory stmt stores. That is, find a store that may > prove stmt to be a dead store. */ > temp = stmt; > do > { > gimple use_stmt, defvar_def; > imm_use_iterator ui; > bool fail = false; > @@ -157,22 +150,21 @@ dse_possible_dead_store_p (gimple stmt, > /* Do not consider the PHI as use if it dominates the > stmt defining the virtual operand we are processing, > we have processed it already in this case. */ > if (gimple_bb (defvar_def) != gimple_bb (use_stmt) > && !dominated_by_p (CDI_DOMINATORS, > gimple_bb (defvar_def), > gimple_bb (use_stmt))) > temp = use_stmt; > } > /* If the statement is a use the store is not dead. */ > - else if (ref_maybe_used_by_stmt_p (use_stmt, > - gimple_assign_lhs (stmt))) > + else if (ref_maybe_used_by_stmt_p (use_stmt, ref)) > { > fail = true; > BREAK_FROM_IMM_USE_STMT (ui); > } > /* If this is a store, remember it or bail out if we have > multiple ones (the will be in different CFG parts then). */ > else if (gimple_vdef (use_stmt)) > { > if (temp) > { > @@ -184,29 +176,29 @@ dse_possible_dead_store_p (gimple stmt, > } > > if (fail) > return false; > > /* If we didn't find any definition this means the store is dead > if it isn't a store to global reachable memory. In this case > just pretend the stmt makes itself dead. Otherwise fail. */ > if (!temp) > { > - if (stmt_may_clobber_global_p (stmt)) > + if (ref_may_alias_global_p (ref)) > return false; > > temp = stmt; > break; > } > } > /* Continue walking until we reach a kill. */ > - while (!stmt_kills_ref_p (temp, gimple_assign_lhs (stmt))); > + while (!stmt_kills_ref_p (temp, ref)); > > *use_stmt = temp; > > return true; > } > > > /* Attempt to eliminate dead stores in the statement referenced by BSI. > > A dead store is a store into a memory location which will later be > @@ -221,75 +213,113 @@ dse_possible_dead_store_p (gimple stmt, > static void > dse_optimize_stmt (gimple_stmt_iterator *gsi) > { > gimple stmt = gsi_stmt (*gsi); > > /* If this statement has no virtual defs, then there is nothing > to do. */ > if (!gimple_vdef (stmt)) > return; > > - /* We know we have virtual definitions. If this is a GIMPLE_ASSIGN > - that's not also a function call, then record it into our table. */ > - if (is_gimple_call (stmt) && gimple_call_fndecl (stmt)) > - return; > - > /* Don't return early on *this_2(D) ={v} {CLOBBER}. */ > if (gimple_has_volatile_ops (stmt) > && (!gimple_clobber_p (stmt) > || TREE_CODE (gimple_assign_lhs (stmt)) != MEM_REF)) > return; > > + /* We know we have virtual definitions. We can handle assignments and > + some builtin calls. */ > + if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)) > + { > + switch (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))) > + { > + case BUILT_IN_MEMCPY: > + case BUILT_IN_MEMMOVE: > + case BUILT_IN_MEMSET: > + { > + gimple use_stmt; > + ao_ref ref; > + tree size = NULL_TREE; > + if (gimple_call_num_args (stmt) == 3) > + size = gimple_call_arg (stmt, 2); > + tree ptr = gimple_call_arg (stmt, 0); > + ao_ref_init_from_ptr_and_size (&ref, ptr, size); > + if (!dse_possible_dead_store_p (&ref, stmt, &use_stmt)) > + return; > + > + if (dump_file && (dump_flags & TDF_DETAILS)) > + { > + fprintf (dump_file, " Deleted dead call '"); > + print_gimple_stmt (dump_file, gsi_stmt (*gsi), dump_flags, > 0); > + fprintf (dump_file, "'\n"); > + } > + > + tree lhs = gimple_call_lhs (stmt); > + if (lhs) > + { > + gimple new_stmt = gimple_build_assign (lhs, ptr); > + unlink_stmt_vdef (stmt); > + if (gsi_replace (gsi, new_stmt, true)) > + bitmap_set_bit (need_eh_cleanup, gimple_bb > (stmt)->index); > + } > + else > + { > + /* Then we need to fix the operand of the consuming stmt. > */ > + unlink_stmt_vdef (stmt); > + > + /* Remove the dead store. */ > + if (gsi_remove (gsi, true)) > + bitmap_set_bit (need_eh_cleanup, gimple_bb > (stmt)->index); > + } > + break; > + } > + default: > + return; > + } > + } > + > if (is_gimple_assign (stmt)) > { > gimple use_stmt; > > - if (!dse_possible_dead_store_p (stmt, &use_stmt)) > - return; > + /* Self-assignments are zombies. */ > + if (operand_equal_p (gimple_assign_rhs1 (stmt), > + gimple_assign_lhs (stmt), 0)) > + use_stmt = stmt; > + else > + { > + ao_ref ref; > + ao_ref_init (&ref, gimple_assign_lhs (stmt)); > + if (!dse_possible_dead_store_p (&ref, stmt, &use_stmt)) > + return; > + } > > /* Now we know that use_stmt kills the LHS of stmt. */ > > /* But only remove *this_2(D) ={v} {CLOBBER} if killed by > another clobber stmt. */ > if (gimple_clobber_p (stmt) > && !gimple_clobber_p (use_stmt)) > return; > > - basic_block bb; > - > - /* If use_stmt is or might be a nop assignment, e.g. for > - struct { ... } S a, b, *p; ... > - b = a; b = b; > - or > - b = a; b = *p; where p might be &b, > - or > - *p = a; *p = b; where p might be &b, > - or > - *p = *u; *p = *v; where p might be v, then USE_STMT > - acts as a use as well as definition, so store in STMT > - is not dead. */ > - if (stmt != use_stmt > - && ref_maybe_used_by_stmt_p (use_stmt, gimple_assign_lhs (stmt))) > - return; > - > if (dump_file && (dump_flags & TDF_DETAILS)) > { > fprintf (dump_file, " Deleted dead store '"); > print_gimple_stmt (dump_file, gsi_stmt (*gsi), dump_flags, 0); > fprintf (dump_file, "'\n"); > } > > /* Then we need to fix the operand of the consuming stmt. */ > unlink_stmt_vdef (stmt); > > /* Remove the dead store. */ > - bb = gimple_bb (stmt); > + basic_block bb = gimple_bb (stmt); > if (gsi_remove (gsi, true)) > bitmap_set_bit (need_eh_cleanup, bb->index); > > /* And release any SSA_NAMEs set in this statement back to the > SSA_NAME manager. */ > release_defs (stmt); > } > } > > class dse_dom_walker : public dom_walker >
On Wed, 20 Aug 2014, Richard Biener wrote: > On Wed, Aug 20, 2014 at 9:14 AM, Marc Glisse <marc.glisse@inria.fr> wrote: >> Hello, >> >> here is a new version of the patch which passed bootstrap+testsuite. I am >> still removing the ref_maybe_used_by_stmt_p test, see the previous message >> for a discussion. > > Looks good to me. I'm still nervous about removing that check (though > the description looks odd - if the stmt kills the lhs and it also uses > it then if this is not an exact overlap it's undefined I hadn't realized that. Note that this looks true if the reference is the lhs of an assignment, but it can be wrong if the reference was computed from a call: memset(p,x,5); memmove(p+2,p,42); memmove would kill memset, except that... > and if it is then the stmt is still dead...). use_stmt would be dead in that case, not stmt. stmt: a = 42; use_stmt: a = a; > So - ok if it bootstraps/tests ok.
On Wed, 20 Aug 2014, Marc Glisse wrote: > On Wed, 20 Aug 2014, Richard Biener wrote: > >> On Wed, Aug 20, 2014 at 9:14 AM, Marc Glisse <marc.glisse@inria.fr> wrote: >>> Hello, >>> >>> here is a new version of the patch which passed bootstrap+testsuite. I am >>> still removing the ref_maybe_used_by_stmt_p test, see the previous message >>> for a discussion. >> >> Looks good to me. I'm still nervous about removing that check (though >> the description looks odd - if the stmt kills the lhs and it also uses >> it then if this is not an exact overlap it's undefined > > I hadn't realized that. Note that this looks true if the reference is the lhs > of an assignment, but it can be wrong if the reference was computed from a > call: > > memset(p,x,5); > memmove(p+2,p,42); Obviously I swapped p and p+2... > memmove would kill memset, except that... > >> and if it is then the stmt is still dead...). > > use_stmt would be dead in that case, not stmt. > > stmt: a = 42; > use_stmt: a = a; > >> So - ok if it bootstraps/tests ok.
On Wed, Aug 20, 2014 at 4:31 PM, Marc Glisse <marc.glisse@inria.fr> wrote: > On Wed, 20 Aug 2014, Richard Biener wrote: > >> On Wed, Aug 20, 2014 at 9:14 AM, Marc Glisse <marc.glisse@inria.fr> wrote: >>> >>> Hello, >>> >>> here is a new version of the patch which passed bootstrap+testsuite. I am >>> still removing the ref_maybe_used_by_stmt_p test, see the previous >>> message >>> for a discussion. >> >> >> Looks good to me. I'm still nervous about removing that check (though >> the description looks odd - if the stmt kills the lhs and it also uses >> it then if this is not an exact overlap it's undefined > > > I hadn't realized that. Note that this looks true if the reference is the > lhs of an assignment, but it can be wrong if the reference was computed from > a call: > > memset(p,x,5); > memmove(p+2,p,42); > > memmove would kill memset, except that... > > >> and if it is then the stmt is still dead...). > > > use_stmt would be dead in that case, not stmt. Yeah, but we don't handle call self-assignments. Richard. > stmt: a = 42; > use_stmt: a = a; > >> So - ok if it bootstraps/tests ok. > > > -- > Marc Glisse
Index: gcc/gimple-iterator.c =================================================================== --- gcc/gimple-iterator.c (revision 214210) +++ gcc/gimple-iterator.c (working copy) @@ -422,51 +422,54 @@ gsi_split_seq_before (gimple_stmt_iterat /* Cut OLD_SEQ before I. */ gimple_seq_set_last (&old_seq, prev); if (prev->next) prev->next = NULL; } /* Replace the statement pointed-to by GSI to STMT. If UPDATE_EH_INFO is true, the exception handling information of the original statement is moved to the new statement. Assignments must only be - replaced with assignments to the same LHS. */ + replaced with assignments to the same LHS. Returns whether EH edge + cleanup is required. */ -void +bool gsi_replace (gimple_stmt_iterator *gsi, gimple stmt, bool update_eh_info) { gimple orig_stmt = gsi_stmt (*gsi); + bool require_eh_edge_purge = false; if (stmt == orig_stmt) - return; + return false; gcc_assert (!gimple_has_lhs (orig_stmt) || !gimple_has_lhs (stmt) || gimple_get_lhs (orig_stmt) == gimple_get_lhs (stmt)); gimple_set_location (stmt, gimple_location (orig_stmt)); gimple_set_bb (stmt, gsi_bb (*gsi)); /* Preserve EH region information from the original statement, if requested by the caller. */ if (update_eh_info) - maybe_clean_or_replace_eh_stmt (orig_stmt, stmt); + require_eh_edge_purge = maybe_clean_or_replace_eh_stmt (orig_stmt, stmt); gimple_duplicate_stmt_histograms (cfun, stmt, cfun, orig_stmt); /* Free all the data flow information for ORIG_STMT. */ gimple_set_bb (orig_stmt, NULL); gimple_remove_stmt_histograms (cfun, orig_stmt); delink_stmt_imm_use (orig_stmt); gsi_set_stmt (gsi, stmt); gimple_set_modified (stmt, true); update_modified_stmt (stmt); + return require_eh_edge_purge; } /* Replace the statement pointed-to by GSI with the sequence SEQ. If UPDATE_EH_INFO is true, the exception handling information of the original statement is moved to the last statement of the new sequence. If the old statement is an assignment, then so must be the last statement of the new sequence, and they must have the same LHS. */ Index: gcc/gimple-iterator.h =================================================================== --- gcc/gimple-iterator.h (revision 214210) +++ gcc/gimple-iterator.h (working copy) @@ -51,21 +51,21 @@ extern void gsi_insert_seq_before_withou extern void gsi_insert_seq_before (gimple_stmt_iterator *, gimple_seq, enum gsi_iterator_update); extern void gsi_insert_seq_after_without_update (gimple_stmt_iterator *, gimple_seq, enum gsi_iterator_update); extern void gsi_insert_seq_after (gimple_stmt_iterator *, gimple_seq, enum gsi_iterator_update); extern gimple_seq gsi_split_seq_after (gimple_stmt_iterator); extern void gsi_set_stmt (gimple_stmt_iterator *, gimple); extern void gsi_split_seq_before (gimple_stmt_iterator *, gimple_seq *); -extern void gsi_replace (gimple_stmt_iterator *, gimple, bool); +extern bool gsi_replace (gimple_stmt_iterator *, gimple, bool); extern void gsi_replace_with_seq (gimple_stmt_iterator *, gimple_seq, bool); extern void gsi_insert_before_without_update (gimple_stmt_iterator *, gimple, enum gsi_iterator_update); extern void gsi_insert_before (gimple_stmt_iterator *, gimple, enum gsi_iterator_update); extern void gsi_insert_after_without_update (gimple_stmt_iterator *, gimple, enum gsi_iterator_update); extern void gsi_insert_after (gimple_stmt_iterator *, gimple, enum gsi_iterator_update); extern bool gsi_remove (gimple_stmt_iterator *, bool); Index: gcc/testsuite/gcc.dg/tree-ssa/pr62112-1.c =================================================================== --- gcc/testsuite/gcc.dg/tree-ssa/pr62112-1.c (revision 0) +++ gcc/testsuite/gcc.dg/tree-ssa/pr62112-1.c (working copy) @@ -0,0 +1,23 @@ +/* { dg-do compile } */ +/* { dg-options "-O1 -fdump-tree-dse1-details" } */ + +void f(){ + char*p=__builtin_malloc(42); + __builtin_memset(p,3,10); + __builtin_memset(p,7,33); +} +char*g; +void h(){ + char*p=__builtin_malloc(42); + g=__builtin_memset(p,3,10); + __builtin_free(p); +} +char*i(){ + char*p=__builtin_malloc(42); + __builtin_memset(p,3,10); + __builtin_memset(p,7,33); + return p; +} + +/* { dg-final { scan-tree-dump-times "Deleted dead call" 4 "dse1" } } */ +/* { dg-final { cleanup-tree-dump "dse1" } } */ Index: gcc/testsuite/gcc.dg/tree-ssa/pr62112-2.c =================================================================== --- gcc/testsuite/gcc.dg/tree-ssa/pr62112-2.c (revision 0) +++ gcc/testsuite/gcc.dg/tree-ssa/pr62112-2.c (working copy) @@ -0,0 +1,17 @@ +/* { dg-do compile } */ +/* { dg-options "-O1 -fdump-tree-dse1-details" } */ + +char*g; +char* f(){ + char*p=__builtin_malloc(42); + __builtin_memset(p,3,33); + __builtin_memset(p,7,10); + return p; +} +void h(){ + char*p=__builtin_malloc(42); + g=__builtin_memset(p,3,10); +} + +/* { dg-final { scan-tree-dump-not "Deleted dead" "dse1" } } */ +/* { dg-final { cleanup-tree-dump "dse1" } } */ Index: gcc/tree-ssa-alias.c =================================================================== --- gcc/tree-ssa-alias.c (revision 214210) +++ gcc/tree-ssa-alias.c (working copy) @@ -323,34 +323,47 @@ ptr_deref_may_alias_ref_p_1 (tree ptr, a if (TREE_CODE (base) == MEM_REF || TREE_CODE (base) == TARGET_MEM_REF) return ptr_derefs_may_alias_p (ptr, TREE_OPERAND (base, 0)); else if (DECL_P (base)) return ptr_deref_may_alias_decl_p (ptr, base); return true; } -/* Return true whether REF may refer to global memory. */ +/* Returns whether reference REF to BASE may refer to global memory. */ -bool -ref_may_alias_global_p (tree ref) +static bool +ref_may_alias_global_p_1 (tree base) { - tree base = get_base_address (ref); if (DECL_P (base)) return is_global_var (base); else if (TREE_CODE (base) == MEM_REF || TREE_CODE (base) == TARGET_MEM_REF) return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0)); return true; } +bool +ref_may_alias_global_p (ao_ref *ref) +{ + tree base = ao_ref_base (ref); + return ref_may_alias_global_p_1 (base); +} + +bool +ref_may_alias_global_p (tree ref) +{ + tree base = get_base_address (ref); + return ref_may_alias_global_p_1 (base); +} + /* Return true whether STMT may clobber global memory. */ bool stmt_may_clobber_global_p (gimple stmt) { tree lhs; if (!gimple_vdef (stmt)) return false; @@ -1406,20 +1419,28 @@ refs_may_alias_p_1 (ao_ref *ref1, ao_ref tbaa_p); /* We really do not want to end up here, but returning true is safe. */ #ifdef ENABLE_CHECKING gcc_unreachable (); #else return true; #endif } +static bool +refs_may_alias_p (tree ref1, ao_ref *ref2) +{ + ao_ref r1; + ao_ref_init (&r1, ref1); + return refs_may_alias_p_1 (&r1, ref2, true); +} + bool refs_may_alias_p (tree ref1, tree ref2) { ao_ref r1, r2; bool res; ao_ref_init (&r1, ref1); ao_ref_init (&r2, ref2); res = refs_may_alias_p_1 (&r1, &r2, true); if (res) ++alias_stats.refs_may_alias_p_may_alias; @@ -1762,39 +1783,37 @@ process_args: ao_ref_init (&r, op); if (refs_may_alias_p_1 (&r, ref, true)) return true; } } return false; } static bool -ref_maybe_used_by_call_p (gimple call, tree ref) +ref_maybe_used_by_call_p (gimple call, ao_ref *ref) { - ao_ref r; bool res; - ao_ref_init (&r, ref); - res = ref_maybe_used_by_call_p_1 (call, &r); + res = ref_maybe_used_by_call_p_1 (call, ref); if (res) ++alias_stats.ref_maybe_used_by_call_p_may_alias; else ++alias_stats.ref_maybe_used_by_call_p_no_alias; return res; } /* If the statement STMT may use the memory reference REF return true, otherwise return false. */ bool -ref_maybe_used_by_stmt_p (gimple stmt, tree ref) +ref_maybe_used_by_stmt_p (gimple stmt, ao_ref *ref) { if (is_gimple_assign (stmt)) { tree rhs; /* All memory assign statements are single. */ if (!gimple_assign_single_p (stmt)) return false; rhs = gimple_assign_rhs1 (stmt); @@ -1803,41 +1822,48 @@ ref_maybe_used_by_stmt_p (gimple stmt, t || gimple_assign_rhs_code (stmt) == CONSTRUCTOR) return false; return refs_may_alias_p (rhs, ref); } else if (is_gimple_call (stmt)) return ref_maybe_used_by_call_p (stmt, ref); else if (gimple_code (stmt) == GIMPLE_RETURN) { tree retval = gimple_return_retval (stmt); - tree base; if (retval && TREE_CODE (retval) != SSA_NAME && !is_gimple_min_invariant (retval) && refs_may_alias_p (retval, ref)) return true; /* If ref escapes the function then the return acts as a use. */ - base = get_base_address (ref); + tree base = ao_ref_base (ref); if (!base) ; else if (DECL_P (base)) return is_global_var (base); else if (TREE_CODE (base) == MEM_REF || TREE_CODE (base) == TARGET_MEM_REF) return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0)); return false; } return true; } +bool +ref_maybe_used_by_stmt_p (gimple stmt, tree ref) +{ + ao_ref r; + ao_ref_init (&r, ref); + return ref_maybe_used_by_stmt_p (stmt, &r); +} + /* If the call in statement CALL may clobber the memory reference REF return true, otherwise return false. */ bool call_may_clobber_ref_p_1 (gimple call, ao_ref *ref) { tree base; tree callee; /* If the call is pure or const it cannot clobber anything. */ @@ -2162,22 +2188,22 @@ bool stmt_may_clobber_ref_p (gimple stmt, tree ref) { ao_ref r; ao_ref_init (&r, ref); return stmt_may_clobber_ref_p_1 (stmt, &r); } /* If STMT kills the memory reference REF return true, otherwise return false. */ -static bool -stmt_kills_ref_p_1 (gimple stmt, ao_ref *ref) +bool +stmt_kills_ref_p (gimple stmt, ao_ref *ref) { if (!ao_ref_base (ref)) return false; if (gimple_has_lhs (stmt) && TREE_CODE (gimple_get_lhs (stmt)) != SSA_NAME /* The assignment is not necessarily carried out if it can throw and we can catch it in the current function where we could inspect the previous value. ??? We only need to care about the RHS throwing. For aggregate @@ -2350,21 +2376,21 @@ stmt_kills_ref_p_1 (gimple stmt, ao_ref } } return false; } bool stmt_kills_ref_p (gimple stmt, tree ref) { ao_ref r; ao_ref_init (&r, ref); - return stmt_kills_ref_p_1 (stmt, &r); + return stmt_kills_ref_p (stmt, &r); } /* Walk the virtual use-def chain of VUSE until hitting the virtual operand TARGET or a statement clobbering the memory reference REF in which case false is returned. The walk starts with VUSE, one argument of PHI. */ static bool maybe_skip_until (gimple phi, tree target, ao_ref *ref, tree vuse, unsigned int *cnt, bitmap *visited, Index: gcc/tree-ssa-alias.h =================================================================== --- gcc/tree-ssa-alias.h (revision 214210) +++ gcc/tree-ssa-alias.h (working copy) @@ -94,31 +94,34 @@ struct ao_ref /* In tree-ssa-alias.c */ extern void ao_ref_init (ao_ref *, tree); extern void ao_ref_init_from_ptr_and_size (ao_ref *, tree, tree); extern tree ao_ref_base (ao_ref *); extern alias_set_type ao_ref_alias_set (ao_ref *); extern bool ptr_deref_may_alias_global_p (tree); extern bool ptr_derefs_may_alias_p (tree, tree); extern bool ref_may_alias_global_p (tree); +extern bool ref_may_alias_global_p (ao_ref *); extern bool refs_may_alias_p (tree, tree); extern bool refs_may_alias_p_1 (ao_ref *, ao_ref *, bool); extern bool refs_anti_dependent_p (tree, tree); extern bool refs_output_dependent_p (tree, tree); extern bool ref_maybe_used_by_stmt_p (gimple, tree); +extern bool ref_maybe_used_by_stmt_p (gimple, ao_ref *); extern bool stmt_may_clobber_global_p (gimple); extern bool stmt_may_clobber_ref_p (gimple, tree); extern bool stmt_may_clobber_ref_p_1 (gimple, ao_ref *); extern bool call_may_clobber_ref_p (gimple, tree); extern bool call_may_clobber_ref_p_1 (gimple, ao_ref *); extern bool stmt_kills_ref_p (gimple, tree); +extern bool stmt_kills_ref_p (gimple, ao_ref *); extern tree get_continuation_for_phi (gimple, ao_ref *, unsigned int *, bitmap *, bool, void *(*)(ao_ref *, tree, void *, bool), void *); extern void *walk_non_aliased_vuses (ao_ref *, tree, void *(*)(ao_ref *, tree, unsigned int, void *), void *(*)(ao_ref *, tree, void *, bool), void *); extern unsigned int walk_aliased_vdefs (ao_ref *, tree, Index: gcc/tree-ssa-dse.c =================================================================== --- gcc/tree-ssa-dse.c (revision 214210) +++ gcc/tree-ssa-dse.c (working copy) @@ -75,39 +75,32 @@ along with GCC; see the file COPYING3. fact, they are the same transformation applied to different views of the CFG. */ /* Bitmap of blocks that have had EH statements cleaned. We should remove their dead edges eventually. */ static bitmap need_eh_cleanup; /* A helper of dse_optimize_stmt. - Given a GIMPLE_ASSIGN in STMT, find a candidate statement *USE_STMT that - may prove STMT to be dead. + Given a GIMPLE_ASSIGN in STMT that writes to REF, find a candidate + statement *USE_STMT that may prove STMT to be dead. Return TRUE if the above conditions are met, otherwise FALSE. */ static bool -dse_possible_dead_store_p (gimple stmt, gimple *use_stmt) +dse_possible_dead_store_p (ao_ref *ref, gimple stmt, gimple *use_stmt) { gimple temp; unsigned cnt = 0; *use_stmt = NULL; - /* Self-assignments are zombies. */ - if (operand_equal_p (gimple_assign_rhs1 (stmt), gimple_assign_lhs (stmt), 0)) - { - *use_stmt = stmt; - return true; - } - /* Find the first dominated statement that clobbers (part of) the memory stmt stores to with no intermediate statement that may use part of the memory stmt stores. That is, find a store that may prove stmt to be a dead store. */ temp = stmt; do { gimple use_stmt, defvar_def; imm_use_iterator ui; bool fail = false; @@ -157,22 +150,21 @@ dse_possible_dead_store_p (gimple stmt, /* Do not consider the PHI as use if it dominates the stmt defining the virtual operand we are processing, we have processed it already in this case. */ if (gimple_bb (defvar_def) != gimple_bb (use_stmt) && !dominated_by_p (CDI_DOMINATORS, gimple_bb (defvar_def), gimple_bb (use_stmt))) temp = use_stmt; } /* If the statement is a use the store is not dead. */ - else if (ref_maybe_used_by_stmt_p (use_stmt, - gimple_assign_lhs (stmt))) + else if (ref_maybe_used_by_stmt_p (use_stmt, ref)) { fail = true; BREAK_FROM_IMM_USE_STMT (ui); } /* If this is a store, remember it or bail out if we have multiple ones (the will be in different CFG parts then). */ else if (gimple_vdef (use_stmt)) { if (temp) { @@ -184,29 +176,29 @@ dse_possible_dead_store_p (gimple stmt, } if (fail) return false; /* If we didn't find any definition this means the store is dead if it isn't a store to global reachable memory. In this case just pretend the stmt makes itself dead. Otherwise fail. */ if (!temp) { - if (stmt_may_clobber_global_p (stmt)) + if (ref_may_alias_global_p (ref)) return false; temp = stmt; break; } } /* Continue walking until we reach a kill. */ - while (!stmt_kills_ref_p (temp, gimple_assign_lhs (stmt))); + while (!stmt_kills_ref_p (temp, ref)); *use_stmt = temp; return true; } /* Attempt to eliminate dead stores in the statement referenced by BSI. A dead store is a store into a memory location which will later be @@ -221,75 +213,113 @@ dse_possible_dead_store_p (gimple stmt, static void dse_optimize_stmt (gimple_stmt_iterator *gsi) { gimple stmt = gsi_stmt (*gsi); /* If this statement has no virtual defs, then there is nothing to do. */ if (!gimple_vdef (stmt)) return; - /* We know we have virtual definitions. If this is a GIMPLE_ASSIGN - that's not also a function call, then record it into our table. */ - if (is_gimple_call (stmt) && gimple_call_fndecl (stmt)) - return; - /* Don't return early on *this_2(D) ={v} {CLOBBER}. */ if (gimple_has_volatile_ops (stmt) && (!gimple_clobber_p (stmt) || TREE_CODE (gimple_assign_lhs (stmt)) != MEM_REF)) return; + /* We know we have virtual definitions. We can handle assignments and + some builtin calls. */ + if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)) + { + switch (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))) + { + case BUILT_IN_MEMCPY: + case BUILT_IN_MEMMOVE: + case BUILT_IN_MEMSET: + { + gimple use_stmt; + ao_ref ref; + tree size = NULL_TREE; + if (gimple_call_num_args (stmt) == 3) + size = gimple_call_arg (stmt, 2); + tree ptr = gimple_call_arg (stmt, 0); + ao_ref_init_from_ptr_and_size (&ref, ptr, size); + if (!dse_possible_dead_store_p (&ref, stmt, &use_stmt)) + return; + + if (dump_file && (dump_flags & TDF_DETAILS)) + { + fprintf (dump_file, " Deleted dead call '"); + print_gimple_stmt (dump_file, gsi_stmt (*gsi), dump_flags, 0); + fprintf (dump_file, "'\n"); + } + + tree lhs = gimple_call_lhs (stmt); + if (lhs) + { + gimple new_stmt = gimple_build_assign (lhs, ptr); + unlink_stmt_vdef (stmt); + if (gsi_replace (gsi, new_stmt, true)) + bitmap_set_bit (need_eh_cleanup, gimple_bb (stmt)->index); + } + else + { + /* Then we need to fix the operand of the consuming stmt. */ + unlink_stmt_vdef (stmt); + + /* Remove the dead store. */ + if (gsi_remove (gsi, true)) + bitmap_set_bit (need_eh_cleanup, gimple_bb (stmt)->index); + } + break; + } + default: + return; + } + } + if (is_gimple_assign (stmt)) { gimple use_stmt; - if (!dse_possible_dead_store_p (stmt, &use_stmt)) - return; + /* Self-assignments are zombies. */ + if (operand_equal_p (gimple_assign_rhs1 (stmt), + gimple_assign_lhs (stmt), 0)) + use_stmt = stmt; + else + { + ao_ref ref; + ao_ref_init (&ref, gimple_assign_lhs (stmt)); + if (!dse_possible_dead_store_p (&ref, stmt, &use_stmt)) + return; + } /* Now we know that use_stmt kills the LHS of stmt. */ /* But only remove *this_2(D) ={v} {CLOBBER} if killed by another clobber stmt. */ if (gimple_clobber_p (stmt) && !gimple_clobber_p (use_stmt)) return; - basic_block bb; - - /* If use_stmt is or might be a nop assignment, e.g. for - struct { ... } S a, b, *p; ... - b = a; b = b; - or - b = a; b = *p; where p might be &b, - or - *p = a; *p = b; where p might be &b, - or - *p = *u; *p = *v; where p might be v, then USE_STMT - acts as a use as well as definition, so store in STMT - is not dead. */ - if (stmt != use_stmt - && ref_maybe_used_by_stmt_p (use_stmt, gimple_assign_lhs (stmt))) - return; - if (dump_file && (dump_flags & TDF_DETAILS)) { fprintf (dump_file, " Deleted dead store '"); print_gimple_stmt (dump_file, gsi_stmt (*gsi), dump_flags, 0); fprintf (dump_file, "'\n"); } /* Then we need to fix the operand of the consuming stmt. */ unlink_stmt_vdef (stmt); /* Remove the dead store. */ - bb = gimple_bb (stmt); + basic_block bb = gimple_bb (stmt); if (gsi_remove (gsi, true)) bitmap_set_bit (need_eh_cleanup, bb->index); /* And release any SSA_NAMEs set in this statement back to the SSA_NAME manager. */ release_defs (stmt); } } class dse_dom_walker : public dom_walker