* tree-into-ssa.c (enum need_phi_state): Relocate from tree-flow.h.
(dump_decl_set): Move to gimple.c.
* gimple.h: Don't include tree-ssa-operands.h.
(dump_decl_set): Add prototype.
(gimple_vuse_op, gimple_vdef_op, update_stmt, update_stmt_if_modified):
Move to gimple-ssa.h.
* gimple.c (get_base_loadstore, walk_stmt_load_store_ops,
gimple_ior_addresses_taken_1, gimple_ior_addresses_taken): Move to
gimple-ssa.c.
(walk_stmt_load_store_addr_ops): Move to gimple-ssa.h.
(dump_decl_set): Relocate here.
* gimple-ssa.h: New file.
(walk_stmt_load_store_ops): Relocate here from gimple.c.
(gimple_vuse_op, gimple_vdef_op, update_stmt, update_stmt_if_modified):
Relocate from gimple.h.
* gimple-ssa.c: New file.
(get_base_loadstore, walk_stmt_load_store_addr_ops,
gimple_ior_addresses_taken_1, gimple_ior_addresses_taken): Relocate from
gimple.c.
* tree-cfg.c (has_zero_uses_1, single_imm_use_1): Move to...
* tree-ssa-operands.c (swap_ssa_operands): Rename from
swap_tree_operands and remove non-ssa path.
(has_zero_uses_1, single_imm_use_1): Relocate from tree-cfg.c.
* tree-ssa-reassoc.c (linearize_expr_tree, repropagate_negates): Use
swap_ssa_operands.
* tree-vect-loop.c (destroy_loop_vec_info, vect_is_slp_reduction,
vect_is_simple_reduction_1): Use swap_ssa_operands.
* tree-flow.h: Move various prototypes to tree-phinodes.h.
(enum need_phi_state): Move to tree-into-ssa.c.
(struct immediate_use_iterator_d, FOR_EACH_IMM_*,
BREAK_FROM_IMM_USE_STMT): Move to ssa-iterators.h.
(swap_tree_operands): Rename and move prototype to tree-ssa-operands.h.
* tree-flow-inline.h (delink_imm_use, link_imm_use_to_list,
link_imm_use, set_ssa_use_from_ptr, link_imm_use_stmt, relink_imm_use,
relink_imm_use_stmt, end_readonly_imm_use_p, first_readonly_imm_use,
next_readonly_imm_use, has_zero_uses, has_single_use, single_imm_use,
num_imm_uses): Move to ssa-iterators.h.
(get_use_from_ptr, get_def_from_ptr): Move to tree-ssa-operands.h
(gimple_phi_arg_imm_use_ptr, phi_arg_index_from_use): Move to
tree-phinodes.h.
(op_iter_done, op_iter_next_def, op_iter_next_tree,
clear_and_done_ssa_iter, op_iter_init, op_iter_init_use,
op_iter_init_def, op_iter_init_tree, single_ssa_tree_operand,
single_ssa_use_operand, single_ssa_def_operand, zero_ssa_operands,
num_ssa_operands, delink_stmt_imm_use, single_phi_def,
op_iter_init_phiuse, op_iter_init_phidef, end_imm_use_stmt_p,
end_imm_use_stmt_traverse, move_use_after_head, link_use_stmts_after,
first_imm_use_stmt, next_imm_use_stmt, first_imm_use_on_stmt,
end_imm_use_on_stmt_p, next_imm_use_on_stmt): Move to ssa-iterators.h.
(gimple_phi_arg_def, gimple_phi_arg_def_ptr, gimple_phi_arg_edge,
gimple_phi_arg_location, gimple_phi_arg_location_from_edge,
gimple_phi_arg_set_location, gimple_phi_arg_has_location, phi_nodes,
phi_nodes_ptr, set_phi_nodes, phi_ssa_name_p): Move to tree-phinodes.h.
* tree-ssa-operands.h (enum ssa_op_iter_type,
struct ssa_operand_iterator_d, SSA_OP*, FOR_EACH_SSA*, SINGLE_SSA*,
ZERO_SSA_OPERANDS, NUM_SSA_OPERANDS): Move to ssa-iterators.h.
(dump_decl_set): Remove prototype.
(get_use_from_ptr, get_def_from_ptr): Relocate from tree-flow.h.
* tree-phinodes.h: New file. Move some prototypes from tree-flow.h.
(phi_ssa_name_p, phi_nodes, phi_node_ptr, set_phi_nodes,
gimple_phi_arg_def, gimple_phi_arg_def_ptr, gimple_phi_arg_edge,
gimple_phi_arg_location, gimple_phi_arg_location_from_edge,
gimple_phi_arg_set_location, gimple_phi_arg_has_location,
gimple_phi_arg_imm_use_ptr, phi_arg_index_from_use): Relocate from
tree-flow-inline.h
* tree-ssa.h: Add tree-phinodes.h, gimple-ssa.h, ssa-iterators.h to
include list. Temporarily add gimple.h to include list.
* ssa-iteratoras.h: New file.
(struct immediate_use_iterator_d, FOR_EACH_IMM_*,
BREAK_FROM_IMM_USE_STMT): Relocate from tree-flow.h.
(enum ssa_op_iter_type, struct ssa_operand_iterator_d, SSA_OP*,
FOR_EACH_SSA*, SINGLE_SSA*, ZERO_SSA_OPERANDS, NUM_SSA_OPERANDS):
Relocate from tree-ssa-operands.h.
(delink_imm_use, link_imm_use_to_list, link_imm_use,
set_ssa_use_from_ptr, link_imm_use_stmt, relink_imm_use,
relink_imm_use_stmt, end_readonly_imm_use_p, first_readonly_imm_use,
next_readonly_imm_use, has_zero_uses, has_single_use, single_imm_use,
num_imm_uses, get_use_from_ptr, get_def_from_ptr,
phi_arg_index_from_use, op_iter_done, op_iter_next_def,
op_iter_next_tree, clear_and_done_ssa_iter, op_iter_init,
op_iter_init_use, op_iter_init_def, op_iter_init_tree,
single_ssa_tree_operand, single_ssa_use_operand, single_ssa_def_operand,
zero_ssa_operands, num_ssa_operands, delink_stmt_imm_use,
single_phi_def, op_iter_init_phiuse, op_iter_init_phidef,
end_imm_use_stmt_p, end_imm_use_stmt_traverse, move_use_after_head,
link_use_stmts_after, first_imm_use_stmt, next_imm_use_stmt,
first_imm_use_on_stmt, end_imm_use_on_stmt_p, next_imm_use_on_stmt):
Relocate from tree-flow-inline.h.
* tree-outof-ssa.h: Change _SSAEXPAND_H macro to GCC_TREE_OUTOF_SSA_H.
* Makefile.in: Add gimple-ssa.o to OBJS.
===================================================================
*************** struct mark_def_sites_global_data
bitmap kills;
};
+ /* It is advantageous to avoid things like life analysis for variables which
+ do not need PHI nodes. This enum describes whether or not a particular
+ variable may need a PHI node. */
+
+ enum need_phi_state {
+ /* This is the default. If we are still in this state after finding
+ all the definition and use sites, then we will assume the variable
+ needs PHI nodes. This is probably an overly conservative assumption. */
+ NEED_PHI_STATE_UNKNOWN,
+
+ /* This state indicates that we have seen one or more sets of the
+ variable in a single basic block and that the sets dominate all
+ uses seen so far. If after finding all definition and use sites
+ we are still in this state, then the variable does not need any
+ PHI nodes. */
+ NEED_PHI_STATE_NO,
+
+ /* This state indicates that we have either seen multiple definitions of
+ the variable in multiple blocks, or that we encountered a use in a
+ block that was not dominated by the block containing the set(s) of
+ this variable. This variable is assumed to need PHI nodes. */
+ NEED_PHI_STATE_MAYBE
+ };
+
/* Information stored for both SSA names and decls. */
struct common_info_d
{
*************** rewrite_dom_walker::after_dom_children (
/* Dump bitmap SET (assumed to contain VAR_DECLs) to FILE. */
- void
- dump_decl_set (FILE *file, bitmap set)
- {
- if (set)
- {
- bitmap_iterator bi;
- unsigned i;
-
- fprintf (file, "{ ");
-
- EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
- {
- fprintf (file, "D.%u", i);
- fprintf (file, " ");
- }
-
- fprintf (file, "}");
- }
- else
- fprintf (file, "NIL");
- }
-
-
- /* Dump bitmap SET (assumed to contain VAR_DECLs) to FILE. */
-
DEBUG_FUNCTION void
debug_decl_set (bitmap set)
{
===================================================================
*************** along with GCC; see the file COPYING3.
#include "ggc.h"
#include "basic-block.h"
#include "tree.h"
- #include "tree-ssa-operands.h"
#include "tree-ssa-alias.h"
#include "internal-fn.h"
*************** extern void free_gimple_type_tables (voi
extern tree gimple_unsigned_type (tree);
extern tree gimple_signed_type (tree);
extern alias_set_type gimple_get_alias_set (tree);
- extern bool walk_stmt_load_store_addr_ops (gimple, void *,
- bool (*)(gimple, tree, void *),
- bool (*)(gimple, tree, void *),
- bool (*)(gimple, tree, void *));
- extern bool walk_stmt_load_store_ops (gimple, void *,
- bool (*)(gimple, tree, void *),
- bool (*)(gimple, tree, void *));
- extern bool gimple_ior_addresses_taken (bitmap, gimple);
extern bool gimple_call_builtin_p (gimple, enum built_in_class);
extern bool gimple_call_builtin_p (gimple, enum built_in_function);
extern bool gimple_asm_clobbers_memory_p (const_gimple);
*************** extern void omp_firstprivatize_variable
extern tree gimple_boolify (tree);
extern gimple_predicate rhs_predicate_for (tree);
extern tree canonicalize_cond_expr_cond (tree);
+ extern void dump_decl_set (FILE *, bitmap);
/* In omp-low.c. */
extern tree omp_reduction_init (tree, tree);
*************** gimple_set_use_ops (gimple g, struct use
}
- /* Return the set of VUSE operand for statement G. */
-
- static inline use_operand_p
- gimple_vuse_op (const_gimple g)
- {
- struct use_optype_d *ops;
- if (!gimple_has_mem_ops (g))
- return NULL_USE_OPERAND_P;
- ops = g->gsops.opbase.use_ops;
- if (ops
- && USE_OP_PTR (ops)->use == &g->gsmembase.vuse)
- return USE_OP_PTR (ops);
- return NULL_USE_OPERAND_P;
- }
-
- /* Return the set of VDEF operand for statement G. */
-
- static inline def_operand_p
- gimple_vdef_op (gimple g)
- {
- if (!gimple_has_mem_ops (g))
- return NULL_DEF_OPERAND_P;
- if (g->gsmembase.vdef)
- return &g->gsmembase.vdef;
- return NULL_DEF_OPERAND_P;
- }
-
-
/* Return the single VUSE operand of the statement G. */
static inline tree
*************** gimple_expr_code (const_gimple stmt)
}
- /* Mark statement S as modified, and update it. */
-
- static inline void
- update_stmt (gimple s)
- {
- if (gimple_has_ops (s))
- {
- gimple_set_modified (s, true);
- update_stmt_operands (s);
- }
- }
-
- /* Update statement S if it has been optimized. */
-
- static inline void
- update_stmt_if_modified (gimple s)
- {
- if (gimple_modified_p (s))
- update_stmt_operands (s);
- }
-
/* Return true if statement STMT contains volatile operands. */
static inline bool
===================================================================
*************** gimple_get_alias_set (tree t)
}
- /* From a tree operand OP return the base of a load or store operation
- or NULL_TREE if OP is not a load or a store. */
-
- static tree
- get_base_loadstore (tree op)
- {
- while (handled_component_p (op))
- op = TREE_OPERAND (op, 0);
- if (DECL_P (op)
- || INDIRECT_REF_P (op)
- || TREE_CODE (op) == MEM_REF
- || TREE_CODE (op) == TARGET_MEM_REF)
- return op;
- return NULL_TREE;
- }
-
- /* For the statement STMT call the callbacks VISIT_LOAD, VISIT_STORE and
- VISIT_ADDR if non-NULL on loads, store and address-taken operands
- passing the STMT, the base of the operand and DATA to it. The base
- will be either a decl, an indirect reference (including TARGET_MEM_REF)
- or the argument of an address expression.
- Returns the results of these callbacks or'ed. */
-
- bool
- walk_stmt_load_store_addr_ops (gimple stmt, void *data,
- bool (*visit_load)(gimple, tree, void *),
- bool (*visit_store)(gimple, tree, void *),
- bool (*visit_addr)(gimple, tree, void *))
- {
- bool ret = false;
- unsigned i;
- if (gimple_assign_single_p (stmt))
- {
- tree lhs, rhs;
- if (visit_store)
- {
- lhs = get_base_loadstore (gimple_assign_lhs (stmt));
- if (lhs)
- ret |= visit_store (stmt, lhs, data);
- }
- rhs = gimple_assign_rhs1 (stmt);
- while (handled_component_p (rhs))
- rhs = TREE_OPERAND (rhs, 0);
- if (visit_addr)
- {
- if (TREE_CODE (rhs) == ADDR_EXPR)
- ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
- else if (TREE_CODE (rhs) == TARGET_MEM_REF
- && TREE_CODE (TMR_BASE (rhs)) == ADDR_EXPR)
- ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (rhs), 0), data);
- else if (TREE_CODE (rhs) == OBJ_TYPE_REF
- && TREE_CODE (OBJ_TYPE_REF_OBJECT (rhs)) == ADDR_EXPR)
- ret |= visit_addr (stmt, TREE_OPERAND (OBJ_TYPE_REF_OBJECT (rhs),
- 0), data);
- else if (TREE_CODE (rhs) == CONSTRUCTOR)
- {
- unsigned int ix;
- tree val;
-
- FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), ix, val)
- if (TREE_CODE (val) == ADDR_EXPR)
- ret |= visit_addr (stmt, TREE_OPERAND (val, 0), data);
- else if (TREE_CODE (val) == OBJ_TYPE_REF
- && TREE_CODE (OBJ_TYPE_REF_OBJECT (val)) == ADDR_EXPR)
- ret |= visit_addr (stmt,
- TREE_OPERAND (OBJ_TYPE_REF_OBJECT (val),
- 0), data);
- }
- lhs = gimple_assign_lhs (stmt);
- if (TREE_CODE (lhs) == TARGET_MEM_REF
- && TREE_CODE (TMR_BASE (lhs)) == ADDR_EXPR)
- ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (lhs), 0), data);
- }
- if (visit_load)
- {
- rhs = get_base_loadstore (rhs);
- if (rhs)
- ret |= visit_load (stmt, rhs, data);
- }
- }
- else if (visit_addr
- && (is_gimple_assign (stmt)
- || gimple_code (stmt) == GIMPLE_COND))
- {
- for (i = 0; i < gimple_num_ops (stmt); ++i)
- {
- tree op = gimple_op (stmt, i);
- if (op == NULL_TREE)
- ;
- else if (TREE_CODE (op) == ADDR_EXPR)
- ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
- /* COND_EXPR and VCOND_EXPR rhs1 argument is a comparison
- tree with two operands. */
- else if (i == 1 && COMPARISON_CLASS_P (op))
- {
- if (TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
- ret |= visit_addr (stmt, TREE_OPERAND (TREE_OPERAND (op, 0),
- 0), data);
- if (TREE_CODE (TREE_OPERAND (op, 1)) == ADDR_EXPR)
- ret |= visit_addr (stmt, TREE_OPERAND (TREE_OPERAND (op, 1),
- 0), data);
- }
- }
- }
- else if (is_gimple_call (stmt))
- {
- if (visit_store)
- {
- tree lhs = gimple_call_lhs (stmt);
- if (lhs)
- {
- lhs = get_base_loadstore (lhs);
- if (lhs)
- ret |= visit_store (stmt, lhs, data);
- }
- }
- if (visit_load || visit_addr)
- for (i = 0; i < gimple_call_num_args (stmt); ++i)
- {
- tree rhs = gimple_call_arg (stmt, i);
- if (visit_addr
- && TREE_CODE (rhs) == ADDR_EXPR)
- ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
- else if (visit_load)
- {
- rhs = get_base_loadstore (rhs);
- if (rhs)
- ret |= visit_load (stmt, rhs, data);
- }
- }
- if (visit_addr
- && gimple_call_chain (stmt)
- && TREE_CODE (gimple_call_chain (stmt)) == ADDR_EXPR)
- ret |= visit_addr (stmt, TREE_OPERAND (gimple_call_chain (stmt), 0),
- data);
- if (visit_addr
- && gimple_call_return_slot_opt_p (stmt)
- && gimple_call_lhs (stmt) != NULL_TREE
- && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
- ret |= visit_addr (stmt, gimple_call_lhs (stmt), data);
- }
- else if (gimple_code (stmt) == GIMPLE_ASM)
- {
- unsigned noutputs;
- const char *constraint;
- const char **oconstraints;
- bool allows_mem, allows_reg, is_inout;
- noutputs = gimple_asm_noutputs (stmt);
- oconstraints = XALLOCAVEC (const char *, noutputs);
- if (visit_store || visit_addr)
- for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
- {
- tree link = gimple_asm_output_op (stmt, i);
- tree op = get_base_loadstore (TREE_VALUE (link));
- if (op && visit_store)
- ret |= visit_store (stmt, op, data);
- if (visit_addr)
- {
- constraint = TREE_STRING_POINTER
- (TREE_VALUE (TREE_PURPOSE (link)));
- oconstraints[i] = constraint;
- parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
- &allows_reg, &is_inout);
- if (op && !allows_reg && allows_mem)
- ret |= visit_addr (stmt, op, data);
- }
- }
- if (visit_load || visit_addr)
- for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
- {
- tree link = gimple_asm_input_op (stmt, i);
- tree op = TREE_VALUE (link);
- if (visit_addr
- && TREE_CODE (op) == ADDR_EXPR)
- ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
- else if (visit_load || visit_addr)
- {
- op = get_base_loadstore (op);
- if (op)
- {
- if (visit_load)
- ret |= visit_load (stmt, op, data);
- if (visit_addr)
- {
- constraint = TREE_STRING_POINTER
- (TREE_VALUE (TREE_PURPOSE (link)));
- parse_input_constraint (&constraint, 0, 0, noutputs,
- 0, oconstraints,
- &allows_mem, &allows_reg);
- if (!allows_reg && allows_mem)
- ret |= visit_addr (stmt, op, data);
- }
- }
- }
- }
- }
- else if (gimple_code (stmt) == GIMPLE_RETURN)
- {
- tree op = gimple_return_retval (stmt);
- if (op)
- {
- if (visit_addr
- && TREE_CODE (op) == ADDR_EXPR)
- ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
- else if (visit_load)
- {
- op = get_base_loadstore (op);
- if (op)
- ret |= visit_load (stmt, op, data);
- }
- }
- }
- else if (visit_addr
- && gimple_code (stmt) == GIMPLE_PHI)
- {
- for (i = 0; i < gimple_phi_num_args (stmt); ++i)
- {
- tree op = PHI_ARG_DEF (stmt, i);
- if (TREE_CODE (op) == ADDR_EXPR)
- ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
- }
- }
- else if (visit_addr
- && gimple_code (stmt) == GIMPLE_GOTO)
- {
- tree op = gimple_goto_dest (stmt);
- if (TREE_CODE (op) == ADDR_EXPR)
- ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
- }
-
- return ret;
- }
-
- /* Like walk_stmt_load_store_addr_ops but with NULL visit_addr. IPA-CP
- should make a faster clone for this case. */
-
- bool
- walk_stmt_load_store_ops (gimple stmt, void *data,
- bool (*visit_load)(gimple, tree, void *),
- bool (*visit_store)(gimple, tree, void *))
- {
- return walk_stmt_load_store_addr_ops (stmt, data,
- visit_load, visit_store, NULL);
- }
-
- /* Helper for gimple_ior_addresses_taken_1. */
-
- static bool
- gimple_ior_addresses_taken_1 (gimple stmt ATTRIBUTE_UNUSED,
- tree addr, void *data)
- {
- bitmap addresses_taken = (bitmap)data;
- addr = get_base_address (addr);
- if (addr
- && DECL_P (addr))
- {
- bitmap_set_bit (addresses_taken, DECL_UID (addr));
- return true;
- }
- return false;
- }
-
- /* Set the bit for the uid of all decls that have their address taken
- in STMT in the ADDRESSES_TAKEN bitmap. Returns true if there
- were any in this stmt. */
-
- bool
- gimple_ior_addresses_taken (bitmap addresses_taken, gimple stmt)
- {
- return walk_stmt_load_store_addr_ops (stmt, addresses_taken, NULL, NULL,
- gimple_ior_addresses_taken_1);
- }
-
-
/* Return a printable name for symbol DECL. */
const char *
*************** types_compatible_p (tree type1, tree typ
&& useless_type_conversion_p (type2, type1)));
}
+ /* Dump bitmap SET (assumed to contain VAR_DECLs) to FILE. */
+
+ void
+ dump_decl_set (FILE *file, bitmap set)
+ {
+ if (set)
+ {
+ bitmap_iterator bi;
+ unsigned i;
+
+ fprintf (file, "{ ");
+
+ EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
+ {
+ fprintf (file, "D.%u", i);
+ fprintf (file, " ");
+ }
+
+ fprintf (file, "}");
+ }
+ else
+ fprintf (file, "NIL");
+ }
#include "gt-gimple.h"
===================================================================
***************
+ /* Header file for routines that straddle the border between GIMPLE and
+ SSA in gimple.
+ Copyright (C) 2009-2013 Free Software Foundation, Inc.
+
+ This file is part of GCC.
+
+ GCC is free software; you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation; either version 3, or (at your option)
+ any later version.
+
+ GCC is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with GCC; see the file COPYING3. If not see
+ <http://www.gnu.org/licenses/>. */
+
+ #ifndef GCC_GIMPLE_SSA_H
+ #define GCC_GIMPLE_SSA_H
+
+ extern bool walk_stmt_load_store_addr_ops (gimple, void *,
+ bool (*)(gimple, tree, void *),
+ bool (*)(gimple, tree, void *),
+ bool (*)(gimple, tree, void *));
+ extern bool gimple_ior_addresses_taken (bitmap, gimple);
+
+ /* Return the set of VUSE operand for statement G. */
+
+ /* Like walk_stmt_load_store_addr_ops but with NULL visit_addr. IPA-CP
+ should make a faster clone for this case. */
+
+ static inline bool
+ walk_stmt_load_store_ops (gimple stmt, void *data,
+ bool (*visit_load)(gimple, tree, void *),
+ bool (*visit_store)(gimple, tree, void *))
+ {
+ return walk_stmt_load_store_addr_ops (stmt, data,
+ visit_load, visit_store, NULL);
+ }
+
+
+ static inline use_operand_p
+ gimple_vuse_op (const_gimple g)
+ {
+ struct use_optype_d *ops;
+ if (!gimple_has_mem_ops (g))
+ return NULL_USE_OPERAND_P;
+ ops = g->gsops.opbase.use_ops;
+ if (ops
+ && USE_OP_PTR (ops)->use == &g->gsmembase.vuse)
+ return USE_OP_PTR (ops);
+ return NULL_USE_OPERAND_P;
+ }
+
+ /* Return the set of VDEF operand for statement G. */
+
+ static inline def_operand_p
+ gimple_vdef_op (gimple g)
+ {
+ if (!gimple_has_mem_ops (g))
+ return NULL_DEF_OPERAND_P;
+ if (g->gsmembase.vdef)
+ return &g->gsmembase.vdef;
+ return NULL_DEF_OPERAND_P;
+ }
+
+ /* Mark statement S as modified, and update it. */
+
+ static inline void
+ update_stmt (gimple s)
+ {
+ if (gimple_has_ops (s))
+ {
+ gimple_set_modified (s, true);
+ update_stmt_operands (s);
+ }
+ }
+
+ /* Update statement S if it has been optimized. */
+
+ static inline void
+ update_stmt_if_modified (gimple s)
+ {
+ if (gimple_modified_p (s))
+ update_stmt_operands (s);
+ }
+
+
+ #endif /* GCC_GIMPLE_SSA_H */
===================================================================
***************
+ /* Routines that straddle the border between GIMPLE and SSA in gimple.
+ Copyright (C) 2004-2013 Free Software Foundation, Inc.
+ Contributed by Andrew Macleod <amacleod@redhat.com>
+
+ This file is part of GCC.
+
+ GCC is free software; you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation; either version 3, or (at your option)
+ any later version.
+
+ GCC is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with GCC; see the file COPYING3. If not see
+ <http://www.gnu.org/licenses/>. */
+
+ #include "config.h"
+ #include "system.h"
+ #include "coretypes.h"
+ #include "tree.h"
+ #include "gimple.h"
+ #include "tree-ssa.h"
+ #include "gimple-ssa.h"
+
+ /* From a tree operand OP return the base of a load or store operation
+ or NULL_TREE if OP is not a load or a store. */
+
+ static tree
+ get_base_loadstore (tree op)
+ {
+ while (handled_component_p (op))
+ op = TREE_OPERAND (op, 0);
+ if (DECL_P (op)
+ || INDIRECT_REF_P (op)
+ || TREE_CODE (op) == MEM_REF
+ || TREE_CODE (op) == TARGET_MEM_REF)
+ return op;
+ return NULL_TREE;
+ }
+
+
+ /* For the statement STMT call the callbacks VISIT_LOAD, VISIT_STORE and
+ VISIT_ADDR if non-NULL on loads, store and address-taken operands
+ passing the STMT, the base of the operand and DATA to it. The base
+ will be either a decl, an indirect reference (including TARGET_MEM_REF)
+ or the argument of an address expression.
+ Returns the results of these callbacks or'ed. */
+
+ bool
+ walk_stmt_load_store_addr_ops (gimple stmt, void *data,
+ bool (*visit_load)(gimple, tree, void *),
+ bool (*visit_store)(gimple, tree, void *),
+ bool (*visit_addr)(gimple, tree, void *))
+ {
+ bool ret = false;
+ unsigned i;
+ if (gimple_assign_single_p (stmt))
+ {
+ tree lhs, rhs;
+ if (visit_store)
+ {
+ lhs = get_base_loadstore (gimple_assign_lhs (stmt));
+ if (lhs)
+ ret |= visit_store (stmt, lhs, data);
+ }
+ rhs = gimple_assign_rhs1 (stmt);
+ while (handled_component_p (rhs))
+ rhs = TREE_OPERAND (rhs, 0);
+ if (visit_addr)
+ {
+ if (TREE_CODE (rhs) == ADDR_EXPR)
+ ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
+ else if (TREE_CODE (rhs) == TARGET_MEM_REF
+ && TREE_CODE (TMR_BASE (rhs)) == ADDR_EXPR)
+ ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (rhs), 0), data);
+ else if (TREE_CODE (rhs) == OBJ_TYPE_REF
+ && TREE_CODE (OBJ_TYPE_REF_OBJECT (rhs)) == ADDR_EXPR)
+ ret |= visit_addr (stmt, TREE_OPERAND (OBJ_TYPE_REF_OBJECT (rhs),
+ 0), data);
+ else if (TREE_CODE (rhs) == CONSTRUCTOR)
+ {
+ unsigned int ix;
+ tree val;
+
+ FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), ix, val)
+ if (TREE_CODE (val) == ADDR_EXPR)
+ ret |= visit_addr (stmt, TREE_OPERAND (val, 0), data);
+ else if (TREE_CODE (val) == OBJ_TYPE_REF
+ && TREE_CODE (OBJ_TYPE_REF_OBJECT (val)) == ADDR_EXPR)
+ ret |= visit_addr (stmt,
+ TREE_OPERAND (OBJ_TYPE_REF_OBJECT (val),
+ 0), data);
+ }
+ lhs = gimple_assign_lhs (stmt);
+ if (TREE_CODE (lhs) == TARGET_MEM_REF
+ && TREE_CODE (TMR_BASE (lhs)) == ADDR_EXPR)
+ ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (lhs), 0), data);
+ }
+ if (visit_load)
+ {
+ rhs = get_base_loadstore (rhs);
+ if (rhs)
+ ret |= visit_load (stmt, rhs, data);
+ }
+ }
+ else if (visit_addr
+ && (is_gimple_assign (stmt)
+ || gimple_code (stmt) == GIMPLE_COND))
+ {
+ for (i = 0; i < gimple_num_ops (stmt); ++i)
+ {
+ tree op = gimple_op (stmt, i);
+ if (op == NULL_TREE)
+ ;
+ else if (TREE_CODE (op) == ADDR_EXPR)
+ ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
+ /* COND_EXPR and VCOND_EXPR rhs1 argument is a comparison
+ tree with two operands. */
+ else if (i == 1 && COMPARISON_CLASS_P (op))
+ {
+ if (TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
+ ret |= visit_addr (stmt, TREE_OPERAND (TREE_OPERAND (op, 0),
+ 0), data);
+ if (TREE_CODE (TREE_OPERAND (op, 1)) == ADDR_EXPR)
+ ret |= visit_addr (stmt, TREE_OPERAND (TREE_OPERAND (op, 1),
+ 0), data);
+ }
+ }
+ }
+ else if (is_gimple_call (stmt))
+ {
+ if (visit_store)
+ {
+ tree lhs = gimple_call_lhs (stmt);
+ if (lhs)
+ {
+ lhs = get_base_loadstore (lhs);
+ if (lhs)
+ ret |= visit_store (stmt, lhs, data);
+ }
+ }
+ if (visit_load || visit_addr)
+ for (i = 0; i < gimple_call_num_args (stmt); ++i)
+ {
+ tree rhs = gimple_call_arg (stmt, i);
+ if (visit_addr
+ && TREE_CODE (rhs) == ADDR_EXPR)
+ ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
+ else if (visit_load)
+ {
+ rhs = get_base_loadstore (rhs);
+ if (rhs)
+ ret |= visit_load (stmt, rhs, data);
+ }
+ }
+ if (visit_addr
+ && gimple_call_chain (stmt)
+ && TREE_CODE (gimple_call_chain (stmt)) == ADDR_EXPR)
+ ret |= visit_addr (stmt, TREE_OPERAND (gimple_call_chain (stmt), 0),
+ data);
+ if (visit_addr
+ && gimple_call_return_slot_opt_p (stmt)
+ && gimple_call_lhs (stmt) != NULL_TREE
+ && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
+ ret |= visit_addr (stmt, gimple_call_lhs (stmt), data);
+ }
+ else if (gimple_code (stmt) == GIMPLE_ASM)
+ {
+ unsigned noutputs;
+ const char *constraint;
+ const char **oconstraints;
+ bool allows_mem, allows_reg, is_inout;
+ noutputs = gimple_asm_noutputs (stmt);
+ oconstraints = XALLOCAVEC (const char *, noutputs);
+ if (visit_store || visit_addr)
+ for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
+ {
+ tree link = gimple_asm_output_op (stmt, i);
+ tree op = get_base_loadstore (TREE_VALUE (link));
+ if (op && visit_store)
+ ret |= visit_store (stmt, op, data);
+ if (visit_addr)
+ {
+ constraint = TREE_STRING_POINTER
+ (TREE_VALUE (TREE_PURPOSE (link)));
+ oconstraints[i] = constraint;
+ parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
+ &allows_reg, &is_inout);
+ if (op && !allows_reg && allows_mem)
+ ret |= visit_addr (stmt, op, data);
+ }
+ }
+ if (visit_load || visit_addr)
+ for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
+ {
+ tree link = gimple_asm_input_op (stmt, i);
+ tree op = TREE_VALUE (link);
+ if (visit_addr
+ && TREE_CODE (op) == ADDR_EXPR)
+ ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
+ else if (visit_load || visit_addr)
+ {
+ op = get_base_loadstore (op);
+ if (op)
+ {
+ if (visit_load)
+ ret |= visit_load (stmt, op, data);
+ if (visit_addr)
+ {
+ constraint = TREE_STRING_POINTER
+ (TREE_VALUE (TREE_PURPOSE (link)));
+ parse_input_constraint (&constraint, 0, 0, noutputs,
+ 0, oconstraints,
+ &allows_mem, &allows_reg);
+ if (!allows_reg && allows_mem)
+ ret |= visit_addr (stmt, op, data);
+ }
+ }
+ }
+ }
+ }
+ else if (gimple_code (stmt) == GIMPLE_RETURN)
+ {
+ tree op = gimple_return_retval (stmt);
+ if (op)
+ {
+ if (visit_addr
+ && TREE_CODE (op) == ADDR_EXPR)
+ ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
+ else if (visit_load)
+ {
+ op = get_base_loadstore (op);
+ if (op)
+ ret |= visit_load (stmt, op, data);
+ }
+ }
+ }
+ else if (visit_addr
+ && gimple_code (stmt) == GIMPLE_PHI)
+ {
+ for (i = 0; i < gimple_phi_num_args (stmt); ++i)
+ {
+ tree op = PHI_ARG_DEF (stmt, i);
+ if (TREE_CODE (op) == ADDR_EXPR)
+ ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
+ }
+ }
+ else if (visit_addr
+ && gimple_code (stmt) == GIMPLE_GOTO)
+ {
+ tree op = gimple_goto_dest (stmt);
+ if (TREE_CODE (op) == ADDR_EXPR)
+ ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
+ }
+
+ return ret;
+ }
+
+ /* Helper for gimple_ior_addresses_taken_1. */
+
+ static bool
+ gimple_ior_addresses_taken_1 (gimple stmt ATTRIBUTE_UNUSED,
+ tree addr, void *data)
+ {
+ bitmap addresses_taken = (bitmap)data;
+ addr = get_base_address (addr);
+ if (addr
+ && DECL_P (addr))
+ {
+ bitmap_set_bit (addresses_taken, DECL_UID (addr));
+ return true;
+ }
+ return false;
+ }
+
+ /* Set the bit for the uid of all decls that have their address taken
+ in STMT in the ADDRESSES_TAKEN bitmap. Returns true if there
+ were any in this stmt. */
+
+ bool
+ gimple_ior_addresses_taken (bitmap addresses_taken, gimple stmt)
+ {
+ return walk_stmt_load_store_addr_ops (stmt, addresses_taken, NULL, NULL,
+ gimple_ior_addresses_taken_1);
+ }
===================================================================
*************** gimple_can_merge_blocks_p (basic_block a
return true;
}
- /* Return true if the var whose chain of uses starts at PTR has no
- nondebug uses. */
- bool
- has_zero_uses_1 (const ssa_use_operand_t *head)
- {
- const ssa_use_operand_t *ptr;
-
- for (ptr = head->next; ptr != head; ptr = ptr->next)
- if (!is_gimple_debug (USE_STMT (ptr)))
- return false;
-
- return true;
- }
-
- /* Return true if the var whose chain of uses starts at PTR has a
- single nondebug use. Set USE_P and STMT to that single nondebug
- use, if so, or to NULL otherwise. */
- bool
- single_imm_use_1 (const ssa_use_operand_t *head,
- use_operand_p *use_p, gimple *stmt)
- {
- ssa_use_operand_t *ptr, *single_use = 0;
-
- for (ptr = head->next; ptr != head; ptr = ptr->next)
- if (!is_gimple_debug (USE_STMT (ptr)))
- {
- if (single_use)
- {
- single_use = NULL;
- break;
- }
- single_use = ptr;
- }
-
- if (use_p)
- *use_p = single_use;
-
- if (stmt)
- *stmt = single_use ? single_use->loc.stmt : NULL;
-
- return !!single_use;
- }
-
/* Replaces all uses of NAME by VAL. */
void
===================================================================
*************** update_stmt_operands (gimple stmt)
to test the validity of the swap operation. */
void
! swap_tree_operands (gimple stmt, tree *exp0, tree *exp1)
{
tree op0, op1;
op0 = *exp0;
op1 = *exp1;
! /* If the operand cache is active, attempt to preserve the relative
! positions of these two operands in their respective immediate use
! lists by adjusting their use pointer to point to the new
! operand position. */
! if (ssa_operands_active (cfun) && op0 != op1)
{
use_optype_p use0, use1, ptr;
use0 = use1 = NULL;
to test the validity of the swap operation. */
void
! swap_ssa_operands (gimple stmt, tree *exp0, tree *exp1)
{
tree op0, op1;
op0 = *exp0;
op1 = *exp1;
! gcc_checking_assert (ssa_operands_active (cfun));
!
! if (op0 != op1)
{
+ /* Attempt to preserve the relative positions of these two operands in
+ their * respective immediate use lists by adjusting their use pointer
+ to point to the new operand position. */
use_optype_p use0, use1, ptr;
use0 = use1 = NULL;
*************** swap_tree_operands (gimple stmt, tree *e
USE_OP_PTR (use0)->use = exp1;
if (use1)
USE_OP_PTR (use1)->use = exp0;
- }
! /* Now swap the data. */
! *exp0 = op1;
! *exp1 = op0;
}
USE_OP_PTR (use0)->use = exp1;
if (use1)
USE_OP_PTR (use1)->use = exp0;
! /* Now swap the data. */
! *exp0 = op1;
! *exp1 = op0;
! }
}
*************** unlink_stmt_vdef (gimple stmt)
SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1;
}
+
+ /* Return true if the var whose chain of uses starts at PTR has no
+ nondebug uses. */
+ bool
+ has_zero_uses_1 (const ssa_use_operand_t *head)
+ {
+ const ssa_use_operand_t *ptr;
+
+ for (ptr = head->next; ptr != head; ptr = ptr->next)
+ if (!is_gimple_debug (USE_STMT (ptr)))
+ return false;
+
+ return true;
+ }
+
+
+ /* Return true if the var whose chain of uses starts at PTR has a
+ single nondebug use. Set USE_P and STMT to that single nondebug
+ use, if so, or to NULL otherwise. */
+ bool
+ single_imm_use_1 (const ssa_use_operand_t *head,
+ use_operand_p *use_p, gimple *stmt)
+ {
+ ssa_use_operand_t *ptr, *single_use = 0;
+
+ for (ptr = head->next; ptr != head; ptr = ptr->next)
+ if (!is_gimple_debug (USE_STMT (ptr)))
+ {
+ if (single_use)
+ {
+ single_use = NULL;
+ break;
+ }
+ single_use = ptr;
+ }
+
+ if (use_p)
+ *use_p = single_use;
+
+ if (stmt)
+ *stmt = single_use ? single_use->loc.stmt : NULL;
+
+ return single_use;
+ }
===================================================================
*************** linearize_expr_tree (vec<operand_entry_t
print_gimple_stmt (dump_file, stmt, 0, 0);
}
! swap_tree_operands (stmt,
! gimple_assign_rhs1_ptr (stmt),
! gimple_assign_rhs2_ptr (stmt));
update_stmt (stmt);
if (dump_file && (dump_flags & TDF_DETAILS))
print_gimple_stmt (dump_file, stmt, 0, 0);
}
! swap_ssa_operands (stmt,
! gimple_assign_rhs1_ptr (stmt),
! gimple_assign_rhs2_ptr (stmt));
update_stmt (stmt);
if (dump_file && (dump_flags & TDF_DETAILS))
*************** repropagate_negates (void)
to force the negated operand to the RHS of the PLUS_EXPR. */
if (gimple_assign_rhs1 (user) == negate)
{
! swap_tree_operands (user,
! gimple_assign_rhs1_ptr (user),
! gimple_assign_rhs2_ptr (user));
}
/* Now transform the PLUS_EXPR into a MINUS_EXPR and replace
to force the negated operand to the RHS of the PLUS_EXPR. */
if (gimple_assign_rhs1 (user) == negate)
{
! swap_ssa_operands (user,
! gimple_assign_rhs1_ptr (user),
! gimple_assign_rhs2_ptr (user));
}
/* Now transform the PLUS_EXPR into a MINUS_EXPR and replace
===================================================================
*************** destroy_loop_vec_info (loop_vec_info loo
|| code == POINTER_PLUS_EXPR
|| code == MULT_EXPR)
&& CONSTANT_CLASS_P (gimple_assign_rhs1 (stmt)))
! swap_tree_operands (stmt,
! gimple_assign_rhs1_ptr (stmt),
! gimple_assign_rhs2_ptr (stmt));
}
/* Free stmt_vec_info. */
|| code == POINTER_PLUS_EXPR
|| code == MULT_EXPR)
&& CONSTANT_CLASS_P (gimple_assign_rhs1 (stmt)))
! swap_ssa_operands (stmt,
! gimple_assign_rhs1_ptr (stmt),
! gimple_assign_rhs2_ptr (stmt));
}
/* Free stmt_vec_info. */
*************** vect_is_slp_reduction (loop_vec_info loo
dump_printf (MSG_NOTE, "\n");
}
! swap_tree_operands (next_stmt,
! gimple_assign_rhs1_ptr (next_stmt),
! gimple_assign_rhs2_ptr (next_stmt));
update_stmt (next_stmt);
if (CONSTANT_CLASS_P (gimple_assign_rhs1 (next_stmt)))
dump_printf (MSG_NOTE, "\n");
}
! swap_ssa_operands (next_stmt,
! gimple_assign_rhs1_ptr (next_stmt),
! gimple_assign_rhs2_ptr (next_stmt));
update_stmt (next_stmt);
if (CONSTANT_CLASS_P (gimple_assign_rhs1 (next_stmt)))
*************** vect_is_simple_reduction_1 (loop_vec_inf
report_vect_op (MSG_NOTE, def_stmt,
"detected reduction: need to swap operands: ");
! swap_tree_operands (def_stmt, gimple_assign_rhs1_ptr (def_stmt),
! gimple_assign_rhs2_ptr (def_stmt));
if (CONSTANT_CLASS_P (gimple_assign_rhs1 (def_stmt)))
LOOP_VINFO_OPERANDS_SWAPPED (loop_info) = true;
report_vect_op (MSG_NOTE, def_stmt,
"detected reduction: need to swap operands: ");
! swap_ssa_operands (def_stmt, gimple_assign_rhs1_ptr (def_stmt),
! gimple_assign_rhs2_ptr (def_stmt));
if (CONSTANT_CLASS_P (gimple_assign_rhs1 (def_stmt)))
LOOP_VINFO_OPERANDS_SWAPPED (loop_info) = true;
===================================================================
*************** typedef struct
!end_htab_p (&(ITER)); \
RESULT = (TYPE) next_htab_element (&(ITER)))
- /* It is advantageous to avoid things like life analysis for variables which
- do not need PHI nodes. This enum describes whether or not a particular
- variable may need a PHI node. */
-
- enum need_phi_state {
- /* This is the default. If we are still in this state after finding
- all the definition and use sites, then we will assume the variable
- needs PHI nodes. This is probably an overly conservative assumption. */
- NEED_PHI_STATE_UNKNOWN,
-
- /* This state indicates that we have seen one or more sets of the
- variable in a single basic block and that the sets dominate all
- uses seen so far. If after finding all definition and use sites
- we are still in this state, then the variable does not need any
- PHI nodes. */
- NEED_PHI_STATE_NO,
-
- /* This state indicates that we have either seen multiple definitions of
- the variable in multiple blocks, or that we encountered a use in a
- block that was not dominated by the block containing the set(s) of
- this variable. This variable is assumed to need PHI nodes. */
- NEED_PHI_STATE_MAYBE
- };
-
-
- /* Immediate use lists are used to directly access all uses for an SSA
- name and get pointers to the statement for each use.
-
- The structure ssa_use_operand_d consists of PREV and NEXT pointers
- to maintain the list. A USE pointer, which points to address where
- the use is located and a LOC pointer which can point to the
- statement where the use is located, or, in the case of the root
- node, it points to the SSA name itself.
-
- The list is anchored by an occurrence of ssa_operand_d *in* the
- ssa_name node itself (named 'imm_uses'). This node is uniquely
- identified by having a NULL USE pointer. and the LOC pointer
- pointing back to the ssa_name node itself. This node forms the
- base for a circular list, and initially this is the only node in
- the list.
-
- Fast iteration allows each use to be examined, but does not allow
- any modifications to the uses or stmts.
-
- Normal iteration allows insertion, deletion, and modification. the
- iterator manages this by inserting a marker node into the list
- immediately before the node currently being examined in the list.
- this marker node is uniquely identified by having null stmt *and* a
- null use pointer.
-
- When iterating to the next use, the iteration routines check to see
- if the node after the marker has changed. if it has, then the node
- following the marker is now the next one to be visited. if not, the
- marker node is moved past that node in the list (visualize it as
- bumping the marker node through the list). this continues until
- the marker node is moved to the original anchor position. the
- marker node is then removed from the list.
-
- If iteration is halted early, the marker node must be removed from
- the list before continuing. */
- typedef struct immediate_use_iterator_d
- {
- /* This is the current use the iterator is processing. */
- ssa_use_operand_t *imm_use;
- /* This marks the last use in the list (use node from SSA_NAME) */
- ssa_use_operand_t *end_p;
- /* This node is inserted and used to mark the end of the uses for a stmt. */
- ssa_use_operand_t iter_node;
- /* This is the next ssa_name to visit. IMM_USE may get removed before
- the next one is traversed to, so it must be cached early. */
- ssa_use_operand_t *next_imm_name;
- } imm_use_iterator;
-
-
- /* Use this iterator when simply looking at stmts. Adding, deleting or
- modifying stmts will cause this iterator to malfunction. */
-
- #define FOR_EACH_IMM_USE_FAST(DEST, ITER, SSAVAR) \
- for ((DEST) = first_readonly_imm_use (&(ITER), (SSAVAR)); \
- !end_readonly_imm_use_p (&(ITER)); \
- (void) ((DEST) = next_readonly_imm_use (&(ITER))))
-
- /* Use this iterator to visit each stmt which has a use of SSAVAR. */
-
- #define FOR_EACH_IMM_USE_STMT(STMT, ITER, SSAVAR) \
- for ((STMT) = first_imm_use_stmt (&(ITER), (SSAVAR)); \
- !end_imm_use_stmt_p (&(ITER)); \
- (void) ((STMT) = next_imm_use_stmt (&(ITER))))
-
- /* Use this to terminate the FOR_EACH_IMM_USE_STMT loop early. Failure to
- do so will result in leaving a iterator marker node in the immediate
- use list, and nothing good will come from that. */
- #define BREAK_FROM_IMM_USE_STMT(ITER) \
- { \
- end_imm_use_stmt_traverse (&(ITER)); \
- break; \
- }
-
-
- /* Use this iterator in combination with FOR_EACH_IMM_USE_STMT to
- get access to each occurrence of ssavar on the stmt returned by
- that iterator.. for instance:
-
- FOR_EACH_IMM_USE_STMT (stmt, iter, var)
- {
- FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
- {
- SET_USE (use_p, blah);
- }
- update_stmt (stmt);
- } */
-
- #define FOR_EACH_IMM_USE_ON_STMT(DEST, ITER) \
- for ((DEST) = first_imm_use_on_stmt (&(ITER)); \
- !end_imm_use_on_stmt_p (&(ITER)); \
- (void) ((DEST) = next_imm_use_on_stmt (&(ITER))))
-
-
-
- static inline void update_stmt (gimple);
static inline int get_lineno (const_gimple);
- /* Accessors for basic block annotations. */
- static inline gimple_seq phi_nodes (const_basic_block);
- static inline void set_phi_nodes (basic_block, gimple_seq);
-
/*---------------------------------------------------------------------------
Global declarations
---------------------------------------------------------------------------*/
*************** extern bool stmt_references_abnormal_ssa
extern tree get_addr_base_and_unit_offset (tree, HOST_WIDE_INT *);
extern void dump_enumerated_decls (FILE *, int);
- /* In tree-phinodes.c */
- extern void reserve_phi_args_for_new_edge (basic_block);
- extern void add_phi_node_to_bb (gimple phi, basic_block bb);
- extern gimple create_phi_node (tree, basic_block);
- extern void add_phi_arg (gimple, tree, edge, source_location);
- extern void remove_phi_args (edge);
- extern void remove_phi_node (gimple_stmt_iterator *, bool);
- extern void remove_phi_nodes (basic_block);
- extern void release_phi_node (gimple);
- extern void phinodes_print_statistics (void);
-
/* In gimple-low.c */
extern void record_vars_into (tree, tree);
extern void record_vars (tree);
*************** bool parallelized_function_p (tree);
#include "tree-flow-inline.h"
- void swap_tree_operands (gimple, tree *, tree *);
-
#endif /* _TREE_FLOW_H */
===================================================================
*************** get_lineno (const_gimple stmt)
return LOCATION_LINE (loc);
}
- /* Delink an immediate_uses node from its chain. */
- static inline void
- delink_imm_use (ssa_use_operand_t *linknode)
- {
- /* Return if this node is not in a list. */
- if (linknode->prev == NULL)
- return;
-
- linknode->prev->next = linknode->next;
- linknode->next->prev = linknode->prev;
- linknode->prev = NULL;
- linknode->next = NULL;
- }
-
- /* Link ssa_imm_use node LINKNODE into the chain for LIST. */
- static inline void
- link_imm_use_to_list (ssa_use_operand_t *linknode, ssa_use_operand_t *list)
- {
- /* Link the new node at the head of the list. If we are in the process of
- traversing the list, we won't visit any new nodes added to it. */
- linknode->prev = list;
- linknode->next = list->next;
- list->next->prev = linknode;
- list->next = linknode;
- }
-
- /* Link ssa_imm_use node LINKNODE into the chain for DEF. */
- static inline void
- link_imm_use (ssa_use_operand_t *linknode, tree def)
- {
- ssa_use_operand_t *root;
-
- if (!def || TREE_CODE (def) != SSA_NAME)
- linknode->prev = NULL;
- else
- {
- root = &(SSA_NAME_IMM_USE_NODE (def));
- if (linknode->use)
- gcc_checking_assert (*(linknode->use) == def);
- link_imm_use_to_list (linknode, root);
- }
- }
-
- /* Set the value of a use pointed to by USE to VAL. */
- static inline void
- set_ssa_use_from_ptr (use_operand_p use, tree val)
- {
- delink_imm_use (use);
- *(use->use) = val;
- link_imm_use (use, val);
- }
-
- /* Link ssa_imm_use node LINKNODE into the chain for DEF, with use occurring
- in STMT. */
- static inline void
- link_imm_use_stmt (ssa_use_operand_t *linknode, tree def, gimple stmt)
- {
- if (stmt)
- link_imm_use (linknode, def);
- else
- link_imm_use (linknode, NULL);
- linknode->loc.stmt = stmt;
- }
-
- /* Relink a new node in place of an old node in the list. */
- static inline void
- relink_imm_use (ssa_use_operand_t *node, ssa_use_operand_t *old)
- {
- /* The node one had better be in the same list. */
- gcc_checking_assert (*(old->use) == *(node->use));
- node->prev = old->prev;
- node->next = old->next;
- if (old->prev)
- {
- old->prev->next = node;
- old->next->prev = node;
- /* Remove the old node from the list. */
- old->prev = NULL;
- }
- }
-
- /* Relink ssa_imm_use node LINKNODE into the chain for OLD, with use occurring
- in STMT. */
- static inline void
- relink_imm_use_stmt (ssa_use_operand_t *linknode, ssa_use_operand_t *old,
- gimple stmt)
- {
- if (stmt)
- relink_imm_use (linknode, old);
- else
- link_imm_use (linknode, NULL);
- linknode->loc.stmt = stmt;
- }
-
-
- /* Return true is IMM has reached the end of the immediate use list. */
- static inline bool
- end_readonly_imm_use_p (const imm_use_iterator *imm)
- {
- return (imm->imm_use == imm->end_p);
- }
-
- /* Initialize iterator IMM to process the list for VAR. */
- static inline use_operand_p
- first_readonly_imm_use (imm_use_iterator *imm, tree var)
- {
- imm->end_p = &(SSA_NAME_IMM_USE_NODE (var));
- imm->imm_use = imm->end_p->next;
- #ifdef ENABLE_CHECKING
- imm->iter_node.next = imm->imm_use->next;
- #endif
- if (end_readonly_imm_use_p (imm))
- return NULL_USE_OPERAND_P;
- return imm->imm_use;
- }
-
- /* Bump IMM to the next use in the list. */
- static inline use_operand_p
- next_readonly_imm_use (imm_use_iterator *imm)
- {
- use_operand_p old = imm->imm_use;
-
- #ifdef ENABLE_CHECKING
- /* If this assertion fails, it indicates the 'next' pointer has changed
- since the last bump. This indicates that the list is being modified
- via stmt changes, or SET_USE, or somesuch thing, and you need to be
- using the SAFE version of the iterator. */
- gcc_assert (imm->iter_node.next == old->next);
- imm->iter_node.next = old->next->next;
- #endif
-
- imm->imm_use = old->next;
- if (end_readonly_imm_use_p (imm))
- return NULL_USE_OPERAND_P;
- return imm->imm_use;
- }
-
- /* tree-cfg.c */
- extern bool has_zero_uses_1 (const ssa_use_operand_t *head);
- extern bool single_imm_use_1 (const ssa_use_operand_t *head,
- use_operand_p *use_p, gimple *stmt);
-
- /* Return true if VAR has no nondebug uses. */
- static inline bool
- has_zero_uses (const_tree var)
- {
- const ssa_use_operand_t *const ptr = &(SSA_NAME_IMM_USE_NODE (var));
-
- /* A single use_operand means there is no items in the list. */
- if (ptr == ptr->next)
- return true;
-
- /* If there are debug stmts, we have to look at each use and see
- whether there are any nondebug uses. */
- if (!MAY_HAVE_DEBUG_STMTS)
- return false;
-
- return has_zero_uses_1 (ptr);
- }
-
- /* Return true if VAR has a single nondebug use. */
- static inline bool
- has_single_use (const_tree var)
- {
- const ssa_use_operand_t *const ptr = &(SSA_NAME_IMM_USE_NODE (var));
-
- /* If there aren't any uses whatsoever, we're done. */
- if (ptr == ptr->next)
- return false;
-
- /* If there's a single use, check that it's not a debug stmt. */
- if (ptr == ptr->next->next)
- return !is_gimple_debug (USE_STMT (ptr->next));
-
- /* If there are debug stmts, we have to look at each of them. */
- if (!MAY_HAVE_DEBUG_STMTS)
- return false;
-
- return single_imm_use_1 (ptr, NULL, NULL);
- }
-
-
- /* If VAR has only a single immediate nondebug use, return true, and
- set USE_P and STMT to the use pointer and stmt of occurrence. */
- static inline bool
- single_imm_use (const_tree var, use_operand_p *use_p, gimple *stmt)
- {
- const ssa_use_operand_t *const ptr = &(SSA_NAME_IMM_USE_NODE (var));
-
- /* If there aren't any uses whatsoever, we're done. */
- if (ptr == ptr->next)
- {
- return_false:
- *use_p = NULL_USE_OPERAND_P;
- *stmt = NULL;
- return false;
- }
-
- /* If there's a single use, check that it's not a debug stmt. */
- if (ptr == ptr->next->next)
- {
- if (!is_gimple_debug (USE_STMT (ptr->next)))
- {
- *use_p = ptr->next;
- *stmt = ptr->next->loc.stmt;
- return true;
- }
- else
- goto return_false;
- }
-
- /* If there are debug stmts, we have to look at each of them. */
- if (!MAY_HAVE_DEBUG_STMTS)
- goto return_false;
-
- return single_imm_use_1 (ptr, use_p, stmt);
- }
-
- /* Return the number of nondebug immediate uses of VAR. */
- static inline unsigned int
- num_imm_uses (const_tree var)
- {
- const ssa_use_operand_t *const start = &(SSA_NAME_IMM_USE_NODE (var));
- const ssa_use_operand_t *ptr;
- unsigned int num = 0;
-
- if (!MAY_HAVE_DEBUG_STMTS)
- for (ptr = start->next; ptr != start; ptr = ptr->next)
- num++;
- else
- for (ptr = start->next; ptr != start; ptr = ptr->next)
- if (!is_gimple_debug (USE_STMT (ptr)))
- num++;
-
- return num;
- }
-
- /* Return the tree pointed-to by USE. */
- static inline tree
- get_use_from_ptr (use_operand_p use)
- {
- return *(use->use);
- }
-
- /* Return the tree pointed-to by DEF. */
- static inline tree
- get_def_from_ptr (def_operand_p def)
- {
- return *def;
- }
-
- /* Return a use_operand_p pointer for argument I of PHI node GS. */
-
- static inline use_operand_p
- gimple_phi_arg_imm_use_ptr (gimple gs, int i)
- {
- return &gimple_phi_arg (gs, i)->imm_use;
- }
-
- /* Return the tree operand for argument I of PHI node GS. */
-
- static inline tree
- gimple_phi_arg_def (gimple gs, size_t index)
- {
- struct phi_arg_d *pd = gimple_phi_arg (gs, index);
- return get_use_from_ptr (&pd->imm_use);
- }
-
- /* Return a pointer to the tree operand for argument I of PHI node GS. */
-
- static inline tree *
- gimple_phi_arg_def_ptr (gimple gs, size_t index)
- {
- return &gimple_phi_arg (gs, index)->def;
- }
-
- /* Return the edge associated with argument I of phi node GS. */
-
- static inline edge
- gimple_phi_arg_edge (gimple gs, size_t i)
- {
- return EDGE_PRED (gimple_bb (gs), i);
- }
-
- /* Return the source location of gimple argument I of phi node GS. */
-
- static inline source_location
- gimple_phi_arg_location (gimple gs, size_t i)
- {
- return gimple_phi_arg (gs, i)->locus;
- }
-
- /* Return the source location of the argument on edge E of phi node GS. */
-
- static inline source_location
- gimple_phi_arg_location_from_edge (gimple gs, edge e)
- {
- return gimple_phi_arg (gs, e->dest_idx)->locus;
- }
-
- /* Set the source location of gimple argument I of phi node GS to LOC. */
-
- static inline void
- gimple_phi_arg_set_location (gimple gs, size_t i, source_location loc)
- {
- gimple_phi_arg (gs, i)->locus = loc;
- }
-
- /* Return TRUE if argument I of phi node GS has a location record. */
-
- static inline bool
- gimple_phi_arg_has_location (gimple gs, size_t i)
- {
- return gimple_phi_arg_location (gs, i) != UNKNOWN_LOCATION;
- }
-
-
- /* Return the PHI nodes for basic block BB, or NULL if there are no
- PHI nodes. */
- static inline gimple_seq
- phi_nodes (const_basic_block bb)
- {
- gcc_checking_assert (!(bb->flags & BB_RTL));
- return bb->il.gimple.phi_nodes;
- }
-
- static inline gimple_seq *
- phi_nodes_ptr (basic_block bb)
- {
- gcc_checking_assert (!(bb->flags & BB_RTL));
- return &bb->il.gimple.phi_nodes;
- }
-
- /* Set PHI nodes of a basic block BB to SEQ. */
-
- static inline void
- set_phi_nodes (basic_block bb, gimple_seq seq)
- {
- gimple_stmt_iterator i;
-
- gcc_checking_assert (!(bb->flags & BB_RTL));
- bb->il.gimple.phi_nodes = seq;
- if (seq)
- for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
- gimple_set_bb (gsi_stmt (i), bb);
- }
-
- /* Return the phi argument which contains the specified use. */
-
- static inline int
- phi_arg_index_from_use (use_operand_p use)
- {
- struct phi_arg_d *element, *root;
- size_t index;
- gimple phi;
-
- /* Since the use is the first thing in a PHI argument element, we can
- calculate its index based on casting it to an argument, and performing
- pointer arithmetic. */
-
- phi = USE_STMT (use);
-
- element = (struct phi_arg_d *)use;
- root = gimple_phi_arg (phi, 0);
- index = element - root;
-
- /* Make sure the calculation doesn't have any leftover bytes. If it does,
- then imm_use is likely not the first element in phi_arg_d. */
- gcc_checking_assert ((((char *)element - (char *)root)
- % sizeof (struct phi_arg_d)) == 0
- && index < gimple_phi_capacity (phi));
-
- return index;
- }
/* Return true if T (assumed to be a DECL) is a global variable.
A variable is considered global if its storage is not automatic. */
*************** may_be_aliased (const_tree var)
}
- /* PHI nodes should contain only ssa_names and invariants. A test
- for ssa_name is definitely simpler; don't let invalid contents
- slip in in the meantime. */
-
- static inline bool
- phi_ssa_name_p (const_tree t)
- {
- if (TREE_CODE (t) == SSA_NAME)
- return true;
- gcc_checking_assert (is_gimple_min_invariant (t));
- return false;
- }
-
-
/* Returns the loop of the statement STMT. */
static inline struct loop *
*************** loop_containing_stmt (gimple stmt)
}
- /* ----------------------------------------------------------------------- */
-
- /* The following set of routines are used to iterator over various type of
- SSA operands. */
-
- /* Return true if PTR is finished iterating. */
- static inline bool
- op_iter_done (const ssa_op_iter *ptr)
- {
- return ptr->done;
- }
-
- /* Get the next iterator use value for PTR. */
- static inline use_operand_p
- op_iter_next_use (ssa_op_iter *ptr)
- {
- use_operand_p use_p;
- gcc_checking_assert (ptr->iter_type == ssa_op_iter_use);
- if (ptr->uses)
- {
- use_p = USE_OP_PTR (ptr->uses);
- ptr->uses = ptr->uses->next;
- return use_p;
- }
- if (ptr->i < ptr->numops)
- {
- return PHI_ARG_DEF_PTR (ptr->stmt, (ptr->i)++);
- }
- ptr->done = true;
- return NULL_USE_OPERAND_P;
- }
-
- /* Get the next iterator def value for PTR. */
- static inline def_operand_p
- op_iter_next_def (ssa_op_iter *ptr)
- {
- gcc_checking_assert (ptr->iter_type == ssa_op_iter_def);
- if (ptr->flags & SSA_OP_VDEF)
- {
- tree *p;
- ptr->flags &= ~SSA_OP_VDEF;
- p = gimple_vdef_ptr (ptr->stmt);
- if (p && *p)
- return p;
- }
- if (ptr->flags & SSA_OP_DEF)
- {
- while (ptr->i < ptr->numops)
- {
- tree *val = gimple_op_ptr (ptr->stmt, ptr->i);
- ptr->i++;
- if (*val)
- {
- if (TREE_CODE (*val) == TREE_LIST)
- val = &TREE_VALUE (*val);
- if (TREE_CODE (*val) == SSA_NAME
- || is_gimple_reg (*val))
- return val;
- }
- }
- ptr->flags &= ~SSA_OP_DEF;
- }
-
- ptr->done = true;
- return NULL_DEF_OPERAND_P;
- }
-
- /* Get the next iterator tree value for PTR. */
- static inline tree
- op_iter_next_tree (ssa_op_iter *ptr)
- {
- tree val;
- gcc_checking_assert (ptr->iter_type == ssa_op_iter_tree);
- if (ptr->uses)
- {
- val = USE_OP (ptr->uses);
- ptr->uses = ptr->uses->next;
- return val;
- }
- if (ptr->flags & SSA_OP_VDEF)
- {
- ptr->flags &= ~SSA_OP_VDEF;
- if ((val = gimple_vdef (ptr->stmt)))
- return val;
- }
- if (ptr->flags & SSA_OP_DEF)
- {
- while (ptr->i < ptr->numops)
- {
- val = gimple_op (ptr->stmt, ptr->i);
- ptr->i++;
- if (val)
- {
- if (TREE_CODE (val) == TREE_LIST)
- val = TREE_VALUE (val);
- if (TREE_CODE (val) == SSA_NAME
- || is_gimple_reg (val))
- return val;
- }
- }
- ptr->flags &= ~SSA_OP_DEF;
- }
-
- ptr->done = true;
- return NULL_TREE;
- }
-
-
- /* This functions clears the iterator PTR, and marks it done. This is normally
- used to prevent warnings in the compile about might be uninitialized
- components. */
-
- static inline void
- clear_and_done_ssa_iter (ssa_op_iter *ptr)
- {
- ptr->i = 0;
- ptr->numops = 0;
- ptr->uses = NULL;
- ptr->iter_type = ssa_op_iter_none;
- ptr->stmt = NULL;
- ptr->done = true;
- ptr->flags = 0;
- }
-
- /* Initialize the iterator PTR to the virtual defs in STMT. */
- static inline void
- op_iter_init (ssa_op_iter *ptr, gimple stmt, int flags)
- {
- /* PHI nodes require a different iterator initialization path. We
- do not support iterating over virtual defs or uses without
- iterating over defs or uses at the same time. */
- gcc_checking_assert (gimple_code (stmt) != GIMPLE_PHI
- && (!(flags & SSA_OP_VDEF) || (flags & SSA_OP_DEF))
- && (!(flags & SSA_OP_VUSE) || (flags & SSA_OP_USE)));
- ptr->numops = 0;
- if (flags & (SSA_OP_DEF | SSA_OP_VDEF))
- {
- switch (gimple_code (stmt))
- {
- case GIMPLE_ASSIGN:
- case GIMPLE_CALL:
- ptr->numops = 1;
- break;
- case GIMPLE_ASM:
- ptr->numops = gimple_asm_noutputs (stmt);
- break;
- default:
- ptr->numops = 0;
- flags &= ~(SSA_OP_DEF | SSA_OP_VDEF);
- break;
- }
- }
- ptr->uses = (flags & (SSA_OP_USE|SSA_OP_VUSE)) ? gimple_use_ops (stmt) : NULL;
- if (!(flags & SSA_OP_VUSE)
- && ptr->uses
- && gimple_vuse (stmt) != NULL_TREE)
- ptr->uses = ptr->uses->next;
- ptr->done = false;
- ptr->i = 0;
-
- ptr->stmt = stmt;
- ptr->flags = flags;
- }
-
- /* Initialize iterator PTR to the use operands in STMT based on FLAGS. Return
- the first use. */
- static inline use_operand_p
- op_iter_init_use (ssa_op_iter *ptr, gimple stmt, int flags)
- {
- gcc_checking_assert ((flags & SSA_OP_ALL_DEFS) == 0
- && (flags & SSA_OP_USE));
- op_iter_init (ptr, stmt, flags);
- ptr->iter_type = ssa_op_iter_use;
- return op_iter_next_use (ptr);
- }
-
- /* Initialize iterator PTR to the def operands in STMT based on FLAGS. Return
- the first def. */
- static inline def_operand_p
- op_iter_init_def (ssa_op_iter *ptr, gimple stmt, int flags)
- {
- gcc_checking_assert ((flags & SSA_OP_ALL_USES) == 0
- && (flags & SSA_OP_DEF));
- op_iter_init (ptr, stmt, flags);
- ptr->iter_type = ssa_op_iter_def;
- return op_iter_next_def (ptr);
- }
-
- /* Initialize iterator PTR to the operands in STMT based on FLAGS. Return
- the first operand as a tree. */
- static inline tree
- op_iter_init_tree (ssa_op_iter *ptr, gimple stmt, int flags)
- {
- op_iter_init (ptr, stmt, flags);
- ptr->iter_type = ssa_op_iter_tree;
- return op_iter_next_tree (ptr);
- }
-
-
- /* If there is a single operand in STMT matching FLAGS, return it. Otherwise
- return NULL. */
- static inline tree
- single_ssa_tree_operand (gimple stmt, int flags)
- {
- tree var;
- ssa_op_iter iter;
-
- var = op_iter_init_tree (&iter, stmt, flags);
- if (op_iter_done (&iter))
- return NULL_TREE;
- op_iter_next_tree (&iter);
- if (op_iter_done (&iter))
- return var;
- return NULL_TREE;
- }
-
-
- /* If there is a single operand in STMT matching FLAGS, return it. Otherwise
- return NULL. */
- static inline use_operand_p
- single_ssa_use_operand (gimple stmt, int flags)
- {
- use_operand_p var;
- ssa_op_iter iter;
-
- var = op_iter_init_use (&iter, stmt, flags);
- if (op_iter_done (&iter))
- return NULL_USE_OPERAND_P;
- op_iter_next_use (&iter);
- if (op_iter_done (&iter))
- return var;
- return NULL_USE_OPERAND_P;
- }
-
-
-
- /* If there is a single operand in STMT matching FLAGS, return it. Otherwise
- return NULL. */
- static inline def_operand_p
- single_ssa_def_operand (gimple stmt, int flags)
- {
- def_operand_p var;
- ssa_op_iter iter;
-
- var = op_iter_init_def (&iter, stmt, flags);
- if (op_iter_done (&iter))
- return NULL_DEF_OPERAND_P;
- op_iter_next_def (&iter);
- if (op_iter_done (&iter))
- return var;
- return NULL_DEF_OPERAND_P;
- }
-
-
- /* Return true if there are zero operands in STMT matching the type
- given in FLAGS. */
- static inline bool
- zero_ssa_operands (gimple stmt, int flags)
- {
- ssa_op_iter iter;
-
- op_iter_init_tree (&iter, stmt, flags);
- return op_iter_done (&iter);
- }
-
-
- /* Return the number of operands matching FLAGS in STMT. */
- static inline int
- num_ssa_operands (gimple stmt, int flags)
- {
- ssa_op_iter iter;
- tree t;
- int num = 0;
-
- gcc_checking_assert (gimple_code (stmt) != GIMPLE_PHI);
- FOR_EACH_SSA_TREE_OPERAND (t, stmt, iter, flags)
- num++;
- return num;
- }
-
- static inline use_operand_p
- op_iter_init_phiuse (ssa_op_iter *ptr, gimple phi, int flags);
-
- /* Delink all immediate_use information for STMT. */
- static inline void
- delink_stmt_imm_use (gimple stmt)
- {
- ssa_op_iter iter;
- use_operand_p use_p;
-
- if (ssa_operands_active (cfun))
- FOR_EACH_PHI_OR_STMT_USE (use_p, stmt, iter, SSA_OP_ALL_USES)
- delink_imm_use (use_p);
- }
-
-
- /* If there is a single DEF in the PHI node which matches FLAG, return it.
- Otherwise return NULL_DEF_OPERAND_P. */
- static inline tree
- single_phi_def (gimple stmt, int flags)
- {
- tree def = PHI_RESULT (stmt);
- if ((flags & SSA_OP_DEF) && is_gimple_reg (def))
- return def;
- if ((flags & SSA_OP_VIRTUAL_DEFS) && !is_gimple_reg (def))
- return def;
- return NULL_TREE;
- }
-
- /* Initialize the iterator PTR for uses matching FLAGS in PHI. FLAGS should
- be either SSA_OP_USES or SSA_OP_VIRTUAL_USES. */
- static inline use_operand_p
- op_iter_init_phiuse (ssa_op_iter *ptr, gimple phi, int flags)
- {
- tree phi_def = gimple_phi_result (phi);
- int comp;
-
- clear_and_done_ssa_iter (ptr);
- ptr->done = false;
-
- gcc_checking_assert ((flags & (SSA_OP_USE | SSA_OP_VIRTUAL_USES)) != 0);
-
- comp = (is_gimple_reg (phi_def) ? SSA_OP_USE : SSA_OP_VIRTUAL_USES);
-
- /* If the PHI node doesn't the operand type we care about, we're done. */
- if ((flags & comp) == 0)
- {
- ptr->done = true;
- return NULL_USE_OPERAND_P;
- }
-
- ptr->stmt = phi;
- ptr->numops = gimple_phi_num_args (phi);
- ptr->iter_type = ssa_op_iter_use;
- ptr->flags = flags;
- return op_iter_next_use (ptr);
- }
-
-
- /* Start an iterator for a PHI definition. */
-
- static inline def_operand_p
- op_iter_init_phidef (ssa_op_iter *ptr, gimple phi, int flags)
- {
- tree phi_def = PHI_RESULT (phi);
- int comp;
-
- clear_and_done_ssa_iter (ptr);
- ptr->done = false;
-
- gcc_checking_assert ((flags & (SSA_OP_DEF | SSA_OP_VIRTUAL_DEFS)) != 0);
-
- comp = (is_gimple_reg (phi_def) ? SSA_OP_DEF : SSA_OP_VIRTUAL_DEFS);
-
- /* If the PHI node doesn't have the operand type we care about,
- we're done. */
- if ((flags & comp) == 0)
- {
- ptr->done = true;
- return NULL_DEF_OPERAND_P;
- }
-
- ptr->iter_type = ssa_op_iter_def;
- /* The first call to op_iter_next_def will terminate the iterator since
- all the fields are NULL. Simply return the result here as the first and
- therefore only result. */
- return PHI_RESULT_PTR (phi);
- }
-
- /* Return true is IMM has reached the end of the immediate use stmt list. */
-
- static inline bool
- end_imm_use_stmt_p (const imm_use_iterator *imm)
- {
- return (imm->imm_use == imm->end_p);
- }
-
- /* Finished the traverse of an immediate use stmt list IMM by removing the
- placeholder node from the list. */
-
- static inline void
- end_imm_use_stmt_traverse (imm_use_iterator *imm)
- {
- delink_imm_use (&(imm->iter_node));
- }
-
- /* Immediate use traversal of uses within a stmt require that all the
- uses on a stmt be sequentially listed. This routine is used to build up
- this sequential list by adding USE_P to the end of the current list
- currently delimited by HEAD and LAST_P. The new LAST_P value is
- returned. */
-
- static inline use_operand_p
- move_use_after_head (use_operand_p use_p, use_operand_p head,
- use_operand_p last_p)
- {
- gcc_checking_assert (USE_FROM_PTR (use_p) == USE_FROM_PTR (head));
- /* Skip head when we find it. */
- if (use_p != head)
- {
- /* If use_p is already linked in after last_p, continue. */
- if (last_p->next == use_p)
- last_p = use_p;
- else
- {
- /* Delink from current location, and link in at last_p. */
- delink_imm_use (use_p);
- link_imm_use_to_list (use_p, last_p);
- last_p = use_p;
- }
- }
- return last_p;
- }
-
-
- /* This routine will relink all uses with the same stmt as HEAD into the list
- immediately following HEAD for iterator IMM. */
-
- static inline void
- link_use_stmts_after (use_operand_p head, imm_use_iterator *imm)
- {
- use_operand_p use_p;
- use_operand_p last_p = head;
- gimple head_stmt = USE_STMT (head);
- tree use = USE_FROM_PTR (head);
- ssa_op_iter op_iter;
- int flag;
-
- /* Only look at virtual or real uses, depending on the type of HEAD. */
- flag = (is_gimple_reg (use) ? SSA_OP_USE : SSA_OP_VIRTUAL_USES);
-
- if (gimple_code (head_stmt) == GIMPLE_PHI)
- {
- FOR_EACH_PHI_ARG (use_p, head_stmt, op_iter, flag)
- if (USE_FROM_PTR (use_p) == use)
- last_p = move_use_after_head (use_p, head, last_p);
- }
- else
- {
- if (flag == SSA_OP_USE)
- {
- FOR_EACH_SSA_USE_OPERAND (use_p, head_stmt, op_iter, flag)
- if (USE_FROM_PTR (use_p) == use)
- last_p = move_use_after_head (use_p, head, last_p);
- }
- else if ((use_p = gimple_vuse_op (head_stmt)) != NULL_USE_OPERAND_P)
- {
- if (USE_FROM_PTR (use_p) == use)
- last_p = move_use_after_head (use_p, head, last_p);
- }
- }
- /* Link iter node in after last_p. */
- if (imm->iter_node.prev != NULL)
- delink_imm_use (&imm->iter_node);
- link_imm_use_to_list (&(imm->iter_node), last_p);
- }
-
- /* Initialize IMM to traverse over uses of VAR. Return the first statement. */
- static inline gimple
- first_imm_use_stmt (imm_use_iterator *imm, tree var)
- {
- imm->end_p = &(SSA_NAME_IMM_USE_NODE (var));
- imm->imm_use = imm->end_p->next;
- imm->next_imm_name = NULL_USE_OPERAND_P;
-
- /* iter_node is used as a marker within the immediate use list to indicate
- where the end of the current stmt's uses are. Initialize it to NULL
- stmt and use, which indicates a marker node. */
- imm->iter_node.prev = NULL_USE_OPERAND_P;
- imm->iter_node.next = NULL_USE_OPERAND_P;
- imm->iter_node.loc.stmt = NULL;
- imm->iter_node.use = NULL;
-
- if (end_imm_use_stmt_p (imm))
- return NULL;
-
- link_use_stmts_after (imm->imm_use, imm);
-
- return USE_STMT (imm->imm_use);
- }
-
- /* Bump IMM to the next stmt which has a use of var. */
-
- static inline gimple
- next_imm_use_stmt (imm_use_iterator *imm)
- {
- imm->imm_use = imm->iter_node.next;
- if (end_imm_use_stmt_p (imm))
- {
- if (imm->iter_node.prev != NULL)
- delink_imm_use (&imm->iter_node);
- return NULL;
- }
-
- link_use_stmts_after (imm->imm_use, imm);
- return USE_STMT (imm->imm_use);
- }
-
- /* This routine will return the first use on the stmt IMM currently refers
- to. */
-
- static inline use_operand_p
- first_imm_use_on_stmt (imm_use_iterator *imm)
- {
- imm->next_imm_name = imm->imm_use->next;
- return imm->imm_use;
- }
-
- /* Return TRUE if the last use on the stmt IMM refers to has been visited. */
-
- static inline bool
- end_imm_use_on_stmt_p (const imm_use_iterator *imm)
- {
- return (imm->imm_use == &(imm->iter_node));
- }
-
- /* Bump to the next use on the stmt IMM refers to, return NULL if done. */
-
- static inline use_operand_p
- next_imm_use_on_stmt (imm_use_iterator *imm)
- {
- imm->imm_use = imm->next_imm_name;
- if (end_imm_use_on_stmt_p (imm))
- return NULL_USE_OPERAND_P;
- else
- {
- imm->next_imm_name = imm->imm_use->next;
- return imm->imm_use;
- }
- }
/* Return true if VAR cannot be modified by the program. */
===================================================================
*************** struct GTY(()) ssa_operands {
#define PHI_ARG_INDEX_FROM_USE(USE) phi_arg_index_from_use (USE)
extern void init_ssa_operands (struct function *fn);
extern void fini_ssa_operands (void);
! extern void update_stmt_operands (gimple);
extern void free_stmt_operands (gimple);
extern bool verify_imm_links (FILE *f, tree var);
- extern bool verify_ssa_operands (gimple stmt);
- extern void dump_immediate_uses (FILE *file);
extern void dump_immediate_uses_for (FILE *file, tree var);
extern void debug_immediate_uses (void);
extern void debug_immediate_uses_for (tree var);
- extern void dump_decl_set (FILE *, bitmap);
- extern void debug_decl_set (bitmap);
-
- extern bool ssa_operands_active (struct function *);
extern bool virtual_operand_p (tree);
extern void unlink_stmt_vdef (gimple);
! enum ssa_op_iter_type {
! ssa_op_iter_none = 0,
! ssa_op_iter_tree,
! ssa_op_iter_use,
! ssa_op_iter_def
! };
!
! /* This structure is used in the operand iterator loops. It contains the
! items required to determine which operand is retrieved next. During
! optimization, this structure is scalarized, and any unused fields are
! optimized away, resulting in little overhead. */
!
! typedef struct ssa_operand_iterator_d
{
! enum ssa_op_iter_type iter_type;
! bool done;
! int flags;
! unsigned i;
! unsigned numops;
! use_optype_p uses;
! gimple stmt;
! } ssa_op_iter;
!
! /* These flags are used to determine which operands are returned during
! execution of the loop. */
! #define SSA_OP_USE 0x01 /* Real USE operands. */
! #define SSA_OP_DEF 0x02 /* Real DEF operands. */
! #define SSA_OP_VUSE 0x04 /* VUSE operands. */
! #define SSA_OP_VDEF 0x08 /* VDEF operands. */
!
! /* These are commonly grouped operand flags. */
! #define SSA_OP_VIRTUAL_USES (SSA_OP_VUSE)
! #define SSA_OP_VIRTUAL_DEFS (SSA_OP_VDEF)
! #define SSA_OP_ALL_VIRTUALS (SSA_OP_VIRTUAL_USES | SSA_OP_VIRTUAL_DEFS)
! #define SSA_OP_ALL_USES (SSA_OP_VIRTUAL_USES | SSA_OP_USE)
! #define SSA_OP_ALL_DEFS (SSA_OP_VIRTUAL_DEFS | SSA_OP_DEF)
! #define SSA_OP_ALL_OPERANDS (SSA_OP_ALL_USES | SSA_OP_ALL_DEFS)
!
! /* This macro executes a loop over the operands of STMT specified in FLAG,
! returning each operand as a 'tree' in the variable TREEVAR. ITER is an
! ssa_op_iter structure used to control the loop. */
! #define FOR_EACH_SSA_TREE_OPERAND(TREEVAR, STMT, ITER, FLAGS) \
! for (TREEVAR = op_iter_init_tree (&(ITER), STMT, FLAGS); \
! !op_iter_done (&(ITER)); \
! (void) (TREEVAR = op_iter_next_tree (&(ITER))))
!
! /* This macro executes a loop over the operands of STMT specified in FLAG,
! returning each operand as a 'use_operand_p' in the variable USEVAR.
! ITER is an ssa_op_iter structure used to control the loop. */
! #define FOR_EACH_SSA_USE_OPERAND(USEVAR, STMT, ITER, FLAGS) \
! for (USEVAR = op_iter_init_use (&(ITER), STMT, FLAGS); \
! !op_iter_done (&(ITER)); \
! USEVAR = op_iter_next_use (&(ITER)))
!
! /* This macro executes a loop over the operands of STMT specified in FLAG,
! returning each operand as a 'def_operand_p' in the variable DEFVAR.
! ITER is an ssa_op_iter structure used to control the loop. */
! #define FOR_EACH_SSA_DEF_OPERAND(DEFVAR, STMT, ITER, FLAGS) \
! for (DEFVAR = op_iter_init_def (&(ITER), STMT, FLAGS); \
! !op_iter_done (&(ITER)); \
! DEFVAR = op_iter_next_def (&(ITER)))
!
! /* This macro will execute a loop over all the arguments of a PHI which
! match FLAGS. A use_operand_p is always returned via USEVAR. FLAGS
! can be either SSA_OP_USE or SSA_OP_VIRTUAL_USES or SSA_OP_ALL_USES. */
! #define FOR_EACH_PHI_ARG(USEVAR, STMT, ITER, FLAGS) \
! for ((USEVAR) = op_iter_init_phiuse (&(ITER), STMT, FLAGS); \
! !op_iter_done (&(ITER)); \
! (USEVAR) = op_iter_next_use (&(ITER)))
!
!
! /* This macro will execute a loop over a stmt, regardless of whether it is
! a real stmt or a PHI node, looking at the USE nodes matching FLAGS. */
! #define FOR_EACH_PHI_OR_STMT_USE(USEVAR, STMT, ITER, FLAGS) \
! for ((USEVAR) = (gimple_code (STMT) == GIMPLE_PHI \
! ? op_iter_init_phiuse (&(ITER), STMT, FLAGS) \
! : op_iter_init_use (&(ITER), STMT, FLAGS)); \
! !op_iter_done (&(ITER)); \
! (USEVAR) = op_iter_next_use (&(ITER)))
!
! /* This macro will execute a loop over a stmt, regardless of whether it is
! a real stmt or a PHI node, looking at the DEF nodes matching FLAGS. */
! #define FOR_EACH_PHI_OR_STMT_DEF(DEFVAR, STMT, ITER, FLAGS) \
! for ((DEFVAR) = (gimple_code (STMT) == GIMPLE_PHI \
! ? op_iter_init_phidef (&(ITER), STMT, FLAGS) \
! : op_iter_init_def (&(ITER), STMT, FLAGS)); \
! !op_iter_done (&(ITER)); \
! (DEFVAR) = op_iter_next_def (&(ITER)))
!
! /* This macro returns an operand in STMT as a tree if it is the ONLY
! operand matching FLAGS. If there are 0 or more than 1 operand matching
! FLAGS, then NULL_TREE is returned. */
! #define SINGLE_SSA_TREE_OPERAND(STMT, FLAGS) \
! single_ssa_tree_operand (STMT, FLAGS)
!
! /* This macro returns an operand in STMT as a use_operand_p if it is the ONLY
! operand matching FLAGS. If there are 0 or more than 1 operand matching
! FLAGS, then NULL_USE_OPERAND_P is returned. */
! #define SINGLE_SSA_USE_OPERAND(STMT, FLAGS) \
! single_ssa_use_operand (STMT, FLAGS)
!
! /* This macro returns an operand in STMT as a def_operand_p if it is the ONLY
! operand matching FLAGS. If there are 0 or more than 1 operand matching
! FLAGS, then NULL_DEF_OPERAND_P is returned. */
! #define SINGLE_SSA_DEF_OPERAND(STMT, FLAGS) \
! single_ssa_def_operand (STMT, FLAGS)
!
! /* This macro returns TRUE if there are no operands matching FLAGS in STMT. */
! #define ZERO_SSA_OPERANDS(STMT, FLAGS) zero_ssa_operands (STMT, FLAGS)
! /* This macro counts the number of operands in STMT matching FLAGS. */
! #define NUM_SSA_OPERANDS(STMT, FLAGS) num_ssa_operands (STMT, FLAGS)
#endif /* GCC_TREE_SSA_OPERANDS_H */
#define PHI_ARG_INDEX_FROM_USE(USE) phi_arg_index_from_use (USE)
+ extern bool ssa_operands_active (struct function *);
extern void init_ssa_operands (struct function *fn);
extern void fini_ssa_operands (void);
! extern bool verify_ssa_operands (gimple stmt);
extern void free_stmt_operands (gimple);
+ extern void update_stmt_operands (gimple);
+ extern void swap_ssa_operands (gimple, tree *, tree *);
extern bool verify_imm_links (FILE *f, tree var);
extern void dump_immediate_uses_for (FILE *file, tree var);
+ extern void dump_immediate_uses (FILE *file);
extern void debug_immediate_uses (void);
extern void debug_immediate_uses_for (tree var);
extern bool virtual_operand_p (tree);
extern void unlink_stmt_vdef (gimple);
! /* Return the tree pointed-to by USE. */
! static inline tree
! get_use_from_ptr (use_operand_p use)
{
! return *(use->use);
! }
! /* Return the tree pointed-to by DEF. */
! static inline tree
! get_def_from_ptr (def_operand_p def)
! {
! return *def;
! }
#endif /* GCC_TREE_SSA_OPERANDS_H */
===================================================================
***************
+ /* Header file for PHI node routines
+ Copyright (C) 2013 Free Software Foundation, Inc.
+
+ This file is part of GCC.
+
+ GCC is free software; you can redistribute it and/or modify it under
+ the terms of the GNU General Public License as published by the Free
+ Software Foundation; either version 3, or (at your option) any later
+ version.
+
+ GCC is distributed in the hope that it will be useful, but WITHOUT ANY
+ WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
+ for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with GCC; see the file COPYING3. If not see
+ <http://www.gnu.org/licenses/>. */
+
+ #ifndef GCC_TREE_PHINODES_H
+ #define GCC_TREE_PHINODES_H
+
+ extern void phinodes_print_statistics (void);
+ extern void release_phi_node (gimple);
+ extern void reserve_phi_args_for_new_edge (basic_block);
+ extern void add_phi_node_to_bb (gimple phi, basic_block bb);
+ extern gimple create_phi_node (tree, basic_block);
+ extern void add_phi_arg (gimple, tree, edge, source_location);
+ extern void remove_phi_args (edge);
+ extern void remove_phi_node (gimple_stmt_iterator *, bool);
+ extern void remove_phi_nodes (basic_block);
+
+
+ /* PHI nodes should contain only ssa_names and invariants. A test
+ for ssa_name is definitely simpler; don't let invalid contents
+ slip in in the meantime. */
+
+ static inline bool
+ phi_ssa_name_p (const_tree t)
+ {
+ if (TREE_CODE (t) == SSA_NAME)
+ return true;
+ gcc_checking_assert (is_gimple_min_invariant (t));
+ return false;
+ }
+
+ /* Return the PHI nodes for basic block BB, or NULL if there are no
+ PHI nodes. */
+
+ static inline gimple_seq
+ phi_nodes (const_basic_block bb)
+ {
+ gcc_checking_assert (!(bb->flags & BB_RTL));
+ return bb->il.gimple.phi_nodes;
+ }
+
+ /* Return a pointer to the PHI nodes for basic block BB. */
+
+ static inline gimple_seq *
+ phi_nodes_ptr (basic_block bb)
+ {
+ gcc_checking_assert (!(bb->flags & BB_RTL));
+ return &bb->il.gimple.phi_nodes;
+ }
+
+ /* Set PHI nodes of a basic block BB to SEQ. */
+
+ static inline void
+ set_phi_nodes (basic_block bb, gimple_seq seq)
+ {
+ gimple_stmt_iterator i;
+
+ gcc_checking_assert (!(bb->flags & BB_RTL));
+ bb->il.gimple.phi_nodes = seq;
+ if (seq)
+ for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
+ gimple_set_bb (gsi_stmt (i), bb);
+ }
+
+
+ /* Return the tree operand for argument I of PHI node GS. */
+
+ static inline tree
+ gimple_phi_arg_def (gimple gs, size_t index)
+ {
+ return gimple_phi_arg (gs, index)->def;
+ }
+
+
+ /* Return a pointer to the tree operand for argument I of PHI node GS. */
+
+ static inline tree *
+ gimple_phi_arg_def_ptr (gimple gs, size_t index)
+ {
+ return &gimple_phi_arg (gs, index)->def;
+ }
+
+ /* Return the edge associated with argument I of phi node GS. */
+
+ static inline edge
+ gimple_phi_arg_edge (gimple gs, size_t i)
+ {
+ return EDGE_PRED (gimple_bb (gs), i);
+ }
+
+ /* Return the source location of gimple argument I of phi node GS. */
+
+ static inline source_location
+ gimple_phi_arg_location (gimple gs, size_t i)
+ {
+ return gimple_phi_arg (gs, i)->locus;
+ }
+
+ /* Return the source location of the argument on edge E of phi node GS. */
+
+ static inline source_location
+ gimple_phi_arg_location_from_edge (gimple gs, edge e)
+ {
+ return gimple_phi_arg (gs, e->dest_idx)->locus;
+ }
+
+ /* Set the source location of gimple argument I of phi node GS to LOC. */
+
+ static inline void
+ gimple_phi_arg_set_location (gimple gs, size_t i, source_location loc)
+ {
+ gimple_phi_arg (gs, i)->locus = loc;
+ }
+
+ /* Return TRUE if argument I of phi node GS has a location record. */
+
+ static inline bool
+ gimple_phi_arg_has_location (gimple gs, size_t i)
+ {
+ return gimple_phi_arg_location (gs, i) != UNKNOWN_LOCATION;
+ }
+
+ /* Return a use_operand_p pointer for argument I of PHI node GS. */
+
+ static inline use_operand_p
+ gimple_phi_arg_imm_use_ptr (gimple gs, int i)
+ {
+ return &gimple_phi_arg (gs, i)->imm_use;
+ }
+
+ /* Return the phi argument which contains the specified use. */
+
+ static inline int
+ phi_arg_index_from_use (use_operand_p use)
+ {
+ struct phi_arg_d *element, *root;
+ size_t index;
+ gimple phi;
+
+ /* Since the use is the first thing in a PHI argument element, we can
+ calculate its index based on casting it to an argument, and performing
+ pointer arithmetic. */
+
+ phi = USE_STMT (use);
+
+ element = (struct phi_arg_d *)use;
+ root = gimple_phi_arg (phi, 0);
+ index = element - root;
+
+ /* Make sure the calculation doesn't have any leftover bytes. If it does,
+ then imm_use is likely not the first element in phi_arg_d. */
+ gcc_checking_assert ((((char *)element - (char *)root)
+ % sizeof (struct phi_arg_d)) == 0
+ && index < gimple_phi_capacity (phi));
+
+ return index;
+ }
+
+
+ #endif /* GCC_TREE_PHINODES_H */
===================================================================
*************** along with GCC; see the file COPYING3.
#ifndef GCC_TREE_SSA_H
#define GCC_TREE_SSA_H
! #include "tree-flow.h"
#include "tree-ssanames.h"
/* Mapping for redirected edges. */
struct _edge_var_map {
#ifndef GCC_TREE_SSA_H
#define GCC_TREE_SSA_H
! #include "gimple.h"
! #include "tree-ssa-operands.h"
! #include "tree-phinodes.h"
! #include "gimple-ssa.h"
! #include "ssa-iterators.h"
#include "tree-ssanames.h"
+ #include "tree-flow.h"
/* Mapping for redirected edges. */
struct _edge_var_map {
===================================================================
***************
+ /* Header file for SSA iterators.
+ Copyright (C) 2013 Free Software Foundation, Inc.
+
+ This file is part of GCC.
+
+ GCC is free software; you can redistribute it and/or modify it under
+ the terms of the GNU General Public License as published by the Free
+ Software Foundation; either version 3, or (at your option) any later
+ version.
+
+ GCC is distributed in the hope that it will be useful, but WITHOUT ANY
+ WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
+ for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with GCC; see the file COPYING3. If not see
+ <http://www.gnu.org/licenses/>. */
+
+ #ifndef GCC_SSA_ITERATORS_H
+ #define GCC_SSA_ITERATORS_H
+
+ /* Immediate use lists are used to directly access all uses for an SSA
+ name and get pointers to the statement for each use.
+
+ The structure ssa_use_operand_d consists of PREV and NEXT pointers
+ to maintain the list. A USE pointer, which points to address where
+ the use is located and a LOC pointer which can point to the
+ statement where the use is located, or, in the case of the root
+ node, it points to the SSA name itself.
+
+ The list is anchored by an occurrence of ssa_operand_d *in* the
+ ssa_name node itself (named 'imm_uses'). This node is uniquely
+ identified by having a NULL USE pointer. and the LOC pointer
+ pointing back to the ssa_name node itself. This node forms the
+ base for a circular list, and initially this is the only node in
+ the list.
+
+ Fast iteration allows each use to be examined, but does not allow
+ any modifications to the uses or stmts.
+
+ Normal iteration allows insertion, deletion, and modification. the
+ iterator manages this by inserting a marker node into the list
+ immediately before the node currently being examined in the list.
+ this marker node is uniquely identified by having null stmt *and* a
+ null use pointer.
+
+ When iterating to the next use, the iteration routines check to see
+ if the node after the marker has changed. if it has, then the node
+ following the marker is now the next one to be visited. if not, the
+ marker node is moved past that node in the list (visualize it as
+ bumping the marker node through the list). this continues until
+ the marker node is moved to the original anchor position. the
+ marker node is then removed from the list.
+
+ If iteration is halted early, the marker node must be removed from
+ the list before continuing. */
+ typedef struct immediate_use_iterator_d
+ {
+ /* This is the current use the iterator is processing. */
+ ssa_use_operand_t *imm_use;
+ /* This marks the last use in the list (use node from SSA_NAME) */
+ ssa_use_operand_t *end_p;
+ /* This node is inserted and used to mark the end of the uses for a stmt. */
+ ssa_use_operand_t iter_node;
+ /* This is the next ssa_name to visit. IMM_USE may get removed before
+ the next one is traversed to, so it must be cached early. */
+ ssa_use_operand_t *next_imm_name;
+ } imm_use_iterator;
+
+
+ /* Use this iterator when simply looking at stmts. Adding, deleting or
+ modifying stmts will cause this iterator to malfunction. */
+
+ #define FOR_EACH_IMM_USE_FAST(DEST, ITER, SSAVAR) \
+ for ((DEST) = first_readonly_imm_use (&(ITER), (SSAVAR)); \
+ !end_readonly_imm_use_p (&(ITER)); \
+ (void) ((DEST) = next_readonly_imm_use (&(ITER))))
+
+ /* Use this iterator to visit each stmt which has a use of SSAVAR. */
+
+ #define FOR_EACH_IMM_USE_STMT(STMT, ITER, SSAVAR) \
+ for ((STMT) = first_imm_use_stmt (&(ITER), (SSAVAR)); \
+ !end_imm_use_stmt_p (&(ITER)); \
+ (void) ((STMT) = next_imm_use_stmt (&(ITER))))
+
+ /* Use this to terminate the FOR_EACH_IMM_USE_STMT loop early. Failure to
+ do so will result in leaving a iterator marker node in the immediate
+ use list, and nothing good will come from that. */
+ #define BREAK_FROM_IMM_USE_STMT(ITER) \
+ { \
+ end_imm_use_stmt_traverse (&(ITER)); \
+ break; \
+ }
+
+
+ /* Use this iterator in combination with FOR_EACH_IMM_USE_STMT to
+ get access to each occurrence of ssavar on the stmt returned by
+ that iterator.. for instance:
+
+ FOR_EACH_IMM_USE_STMT (stmt, iter, var)
+ {
+ FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
+ {
+ SET_USE (use_p, blah);
+ }
+ update_stmt (stmt);
+ } */
+
+ #define FOR_EACH_IMM_USE_ON_STMT(DEST, ITER) \
+ for ((DEST) = first_imm_use_on_stmt (&(ITER)); \
+ !end_imm_use_on_stmt_p (&(ITER)); \
+ (void) ((DEST) = next_imm_use_on_stmt (&(ITER))))
+
+
+
+ extern bool has_zero_uses_1 (const ssa_use_operand_t *head);
+ extern bool single_imm_use_1 (const ssa_use_operand_t *head,
+ use_operand_p *use_p, gimple *stmt);
+
+
+ enum ssa_op_iter_type {
+ ssa_op_iter_none = 0,
+ ssa_op_iter_tree,
+ ssa_op_iter_use,
+ ssa_op_iter_def
+ };
+
+ /* This structure is used in the operand iterator loops. It contains the
+ items required to determine which operand is retrieved next. During
+ optimization, this structure is scalarized, and any unused fields are
+ optimized away, resulting in little overhead. */
+
+ typedef struct ssa_operand_iterator_d
+ {
+ enum ssa_op_iter_type iter_type;
+ bool done;
+ int flags;
+ unsigned i;
+ unsigned numops;
+ use_optype_p uses;
+ gimple stmt;
+ } ssa_op_iter;
+
+ /* These flags are used to determine which operands are returned during
+ execution of the loop. */
+ #define SSA_OP_USE 0x01 /* Real USE operands. */
+ #define SSA_OP_DEF 0x02 /* Real DEF operands. */
+ #define SSA_OP_VUSE 0x04 /* VUSE operands. */
+ #define SSA_OP_VDEF 0x08 /* VDEF operands. */
+
+ /* These are commonly grouped operand flags. */
+ #define SSA_OP_VIRTUAL_USES (SSA_OP_VUSE)
+ #define SSA_OP_VIRTUAL_DEFS (SSA_OP_VDEF)
+ #define SSA_OP_ALL_VIRTUALS (SSA_OP_VIRTUAL_USES | SSA_OP_VIRTUAL_DEFS)
+ #define SSA_OP_ALL_USES (SSA_OP_VIRTUAL_USES | SSA_OP_USE)
+ #define SSA_OP_ALL_DEFS (SSA_OP_VIRTUAL_DEFS | SSA_OP_DEF)
+ #define SSA_OP_ALL_OPERANDS (SSA_OP_ALL_USES | SSA_OP_ALL_DEFS)
+
+ /* This macro executes a loop over the operands of STMT specified in FLAG,
+ returning each operand as a 'tree' in the variable TREEVAR. ITER is an
+ ssa_op_iter structure used to control the loop. */
+ #define FOR_EACH_SSA_TREE_OPERAND(TREEVAR, STMT, ITER, FLAGS) \
+ for (TREEVAR = op_iter_init_tree (&(ITER), STMT, FLAGS); \
+ !op_iter_done (&(ITER)); \
+ (void) (TREEVAR = op_iter_next_tree (&(ITER))))
+
+ /* This macro executes a loop over the operands of STMT specified in FLAG,
+ returning each operand as a 'use_operand_p' in the variable USEVAR.
+ ITER is an ssa_op_iter structure used to control the loop. */
+ #define FOR_EACH_SSA_USE_OPERAND(USEVAR, STMT, ITER, FLAGS) \
+ for (USEVAR = op_iter_init_use (&(ITER), STMT, FLAGS); \
+ !op_iter_done (&(ITER)); \
+ USEVAR = op_iter_next_use (&(ITER)))
+
+ /* This macro executes a loop over the operands of STMT specified in FLAG,
+ returning each operand as a 'def_operand_p' in the variable DEFVAR.
+ ITER is an ssa_op_iter structure used to control the loop. */
+ #define FOR_EACH_SSA_DEF_OPERAND(DEFVAR, STMT, ITER, FLAGS) \
+ for (DEFVAR = op_iter_init_def (&(ITER), STMT, FLAGS); \
+ !op_iter_done (&(ITER)); \
+ DEFVAR = op_iter_next_def (&(ITER)))
+
+ /* This macro will execute a loop over all the arguments of a PHI which
+ match FLAGS. A use_operand_p is always returned via USEVAR. FLAGS
+ can be either SSA_OP_USE or SSA_OP_VIRTUAL_USES or SSA_OP_ALL_USES. */
+ #define FOR_EACH_PHI_ARG(USEVAR, STMT, ITER, FLAGS) \
+ for ((USEVAR) = op_iter_init_phiuse (&(ITER), STMT, FLAGS); \
+ !op_iter_done (&(ITER)); \
+ (USEVAR) = op_iter_next_use (&(ITER)))
+
+
+ /* This macro will execute a loop over a stmt, regardless of whether it is
+ a real stmt or a PHI node, looking at the USE nodes matching FLAGS. */
+ #define FOR_EACH_PHI_OR_STMT_USE(USEVAR, STMT, ITER, FLAGS) \
+ for ((USEVAR) = (gimple_code (STMT) == GIMPLE_PHI \
+ ? op_iter_init_phiuse (&(ITER), STMT, FLAGS) \
+ : op_iter_init_use (&(ITER), STMT, FLAGS)); \
+ !op_iter_done (&(ITER)); \
+ (USEVAR) = op_iter_next_use (&(ITER)))
+
+ /* This macro will execute a loop over a stmt, regardless of whether it is
+ a real stmt or a PHI node, looking at the DEF nodes matching FLAGS. */
+ #define FOR_EACH_PHI_OR_STMT_DEF(DEFVAR, STMT, ITER, FLAGS) \
+ for ((DEFVAR) = (gimple_code (STMT) == GIMPLE_PHI \
+ ? op_iter_init_phidef (&(ITER), STMT, FLAGS) \
+ : op_iter_init_def (&(ITER), STMT, FLAGS)); \
+ !op_iter_done (&(ITER)); \
+ (DEFVAR) = op_iter_next_def (&(ITER)))
+
+ /* This macro returns an operand in STMT as a tree if it is the ONLY
+ operand matching FLAGS. If there are 0 or more than 1 operand matching
+ FLAGS, then NULL_TREE is returned. */
+ #define SINGLE_SSA_TREE_OPERAND(STMT, FLAGS) \
+ single_ssa_tree_operand (STMT, FLAGS)
+
+ /* This macro returns an operand in STMT as a use_operand_p if it is the ONLY
+ operand matching FLAGS. If there are 0 or more than 1 operand matching
+ FLAGS, then NULL_USE_OPERAND_P is returned. */
+ #define SINGLE_SSA_USE_OPERAND(STMT, FLAGS) \
+ single_ssa_use_operand (STMT, FLAGS)
+
+ /* This macro returns an operand in STMT as a def_operand_p if it is the ONLY
+ operand matching FLAGS. If there are 0 or more than 1 operand matching
+ FLAGS, then NULL_DEF_OPERAND_P is returned. */
+ #define SINGLE_SSA_DEF_OPERAND(STMT, FLAGS) \
+ single_ssa_def_operand (STMT, FLAGS)
+
+ /* This macro returns TRUE if there are no operands matching FLAGS in STMT. */
+ #define ZERO_SSA_OPERANDS(STMT, FLAGS) zero_ssa_operands (STMT, FLAGS)
+
+ /* This macro counts the number of operands in STMT matching FLAGS. */
+ #define NUM_SSA_OPERANDS(STMT, FLAGS) num_ssa_operands (STMT, FLAGS)
+
+
+ /* Delink an immediate_uses node from its chain. */
+ static inline void
+ delink_imm_use (ssa_use_operand_t *linknode)
+ {
+ /* Return if this node is not in a list. */
+ if (linknode->prev == NULL)
+ return;
+
+ linknode->prev->next = linknode->next;
+ linknode->next->prev = linknode->prev;
+ linknode->prev = NULL;
+ linknode->next = NULL;
+ }
+
+ /* Link ssa_imm_use node LINKNODE into the chain for LIST. */
+ static inline void
+ link_imm_use_to_list (ssa_use_operand_t *linknode, ssa_use_operand_t *list)
+ {
+ /* Link the new node at the head of the list. If we are in the process of
+ traversing the list, we won't visit any new nodes added to it. */
+ linknode->prev = list;
+ linknode->next = list->next;
+ list->next->prev = linknode;
+ list->next = linknode;
+ }
+
+ /* Link ssa_imm_use node LINKNODE into the chain for DEF. */
+ static inline void
+ link_imm_use (ssa_use_operand_t *linknode, tree def)
+ {
+ ssa_use_operand_t *root;
+
+ if (!def || TREE_CODE (def) != SSA_NAME)
+ linknode->prev = NULL;
+ else
+ {
+ root = &(SSA_NAME_IMM_USE_NODE (def));
+ if (linknode->use)
+ gcc_checking_assert (*(linknode->use) == def);
+ link_imm_use_to_list (linknode, root);
+ }
+ }
+
+ /* Set the value of a use pointed to by USE to VAL. */
+ static inline void
+ set_ssa_use_from_ptr (use_operand_p use, tree val)
+ {
+ delink_imm_use (use);
+ *(use->use) = val;
+ link_imm_use (use, val);
+ }
+
+ /* Link ssa_imm_use node LINKNODE into the chain for DEF, with use occurring
+ in STMT. */
+ static inline void
+ link_imm_use_stmt (ssa_use_operand_t *linknode, tree def, gimple stmt)
+ {
+ if (stmt)
+ link_imm_use (linknode, def);
+ else
+ link_imm_use (linknode, NULL);
+ linknode->loc.stmt = stmt;
+ }
+
+ /* Relink a new node in place of an old node in the list. */
+ static inline void
+ relink_imm_use (ssa_use_operand_t *node, ssa_use_operand_t *old)
+ {
+ /* The node one had better be in the same list. */
+ gcc_checking_assert (*(old->use) == *(node->use));
+ node->prev = old->prev;
+ node->next = old->next;
+ if (old->prev)
+ {
+ old->prev->next = node;
+ old->next->prev = node;
+ /* Remove the old node from the list. */
+ old->prev = NULL;
+ }
+ }
+
+ /* Relink ssa_imm_use node LINKNODE into the chain for OLD, with use occurring
+ in STMT. */
+ static inline void
+ relink_imm_use_stmt (ssa_use_operand_t *linknode, ssa_use_operand_t *old,
+ gimple stmt)
+ {
+ if (stmt)
+ relink_imm_use (linknode, old);
+ else
+ link_imm_use (linknode, NULL);
+ linknode->loc.stmt = stmt;
+ }
+
+
+ /* Return true is IMM has reached the end of the immediate use list. */
+ static inline bool
+ end_readonly_imm_use_p (const imm_use_iterator *imm)
+ {
+ return (imm->imm_use == imm->end_p);
+ }
+
+ /* Initialize iterator IMM to process the list for VAR. */
+ static inline use_operand_p
+ first_readonly_imm_use (imm_use_iterator *imm, tree var)
+ {
+ imm->end_p = &(SSA_NAME_IMM_USE_NODE (var));
+ imm->imm_use = imm->end_p->next;
+ #ifdef ENABLE_CHECKING
+ imm->iter_node.next = imm->imm_use->next;
+ #endif
+ if (end_readonly_imm_use_p (imm))
+ return NULL_USE_OPERAND_P;
+ return imm->imm_use;
+ }
+
+ /* Bump IMM to the next use in the list. */
+ static inline use_operand_p
+ next_readonly_imm_use (imm_use_iterator *imm)
+ {
+ use_operand_p old = imm->imm_use;
+
+ #ifdef ENABLE_CHECKING
+ /* If this assertion fails, it indicates the 'next' pointer has changed
+ since the last bump. This indicates that the list is being modified
+ via stmt changes, or SET_USE, or somesuch thing, and you need to be
+ using the SAFE version of the iterator. */
+ gcc_assert (imm->iter_node.next == old->next);
+ imm->iter_node.next = old->next->next;
+ #endif
+
+ imm->imm_use = old->next;
+ if (end_readonly_imm_use_p (imm))
+ return NULL_USE_OPERAND_P;
+ return imm->imm_use;
+ }
+
+
+ /* Return true if VAR has no nondebug uses. */
+ static inline bool
+ has_zero_uses (const_tree var)
+ {
+ const ssa_use_operand_t *const ptr = &(SSA_NAME_IMM_USE_NODE (var));
+
+ /* A single use_operand means there is no items in the list. */
+ if (ptr == ptr->next)
+ return true;
+
+ /* If there are debug stmts, we have to look at each use and see
+ whether there are any nondebug uses. */
+ if (!MAY_HAVE_DEBUG_STMTS)
+ return false;
+
+ return has_zero_uses_1 (ptr);
+ }
+
+ /* Return true if VAR has a single nondebug use. */
+ static inline bool
+ has_single_use (const_tree var)
+ {
+ const ssa_use_operand_t *const ptr = &(SSA_NAME_IMM_USE_NODE (var));
+
+ /* If there aren't any uses whatsoever, we're done. */
+ if (ptr == ptr->next)
+ return false;
+
+ /* If there's a single use, check that it's not a debug stmt. */
+ if (ptr == ptr->next->next)
+ return !is_gimple_debug (USE_STMT (ptr->next));
+
+ /* If there are debug stmts, we have to look at each of them. */
+ if (!MAY_HAVE_DEBUG_STMTS)
+ return false;
+
+ return single_imm_use_1 (ptr, NULL, NULL);
+ }
+
+
+ /* If VAR has only a single immediate nondebug use, return true, and
+ set USE_P and STMT to the use pointer and stmt of occurrence. */
+ static inline bool
+ single_imm_use (const_tree var, use_operand_p *use_p, gimple *stmt)
+ {
+ const ssa_use_operand_t *const ptr = &(SSA_NAME_IMM_USE_NODE (var));
+
+ /* If there aren't any uses whatsoever, we're done. */
+ if (ptr == ptr->next)
+ {
+ return_false:
+ *use_p = NULL_USE_OPERAND_P;
+ *stmt = NULL;
+ return false;
+ }
+
+ /* If there's a single use, check that it's not a debug stmt. */
+ if (ptr == ptr->next->next)
+ {
+ if (!is_gimple_debug (USE_STMT (ptr->next)))
+ {
+ *use_p = ptr->next;
+ *stmt = ptr->next->loc.stmt;
+ return true;
+ }
+ else
+ goto return_false;
+ }
+
+ /* If there are debug stmts, we have to look at each of them. */
+ if (!MAY_HAVE_DEBUG_STMTS)
+ goto return_false;
+
+ return single_imm_use_1 (ptr, use_p, stmt);
+ }
+
+ /* Return the number of nondebug immediate uses of VAR. */
+ static inline unsigned int
+ num_imm_uses (const_tree var)
+ {
+ const ssa_use_operand_t *const start = &(SSA_NAME_IMM_USE_NODE (var));
+ const ssa_use_operand_t *ptr;
+ unsigned int num = 0;
+
+ if (!MAY_HAVE_DEBUG_STMTS)
+ for (ptr = start->next; ptr != start; ptr = ptr->next)
+ num++;
+ else
+ for (ptr = start->next; ptr != start; ptr = ptr->next)
+ if (!is_gimple_debug (USE_STMT (ptr)))
+ num++;
+
+ return num;
+ }
+
+ /* ----------------------------------------------------------------------- */
+
+ /* The following set of routines are used to iterator over various type of
+ SSA operands. */
+
+ /* Return true if PTR is finished iterating. */
+ static inline bool
+ op_iter_done (const ssa_op_iter *ptr)
+ {
+ return ptr->done;
+ }
+
+ /* Get the next iterator use value for PTR. */
+ static inline use_operand_p
+ op_iter_next_use (ssa_op_iter *ptr)
+ {
+ use_operand_p use_p;
+ gcc_checking_assert (ptr->iter_type == ssa_op_iter_use);
+ if (ptr->uses)
+ {
+ use_p = USE_OP_PTR (ptr->uses);
+ ptr->uses = ptr->uses->next;
+ return use_p;
+ }
+ if (ptr->i < ptr->numops)
+ {
+ return PHI_ARG_DEF_PTR (ptr->stmt, (ptr->i)++);
+ }
+ ptr->done = true;
+ return NULL_USE_OPERAND_P;
+ }
+
+ /* Get the next iterator def value for PTR. */
+ static inline def_operand_p
+ op_iter_next_def (ssa_op_iter *ptr)
+ {
+ gcc_checking_assert (ptr->iter_type == ssa_op_iter_def);
+ if (ptr->flags & SSA_OP_VDEF)
+ {
+ tree *p;
+ ptr->flags &= ~SSA_OP_VDEF;
+ p = gimple_vdef_ptr (ptr->stmt);
+ if (p && *p)
+ return p;
+ }
+ if (ptr->flags & SSA_OP_DEF)
+ {
+ while (ptr->i < ptr->numops)
+ {
+ tree *val = gimple_op_ptr (ptr->stmt, ptr->i);
+ ptr->i++;
+ if (*val)
+ {
+ if (TREE_CODE (*val) == TREE_LIST)
+ val = &TREE_VALUE (*val);
+ if (TREE_CODE (*val) == SSA_NAME
+ || is_gimple_reg (*val))
+ return val;
+ }
+ }
+ ptr->flags &= ~SSA_OP_DEF;
+ }
+
+ ptr->done = true;
+ return NULL_DEF_OPERAND_P;
+ }
+
+ /* Get the next iterator tree value for PTR. */
+ static inline tree
+ op_iter_next_tree (ssa_op_iter *ptr)
+ {
+ tree val;
+ gcc_checking_assert (ptr->iter_type == ssa_op_iter_tree);
+ if (ptr->uses)
+ {
+ val = USE_OP (ptr->uses);
+ ptr->uses = ptr->uses->next;
+ return val;
+ }
+ if (ptr->flags & SSA_OP_VDEF)
+ {
+ ptr->flags &= ~SSA_OP_VDEF;
+ if ((val = gimple_vdef (ptr->stmt)))
+ return val;
+ }
+ if (ptr->flags & SSA_OP_DEF)
+ {
+ while (ptr->i < ptr->numops)
+ {
+ val = gimple_op (ptr->stmt, ptr->i);
+ ptr->i++;
+ if (val)
+ {
+ if (TREE_CODE (val) == TREE_LIST)
+ val = TREE_VALUE (val);
+ if (TREE_CODE (val) == SSA_NAME
+ || is_gimple_reg (val))
+ return val;
+ }
+ }
+ ptr->flags &= ~SSA_OP_DEF;
+ }
+
+ ptr->done = true;
+ return NULL_TREE;
+ }
+
+
+ /* This functions clears the iterator PTR, and marks it done. This is normally
+ used to prevent warnings in the compile about might be uninitialized
+ components. */
+
+ static inline void
+ clear_and_done_ssa_iter (ssa_op_iter *ptr)
+ {
+ ptr->i = 0;
+ ptr->numops = 0;
+ ptr->uses = NULL;
+ ptr->iter_type = ssa_op_iter_none;
+ ptr->stmt = NULL;
+ ptr->done = true;
+ ptr->flags = 0;
+ }
+
+ /* Initialize the iterator PTR to the virtual defs in STMT. */
+ static inline void
+ op_iter_init (ssa_op_iter *ptr, gimple stmt, int flags)
+ {
+ /* PHI nodes require a different iterator initialization path. We
+ do not support iterating over virtual defs or uses without
+ iterating over defs or uses at the same time. */
+ gcc_checking_assert (gimple_code (stmt) != GIMPLE_PHI
+ && (!(flags & SSA_OP_VDEF) || (flags & SSA_OP_DEF))
+ && (!(flags & SSA_OP_VUSE) || (flags & SSA_OP_USE)));
+ ptr->numops = 0;
+ if (flags & (SSA_OP_DEF | SSA_OP_VDEF))
+ {
+ switch (gimple_code (stmt))
+ {
+ case GIMPLE_ASSIGN:
+ case GIMPLE_CALL:
+ ptr->numops = 1;
+ break;
+ case GIMPLE_ASM:
+ ptr->numops = gimple_asm_noutputs (stmt);
+ break;
+ default:
+ ptr->numops = 0;
+ flags &= ~(SSA_OP_DEF | SSA_OP_VDEF);
+ break;
+ }
+ }
+ ptr->uses = (flags & (SSA_OP_USE|SSA_OP_VUSE)) ? gimple_use_ops (stmt) : NULL;
+ if (!(flags & SSA_OP_VUSE)
+ && ptr->uses
+ && gimple_vuse (stmt) != NULL_TREE)
+ ptr->uses = ptr->uses->next;
+ ptr->done = false;
+ ptr->i = 0;
+
+ ptr->stmt = stmt;
+ ptr->flags = flags;
+ }
+
+ /* Initialize iterator PTR to the use operands in STMT based on FLAGS. Return
+ the first use. */
+ static inline use_operand_p
+ op_iter_init_use (ssa_op_iter *ptr, gimple stmt, int flags)
+ {
+ gcc_checking_assert ((flags & SSA_OP_ALL_DEFS) == 0
+ && (flags & SSA_OP_USE));
+ op_iter_init (ptr, stmt, flags);
+ ptr->iter_type = ssa_op_iter_use;
+ return op_iter_next_use (ptr);
+ }
+
+ /* Initialize iterator PTR to the def operands in STMT based on FLAGS. Return
+ the first def. */
+ static inline def_operand_p
+ op_iter_init_def (ssa_op_iter *ptr, gimple stmt, int flags)
+ {
+ gcc_checking_assert ((flags & SSA_OP_ALL_USES) == 0
+ && (flags & SSA_OP_DEF));
+ op_iter_init (ptr, stmt, flags);
+ ptr->iter_type = ssa_op_iter_def;
+ return op_iter_next_def (ptr);
+ }
+
+ /* Initialize iterator PTR to the operands in STMT based on FLAGS. Return
+ the first operand as a tree. */
+ static inline tree
+ op_iter_init_tree (ssa_op_iter *ptr, gimple stmt, int flags)
+ {
+ op_iter_init (ptr, stmt, flags);
+ ptr->iter_type = ssa_op_iter_tree;
+ return op_iter_next_tree (ptr);
+ }
+
+
+ /* If there is a single operand in STMT matching FLAGS, return it. Otherwise
+ return NULL. */
+ static inline tree
+ single_ssa_tree_operand (gimple stmt, int flags)
+ {
+ tree var;
+ ssa_op_iter iter;
+
+ var = op_iter_init_tree (&iter, stmt, flags);
+ if (op_iter_done (&iter))
+ return NULL_TREE;
+ op_iter_next_tree (&iter);
+ if (op_iter_done (&iter))
+ return var;
+ return NULL_TREE;
+ }
+
+
+ /* If there is a single operand in STMT matching FLAGS, return it. Otherwise
+ return NULL. */
+ static inline use_operand_p
+ single_ssa_use_operand (gimple stmt, int flags)
+ {
+ use_operand_p var;
+ ssa_op_iter iter;
+
+ var = op_iter_init_use (&iter, stmt, flags);
+ if (op_iter_done (&iter))
+ return NULL_USE_OPERAND_P;
+ op_iter_next_use (&iter);
+ if (op_iter_done (&iter))
+ return var;
+ return NULL_USE_OPERAND_P;
+ }
+
+
+
+ /* If there is a single operand in STMT matching FLAGS, return it. Otherwise
+ return NULL. */
+ static inline def_operand_p
+ single_ssa_def_operand (gimple stmt, int flags)
+ {
+ def_operand_p var;
+ ssa_op_iter iter;
+
+ var = op_iter_init_def (&iter, stmt, flags);
+ if (op_iter_done (&iter))
+ return NULL_DEF_OPERAND_P;
+ op_iter_next_def (&iter);
+ if (op_iter_done (&iter))
+ return var;
+ return NULL_DEF_OPERAND_P;
+ }
+
+
+ /* Return true if there are zero operands in STMT matching the type
+ given in FLAGS. */
+ static inline bool
+ zero_ssa_operands (gimple stmt, int flags)
+ {
+ ssa_op_iter iter;
+
+ op_iter_init_tree (&iter, stmt, flags);
+ return op_iter_done (&iter);
+ }
+
+
+ /* Return the number of operands matching FLAGS in STMT. */
+ static inline int
+ num_ssa_operands (gimple stmt, int flags)
+ {
+ ssa_op_iter iter;
+ tree t;
+ int num = 0;
+
+ gcc_checking_assert (gimple_code (stmt) != GIMPLE_PHI);
+ FOR_EACH_SSA_TREE_OPERAND (t, stmt, iter, flags)
+ num++;
+ return num;
+ }
+
+ /* If there is a single DEF in the PHI node which matches FLAG, return it.
+ Otherwise return NULL_DEF_OPERAND_P. */
+ static inline tree
+ single_phi_def (gimple stmt, int flags)
+ {
+ tree def = PHI_RESULT (stmt);
+ if ((flags & SSA_OP_DEF) && is_gimple_reg (def))
+ return def;
+ if ((flags & SSA_OP_VIRTUAL_DEFS) && !is_gimple_reg (def))
+ return def;
+ return NULL_TREE;
+ }
+
+ /* Initialize the iterator PTR for uses matching FLAGS in PHI. FLAGS should
+ be either SSA_OP_USES or SSA_OP_VIRTUAL_USES. */
+ static inline use_operand_p
+ op_iter_init_phiuse (ssa_op_iter *ptr, gimple phi, int flags)
+ {
+ tree phi_def = gimple_phi_result (phi);
+ int comp;
+
+ clear_and_done_ssa_iter (ptr);
+ ptr->done = false;
+
+ gcc_checking_assert ((flags & (SSA_OP_USE | SSA_OP_VIRTUAL_USES)) != 0);
+
+ comp = (is_gimple_reg (phi_def) ? SSA_OP_USE : SSA_OP_VIRTUAL_USES);
+
+ /* If the PHI node doesn't the operand type we care about, we're done. */
+ if ((flags & comp) == 0)
+ {
+ ptr->done = true;
+ return NULL_USE_OPERAND_P;
+ }
+
+ ptr->stmt = phi;
+ ptr->numops = gimple_phi_num_args (phi);
+ ptr->iter_type = ssa_op_iter_use;
+ ptr->flags = flags;
+ return op_iter_next_use (ptr);
+ }
+
+
+ /* Start an iterator for a PHI definition. */
+
+ static inline def_operand_p
+ op_iter_init_phidef (ssa_op_iter *ptr, gimple phi, int flags)
+ {
+ tree phi_def = PHI_RESULT (phi);
+ int comp;
+
+ clear_and_done_ssa_iter (ptr);
+ ptr->done = false;
+
+ gcc_checking_assert ((flags & (SSA_OP_DEF | SSA_OP_VIRTUAL_DEFS)) != 0);
+
+ comp = (is_gimple_reg (phi_def) ? SSA_OP_DEF : SSA_OP_VIRTUAL_DEFS);
+
+ /* If the PHI node doesn't have the operand type we care about,
+ we're done. */
+ if ((flags & comp) == 0)
+ {
+ ptr->done = true;
+ return NULL_DEF_OPERAND_P;
+ }
+
+ ptr->iter_type = ssa_op_iter_def;
+ /* The first call to op_iter_next_def will terminate the iterator since
+ all the fields are NULL. Simply return the result here as the first and
+ therefore only result. */
+ return PHI_RESULT_PTR (phi);
+ }
+
+ /* Return true is IMM has reached the end of the immediate use stmt list. */
+
+ static inline bool
+ end_imm_use_stmt_p (const imm_use_iterator *imm)
+ {
+ return (imm->imm_use == imm->end_p);
+ }
+
+ /* Finished the traverse of an immediate use stmt list IMM by removing the
+ placeholder node from the list. */
+
+ static inline void
+ end_imm_use_stmt_traverse (imm_use_iterator *imm)
+ {
+ delink_imm_use (&(imm->iter_node));
+ }
+
+ /* Immediate use traversal of uses within a stmt require that all the
+ uses on a stmt be sequentially listed. This routine is used to build up
+ this sequential list by adding USE_P to the end of the current list
+ currently delimited by HEAD and LAST_P. The new LAST_P value is
+ returned. */
+
+ static inline use_operand_p
+ move_use_after_head (use_operand_p use_p, use_operand_p head,
+ use_operand_p last_p)
+ {
+ gcc_checking_assert (USE_FROM_PTR (use_p) == USE_FROM_PTR (head));
+ /* Skip head when we find it. */
+ if (use_p != head)
+ {
+ /* If use_p is already linked in after last_p, continue. */
+ if (last_p->next == use_p)
+ last_p = use_p;
+ else
+ {
+ /* Delink from current location, and link in at last_p. */
+ delink_imm_use (use_p);
+ link_imm_use_to_list (use_p, last_p);
+ last_p = use_p;
+ }
+ }
+ return last_p;
+ }
+
+
+ /* This routine will relink all uses with the same stmt as HEAD into the list
+ immediately following HEAD for iterator IMM. */
+
+ static inline void
+ link_use_stmts_after (use_operand_p head, imm_use_iterator *imm)
+ {
+ use_operand_p use_p;
+ use_operand_p last_p = head;
+ gimple head_stmt = USE_STMT (head);
+ tree use = USE_FROM_PTR (head);
+ ssa_op_iter op_iter;
+ int flag;
+
+ /* Only look at virtual or real uses, depending on the type of HEAD. */
+ flag = (is_gimple_reg (use) ? SSA_OP_USE : SSA_OP_VIRTUAL_USES);
+
+ if (gimple_code (head_stmt) == GIMPLE_PHI)
+ {
+ FOR_EACH_PHI_ARG (use_p, head_stmt, op_iter, flag)
+ if (USE_FROM_PTR (use_p) == use)
+ last_p = move_use_after_head (use_p, head, last_p);
+ }
+ else
+ {
+ if (flag == SSA_OP_USE)
+ {
+ FOR_EACH_SSA_USE_OPERAND (use_p, head_stmt, op_iter, flag)
+ if (USE_FROM_PTR (use_p) == use)
+ last_p = move_use_after_head (use_p, head, last_p);
+ }
+ else if ((use_p = gimple_vuse_op (head_stmt)) != NULL_USE_OPERAND_P)
+ {
+ if (USE_FROM_PTR (use_p) == use)
+ last_p = move_use_after_head (use_p, head, last_p);
+ }
+ }
+ /* Link iter node in after last_p. */
+ if (imm->iter_node.prev != NULL)
+ delink_imm_use (&imm->iter_node);
+ link_imm_use_to_list (&(imm->iter_node), last_p);
+ }
+
+ /* Initialize IMM to traverse over uses of VAR. Return the first statement. */
+ static inline gimple
+ first_imm_use_stmt (imm_use_iterator *imm, tree var)
+ {
+ imm->end_p = &(SSA_NAME_IMM_USE_NODE (var));
+ imm->imm_use = imm->end_p->next;
+ imm->next_imm_name = NULL_USE_OPERAND_P;
+
+ /* iter_node is used as a marker within the immediate use list to indicate
+ where the end of the current stmt's uses are. Initialize it to NULL
+ stmt and use, which indicates a marker node. */
+ imm->iter_node.prev = NULL_USE_OPERAND_P;
+ imm->iter_node.next = NULL_USE_OPERAND_P;
+ imm->iter_node.loc.stmt = NULL;
+ imm->iter_node.use = NULL;
+
+ if (end_imm_use_stmt_p (imm))
+ return NULL;
+
+ link_use_stmts_after (imm->imm_use, imm);
+
+ return USE_STMT (imm->imm_use);
+ }
+
+ /* Bump IMM to the next stmt which has a use of var. */
+
+ static inline gimple
+ next_imm_use_stmt (imm_use_iterator *imm)
+ {
+ imm->imm_use = imm->iter_node.next;
+ if (end_imm_use_stmt_p (imm))
+ {
+ if (imm->iter_node.prev != NULL)
+ delink_imm_use (&imm->iter_node);
+ return NULL;
+ }
+
+ link_use_stmts_after (imm->imm_use, imm);
+ return USE_STMT (imm->imm_use);
+ }
+
+ /* This routine will return the first use on the stmt IMM currently refers
+ to. */
+
+ static inline use_operand_p
+ first_imm_use_on_stmt (imm_use_iterator *imm)
+ {
+ imm->next_imm_name = imm->imm_use->next;
+ return imm->imm_use;
+ }
+
+ /* Return TRUE if the last use on the stmt IMM refers to has been visited. */
+
+ static inline bool
+ end_imm_use_on_stmt_p (const imm_use_iterator *imm)
+ {
+ return (imm->imm_use == &(imm->iter_node));
+ }
+
+ /* Bump to the next use on the stmt IMM refers to, return NULL if done. */
+
+ static inline use_operand_p
+ next_imm_use_on_stmt (imm_use_iterator *imm)
+ {
+ imm->imm_use = imm->next_imm_name;
+ if (end_imm_use_on_stmt_p (imm))
+ return NULL_USE_OPERAND_P;
+ else
+ {
+ imm->next_imm_name = imm->imm_use->next;
+ return imm->imm_use;
+ }
+ }
+
+ /* Delink all immediate_use information for STMT. */
+ static inline void
+ delink_stmt_imm_use (gimple stmt)
+ {
+ ssa_op_iter iter;
+ use_operand_p use_p;
+
+ if (ssa_operands_active (cfun))
+ FOR_EACH_PHI_OR_STMT_USE (use_p, stmt, iter, SSA_OP_ALL_USES)
+ delink_imm_use (use_p);
+ }
+
+ #endif /* GCC_TREE_SSA_ITERATORS_H */
===================================================================
*************** along with GCC; see the file COPYING3.
<http://www.gnu.org/licenses/>. */
! #ifndef _SSAEXPAND_H
! #define _SSAEXPAND_H 1
#include "tree-ssa-live.h"
#include "tree-ssa-ter.h"
<http://www.gnu.org/licenses/>. */
! #ifndef GCC_TREE_OUTOF_SSA_H
! #define GCC_TREE_OUTOF_SSA_H
#include "tree-ssa-live.h"
#include "tree-ssa-ter.h"
*************** extern void finish_out_of_ssa (struct ss
extern unsigned int rewrite_out_of_ssa (struct ssaexpand *sa);
extern void expand_phi_nodes (struct ssaexpand *sa);
! #endif
extern unsigned int rewrite_out_of_ssa (struct ssaexpand *sa);
extern void expand_phi_nodes (struct ssaexpand *sa);
! #endif /* GCC_TREE_OUTOF_SSA_H */
===================================================================
*************** OBJS = \
gimple-fold.o \
gimple-low.o \
gimple-pretty-print.o \
+ gimple-ssa.o \
gimple-ssa-strength-reduction.o \
gimple-streamer-in.o \
gimple-streamer-out.o \