@@ -2910,7 +2910,7 @@ finalize_nrv_unc_r (tree *tp, int *walk_subtrees, void *data)
= VEC_index (constructor_elt,
CONSTRUCTOR_ELTS
(TREE_OPERAND (TREE_OPERAND (ret_val, 0), 1)),
- 1)->value;
+ 1).value;
else
ret_val = TREE_OPERAND (TREE_OPERAND (ret_val, 0), 1);
}
@@ -2969,7 +2969,7 @@ finalize_nrv_unc_r (tree *tp, int *walk_subtrees, void *data)
TREE_OPERAND (alloc, 0),
VEC_index (constructor_elt,
CONSTRUCTOR_ELTS (TREE_OPERAND (alloc, 1)),
- 0)->value);
+ 0).value);
/* Build a modified CONSTRUCTOR that references NEW_VAR. */
p_array = TYPE_FIELDS (TREE_TYPE (alloc));
@@ -2979,7 +2979,7 @@ finalize_nrv_unc_r (tree *tp, int *walk_subtrees, void *data)
VEC_index (constructor_elt,
CONSTRUCTOR_ELTS
(TREE_OPERAND (alloc, 1)),
- 1)->value);
+ 1).value);
new_ret = build_constructor (TREE_TYPE (alloc), v);
}
else
@@ -4487,10 +4487,10 @@ convert (tree type, tree expr)
inner expression. */
if (TREE_CODE (expr) == CONSTRUCTOR
&& !VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (expr))
- && VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->index
+ && VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0).index
== TYPE_FIELDS (etype))
unpadded
- = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->value;
+ = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0).value;
/* Otherwise, build an explicit component reference. */
else
@@ -5043,7 +5043,7 @@ remove_conversions (tree exp, bool true_address)
&& TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
return
remove_conversions (VEC_index (constructor_elt,
- CONSTRUCTOR_ELTS (exp), 0)->value,
+ CONSTRUCTOR_ELTS (exp), 0).value,
true);
break;
@@ -442,7 +442,7 @@ compare_fat_pointers (location_t loc, tree result_type, tree p1, tree p2)
/* The constant folder doesn't fold fat pointer types so we do it here. */
if (TREE_CODE (p1) == CONSTRUCTOR)
- p1_array = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (p1), 0)->value;
+ p1_array = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (p1), 0).value;
else
p1_array = build_component_ref (p1, NULL_TREE,
TYPE_FIELDS (TREE_TYPE (p1)), true);
@@ -453,7 +453,7 @@ compare_fat_pointers (location_t loc, tree result_type, tree p1, tree p2)
null_pointer_node));
if (TREE_CODE (p2) == CONSTRUCTOR)
- p2_array = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (p2), 0)->value;
+ p2_array = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (p2), 0).value;
else
p2_array = build_component_ref (p2, NULL_TREE,
TYPE_FIELDS (TREE_TYPE (p2)), true);
@@ -474,14 +474,14 @@ compare_fat_pointers (location_t loc, tree result_type, tree p1, tree p2)
= fold_build2_loc (loc, EQ_EXPR, result_type, p1_array, p2_array);
if (TREE_CODE (p1) == CONSTRUCTOR)
- p1_bounds = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (p1), 1)->value;
+ p1_bounds = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (p1), 1).value;
else
p1_bounds
= build_component_ref (p1, NULL_TREE,
DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (p1))), true);
if (TREE_CODE (p2) == CONSTRUCTOR)
- p2_bounds = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (p2), 1)->value;
+ p2_bounds = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (p2), 1).value;
else
p2_bounds
= build_component_ref (p2, NULL_TREE,
@@ -1337,7 +1337,7 @@ build_unary_op (enum tree_code op_code, tree result_type, tree operand)
{
result = VEC_index (constructor_elt,
CONSTRUCTOR_ELTS (operand),
- 0)->value;
+ 0).value;
result = convert (build_pointer_type (TREE_TYPE (operand)),
build_unary_op (ADDR_EXPR, NULL_TREE, result));
break;
@@ -2678,9 +2678,9 @@ gnat_stabilize_reference (tree ref, bool force, bool *success)
&& VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ref)) == 1)
{
tree index
- = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (ref), 0)->index;
+ = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (ref), 0).index;
tree value
- = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (ref), 0)->value;
+ = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (ref), 0).value;
result
= build_constructor_single (type, index,
gnat_stabilize_reference_1 (value,
@@ -867,8 +867,8 @@ new_alias_set (void)
if (flag_strict_aliasing)
{
if (alias_sets == 0)
- VEC_safe_push (alias_set_entry, gc, alias_sets, 0);
- VEC_safe_push (alias_set_entry, gc, alias_sets, 0);
+ VEC_safe_push (alias_set_entry, gc, alias_sets, (alias_set_entry) 0);
+ VEC_safe_push (alias_set_entry, gc, alias_sets, (alias_set_entry) 0);
return VEC_length (alias_set_entry, alias_sets) - 1;
}
else
@@ -3933,7 +3933,7 @@ add_flexible_array_elts_to_size (tree decl, tree init)
if (VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (init)))
return;
- elt = VEC_last (constructor_elt, CONSTRUCTOR_ELTS (init))->value;
+ elt = VEC_last (constructor_elt, CONSTRUCTOR_ELTS (init)).value;
type = TREE_TYPE (elt);
if (TREE_CODE (type) == ARRAY_TYPE
&& TYPE_SIZE (type) == NULL_TREE
@@ -8279,7 +8279,7 @@ parse_optimize_options (tree args, bool attr_p)
/* Build up argv vector. Just in case the string is stored away, use garbage
collected strings. */
VEC_truncate (const_char_p, optimize_args, 0);
- VEC_safe_push (const_char_p, gc, optimize_args, NULL);
+ VEC_safe_push (const_char_p, gc, optimize_args, (const_char_p)NULL);
for (ap = args; ap != NULL_TREE; ap = TREE_CHAIN (ap))
{
@@ -9277,10 +9277,10 @@ complete_array_type (tree *ptype, tree initial_value, bool do_default)
constructor_elt *ce;
bool fold_p = false;
- if (VEC_index (constructor_elt, v, 0)->index)
+ if (VEC_index (constructor_elt, v, 0).index)
maxindex = fold_convert_loc (input_location, sizetype,
VEC_index (constructor_elt,
- v, 0)->index);
+ v, 0).index);
curindex = maxindex;
for (cnt = 1;
@@ -1210,9 +1210,9 @@ c_pp_lookup_pragma (unsigned int id, const char **space, const char **name)
+ VEC_length (pragma_ns_name, registered_pp_pragmas)))
{
*space = VEC_index (pragma_ns_name, registered_pp_pragmas,
- id - PRAGMA_FIRST_EXTERNAL)->space;
+ id - PRAGMA_FIRST_EXTERNAL).space;
*name = VEC_index (pragma_ns_name, registered_pp_pragmas,
- id - PRAGMA_FIRST_EXTERNAL)->name;
+ id - PRAGMA_FIRST_EXTERNAL).name;
return;
}
@@ -1335,7 +1335,7 @@ c_invoke_pragma_handler (unsigned int id)
pragma_handler_2arg handler_2arg;
id -= PRAGMA_FIRST_EXTERNAL;
- ihandler = VEC_index (internal_pragma_handler, registered_pragmas, id);
+ ihandler = &VEC_index (internal_pragma_handler, registered_pragmas, id);
if (ihandler->extra_data)
{
handler_2arg = ihandler->handler.handler_2arg;
@@ -6530,9 +6530,9 @@ c_parser_postfix_expression (c_parser *parser)
break;
}
- e1_p = VEC_index (c_expr_t, cexpr_list, 0);
- e2_p = VEC_index (c_expr_t, cexpr_list, 1);
- e3_p = VEC_index (c_expr_t, cexpr_list, 2);
+ e1_p = &VEC_index (c_expr_t, cexpr_list, 0);
+ e2_p = &VEC_index (c_expr_t, cexpr_list, 1);
+ e3_p = &VEC_index (c_expr_t, cexpr_list, 2);
c = e1_p->value;
mark_exp_read (e2_p->value);
@@ -6612,8 +6612,8 @@ c_parser_postfix_expression (c_parser *parser)
break;
}
- e1_p = VEC_index (c_expr_t, cexpr_list, 0);
- e2_p = VEC_index (c_expr_t, cexpr_list, 1);
+ e1_p = &VEC_index (c_expr_t, cexpr_list, 0);
+ e2_p = &VEC_index (c_expr_t, cexpr_list, 1);
mark_exp_read (e1_p->value);
if (TREE_CODE (e1_p->value) == EXCESS_PRECISION_EXPR)
@@ -6672,15 +6672,15 @@ c_parser_postfix_expression (c_parser *parser)
if (VEC_length (c_expr_t, cexpr_list) == 2)
expr.value =
c_build_vec_perm_expr
- (loc, VEC_index (c_expr_t, cexpr_list, 0)->value,
- NULL_TREE, VEC_index (c_expr_t, cexpr_list, 1)->value);
+ (loc, VEC_index (c_expr_t, cexpr_list, 0).value,
+ NULL_TREE, VEC_index (c_expr_t, cexpr_list, 1).value);
else if (VEC_length (c_expr_t, cexpr_list) == 3)
expr.value =
c_build_vec_perm_expr
- (loc, VEC_index (c_expr_t, cexpr_list, 0)->value,
- VEC_index (c_expr_t, cexpr_list, 1)->value,
- VEC_index (c_expr_t, cexpr_list, 2)->value);
+ (loc, VEC_index (c_expr_t, cexpr_list, 0).value,
+ VEC_index (c_expr_t, cexpr_list, 1).value,
+ VEC_index (c_expr_t, cexpr_list, 2).value);
else
{
error_at (loc, "wrong number of arguments to "
@@ -7026,7 +7026,7 @@ pop_init_level (int implicit, struct obstack * braced_init_obstack)
bool constructor_zeroinit =
(VEC_length (constructor_elt, constructor_elements) == 1
&& integer_zerop
- (VEC_index (constructor_elt, constructor_elements, 0)->value));
+ (VEC_index (constructor_elt, constructor_elements, 0).value));
/* Do not warn for flexible array members or zero-length arrays. */
while (constructor_unfilled_fields
@@ -7073,10 +7073,10 @@ pop_init_level (int implicit, struct obstack * braced_init_obstack)
else if (VEC_length (constructor_elt,constructor_elements) != 1)
{
error_init ("extra elements in scalar initializer");
- ret.value = VEC_index (constructor_elt,constructor_elements,0)->value;
+ ret.value = VEC_index (constructor_elt,constructor_elements,0).value;
}
else
- ret.value = VEC_index (constructor_elt,constructor_elements,0)->value;
+ ret.value = VEC_index (constructor_elt,constructor_elements,0).value;
}
else
{
@@ -7747,9 +7747,9 @@ find_init_member (tree field, struct obstack * braced_init_obstack)
else if (TREE_CODE (constructor_type) == UNION_TYPE)
{
if (!VEC_empty (constructor_elt, constructor_elements)
- && (VEC_last (constructor_elt, constructor_elements)->index
+ && (VEC_last (constructor_elt, constructor_elements).index
== field))
- return VEC_last (constructor_elt, constructor_elements)->value;
+ return VEC_last (constructor_elt, constructor_elements).value;
}
return 0;
}
@@ -7932,7 +7932,7 @@ output_init_element (tree value, tree origtype, bool strict_string, tree type,
if (!implicit)
{
if (TREE_SIDE_EFFECTS (VEC_last (constructor_elt,
- constructor_elements)->value))
+ constructor_elements).value))
warning_init (0,
"initialized field with side-effects overwritten");
else if (warn_override_init)
@@ -528,7 +528,7 @@ locator_location (int loc)
break;
}
}
- return *VEC_index (location_t, locations_locators_vals, min);
+ return VEC_index (location_t, locations_locators_vals, min);
}
/* Return source line of the statement that produced this insn. */
@@ -1590,7 +1590,7 @@ set_nonzero_bits_and_sign_copies (rtx x, const_rtx set, void *data)
(DF_LR_IN (ENTRY_BLOCK_PTR->next_bb), REGNO (x))
&& HWI_COMPUTABLE_MODE_P (GET_MODE (x)))
{
- reg_stat_type *rsp = VEC_index (reg_stat_type, reg_stat, REGNO (x));
+ reg_stat_type *rsp = &VEC_index (reg_stat_type, reg_stat, REGNO (x));
if (set == 0 || GET_CODE (set) == CLOBBER)
{
@@ -3793,21 +3793,21 @@ try_combine (rtx i3, rtx i2, rtx i1, rtx i0, int *new_direct_jump_p,
&& ! (temp = SET_DEST (XVECEXP (newpat, 0, 1)),
(REG_P (temp)
&& VEC_index (reg_stat_type, reg_stat,
- REGNO (temp))->nonzero_bits != 0
+ REGNO (temp)).nonzero_bits != 0
&& GET_MODE_PRECISION (GET_MODE (temp)) < BITS_PER_WORD
&& GET_MODE_PRECISION (GET_MODE (temp)) < HOST_BITS_PER_INT
&& (VEC_index (reg_stat_type, reg_stat,
- REGNO (temp))->nonzero_bits
+ REGNO (temp)).nonzero_bits
!= GET_MODE_MASK (word_mode))))
&& ! (GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == SUBREG
&& (temp = SUBREG_REG (SET_DEST (XVECEXP (newpat, 0, 1))),
(REG_P (temp)
&& VEC_index (reg_stat_type, reg_stat,
- REGNO (temp))->nonzero_bits != 0
+ REGNO (temp)).nonzero_bits != 0
&& GET_MODE_PRECISION (GET_MODE (temp)) < BITS_PER_WORD
&& GET_MODE_PRECISION (GET_MODE (temp)) < HOST_BITS_PER_INT
&& (VEC_index (reg_stat_type, reg_stat,
- REGNO (temp))->nonzero_bits
+ REGNO (temp)).nonzero_bits
!= GET_MODE_MASK (word_mode)))))
&& ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)),
SET_SRC (XVECEXP (newpat, 0, 1)))
@@ -9577,7 +9577,7 @@ reg_nonzero_bits_for_combine (const_rtx x, enum machine_mode mode,
value. Otherwise, use the previously-computed global nonzero bits
for this register. */
- rsp = VEC_index (reg_stat_type, reg_stat, REGNO (x));
+ rsp = &VEC_index (reg_stat_type, reg_stat, REGNO (x));
if (rsp->last_set_value != 0
&& (rsp->last_set_mode == mode
|| (GET_MODE_CLASS (rsp->last_set_mode) == MODE_INT
@@ -9646,7 +9646,7 @@ reg_num_sign_bit_copies_for_combine (const_rtx x, enum machine_mode mode,
rtx tem;
reg_stat_type *rsp;
- rsp = VEC_index (reg_stat_type, reg_stat, REGNO (x));
+ rsp = &VEC_index (reg_stat_type, reg_stat, REGNO (x));
if (rsp->last_set_value != 0
&& rsp->last_set_mode == mode
&& ((rsp->last_set_label >= label_tick_ebb_start
@@ -12195,7 +12195,7 @@ update_table_tick (rtx x)
for (r = regno; r < endregno; r++)
{
- reg_stat_type *rsp = VEC_index (reg_stat_type, reg_stat, r);
+ reg_stat_type *rsp = &VEC_index (reg_stat_type, reg_stat, r);
rsp->last_set_table_tick = label_tick;
}
@@ -12297,7 +12297,7 @@ record_value_for_reg (rtx reg, rtx insn, rtx value)
register. */
for (i = regno; i < endregno; i++)
{
- rsp = VEC_index (reg_stat_type, reg_stat, i);
+ rsp = &VEC_index (reg_stat_type, reg_stat, i);
if (insn)
rsp->last_set = insn;
@@ -12323,7 +12323,7 @@ record_value_for_reg (rtx reg, rtx insn, rtx value)
for (i = regno; i < endregno; i++)
{
- rsp = VEC_index (reg_stat_type, reg_stat, i);
+ rsp = &VEC_index (reg_stat_type, reg_stat, i);
rsp->last_set_label = label_tick;
if (!insn
|| (value && rsp->last_set_table_tick >= label_tick_ebb_start))
@@ -12335,7 +12335,7 @@ record_value_for_reg (rtx reg, rtx insn, rtx value)
/* The value being assigned might refer to X (like in "x++;"). In that
case, we must replace it with (clobber (const_int 0)) to prevent
infinite loops. */
- rsp = VEC_index (reg_stat_type, reg_stat, regno);
+ rsp = &VEC_index (reg_stat_type, reg_stat, regno);
if (value && !get_last_value_validate (&value, insn, label_tick, 0))
{
value = copy_rtx (value);
@@ -12433,7 +12433,7 @@ record_dead_and_set_regs (rtx insn)
{
reg_stat_type *rsp;
- rsp = VEC_index (reg_stat_type, reg_stat, i);
+ rsp = &VEC_index (reg_stat_type, reg_stat, i);
rsp->last_death = insn;
}
}
@@ -12448,7 +12448,7 @@ record_dead_and_set_regs (rtx insn)
{
reg_stat_type *rsp;
- rsp = VEC_index (reg_stat_type, reg_stat, i);
+ rsp = &VEC_index (reg_stat_type, reg_stat, i);
rsp->last_set_invalid = 1;
rsp->last_set = insn;
rsp->last_set_value = 0;
@@ -12506,7 +12506,7 @@ record_promoted_value (rtx insn, rtx subreg)
continue;
}
- rsp = VEC_index (reg_stat_type, reg_stat, regno);
+ rsp = &VEC_index (reg_stat_type, reg_stat, regno);
if (rsp->last_set == insn)
{
if (SUBREG_PROMOTED_UNSIGNED_P (subreg) > 0)
@@ -12531,7 +12531,7 @@ record_promoted_value (rtx insn, rtx subreg)
static bool
reg_truncated_to_mode (enum machine_mode mode, const_rtx x)
{
- reg_stat_type *rsp = VEC_index (reg_stat_type, reg_stat, REGNO (x));
+ reg_stat_type *rsp = &VEC_index (reg_stat_type, reg_stat, REGNO (x));
enum machine_mode truncated = rsp->truncated_to_mode;
if (truncated == 0
@@ -12576,7 +12576,7 @@ record_truncated_value (rtx *p, void *data ATTRIBUTE_UNUSED)
else
return 0;
- rsp = VEC_index (reg_stat_type, reg_stat, REGNO (x));
+ rsp = &VEC_index (reg_stat_type, reg_stat, REGNO (x));
if (rsp->truncated_to_mode == 0
|| rsp->truncation_label < label_tick_ebb_start
|| (GET_MODE_SIZE (truncated_mode)
@@ -12655,7 +12655,7 @@ get_last_value_validate (rtx *loc, rtx insn, int tick, int replace)
for (j = regno; j < endregno; j++)
{
- reg_stat_type *rsp = VEC_index (reg_stat_type, reg_stat, j);
+ reg_stat_type *rsp = &VEC_index (reg_stat_type, reg_stat, j);
if (rsp->last_set_invalid
/* If this is a pseudo-register that was only set once and not
live at the beginning of the function, it is always valid. */
@@ -12759,7 +12759,7 @@ get_last_value (const_rtx x)
return 0;
regno = REGNO (x);
- rsp = VEC_index (reg_stat_type, reg_stat, regno);
+ rsp = &VEC_index (reg_stat_type, reg_stat, regno);
value = rsp->last_set_value;
/* If we don't have a value, or if it isn't for this basic block and
@@ -12823,7 +12823,7 @@ use_crosses_set_p (const_rtx x, int from_luid)
#endif
for (; regno < endreg; regno++)
{
- reg_stat_type *rsp = VEC_index (reg_stat_type, reg_stat, regno);
+ reg_stat_type *rsp = &VEC_index (reg_stat_type, reg_stat, regno);
if (rsp->last_set
&& rsp->last_set_label == label_tick
&& DF_INSN_LUID (rsp->last_set) > from_luid)
@@ -13071,7 +13071,7 @@ move_deaths (rtx x, rtx maybe_kill_insn, int from_luid, rtx to_insn,
if (code == REG)
{
unsigned int regno = REGNO (x);
- rtx where_dead = VEC_index (reg_stat_type, reg_stat, regno)->last_death;
+ rtx where_dead = VEC_index (reg_stat_type, reg_stat, regno).last_death;
/* Don't move the register if it gets killed in between from and to. */
if (maybe_kill_insn && reg_set_p (x, maybe_kill_insn)
@@ -13686,7 +13686,7 @@ distribute_notes (rtx notes, rtx from_insn, rtx i3, rtx i2, rtx elim_i2,
if (place && REG_NOTE_KIND (note) == REG_DEAD)
{
unsigned int regno = REGNO (XEXP (note, 0));
- reg_stat_type *rsp = VEC_index (reg_stat_type, reg_stat, regno);
+ reg_stat_type *rsp = &VEC_index (reg_stat_type, reg_stat, regno);
if (dead_or_set_p (place, XEXP (note, 0))
|| reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
@@ -1920,7 +1920,8 @@ add_function_candidate (struct z_candidate **candidates,
for (i = 0; i < len; ++i)
{
- tree arg, argtype, to_type;
+ tree argtype, to_type;
+ tree arg;
conversion *t;
int is_this;
@@ -1930,8 +1931,9 @@ add_function_candidate (struct z_candidate **candidates,
if (i == 0 && first_arg != NULL_TREE)
arg = first_arg;
else
- arg = VEC_index (tree, args,
- i + skip - (first_arg != NULL_TREE ? 1 : 0));
+ arg = CONST_CAST_TREE (
+ VEC_index (tree, args,
+ i + skip - (first_arg != NULL_TREE ? 1 : 0)));
argtype = lvalue_type (arg);
is_this = (i == 0 && DECL_NONSTATIC_MEMBER_FUNCTION_P (fn)
@@ -8344,12 +8344,12 @@ build_vtbl_initializer (tree binfo,
int new_position = (TARGET_VTABLE_DATA_ENTRY_DISTANCE * ix
+ (TARGET_VTABLE_DATA_ENTRY_DISTANCE - 1));
- VEC_replace (constructor_elt, vid.inits, new_position, e);
+ VEC_replace (constructor_elt, vid.inits, new_position, *e);
for (j = 1; j < TARGET_VTABLE_DATA_ENTRY_DISTANCE; ++j)
{
- constructor_elt *f = VEC_index (constructor_elt, vid.inits,
- new_position - j);
+ constructor_elt *f = &VEC_index (constructor_elt, vid.inits,
+ new_position - j);
f->index = NULL_TREE;
f->value = build1 (NOP_EXPR, vtable_entry_type,
null_pointer_node);
@@ -8370,7 +8370,7 @@ build_vtbl_initializer (tree binfo,
for (ix = VEC_length (constructor_elt, vid.inits) - 1;
VEC_iterate (constructor_elt, vid.inits, ix, e);
ix--, jx++)
- VEC_replace (constructor_elt, *inits, jx, e);
+ VEC_replace (constructor_elt, *inits, jx, *e);
/* Go through all the ordinary virtual functions, building up
initializers. */
@@ -5292,7 +5292,7 @@ reshape_init_r (tree type, reshape_iter *d, bool first_initializer_p,
&& VEC_length (constructor_elt, CONSTRUCTOR_ELTS (str_init)) == 1)
{
str_init = VEC_index (constructor_elt,
- CONSTRUCTOR_ELTS (str_init), 0)->value;
+ CONSTRUCTOR_ELTS (str_init), 0).value;
}
/* If it's a string literal, then it's the initializer for the array
@@ -5382,7 +5382,7 @@ reshape_init (tree type, tree init, tsubst_flags_t complain)
return init;
/* Recurse on this CONSTRUCTOR. */
- d.cur = VEC_index (constructor_elt, v, 0);
+ d.cur = &VEC_index (constructor_elt, v, 0);
d.end = d.cur + VEC_length (constructor_elt, v);
new_init = reshape_init_r (type, &d, true, complain);
@@ -5927,7 +5927,7 @@ type_dependent_init_p (tree init)
nelts = VEC_length (constructor_elt, elts);
for (i = 0; i < nelts; ++i)
if (type_dependent_init_p (VEC_index (constructor_elt,
- elts, i)->value))
+ elts, i).value))
return true;
}
else
@@ -5957,7 +5957,7 @@ value_dependent_init_p (tree init)
nelts = VEC_length (constructor_elt, elts);
for (i = 0; i < nelts; ++i)
if (value_dependent_init_p (VEC_index (constructor_elt,
- elts, i)->value))
+ elts, i).value))
return true;
}
else
@@ -6905,7 +6905,7 @@ cp_complete_array_type (tree *ptype, tree initial_value, bool do_default)
&& !VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (initial_value)))
{
VEC(constructor_elt,gc) *v = CONSTRUCTOR_ELTS (initial_value);
- tree value = VEC_index (constructor_elt, v, 0)->value;
+ tree value = VEC_index (constructor_elt, v, 0).value;
if (TREE_CODE (value) == STRING_CST
&& VEC_length (constructor_elt, v) == 1)
@@ -274,7 +274,7 @@ cp_lexer_dump_tokens (FILE *file, VEC(cp_token,gc) *buffer,
if (start_token > VEC_address (cp_token, buffer))
{
- cp_lexer_print_token (file, VEC_index (cp_token, buffer, 0));
+ cp_lexer_print_token (file, &VEC_index (cp_token, buffer, 0));
fprintf (file, " ... ");
}
@@ -314,8 +314,7 @@ cp_lexer_dump_tokens (FILE *file, VEC(cp_token,gc) *buffer,
if (i == num && i < VEC_length (cp_token, buffer))
{
fprintf (file, " ... ");
- cp_lexer_print_token (file, VEC_index (cp_token, buffer,
- VEC_length (cp_token, buffer) - 1));
+ cp_lexer_print_token (file, &VEC_last (cp_token, buffer));
}
fprintf (file, "\n");
@@ -1724,11 +1723,11 @@ cp_parser_context_new (cp_parser_context* next)
/* Managing the unparsed function queues. */
#define unparsed_funs_with_default_args \
- VEC_last (cp_unparsed_functions_entry, parser->unparsed_queues)->funs_with_default_args
+ VEC_last (cp_unparsed_functions_entry, parser->unparsed_queues).funs_with_default_args
#define unparsed_funs_with_definitions \
- VEC_last (cp_unparsed_functions_entry, parser->unparsed_queues)->funs_with_definitions
+ VEC_last (cp_unparsed_functions_entry, parser->unparsed_queues).funs_with_definitions
#define unparsed_nsdmis \
- VEC_last (cp_unparsed_functions_entry, parser->unparsed_queues)->nsdmis
+ VEC_last (cp_unparsed_functions_entry, parser->unparsed_queues).nsdmis
static void
push_unparsed_function_queues (cp_parser *parser)
@@ -7998,7 +7997,7 @@ record_lambda_scope (tree lambda)
static void
finish_lambda_scope (void)
{
- tree_int *p = VEC_last (tree_int, lambda_scope_stack);
+ tree_int *p = &VEC_last (tree_int, lambda_scope_stack);
if (lambda_scope != p->t)
{
lambda_scope = p->t;
@@ -296,7 +296,7 @@ typeid_ok_p (void)
}
pseudo_type_info
- = VEC_index (tinfo_s, tinfo_descs, TK_TYPE_INFO_TYPE)->type;
+ = VEC_index (tinfo_s, tinfo_descs, TK_TYPE_INFO_TYPE).type;
type_info_type = TYPE_MAIN_VARIANT (const_type_info_type_node);
/* Make sure abi::__type_info_pseudo has the same alias set
@@ -422,7 +422,7 @@ get_tinfo_decl (tree type)
if (!d)
{
int ix = get_pseudo_ti_index (type);
- tinfo_s *ti = VEC_index (tinfo_s, tinfo_descs, ix);
+ tinfo_s *ti = &VEC_index (tinfo_s, tinfo_descs, ix);
d = build_lang_decl (VAR_DECL, name, ti->type);
SET_DECL_ASSEMBLER_NAME (d, name);
@@ -1078,7 +1078,7 @@ typeinfo_in_lib_p (tree type)
static tree
get_pseudo_ti_init (tree type, unsigned tk_index)
{
- tinfo_s *ti = VEC_index (tinfo_s, tinfo_descs, tk_index);
+ tinfo_s *ti = &VEC_index (tinfo_s, tinfo_descs, tk_index);
gcc_assert (at_eof);
switch (tk_index)
@@ -1104,7 +1104,7 @@ get_pseudo_ti_init (tree type, unsigned tk_index)
tree tinfo = get_tinfo_ptr (BINFO_TYPE (base_binfo));
/* get_tinfo_ptr might have reallocated the tinfo_descs vector. */
- ti = VEC_index (tinfo_s, tinfo_descs, tk_index);
+ ti = &VEC_index (tinfo_s, tinfo_descs, tk_index);
return class_initializer (ti, type, 1, tinfo);
}
@@ -1159,14 +1159,14 @@ get_pseudo_ti_init (tree type, unsigned tk_index)
CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, tinfo);
CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, offset);
base_init = build_constructor (init_list_type_node, v);
- e = VEC_index (constructor_elt, init_vec, ix);
+ e = &VEC_index (constructor_elt, init_vec, ix);
e->index = NULL_TREE;
e->value = base_init;
}
base_inits = build_constructor (init_list_type_node, init_vec);
/* get_tinfo_ptr might have reallocated the tinfo_descs vector. */
- ti = VEC_index (tinfo_s, tinfo_descs, tk_index);
+ ti = &VEC_index (tinfo_s, tinfo_descs, tk_index);
return class_initializer (ti, type, 3,
build_int_cst (NULL_TREE, hint),
build_int_cst (NULL_TREE, nbases),
@@ -1213,7 +1213,7 @@ create_pseudo_type_info (int tk, const char *real_name, ...)
fields = build_decl (input_location,
FIELD_DECL, NULL_TREE,
VEC_index (tinfo_s, tinfo_descs,
- TK_TYPE_INFO_TYPE)->type);
+ TK_TYPE_INFO_TYPE).type);
/* Now add the derived fields. */
while ((field_decl = va_arg (ap, tree)))
@@ -1227,7 +1227,7 @@ create_pseudo_type_info (int tk, const char *real_name, ...)
finish_builtin_struct (pseudo_type, pseudo_name, fields, NULL_TREE);
CLASSTYPE_AS_BASE (pseudo_type) = pseudo_type;
- ti = VEC_index (tinfo_s, tinfo_descs, tk);
+ ti = &VEC_index (tinfo_s, tinfo_descs, tk);
ti->type = cp_build_qualified_type (pseudo_type, TYPE_QUAL_CONST);
ti->name = get_identifier (real_name);
ti->vtable = NULL_TREE;
@@ -1320,7 +1320,7 @@ get_pseudo_ti_index (tree type)
while (VEC_iterate (tinfo_s, tinfo_descs, len++, ti))
ti->type = ti->vtable = ti->name = NULL_TREE;
}
- else if (VEC_index (tinfo_s, tinfo_descs, ix)->type)
+ else if (VEC_index (tinfo_s, tinfo_descs, ix).type)
/* already created. */
break;
@@ -1334,7 +1334,7 @@ get_pseudo_ti_index (tree type)
array_domain = build_index_type (size_int (num_bases));
base_array =
build_array_type (VEC_index (tinfo_s, tinfo_descs,
- TK_BASE_TYPE)->type,
+ TK_BASE_TYPE).type,
array_domain);
push_abi_namespace ();
@@ -1386,7 +1386,7 @@ create_tinfo_types (void)
DECL_CHAIN (field) = fields;
fields = field;
- ti = VEC_index (tinfo_s, tinfo_descs, TK_TYPE_INFO_TYPE);
+ ti = &VEC_index (tinfo_s, tinfo_descs, TK_TYPE_INFO_TYPE);
ti->type = make_class_type (RECORD_TYPE);
ti->vtable = NULL_TREE;
ti->name = NULL_TREE;
@@ -1426,7 +1426,7 @@ create_tinfo_types (void)
DECL_CHAIN (field) = fields;
fields = field;
- ti = VEC_index (tinfo_s, tinfo_descs, TK_BASE_TYPE);
+ ti = &VEC_index (tinfo_s, tinfo_descs, TK_BASE_TYPE);
ti->type = make_class_type (RECORD_TYPE);
ti->vtable = NULL_TREE;
@@ -162,7 +162,7 @@ resume_deferring_access_checks (void)
{
if (!deferred_access_no_check)
VEC_last (deferred_access, deferred_access_stack)
- ->deferring_access_checks_kind = dk_deferred;
+ .deferring_access_checks_kind = dk_deferred;
}
/* Stop deferring access checks. */
@@ -172,7 +172,7 @@ stop_deferring_access_checks (void)
{
if (!deferred_access_no_check)
VEC_last (deferred_access, deferred_access_stack)
- ->deferring_access_checks_kind = dk_no_deferred;
+ .deferring_access_checks_kind = dk_no_deferred;
}
/* Discard the current deferred access checks and restore the
@@ -199,7 +199,7 @@ get_deferred_access_checks (void)
return NULL;
else
return (VEC_last (deferred_access, deferred_access_stack)
- ->deferred_access_checks);
+ .deferred_access_checks);
}
/* Take current deferred checks and combine with the
@@ -217,10 +217,10 @@ pop_to_parent_deferring_access_checks (void)
deferred_access *ptr;
checks = (VEC_last (deferred_access, deferred_access_stack)
- ->deferred_access_checks);
+ .deferred_access_checks);
VEC_pop (deferred_access, deferred_access_stack);
- ptr = VEC_last (deferred_access, deferred_access_stack);
+ ptr = &VEC_last (deferred_access, deferred_access_stack);
if (ptr->deferring_access_checks_kind == dk_no_deferred)
{
/* Check access. */
@@ -309,7 +309,7 @@ perform_or_defer_access_check (tree binfo, tree decl, tree diag_decl)
gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
- ptr = VEC_last (deferred_access, deferred_access_stack);
+ ptr = &VEC_last (deferred_access, deferred_access_stack);
/* If we are not supposed to defer access checks, just check now. */
if (ptr->deferring_access_checks_kind == dk_no_deferred)
@@ -5952,7 +5952,7 @@ build_constexpr_constructor_member_initializers (tree type, tree body)
if (VEC_length (constructor_elt, vec) > 0)
{
/* In a delegating constructor, return the target. */
- constructor_elt *ce = VEC_index (constructor_elt, vec, 0);
+ constructor_elt *ce = &VEC_index (constructor_elt, vec, 0);
if (ce->index == current_class_ptr)
{
body = ce->value;
@@ -6821,7 +6821,7 @@ cxx_eval_array_reference (const constexpr_call *call, tree t,
}
i = tree_low_cst (index, 0);
if (TREE_CODE (ary) == CONSTRUCTOR)
- return VEC_index (constructor_elt, CONSTRUCTOR_ELTS (ary), i)->value;
+ return VEC_index (constructor_elt, CONSTRUCTOR_ELTS (ary), i).value;
else if (elem_nchars == 1)
return build_int_cst (cv_unqualified (TREE_TYPE (TREE_TYPE (ary))),
TREE_STRING_POINTER (ary)[i]);
@@ -1179,7 +1179,7 @@ process_init_constructor_record (tree type, tree init,
if (idx < VEC_length (constructor_elt, CONSTRUCTOR_ELTS (init)))
{
- constructor_elt *ce = VEC_index (constructor_elt,
+ constructor_elt *ce = &VEC_index (constructor_elt,
CONSTRUCTOR_ELTS (init), idx);
if (ce->index)
{
@@ -1306,7 +1306,7 @@ process_init_constructor_union (tree type, tree init,
VEC_block_remove (constructor_elt, CONSTRUCTOR_ELTS (init), 1, len-1);
}
- ce = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (init), 0);
+ ce = &VEC_index (constructor_elt, CONSTRUCTOR_ELTS (init), 0);
/* If this element specifies a field, initialize via that field. */
if (ce->index)
@@ -4392,7 +4392,6 @@ df_bb_verify (basic_block bb)
if (!INSN_P (insn))
continue;
df_insn_refs_verify (&collection_rec, bb, insn, true);
- df_free_collection_rec (&collection_rec);
}
/* Do the artificial defs and uses. */
@@ -2536,7 +2536,7 @@ create_cfi_notes (void)
gcc_checking_assert (trace_work_list == NULL);
/* Always begin at the entry trace. */
- ti = VEC_index (dw_trace_info, trace_info, 0);
+ ti = &VEC_index (dw_trace_info, trace_info, 0);
scan_trace (ti);
while (!VEC_empty (dw_trace_info_ref, trace_work_list))
@@ -2583,7 +2583,7 @@ connect_traces (void)
/* Remove all unprocessed traces from the list. */
for (i = n - 1; i > 0; --i)
{
- ti = VEC_index (dw_trace_info, trace_info, i);
+ ti = &VEC_index (dw_trace_info, trace_info, i);
if (ti->beg_row == NULL)
{
VEC_ordered_remove (dw_trace_info, trace_info, i);
@@ -2595,13 +2595,13 @@ connect_traces (void)
/* Work from the end back to the beginning. This lets us easily insert
remember/restore_state notes in the correct order wrt other notes. */
- prev_ti = VEC_index (dw_trace_info, trace_info, n - 1);
+ prev_ti = &VEC_index (dw_trace_info, trace_info, n - 1);
for (i = n - 1; i > 0; --i)
{
dw_cfi_row *old_row;
ti = prev_ti;
- prev_ti = VEC_index (dw_trace_info, trace_info, i - 1);
+ prev_ti = &VEC_index (dw_trace_info, trace_info, i - 1);
add_cfi_insn = ti->head;
@@ -2672,7 +2672,7 @@ connect_traces (void)
for (i = 0; i < n; ++i)
{
- ti = VEC_index (dw_trace_info, trace_info, i);
+ ti = &VEC_index (dw_trace_info, trace_info, i);
if (ti->switch_sections)
prev_args_size = 0;
@@ -2870,8 +2870,8 @@ create_cie_data (void)
break;
case 1:
cie_return_save = ggc_alloc_reg_saved_in_data ();
- *cie_return_save = *VEC_index (reg_saved_in_data,
- cie_trace.regs_saved_in_regs, 0);
+ *cie_return_save = VEC_index (reg_saved_in_data,
+ cie_trace.regs_saved_in_regs, 0);
VEC_free (reg_saved_in_data, heap, cie_trace.regs_saved_in_regs);
break;
default:
@@ -5802,7 +5802,7 @@ same_die_p (dw_die_ref die1, dw_die_ref die2, int *mark)
return 0;
FOR_EACH_VEC_ELT (dw_attr_node, die1->die_attr, ix, a1)
- if (!same_attr_p (a1, VEC_index (dw_attr_node, die2->die_attr, ix), mark))
+ if (!same_attr_p (a1, &VEC_index (dw_attr_node, die2->die_attr, ix), mark))
return 0;
c1 = die1->die_child;
@@ -7028,7 +7028,7 @@ build_abbrev_table (dw_die_ref die, htab_t extern_map)
FOR_EACH_VEC_ELT (dw_attr_node, die->die_attr, ix, die_a)
{
- abbrev_a = VEC_index (dw_attr_node, abbrev->die_attr, ix);
+ abbrev_a = &VEC_index (dw_attr_node, abbrev->die_attr, ix);
if ((abbrev_a->dw_attr != die_a->dw_attr)
|| (value_format (abbrev_a) != value_format (die_a)))
{
@@ -20271,8 +20271,8 @@ optimize_macinfo_range (unsigned int idx, VEC (macinfo_entry, gc) *files,
unsigned int i, count, encoded_filename_len, linebuf_len;
void **slot;
- first = VEC_index (macinfo_entry, macinfo_table, idx);
- second = VEC_index (macinfo_entry, macinfo_table, idx + 1);
+ first = &VEC_index (macinfo_entry, macinfo_table, idx);
+ second = &VEC_index (macinfo_entry, macinfo_table, idx + 1);
/* Optimize only if there are at least two consecutive define/undef ops,
and either all of them are before first DW_MACINFO_start_file
@@ -20312,7 +20312,7 @@ optimize_macinfo_range (unsigned int idx, VEC (macinfo_entry, gc) *files,
if (VEC_empty (macinfo_entry, files))
base = "";
else
- base = lbasename (VEC_last (macinfo_entry, files)->info);
+ base = lbasename (VEC_last (macinfo_entry, files).info);
for (encoded_filename_len = 0, i = 0; base[i]; i++)
if (ISIDNUM (base[i]) || base[i] == '.')
encoded_filename_len++;
@@ -20343,7 +20343,7 @@ optimize_macinfo_range (unsigned int idx, VEC (macinfo_entry, gc) *files,
/* Construct a macinfo_entry for DW_MACRO_GNU_transparent_include
in the empty vector entry before the first define/undef. */
- inc = VEC_index (macinfo_entry, macinfo_table, idx - 1);
+ inc = &VEC_index (macinfo_entry, macinfo_table, idx - 1);
inc->code = DW_MACRO_GNU_transparent_include;
inc->lineno = 0;
inc->info = ggc_strdup (grp_name);
@@ -20436,7 +20436,7 @@ output_macinfo (void)
&& VEC_length (macinfo_entry, files) != 1
&& i > 0
&& i + 1 < length
- && VEC_index (macinfo_entry, macinfo_table, i - 1)->code == 0)
+ && VEC_index (macinfo_entry, macinfo_table, i - 1).code == 0)
{
unsigned count = optimize_macinfo_range (i, files, &macinfo_htab);
if (count)
@@ -21043,14 +21043,14 @@ static inline void
move_linkage_attr (dw_die_ref die)
{
unsigned ix = VEC_length (dw_attr_node, die->die_attr);
- dw_attr_node linkage = *VEC_index (dw_attr_node, die->die_attr, ix - 1);
+ dw_attr_node linkage = VEC_index (dw_attr_node, die->die_attr, ix - 1);
gcc_assert (linkage.dw_attr == DW_AT_linkage_name
|| linkage.dw_attr == DW_AT_MIPS_linkage_name);
while (--ix > 0)
{
- dw_attr_node *prev = VEC_index (dw_attr_node, die->die_attr, ix - 1);
+ dw_attr_node *prev = &VEC_index (dw_attr_node, die->die_attr, ix - 1);
if (prev->dw_attr == DW_AT_decl_line || prev->dw_attr == DW_AT_name)
break;
@@ -21962,8 +21962,8 @@ dwarf2out_finish (const char *filename)
for (i = 0; i < VEC_length (deferred_locations, deferred_locations_list); i++)
{
add_location_or_const_value_attribute (
- VEC_index (deferred_locations, deferred_locations_list, i)->die,
- VEC_index (deferred_locations, deferred_locations_list, i)->variable,
+ VEC_index (deferred_locations, deferred_locations_list, i).die,
+ VEC_index (deferred_locations, deferred_locations_list, i).variable,
false,
DW_AT_location);
}
@@ -306,8 +306,8 @@ init_eh_for_function (void)
cfun->eh = ggc_alloc_cleared_eh_status ();
/* Make sure zero'th entries are used. */
- VEC_safe_push (eh_region, gc, cfun->eh->region_array, NULL);
- VEC_safe_push (eh_landing_pad, gc, cfun->eh->lp_array, NULL);
+ VEC_safe_push (eh_region, gc, cfun->eh->region_array, (eh_region) NULL);
+ VEC_safe_push (eh_landing_pad, gc, cfun->eh->lp_array, (eh_landing_pad) NULL);
}
/* Routines to generate the exception tree somewhat directly.
@@ -808,7 +808,7 @@ add_ehspec_entry (htab_t ehspec_hash, htab_t ttypes_hash, tree list)
if (targetm.arm_eabi_unwinder)
VEC_safe_push (tree, gc, cfun->eh->ehspec_data.arm_eabi, NULL_TREE);
else
- VEC_safe_push (uchar, gc, cfun->eh->ehspec_data.other, 0);
+ VEC_safe_push (uchar, gc, cfun->eh->ehspec_data.other, (uchar) 0);
}
return n->filter;
@@ -2397,10 +2397,10 @@ add_call_site (rtx landing_pad, int action, int section)
record->action = action;
VEC_safe_push (call_site_record, gc,
- crtl->eh.call_site_record[section], record);
+ crtl->eh.call_site_record_v[section], record);
return call_site_base + VEC_length (call_site_record,
- crtl->eh.call_site_record[section]) - 1;
+ crtl->eh.call_site_record_v[section]) - 1;
}
/* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
@@ -2548,10 +2548,10 @@ convert_to_eh_region_ranges (void)
else if (last_action != -3)
last_landing_pad = pc_rtx;
call_site_base += VEC_length (call_site_record,
- crtl->eh.call_site_record[cur_sec]);
+ crtl->eh.call_site_record_v[cur_sec]);
cur_sec++;
- gcc_assert (crtl->eh.call_site_record[cur_sec] == NULL);
- crtl->eh.call_site_record[cur_sec]
+ gcc_assert (crtl->eh.call_site_record_v[cur_sec] == NULL);
+ crtl->eh.call_site_record_v[cur_sec]
= VEC_alloc (call_site_record, gc, 10);
}
@@ -2635,14 +2635,14 @@ push_sleb128 (VEC (uchar, gc) **data_area, int value)
static int
dw2_size_of_call_site_table (int section)
{
- int n = VEC_length (call_site_record, crtl->eh.call_site_record[section]);
+ int n = VEC_length (call_site_record, crtl->eh.call_site_record_v[section]);
int size = n * (4 + 4 + 4);
int i;
for (i = 0; i < n; ++i)
{
struct call_site_record_d *cs =
- VEC_index (call_site_record, crtl->eh.call_site_record[section], i);
+ VEC_index (call_site_record, crtl->eh.call_site_record_v[section], i);
size += size_of_uleb128 (cs->action);
}
@@ -2652,14 +2652,14 @@ dw2_size_of_call_site_table (int section)
static int
sjlj_size_of_call_site_table (void)
{
- int n = VEC_length (call_site_record, crtl->eh.call_site_record[0]);
+ int n = VEC_length (call_site_record, crtl->eh.call_site_record_v[0]);
int size = 0;
int i;
for (i = 0; i < n; ++i)
{
struct call_site_record_d *cs =
- VEC_index (call_site_record, crtl->eh.call_site_record[0], i);
+ VEC_index (call_site_record, crtl->eh.call_site_record_v[0], i);
size += size_of_uleb128 (INTVAL (cs->landing_pad));
size += size_of_uleb128 (cs->action);
}
@@ -2671,7 +2671,7 @@ sjlj_size_of_call_site_table (void)
static void
dw2_output_call_site_table (int cs_format, int section)
{
- int n = VEC_length (call_site_record, crtl->eh.call_site_record[section]);
+ int n = VEC_length (call_site_record, crtl->eh.call_site_record_v[section]);
int i;
const char *begin;
@@ -2685,7 +2685,7 @@ dw2_output_call_site_table (int cs_format, int section)
for (i = 0; i < n; ++i)
{
struct call_site_record_d *cs =
- VEC_index (call_site_record, crtl->eh.call_site_record[section], i);
+ VEC_index (call_site_record, crtl->eh.call_site_record_v[section], i);
char reg_start_lab[32];
char reg_end_lab[32];
char landing_pad_lab[32];
@@ -2733,13 +2733,13 @@ dw2_output_call_site_table (int cs_format, int section)
static void
sjlj_output_call_site_table (void)
{
- int n = VEC_length (call_site_record, crtl->eh.call_site_record[0]);
+ int n = VEC_length (call_site_record, crtl->eh.call_site_record_v[0]);
int i;
for (i = 0; i < n; ++i)
{
struct call_site_record_d *cs =
- VEC_index (call_site_record, crtl->eh.call_site_record[0], i);
+ VEC_index (call_site_record, crtl->eh.call_site_record_v[0], i);
dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
"region %d landing pad", i);
@@ -3048,7 +3048,7 @@ output_function_exception_table (const char *fnname)
targetm.asm_out.emit_except_table_label (asm_out_file);
output_one_function_exception_table (0);
- if (crtl->eh.call_site_record[1] != NULL)
+ if (crtl->eh.call_site_record_v[1] != NULL)
output_one_function_exception_table (1);
switch_to_section (current_function_section ());
@@ -14245,7 +14245,7 @@ fold (tree expr)
while (begin != end)
{
unsigned HOST_WIDE_INT middle = (begin + end) / 2;
- tree index = VEC_index (constructor_elt, elts, middle)->index;
+ tree index = VEC_index (constructor_elt, elts, middle).index;
if (TREE_CODE (index) == INTEGER_CST
&& tree_int_cst_lt (index, op1))
@@ -14260,7 +14260,7 @@ fold (tree expr)
&& tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
end = middle;
else
- return VEC_index (constructor_elt, elts, middle)->value;
+ return VEC_index (constructor_elt, elts, middle).value;
}
}
@@ -158,7 +158,7 @@ struct GTY(()) rtl_eh {
VEC(uchar,gc) *action_record_data;
- VEC(call_site_record,gc) *call_site_record[2];
+ VEC(call_site_record,gc) *call_site_record_v[2];
};
#define pending_stack_adjust (crtl->expr.x_pending_stack_adjust)
@@ -225,7 +225,7 @@ single_def_use_enter_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
bitmap_copy (local_lr, &lr_bb_info->in);
/* Push a marker for the leave_block callback. */
- VEC_safe_push (df_ref, heap, reg_defs_stack, NULL);
+ VEC_safe_push (df_ref, heap, reg_defs_stack, (df_ref) NULL);
process_uses (df_get_artificial_uses (bb_index), DF_REF_AT_TOP);
process_defs (df_get_artificial_defs (bb_index), DF_REF_AT_TOP);
@@ -2517,7 +2517,7 @@ execute (void)
and record info about each one.
Also search for the programs that are to be run. */
- VEC_safe_push (const_char_p, heap, argbuf, 0);
+ VEC_safe_push (const_char_p, heap, argbuf, (const_char_p)0);
commands[0].prog = VEC_index (const_char_p, argbuf, 0); /* first command. */
commands[0].argv = VEC_address (const_char_p, argbuf);
@@ -5076,7 +5076,8 @@ store_alt_unit_usage (regexp_t regexp, regexp_t unit, int cycle,
length = (cycle + 1) * REGEXP_ONEOF (regexp)->regexps_num;
while (VEC_length (unit_usage_t, cycle_alt_unit_usages) < length)
- VEC_safe_push (unit_usage_t, heap, cycle_alt_unit_usages, 0);
+ VEC_safe_push (unit_usage_t, heap, cycle_alt_unit_usages,
+ (unit_usage_t) NULL);
index = cycle * REGEXP_ONEOF (regexp)->regexps_num + alt_num;
prev = NULL;
@@ -7673,7 +7674,8 @@ output_min_issue_delay_table (automaton_t automaton)
if (VEC_index (vect_el_t, min_issue_delay_vect, asn))
{
- VEC_replace (vect_el_t, min_issue_delay_vect, asn, 0);
+ VEC_replace (vect_el_t, min_issue_delay_vect, asn,
+ (vect_el_t) 0);
changed = 1;
}
@@ -7723,7 +7725,8 @@ output_min_issue_delay_table (automaton_t automaton)
if (automaton->max_min_delay < x)
automaton->max_min_delay = x;
if (x == -1)
- VEC_replace (vect_el_t, min_issue_delay_vect, np, 0);
+ VEC_replace (vect_el_t, min_issue_delay_vect, np,
+ (vect_el_t) 0);
}
}
@@ -7749,7 +7752,8 @@ output_min_issue_delay_table (automaton_t automaton)
= VEC_alloc (vect_el_t, heap, compressed_min_issue_delay_len);
for (i = 0; i < compressed_min_issue_delay_len; i++)
- VEC_quick_push (vect_el_t, compressed_min_issue_delay_vect, 0);
+ VEC_quick_push (vect_el_t, compressed_min_issue_delay_vect,
+ (vect_el_t) 0);
for (i = 0; i < min_issue_delay_len; i++)
{
@@ -7798,7 +7802,8 @@ output_dead_lock_vect (automaton_t automaton)
automaton->locked_states++;
}
else
- VEC_replace (vect_el_t, dead_lock_vect, s->order_state_num, 0);
+ VEC_replace (vect_el_t, dead_lock_vect, s->order_state_num,
+ (vect_el_t) 0);
}
if (automaton->locked_states == 0)
return;
@@ -7840,7 +7845,7 @@ output_reserved_units_table (automaton_t automaton)
reserved_units_table = VEC_alloc (vect_el_t, heap, reserved_units_size);
for (i = 0; i < reserved_units_size; i++)
- VEC_quick_push (vect_el_t, reserved_units_table, 0);
+ VEC_quick_push (vect_el_t, reserved_units_table, (vect_el_t) 0);
for (n = 0; n < VEC_length (state_t, output_states_vect); n++)
{
state_t s = VEC_index (state_t, output_states_vect, n);
@@ -201,7 +201,7 @@ VEC_safe_set_locstr (VEC(locstr,heap) **vp, unsigned int ix, char *str)
else
{
while (ix > VEC_length (locstr, *vp))
- VEC_safe_push (locstr, heap, *vp, 0);
+ VEC_safe_push (locstr, heap, *vp, (locstr) NULL);
VEC_safe_push (locstr, heap, *vp, str);
}
}
@@ -120,7 +120,7 @@ lower_function_body (void)
if (gimple_seq_may_fallthru (lowered_body)
&& (VEC_empty (return_statements_t, data.return_statements)
|| gimple_return_retval (VEC_last (return_statements_t,
- data.return_statements)->stmt) != NULL))
+ data.return_statements).stmt) != NULL))
{
x = gimple_build_return (NULL);
gimple_set_location (x, cfun->function_end_locus);
@@ -136,7 +136,7 @@ lower_function_body (void)
/* Unfortunately, we can't use VEC_pop because it returns void for
objects. */
- t = *VEC_last (return_statements_t, data.return_statements);
+ t = VEC_last (return_statements_t, data.return_statements);
VEC_truncate (return_statements_t,
data.return_statements,
VEC_length (return_statements_t,
@@ -776,7 +776,7 @@ lower_gimple_return (gimple_stmt_iterator *gsi, struct lower_data *data)
for (i = VEC_length (return_statements_t, data->return_statements) - 1;
i >= 0; i--)
{
- tmp_rs = *VEC_index (return_statements_t, data->return_statements, i);
+ tmp_rs = VEC_index (return_statements_t, data->return_statements, i);
if (gimple_return_retval (stmt) == gimple_return_retval (tmp_rs.stmt))
{
@@ -2121,7 +2121,7 @@ gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
fallback_t fallback)
{
tree *p;
- VEC(tree,heap) *stack;
+ VEC(tree,heap) *expr_stack;
enum gimplify_status ret = GS_ALL_DONE, tret;
int i;
location_t loc = EXPR_LOCATION (*expr_p);
@@ -2129,7 +2129,7 @@ gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
/* Create a stack of the subexpressions so later we can walk them in
order from inner to outer. */
- stack = VEC_alloc (tree, heap, 10);
+ expr_stack = VEC_alloc (tree, heap, 10);
/* We can handle anything that get_inner_reference can deal with. */
for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
@@ -2149,13 +2149,13 @@ gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
else
break;
- VEC_safe_push (tree, heap, stack, *p);
+ VEC_safe_push (tree, heap, expr_stack, *p);
}
- gcc_assert (VEC_length (tree, stack));
+ gcc_assert (VEC_length (tree, expr_stack));
- /* Now STACK is a stack of pointers to all the refs we've walked through
- and P points to the innermost expression.
+ /* Now EXPR_STACK is a stack of pointers to all the refs we've
+ walked through and P points to the innermost expression.
Java requires that we elaborated nodes in source order. That
means we must gimplify the inner expression followed by each of
@@ -2166,9 +2166,9 @@ gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
So we do this in three steps. First we deal with the annotations
for any variables in the components, then we gimplify the base,
then we gimplify any indices, from left to right. */
- for (i = VEC_length (tree, stack) - 1; i >= 0; i--)
+ for (i = VEC_length (tree, expr_stack) - 1; i >= 0; i--)
{
- tree t = VEC_index (tree, stack, i);
+ tree t = VEC_index (tree, expr_stack, i);
if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
{
@@ -2261,9 +2261,9 @@ gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
/* And finally, the indices and operands to BIT_FIELD_REF. During this
loop we also remove any useless conversions. */
- for (; VEC_length (tree, stack) > 0; )
+ for (; VEC_length (tree, expr_stack) > 0; )
{
- tree t = VEC_pop (tree, stack);
+ tree t = VEC_pop (tree, expr_stack);
if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
{
@@ -2300,7 +2300,7 @@ gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
canonicalize_component_ref (expr_p);
}
- VEC_free (tree, heap, stack);
+ VEC_free (tree, heap, expr_stack);
gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
@@ -3845,7 +3845,7 @@ optimize_compound_literals_in_ctor (tree orig_ctor)
for (idx = 0; idx < num; idx++)
{
- tree value = VEC_index (constructor_elt, elts, idx)->value;
+ tree value = VEC_index (constructor_elt, elts, idx).value;
tree newval = value;
if (TREE_CODE (value) == CONSTRUCTOR)
newval = optimize_compound_literals_in_ctor (value);
@@ -3869,7 +3869,7 @@ optimize_compound_literals_in_ctor (tree orig_ctor)
CONSTRUCTOR_ELTS (ctor) = VEC_copy (constructor_elt, gc, elts);
elts = CONSTRUCTOR_ELTS (ctor);
}
- VEC_index (constructor_elt, elts, idx)->value = newval;
+ VEC_index (constructor_elt, elts, idx).value = newval;
}
return ctor;
}
@@ -4120,8 +4120,8 @@ gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
/* Extract the real and imaginary parts out of the ctor. */
gcc_assert (VEC_length (constructor_elt, elts) == 2);
- r = VEC_index (constructor_elt, elts, 0)->value;
- i = VEC_index (constructor_elt, elts, 1)->value;
+ r = VEC_index (constructor_elt, elts, 0).value;
+ i = VEC_index (constructor_elt, elts, 1).value;
if (r == NULL || i == NULL)
{
tree zero = build_zero_cst (TREE_TYPE (type));
@@ -296,9 +296,9 @@ add_clause (conditions conditions, struct predicate *p, clause_t clause)
condition *cc1;
if (!(clause & (1 << c1)))
continue;
- cc1 = VEC_index (condition,
- conditions,
- c1 - predicate_first_dynamic_condition);
+ cc1 = &VEC_index (condition,
+ conditions,
+ c1 - predicate_first_dynamic_condition);
/* We have no way to represent !CHANGED and !IS_NOT_CONSTANT
and thus there is no point for looking for them. */
if (cc1->code == CHANGED
@@ -307,12 +307,12 @@ add_clause (conditions conditions, struct predicate *p, clause_t clause)
for (c2 = c1 + 1; c2 <= NUM_CONDITIONS; c2++)
if (clause & (1 << c2))
{
- condition *cc1 = VEC_index (condition,
- conditions,
- c1 - predicate_first_dynamic_condition);
- condition *cc2 = VEC_index (condition,
- conditions,
- c2 - predicate_first_dynamic_condition);
+ condition *cc1 = &VEC_index (condition,
+ conditions,
+ c1 - predicate_first_dynamic_condition);
+ condition *cc2 = &VEC_index (condition,
+ conditions,
+ c2 - predicate_first_dynamic_condition);
if (cc1->operand_num == cc2->operand_num
&& cc1->val == cc2->val
&& cc2->code != IS_NOT_CONSTANT
@@ -477,7 +477,7 @@ predicate_probability (conditions conds,
{
if (i2 >= predicate_first_dynamic_condition)
{
- condition *c = VEC_index
+ condition *c = &VEC_index
(condition, conds,
i2 - predicate_first_dynamic_condition);
if (c->code == CHANGED
@@ -487,7 +487,7 @@ predicate_probability (conditions conds,
{
int iprob = VEC_index (inline_param_summary_t,
inline_param_summary,
- c->operand_num)->change_prob;
+ c->operand_num).change_prob;
this_prob = MAX (this_prob, iprob);
}
else
@@ -517,8 +517,8 @@ dump_condition (FILE *f, conditions conditions, int cond)
fprintf (f, "not inlined");
else
{
- c = VEC_index (condition, conditions,
- cond - predicate_first_dynamic_condition);
+ c = &VEC_index (condition, conditions,
+ cond - predicate_first_dynamic_condition);
fprintf (f, "op%i", c->operand_num);
if (c->code == IS_NOT_CONSTANT)
{
@@ -610,7 +610,7 @@ account_size_time (struct inline_summary *summary, int size, int time,
{
i = 0;
found = true;
- e = VEC_index (size_time_entry, summary->entry, 0);
+ e = &VEC_index (size_time_entry, summary->entry, 0);
gcc_assert (!e->predicate.clause[0]);
}
if (dump_file && (dump_flags & TDF_DETAILS) && (time || size))
@@ -760,7 +760,7 @@ evaluate_properties_for_edge (struct cgraph_edge *e, bool inline_p,
else if (inline_p
&& !VEC_index (inline_param_summary_t,
es->param,
- i)->change_prob)
+ i).change_prob)
VEC_replace (tree, known_vals, i, error_mark_node);
}
}
@@ -1135,7 +1135,7 @@ dump_inline_edge_summary (FILE * f, int indent, struct cgraph_node *node,
i++)
{
int prob = VEC_index (inline_param_summary_t,
- es->param, i)->change_prob;
+ es->param, i).change_prob;
if (!prob)
fprintf (f, "%*s op%i is compile time invariant\n",
@@ -1712,8 +1712,8 @@ will_be_nonconstant_predicate (struct ipa_node_params *info,
return p;
/* If we know when operand is constant,
we still can say something useful. */
- if (!true_predicate_p (VEC_index (predicate_t, nonconstant_names,
- SSA_NAME_VERSION (use))))
+ if (!true_predicate_p (&VEC_index (predicate_t, nonconstant_names,
+ SSA_NAME_VERSION (use))))
continue;
return p;
}
@@ -1735,14 +1735,14 @@ will_be_nonconstant_predicate (struct ipa_node_params *info,
ipa_get_param_decl_index (info, parm),
CHANGED, NULL);
else
- p = *VEC_index (predicate_t, nonconstant_names,
- SSA_NAME_VERSION (use));
+ p = VEC_index (predicate_t, nonconstant_names,
+ SSA_NAME_VERSION (use));
op_non_const = or_predicates (summary->conds, &p, &op_non_const);
}
if (gimple_code (stmt) == GIMPLE_ASSIGN
&& TREE_CODE (gimple_assign_lhs (stmt)) == SSA_NAME)
VEC_replace (predicate_t, nonconstant_names,
- SSA_NAME_VERSION (gimple_assign_lhs (stmt)), &op_non_const);
+ SSA_NAME_VERSION (gimple_assign_lhs (stmt)), op_non_const);
return op_non_const;
}
@@ -1957,7 +1957,7 @@ estimate_function_body_sizes (struct cgraph_node *node, bool early)
struct predicate false_p = false_predicate ();
VEC_replace (predicate_t, nonconstant_names,
SSA_NAME_VERSION (gimple_call_lhs (stmt)),
- &false_p);
+ false_p);
}
if (ipa_node_params_vector)
{
@@ -1972,7 +1972,7 @@ estimate_function_body_sizes (struct cgraph_node *node, bool early)
int prob = param_change_prob (stmt, i);
gcc_assert (prob >= 0 && prob <= REG_BR_PROB_BASE);
VEC_index (inline_param_summary_t,
- es->param, i)->change_prob = prob;
+ es->param, i).change_prob = prob;
}
}
@@ -2431,8 +2431,8 @@ remap_predicate (struct inline_summary *info,
{
struct condition *c;
- c = VEC_index (condition, callee_info->conds,
- cond - predicate_first_dynamic_condition);
+ c = &VEC_index (condition, callee_info->conds,
+ cond - predicate_first_dynamic_condition);
/* See if we can remap condition operand to caller's operand.
Otherwise give up. */
if (!operand_map
@@ -2519,11 +2519,11 @@ remap_edge_change_prob (struct cgraph_edge *inlined_edge,
inlined_es->param)))
{
int prob1 = VEC_index (inline_param_summary_t,
- es->param, i)->change_prob;
+ es->param, i).change_prob;
int prob2 = VEC_index
(inline_param_summary_t,
inlined_es->param,
- jfunc->value.pass_through.formal_id)->change_prob;
+ jfunc->value.pass_through.formal_id).change_prob;
int prob = ((prob1 * prob2 + REG_BR_PROB_BASE / 2)
/ REG_BR_PROB_BASE);
@@ -2531,7 +2531,7 @@ remap_edge_change_prob (struct cgraph_edge *inlined_edge,
prob = 1;
VEC_index (inline_param_summary_t,
- es->param, i)->change_prob = prob;
+ es->param, i).change_prob = prob;
}
}
}
@@ -2743,12 +2743,12 @@ do_estimate_edge_time (struct cgraph_edge *edge)
<= edge->uid)
VEC_safe_grow_cleared (edge_growth_cache_entry, heap, edge_growth_cache,
cgraph_edge_max_uid);
- VEC_index (edge_growth_cache_entry, edge_growth_cache, edge->uid)->time
+ VEC_index (edge_growth_cache_entry, edge_growth_cache, edge->uid).time
= ret + (ret >= 0);
ret_size = size - es->call_stmt_size;
gcc_checking_assert (es->call_stmt_size);
- VEC_index (edge_growth_cache_entry, edge_growth_cache, edge->uid)->size
+ VEC_index (edge_growth_cache_entry, edge_growth_cache, edge->uid).size
= ret_size + (ret_size >= 0);
}
return ret;
@@ -2774,7 +2774,7 @@ do_estimate_edge_growth (struct cgraph_edge *edge)
do_estimate_edge_time (edge);
size = VEC_index (edge_growth_cache_entry,
edge_growth_cache,
- edge->uid)->size;
+ edge->uid).size;
gcc_checking_assert (size);
return size - (size > 0);
}
@@ -3009,7 +3009,7 @@ read_inline_edge_summary (struct lto_input_block *ib, struct cgraph_edge *e)
{
VEC_safe_grow_cleared (inline_param_summary_t, heap, es->param, length);
for (i = 0; i < length; i++)
- VEC_index (inline_param_summary_t, es->param, i)->change_prob
+ VEC_index (inline_param_summary_t, es->param, i).change_prob
= streamer_read_uhwi (ib);
}
}
@@ -3163,7 +3163,7 @@ write_inline_edge_summary (struct output_block *ob, struct cgraph_edge *e)
streamer_write_uhwi (ob, VEC_length (inline_param_summary_t, es->param));
for (i = 0; i < (int)VEC_length (inline_param_summary_t, es->param); i++)
streamer_write_uhwi (ob, VEC_index (inline_param_summary_t,
- es->param, i)->change_prob);
+ es->param, i).change_prob);
}
@@ -1288,7 +1288,7 @@ inline_small_functions (void)
{
struct cgraph_node *node;
struct cgraph_edge *edge;
- fibheap_t heap = fibheap_new ();
+ fibheap_t edge_heap = fibheap_new ();
bitmap updated_nodes = BITMAP_ALLOC (NULL);
int min_size, max_size;
VEC (cgraph_edge_p, heap) *new_indirect_edges = NULL;
@@ -1345,7 +1345,7 @@ inline_small_functions (void)
&& edge->inline_failed)
{
gcc_assert (!edge->aux);
- update_edge_key (heap, edge);
+ update_edge_key (edge_heap, edge);
}
}
@@ -1353,16 +1353,16 @@ inline_small_functions (void)
|| !max_count
|| (profile_info && flag_branch_probabilities));
- while (!fibheap_empty (heap))
+ while (!fibheap_empty (edge_heap))
{
int old_size = overall_size;
struct cgraph_node *where, *callee;
- int badness = fibheap_min_key (heap);
+ int badness = fibheap_min_key (edge_heap);
int current_badness;
int cached_badness;
int growth;
- edge = (struct cgraph_edge *) fibheap_extract_min (heap);
+ edge = (struct cgraph_edge *) fibheap_extract_min (edge_heap);
gcc_assert (edge->aux);
edge->aux = NULL;
if (!edge->inline_failed)
@@ -1383,7 +1383,7 @@ inline_small_functions (void)
gcc_assert (current_badness >= badness);
if (current_badness != badness)
{
- edge->aux = fibheap_insert (heap, current_badness, edge);
+ edge->aux = fibheap_insert (edge_heap, current_badness, edge);
continue;
}
@@ -1448,8 +1448,8 @@ inline_small_functions (void)
/* Recursive inliner inlines all recursive calls of the function
at once. Consequently we need to update all callee keys. */
if (flag_indirect_inlining)
- add_new_edges_to_heap (heap, new_indirect_edges);
- update_callee_keys (heap, where, updated_nodes);
+ add_new_edges_to_heap (edge_heap, new_indirect_edges);
+ update_callee_keys (edge_heap, where, updated_nodes);
}
else
{
@@ -1483,12 +1483,12 @@ inline_small_functions (void)
gcc_checking_assert (!callee->global.inlined_to);
inline_call (edge, true, &new_indirect_edges, &overall_size);
if (flag_indirect_inlining)
- add_new_edges_to_heap (heap, new_indirect_edges);
+ add_new_edges_to_heap (edge_heap, new_indirect_edges);
reset_edge_caches (edge->callee);
reset_node_growth_cache (callee);
- update_callee_keys (heap, edge->callee, updated_nodes);
+ update_callee_keys (edge_heap, edge->callee, updated_nodes);
}
where = edge->caller;
if (where->global.inlined_to)
@@ -1500,7 +1500,7 @@ inline_small_functions (void)
inlined into (since it's body size changed) and for the functions
called by function we inlined (since number of it inlinable callers
might change). */
- update_caller_keys (heap, where, updated_nodes, NULL);
+ update_caller_keys (edge_heap, where, updated_nodes, NULL);
bitmap_clear (updated_nodes);
if (dump_file)
@@ -1526,7 +1526,7 @@ inline_small_functions (void)
free_growth_caches ();
if (new_indirect_edges)
VEC_free (cgraph_edge_p, heap, new_indirect_edges);
- fibheap_delete (heap);
+ fibheap_delete (edge_heap);
if (dump_file)
fprintf (dump_file,
"Unit growth for small function inlining: %i->%i (%i%%)\n",
@@ -190,13 +190,13 @@ extern int nfunctions_inlined;
static inline struct inline_summary *
inline_summary (struct cgraph_node *node)
{
- return VEC_index (inline_summary_t, inline_summary_vec, node->uid);
+ return &VEC_index (inline_summary_t, inline_summary_vec, node->uid);
}
static inline struct inline_edge_summary *
inline_edge_summary (struct cgraph_edge *edge)
{
- return VEC_index (inline_edge_summary_t,
+ return &VEC_index (inline_edge_summary_t,
inline_edge_summary_vec, edge->uid);
}
@@ -225,7 +225,7 @@ estimate_edge_growth (struct cgraph_edge *edge)
if ((int)VEC_length (edge_growth_cache_entry, edge_growth_cache) <= edge->uid
|| !(ret = VEC_index (edge_growth_cache_entry,
edge_growth_cache,
- edge->uid)->size))
+ edge->uid).size))
return do_estimate_edge_growth (edge);
return ret - (ret > 0);
}
@@ -241,7 +241,7 @@ estimate_edge_time (struct cgraph_edge *edge)
if ((int)VEC_length (edge_growth_cache_entry, edge_growth_cache) <= edge->uid
|| !(ret = VEC_index (edge_growth_cache_entry,
edge_growth_cache,
- edge->uid)->time))
+ edge->uid).time))
return do_estimate_edge_time (edge);
return ret - (ret > 0);
}
@@ -264,6 +264,6 @@ reset_edge_growth_cache (struct cgraph_edge *edge)
if ((int)VEC_length (edge_growth_cache_entry, edge_growth_cache) > edge->uid)
{
struct edge_growth_cache_entry zero = {0, 0};
- VEC_replace (edge_growth_cache_entry, edge_growth_cache, edge->uid, &zero);
+ VEC_replace (edge_growth_cache_entry, edge_growth_cache, edge->uid, zero);
}
}
@@ -97,7 +97,7 @@ ipa_populate_param_decls (struct cgraph_node *node,
for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
{
VEC_index (ipa_param_descriptor_t,
- info->descriptors, param_num)->decl = parm;
+ info->descriptors, param_num).decl = parm;
param_num++;
}
}
@@ -2298,7 +2298,7 @@ ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments,
struct ipa_parm_adjustment *adj;
gcc_assert (link);
- adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
+ adj = &VEC_index (ipa_parm_adjustment_t, adjustments, i);
parm = VEC_index (tree, oparms, adj->base_index);
adj->base = parm;
@@ -2366,8 +2366,8 @@ ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments,
When we are asked to remove it, we need to build new FUNCTION_TYPE
instead. */
if (TREE_CODE (orig_type) != METHOD_TYPE
- || (VEC_index (ipa_parm_adjustment_t, adjustments, 0)->copy_param
- && VEC_index (ipa_parm_adjustment_t, adjustments, 0)->base_index == 0))
+ || (VEC_index (ipa_parm_adjustment_t, adjustments, 0).copy_param
+ && VEC_index (ipa_parm_adjustment_t, adjustments, 0).base_index == 0))
{
new_type = build_distinct_type_copy (orig_type);
TYPE_ARG_TYPES (new_type) = new_reversed;
@@ -2434,7 +2434,7 @@ ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
{
struct ipa_parm_adjustment *adj;
- adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
+ adj = &VEC_index (ipa_parm_adjustment_t, adjustments, i);
if (adj->copy_param)
{
@@ -2617,7 +2617,7 @@ index_in_adjustments_multiple_times_p (int base_index,
for (i = 0; i < len; i++)
{
struct ipa_parm_adjustment *adj;
- adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
+ adj = &VEC_index (ipa_parm_adjustment_t, adjustments, i);
if (adj->base_index == base_index)
{
@@ -2648,7 +2648,7 @@ ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
for (i = 0; i < inlen; i++)
{
struct ipa_parm_adjustment *n;
- n = VEC_index (ipa_parm_adjustment_t, inner, i);
+ n = &VEC_index (ipa_parm_adjustment_t, inner, i);
if (n->remove_param)
removals++;
@@ -2660,10 +2660,10 @@ ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
for (i = 0; i < outlen; i++)
{
struct ipa_parm_adjustment *r;
- struct ipa_parm_adjustment *out = VEC_index (ipa_parm_adjustment_t,
- outer, i);
- struct ipa_parm_adjustment *in = VEC_index (ipa_parm_adjustment_t, tmp,
- out->base_index);
+ struct ipa_parm_adjustment *out = &VEC_index (ipa_parm_adjustment_t,
+ outer, i);
+ struct ipa_parm_adjustment *in = &VEC_index (ipa_parm_adjustment_t, tmp,
+ out->base_index);
gcc_assert (!in->remove_param);
if (out->remove_param)
@@ -2696,8 +2696,8 @@ ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
for (i = 0; i < inlen; i++)
{
- struct ipa_parm_adjustment *n = VEC_index (ipa_parm_adjustment_t,
- inner, i);
+ struct ipa_parm_adjustment *n = &VEC_index (ipa_parm_adjustment_t,
+ inner, i);
if (n->remove_param)
VEC_quick_push (ipa_parm_adjustment_t, adjustments, n);
@@ -2722,7 +2722,7 @@ ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
for (i = 0; i < len; i++)
{
struct ipa_parm_adjustment *adj;
- adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
+ adj = &VEC_index (ipa_parm_adjustment_t, adjustments, i);
if (!first)
fprintf (file, " ");
@@ -212,7 +212,7 @@ ipa_get_param_count (struct ipa_node_params *info)
static inline tree
ipa_get_param (struct ipa_node_params *info, int i)
{
- return VEC_index (ipa_param_descriptor_t, info->descriptors, i)->decl;
+ return VEC_index (ipa_param_descriptor_t, info->descriptors, i).decl;
}
/* Set the used flag corresponding to the Ith formal parameter of the function
@@ -221,7 +221,7 @@ ipa_get_param (struct ipa_node_params *info, int i)
static inline void
ipa_set_param_used (struct ipa_node_params *info, int i, bool val)
{
- VEC_index (ipa_param_descriptor_t, info->descriptors, i)->used = val;
+ VEC_index (ipa_param_descriptor_t, info->descriptors, i).used = val;
}
/* Return the used flag corresponding to the Ith formal parameter of the
@@ -230,7 +230,7 @@ ipa_set_param_used (struct ipa_node_params *info, int i, bool val)
static inline bool
ipa_is_param_used (struct ipa_node_params *info, int i)
{
- return VEC_index (ipa_param_descriptor_t, info->descriptors, i)->used;
+ return VEC_index (ipa_param_descriptor_t, info->descriptors, i).used;
}
/* ipa_edge_args stores information related to a callsite and particularly its
@@ -259,7 +259,7 @@ ipa_get_cs_argument_count (struct ipa_edge_args *args)
static inline struct ipa_jump_func *
ipa_get_ith_jump_func (struct ipa_edge_args *args, int i)
{
- return VEC_index (ipa_jump_func_t, args->jump_functions, i);
+ return &VEC_index (ipa_jump_func_t, args->jump_functions, i);
}
/* Vectors need to have typedefs of structures. */
@@ -278,10 +278,10 @@ extern GTY(()) VEC (ipa_edge_args_t, gc) *ipa_edge_args_vector;
/* Return the associated parameter/argument info corresponding to the given
node/edge. */
-#define IPA_NODE_REF(NODE) (VEC_index (ipa_node_params_t, \
- ipa_node_params_vector, (NODE)->uid))
-#define IPA_EDGE_REF(EDGE) (VEC_index (ipa_edge_args_t, \
- ipa_edge_args_vector, (EDGE)->uid))
+#define IPA_NODE_REF(NODE) (&VEC_index (ipa_node_params_t, \
+ ipa_node_params_vector, (NODE)->uid))
+#define IPA_EDGE_REF(EDGE) (&VEC_index (ipa_edge_args_t, \
+ ipa_edge_args_vector, (EDGE)->uid))
/* This macro checks validity of index returned by
ipa_get_param_decl_index function. */
#define IS_VALID_JUMP_FUNC_INDEX(I) ((I) != -1)
@@ -73,7 +73,7 @@ ipa_ref_list_first_reference (struct ipa_ref_list *list)
{
if (!VEC_length (ipa_ref_t, list->references))
return NULL;
- return VEC_index (ipa_ref_t, list->references, 0);
+ return &VEC_index (ipa_ref_t, list->references, 0);
}
/* Return first referring ref in LIST or NULL if empty. */
@@ -49,7 +49,7 @@ ipa_record_reference (symtab_node referring_node,
old_references = list->references;
VEC_safe_grow (ipa_ref_t, gc, list->references,
VEC_length (ipa_ref_t, list->references) + 1);
- ref = VEC_last (ipa_ref_t, list->references);
+ ref = &VEC_last (ipa_ref_t, list->references);
list2 = &referred_node->symbol.ref_list;
VEC_safe_push (ipa_ref_ptr, heap, list2->referring, ref);
@@ -93,7 +93,7 @@ ipa_remove_reference (struct ipa_ref *ref)
}
VEC_pop (ipa_ref_ptr, list->referring);
- last = VEC_last (ipa_ref_t, list2->references);
+ last = &VEC_last (ipa_ref_t, list2->references);
if (ref != last)
{
*ref = *last;
@@ -111,7 +111,7 @@ void
ipa_remove_all_references (struct ipa_ref_list *list)
{
while (VEC_length (ipa_ref_t, list->references))
- ipa_remove_reference (VEC_last (ipa_ref_t, list->references));
+ ipa_remove_reference (&VEC_last (ipa_ref_t, list->references));
VEC_free (ipa_ref_t, gc, list->references);
list->references = NULL;
}
@@ -917,7 +917,7 @@ find_split_points (int overall_time, int overall_size)
while (!VEC_empty (stack_entry, stack))
{
- stack_entry *entry = VEC_last (stack_entry, stack);
+ stack_entry *entry = &VEC_last (stack_entry, stack);
/* We are walking an acyclic graph, so edge_num counts
succ and pred edges together. However when considering
@@ -984,9 +984,9 @@ find_split_points (int overall_time, int overall_size)
new_entry.bb = dest;
new_entry.edge_num = 0;
new_entry.overall_time
- = VEC_index (bb_info, bb_info_vec, dest->index)->time;
+ = VEC_index (bb_info, bb_info_vec, dest->index).time;
new_entry.overall_size
- = VEC_index (bb_info, bb_info_vec, dest->index)->size;
+ = VEC_index (bb_info, bb_info_vec, dest->index).size;
new_entry.earliest = INT_MAX;
new_entry.set_ssa_names = BITMAP_ALLOC (NULL);
new_entry.used_ssa_names = BITMAP_ALLOC (NULL);
@@ -1006,8 +1006,8 @@ find_split_points (int overall_time, int overall_size)
and merge stuff we accumulate during the walk. */
else if (entry->bb != ENTRY_BLOCK_PTR)
{
- stack_entry *prev = VEC_index (stack_entry, stack,
- VEC_length (stack_entry, stack) - 2);
+ stack_entry *prev = &VEC_index (stack_entry, stack,
+ VEC_length (stack_entry, stack) - 2);
entry->bb->aux = (void *)(intptr_t)-1;
prev->can_split &= entry->can_split;
@@ -1510,8 +1510,8 @@ execute_split_functions (void)
}
overall_time += time;
overall_size += size;
- VEC_index (bb_info, bb_info_vec, bb->index)->time = time;
- VEC_index (bb_info, bb_info_vec, bb->index)->size = size;
+ VEC_index (bb_info, bb_info_vec, bb->index).time = time;
+ VEC_index (bb_info, bb_info_vec, bb->index).size = size;
}
find_split_points (overall_time, overall_size);
if (best_split_point.split_bbs)
@@ -233,6 +233,6 @@ uses_jv_markobj_p (tree dtable)
this function is only used with flag_reduced_reflection. No
point in asserting unless we hit the bad case. */
gcc_assert (!flag_reduced_reflection || TARGET_VTABLE_USES_DESCRIPTORS == 0);
- v = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (dtable), 3)->value;
+ v = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (dtable), 3).value;
return (PROCEDURE_OBJECT_DESCRIPTOR == TREE_INT_CST_LOW (v));
}
@@ -1534,7 +1534,7 @@ make_method_value (tree mdecl)
v = VEC_alloc (constructor_elt, gc, length);
VEC_safe_grow_cleared (constructor_elt, gc, v, length);
- e = VEC_index (constructor_elt, v, idx--);
+ e = &VEC_index (constructor_elt, v, idx--);
e->value = null_pointer_node;
FOR_EACH_VEC_ELT (tree, DECL_FUNCTION_THROWS (mdecl), ix, t)
@@ -1543,7 +1543,7 @@ make_method_value (tree mdecl)
tree utf8
= build_utf8_ref (unmangle_classname (IDENTIFIER_POINTER (sig),
IDENTIFIER_LENGTH (sig)));
- e = VEC_index (constructor_elt, v, idx--);
+ e = &VEC_index (constructor_elt, v, idx--);
e->value = utf8;
}
gcc_assert (idx == -1);
@@ -1622,7 +1622,7 @@ get_dispatch_table (tree type, tree this_class_addr)
arraysize += 2;
VEC_safe_grow_cleared (constructor_elt, gc, v, arraysize);
- e = VEC_index (constructor_elt, v, arraysize - 1);
+ e = &VEC_index (constructor_elt, v, arraysize - 1);
#define CONSTRUCTOR_PREPEND_VALUE(E, V) E->value = V, E--
for (i = nvirtuals; --i >= 0; )
@@ -3008,7 +3008,7 @@ emit_catch_table (tree this_class)
int n_catch_classes;
constructor_elt *e;
/* Fill in the dummy entry that make_class created. */
- e = VEC_index (constructor_elt, TYPE_CATCH_CLASSES (this_class), 0);
+ e = &VEC_index (constructor_elt, TYPE_CATCH_CLASSES (this_class), 0);
e->value = make_catch_class_record (null_pointer_node, null_pointer_node);
CONSTRUCTOR_APPEND_ELT (TYPE_CATCH_CLASSES (this_class), NULL_TREE,
make_catch_class_record (null_pointer_node,
@@ -514,8 +514,8 @@ build_constants_constructor (void)
int c = outgoing_cpool->count;
VEC_safe_grow_cleared (constructor_elt, gc, tags, c);
VEC_safe_grow_cleared (constructor_elt, gc, data, c);
- t = VEC_index (constructor_elt, tags, c-1);
- d = VEC_index (constructor_elt, data, c-1);
+ t = &VEC_index (constructor_elt, tags, c-1);
+ d = &VEC_index (constructor_elt, data, c-1);
}
#define CONSTRUCTOR_PREPEND_VALUE(E, V) E->value = V, E--
@@ -1431,7 +1431,7 @@ extern tree *type_map;
#define PUSH_SUPER_VALUE(V, VALUE) \
do \
{ \
- constructor_elt *_elt___ = VEC_last (constructor_elt, V); \
+ constructor_elt *_elt___ = &VEC_last (constructor_elt, V); \
tree _next___ = DECL_CHAIN (_elt___->index); \
gcc_assert (!DECL_NAME (_elt___->index)); \
_elt___->value = VALUE; \
@@ -1445,7 +1445,7 @@ extern tree *type_map;
#define PUSH_FIELD_VALUE(V, NAME, VALUE) \
do \
{ \
- constructor_elt *_elt___ = VEC_last (constructor_elt, V); \
+ constructor_elt *_elt___ = &VEC_last (constructor_elt, V); \
tree _next___ = DECL_CHAIN (_elt___->index); \
gcc_assert (strcmp (IDENTIFIER_POINTER (DECL_NAME (_elt___->index)), \
NAME) == 0); \
@@ -232,7 +232,7 @@ static void remove_node_from_ps (partial_schedule_ptr, ps_insn_ptr);
#define NODE_ASAP(node) ((node)->aux.count)
-#define SCHED_PARAMS(x) VEC_index (node_sched_params, node_sched_param_vec, x)
+#define SCHED_PARAMS(x) (&VEC_index (node_sched_params, node_sched_param_vec, x))
#define SCHED_TIME(x) (SCHED_PARAMS (x)->time)
#define SCHED_ROW(x) (SCHED_PARAMS (x)->row)
#define SCHED_STAGE(x) (SCHED_PARAMS (x)->stage)
@@ -308,7 +308,7 @@ static struct ps_reg_move_info *
ps_reg_move (partial_schedule_ptr ps, int id)
{
gcc_checking_assert (id >= ps->g->num_nodes);
- return VEC_index (ps_reg_move_info, ps->reg_moves, id - ps->g->num_nodes);
+ return &VEC_index (ps_reg_move_info, ps->reg_moves, id - ps->g->num_nodes);
}
/* Return the rtl instruction that is being scheduled by partial schedule
@@ -806,7 +806,7 @@ add_removable_extension (const_rtx expr, rtx insn,
different extension. FIXME: this obviously can be improved. */
for (def = defs; def; def = def->next)
if ((idx = def_map[INSN_UID(DF_REF_INSN (def->ref))])
- && (cand = VEC_index (ext_cand, *insn_list, idx - 1))
+ && (cand = &VEC_index (ext_cand, *insn_list, idx - 1))
&& (cand->code != code || cand->mode != mode))
{
if (dump_file)
@@ -204,7 +204,7 @@ typedef struct stack_def
int top; /* index to top stack element */
HARD_REG_SET reg_set; /* set of live registers */
unsigned char reg[REG_STACK_SIZE];/* register - stack mapping */
-} *stack;
+} *stack_ptr;
/* This is used to carry information about basic blocks. It is
attached to the AUX field of the standard CFG block. */
@@ -249,7 +249,7 @@ static rtx not_a_num;
/* Forward declarations */
static int stack_regs_mentioned_p (const_rtx pat);
-static void pop_stack (stack, int);
+static void pop_stack (stack_ptr, int);
static rtx *get_true_reg (rtx *);
static int check_asm_stack_operands (rtx);
@@ -257,19 +257,19 @@ static void get_asm_operands_in_out (rtx, int *, int *);
static rtx stack_result (tree);
static void replace_reg (rtx *, int);
static void remove_regno_note (rtx, enum reg_note, unsigned int);
-static int get_hard_regnum (stack, rtx);
-static rtx emit_pop_insn (rtx, stack, rtx, enum emit_where);
-static void swap_to_top(rtx, stack, rtx, rtx);
-static bool move_for_stack_reg (rtx, stack, rtx);
-static bool move_nan_for_stack_reg (rtx, stack, rtx);
+static int get_hard_regnum (stack_ptr, rtx);
+static rtx emit_pop_insn (rtx, stack_ptr, rtx, enum emit_where);
+static void swap_to_top(rtx, stack_ptr, rtx, rtx);
+static bool move_for_stack_reg (rtx, stack_ptr, rtx);
+static bool move_nan_for_stack_reg (rtx, stack_ptr, rtx);
static int swap_rtx_condition_1 (rtx);
static int swap_rtx_condition (rtx);
-static void compare_for_stack_reg (rtx, stack, rtx);
-static bool subst_stack_regs_pat (rtx, stack, rtx);
-static void subst_asm_stack_regs (rtx, stack);
-static bool subst_stack_regs (rtx, stack);
-static void change_stack (rtx, stack, stack, enum emit_where);
-static void print_stack (FILE *, stack);
+static void compare_for_stack_reg (rtx, stack_ptr, rtx);
+static bool subst_stack_regs_pat (rtx, stack_ptr, rtx);
+static void subst_asm_stack_regs (rtx, stack_ptr);
+static bool subst_stack_regs (rtx, stack_ptr);
+static void change_stack (rtx, stack_ptr, stack_ptr, enum emit_where);
+static void print_stack (FILE *, stack_ptr);
static rtx next_flags_user (rtx);
/* Return nonzero if any stack register is mentioned somewhere within PAT. */
@@ -357,7 +357,7 @@ next_flags_user (rtx insn)
/* Reorganize the stack into ascending numbers, before this insn. */
static void
-straighten_stack (rtx insn, stack regstack)
+straighten_stack (rtx insn, stack_ptr regstack)
{
struct stack_def temp_stack;
int top;
@@ -380,7 +380,7 @@ straighten_stack (rtx insn, stack regstack)
/* Pop a register from the stack. */
static void
-pop_stack (stack regstack, int regno)
+pop_stack (stack_ptr regstack, int regno)
{
int top = regstack->top;
@@ -724,7 +724,7 @@ remove_regno_note (rtx insn, enum reg_note note, unsigned int regno)
returned if the register is not found. */
static int
-get_hard_regnum (stack regstack, rtx reg)
+get_hard_regnum (stack_ptr regstack, rtx reg)
{
int i;
@@ -745,7 +745,7 @@ get_hard_regnum (stack regstack, rtx reg)
cases the movdf pattern to pop. */
static rtx
-emit_pop_insn (rtx insn, stack regstack, rtx reg, enum emit_where where)
+emit_pop_insn (rtx insn, stack_ptr regstack, rtx reg, enum emit_where where)
{
rtx pop_insn, pop_rtx;
int hard_regno;
@@ -796,7 +796,7 @@ emit_pop_insn (rtx insn, stack regstack, rtx reg, enum emit_where where)
If REG is already at the top of the stack, no insn is emitted. */
static void
-emit_swap_insn (rtx insn, stack regstack, rtx reg)
+emit_swap_insn (rtx insn, stack_ptr regstack, rtx reg)
{
int hard_regno;
rtx swap_rtx;
@@ -903,7 +903,7 @@ emit_swap_insn (rtx insn, stack regstack, rtx reg)
is emitted. */
static void
-swap_to_top (rtx insn, stack regstack, rtx src1, rtx src2)
+swap_to_top (rtx insn, stack_ptr regstack, rtx src1, rtx src2)
{
struct stack_def temp_stack;
int regno, j, k, temp;
@@ -944,7 +944,7 @@ swap_to_top (rtx insn, stack regstack, rtx src1, rtx src2)
was deleted in the process. */
static bool
-move_for_stack_reg (rtx insn, stack regstack, rtx pat)
+move_for_stack_reg (rtx insn, stack_ptr regstack, rtx pat)
{
rtx *psrc = get_true_reg (&SET_SRC (pat));
rtx *pdest = get_true_reg (&SET_DEST (pat));
@@ -1095,7 +1095,7 @@ move_for_stack_reg (rtx insn, stack regstack, rtx pat)
a NaN into DEST, then invokes move_for_stack_reg. */
static bool
-move_nan_for_stack_reg (rtx insn, stack regstack, rtx dest)
+move_nan_for_stack_reg (rtx insn, stack_ptr regstack, rtx dest)
{
rtx pat;
@@ -1234,7 +1234,7 @@ swap_rtx_condition (rtx insn)
set up. */
static void
-compare_for_stack_reg (rtx insn, stack regstack, rtx pat_src)
+compare_for_stack_reg (rtx insn, stack_ptr regstack, rtx pat_src)
{
rtx *src1, *src2;
rtx src1_note, src2_note;
@@ -1323,7 +1323,7 @@ compare_for_stack_reg (rtx insn, stack regstack, rtx pat_src)
static int
subst_stack_regs_in_debug_insn (rtx *loc, void *data)
{
- stack regstack = (stack)data;
+ stack_ptr regstack = (stack_ptr)data;
int hard_regno;
if (!STACK_REG_P (*loc))
@@ -1364,7 +1364,7 @@ subst_all_stack_regs_in_debug_insn (rtx insn, struct stack_def *regstack)
was deleted in the process. */
static bool
-subst_stack_regs_pat (rtx insn, stack regstack, rtx pat)
+subst_stack_regs_pat (rtx insn, stack_ptr regstack, rtx pat)
{
rtx *dest, *src;
bool control_flow_insn_deleted = false;
@@ -2012,7 +2012,7 @@ subst_stack_regs_pat (rtx insn, stack regstack, rtx pat)
requirements, since record_asm_stack_regs removes any problem asm. */
static void
-subst_asm_stack_regs (rtx insn, stack regstack)
+subst_asm_stack_regs (rtx insn, stack_ptr regstack)
{
rtx body = PATTERN (insn);
int alt;
@@ -2295,7 +2295,7 @@ subst_asm_stack_regs (rtx insn, stack regstack)
a control flow insn was deleted in the process. */
static bool
-subst_stack_regs (rtx insn, stack regstack)
+subst_stack_regs (rtx insn, stack_ptr regstack)
{
rtx *note_link, note;
bool control_flow_insn_deleted = false;
@@ -2407,7 +2407,7 @@ subst_stack_regs (rtx insn, stack regstack)
is no longer needed once this has executed. */
static void
-change_stack (rtx insn, stack old, stack new_stack, enum emit_where where)
+change_stack (rtx insn, stack_ptr old, stack_ptr new_stack, enum emit_where where)
{
int reg;
int update_end = 0;
@@ -2613,7 +2613,7 @@ change_stack (rtx insn, stack old, stack new_stack, enum emit_where where)
/* Print stack configuration. */
static void
-print_stack (FILE *file, stack s)
+print_stack (FILE *file, stack_ptr s)
{
if (! file)
return;
@@ -2689,7 +2689,7 @@ static void
convert_regs_exit (void)
{
int value_reg_low, value_reg_high;
- stack output_stack;
+ stack_ptr output_stack;
rtx retvalue;
retvalue = stack_result (current_function_decl);
@@ -2722,8 +2722,8 @@ convert_regs_exit (void)
static void
propagate_stack (edge e)
{
- stack src_stack = &BLOCK_INFO (e->src)->stack_out;
- stack dest_stack = &BLOCK_INFO (e->dest)->stack_in;
+ stack_ptr src_stack = &BLOCK_INFO (e->src)->stack_out;
+ stack_ptr dest_stack = &BLOCK_INFO (e->dest)->stack_in;
int reg;
/* Preserve the order of the original stack, but check whether
@@ -2749,8 +2749,8 @@ static bool
compensate_edge (edge e)
{
basic_block source = e->src, target = e->dest;
- stack target_stack = &BLOCK_INFO (target)->stack_in;
- stack source_stack = &BLOCK_INFO (source)->stack_out;
+ stack_ptr target_stack = &BLOCK_INFO (target)->stack_in;
+ stack_ptr source_stack = &BLOCK_INFO (source)->stack_out;
struct stack_def regstack;
int reg;
@@ -729,8 +729,8 @@ regrename_analyze (bitmap bb_mask)
rtx insn;
FOR_BB_INSNS (bb1, insn)
{
- insn_rr_info *p = VEC_index (insn_rr_info, insn_rr,
- INSN_UID (insn));
+ insn_rr_info *p = &VEC_index (insn_rr_info, insn_rr,
+ INSN_UID (insn));
p->op_info = NULL;
}
}
@@ -1584,7 +1584,7 @@ build_def_use (basic_block bb)
if (insn_rr != NULL)
{
- insn_info = VEC_index (insn_rr_info, insn_rr, INSN_UID (insn));
+ insn_info = &VEC_index (insn_rr_info, insn_rr, INSN_UID (insn));
insn_info->op_info = XOBNEWVEC (&rename_obstack, operand_rr_info,
recog_data.n_operands);
memset (insn_info->op_info, 0,
@@ -243,19 +243,19 @@ typedef struct reg_equivs
} reg_equivs_t;
#define reg_equiv_constant(ELT) \
- VEC_index (reg_equivs_t, reg_equivs, (ELT))->constant
+ VEC_index (reg_equivs_t, reg_equivs, (ELT)).constant
#define reg_equiv_invariant(ELT) \
- VEC_index (reg_equivs_t, reg_equivs, (ELT))->invariant
+ VEC_index (reg_equivs_t, reg_equivs, (ELT)).invariant
#define reg_equiv_memory_loc(ELT) \
- VEC_index (reg_equivs_t, reg_equivs, (ELT))->memory_loc
+ VEC_index (reg_equivs_t, reg_equivs, (ELT)).memory_loc
#define reg_equiv_address(ELT) \
- VEC_index (reg_equivs_t, reg_equivs, (ELT))->address
+ VEC_index (reg_equivs_t, reg_equivs, (ELT)).address
#define reg_equiv_mem(ELT) \
- VEC_index (reg_equivs_t, reg_equivs, (ELT))->mem
+ VEC_index (reg_equivs_t, reg_equivs, (ELT)).mem
#define reg_equiv_alt_mem_list(ELT) \
- VEC_index (reg_equivs_t, reg_equivs, (ELT))->alt_mem_list
+ VEC_index (reg_equivs_t, reg_equivs, (ELT)).alt_mem_list
#define reg_equiv_init(ELT) \
- VEC_index (reg_equivs_t, reg_equivs, (ELT))->init
+ VEC_index (reg_equivs_t, reg_equivs, (ELT)).init
DEF_VEC_O(reg_equivs_t);
DEF_VEC_ALLOC_O(reg_equivs_t, gc);
@@ -664,7 +664,8 @@ grow_reg_equivs (void)
for (i = old_size; i < max_regno; i++)
{
VEC_quick_insert (reg_equivs_t, reg_equivs, i, 0);
- memset (VEC_index (reg_equivs_t, reg_equivs, i), 0, sizeof (reg_equivs_t));
+ memset (&VEC_index (reg_equivs_t, reg_equivs, i), 0,
+ sizeof (reg_equivs_t));
}
}
@@ -865,7 +865,7 @@ DEF_VEC_ALLOC_O (haifa_insn_data_def, heap);
extern VEC(haifa_insn_data_def, heap) *h_i_d;
-#define HID(INSN) (VEC_index (haifa_insn_data_def, h_i_d, INSN_UID (INSN)))
+#define HID(INSN) (&VEC_index (haifa_insn_data_def, h_i_d, INSN_UID (INSN)))
/* Accessor macros for h_i_d. There are more in haifa-sched.c and
sched-rgn.c. */
@@ -887,7 +887,7 @@ DEF_VEC_ALLOC_O (haifa_deps_insn_data_def, heap);
extern VEC(haifa_deps_insn_data_def, heap) *h_d_i_d;
-#define HDID(INSN) (VEC_index (haifa_deps_insn_data_def, h_d_i_d, \
+#define HDID(INSN) (&VEC_index (haifa_deps_insn_data_def, h_d_i_d, \
INSN_LUID (INSN)))
#define INSN_DEP_COUNT(INSN) (HDID (INSN)->dep_count)
#define HAS_INTERNAL_DEP(INSN) (HDID (INSN)->has_internal_dep)
@@ -901,7 +901,7 @@ extern VEC(haifa_deps_insn_data_def, heap) *h_d_i_d;
#define INSN_COND_DEPS(INSN) (HDID (INSN)->cond_deps)
#define CANT_MOVE(INSN) (HDID (INSN)->cant_move)
#define CANT_MOVE_BY_LUID(LUID) (VEC_index (haifa_deps_insn_data_def, h_d_i_d, \
- LUID)->cant_move)
+ LUID).cant_move)
#define INSN_PRIORITY(INSN) (HID (INSN)->priority)
@@ -1520,7 +1520,7 @@ insert_in_history_vect (VEC (expr_history_def, heap) **pvect,
if (res)
{
- expr_history_def *phist = VEC_index (expr_history_def, vect, ind);
+ expr_history_def *phist = &VEC_index (expr_history_def, vect, ind);
/* It is possible that speculation types of expressions that were
propagated through different paths will be different here. In this
@@ -4153,7 +4153,7 @@ finish_insns (void)
removed during the scheduling. */
for (i = 0; i < VEC_length (sel_insn_data_def, s_i_d); i++)
{
- sel_insn_data_def *sid_entry = VEC_index (sel_insn_data_def, s_i_d, i);
+ sel_insn_data_def *sid_entry = &VEC_index (sel_insn_data_def, s_i_d, i);
if (sid_entry->live)
return_regset_to_pool (sid_entry->live);
@@ -765,8 +765,8 @@ DEF_VEC_ALLOC_O (sel_insn_data_def, heap);
extern VEC (sel_insn_data_def, heap) *s_i_d;
/* Accessor macros for s_i_d. */
-#define SID(INSN) (VEC_index (sel_insn_data_def, s_i_d, INSN_LUID (INSN)))
-#define SID_BY_UID(UID) (VEC_index (sel_insn_data_def, s_i_d, LUID_BY_UID (UID)))
+#define SID(INSN) (&VEC_index (sel_insn_data_def, s_i_d, INSN_LUID (INSN)))
+#define SID_BY_UID(UID) (&VEC_index (sel_insn_data_def, s_i_d, LUID_BY_UID (UID)))
extern sel_insn_data_def insn_sid (insn_t);
@@ -897,7 +897,7 @@ extern void sel_finish_global_bb_info (void);
/* Get data for BB. */
#define SEL_GLOBAL_BB_INFO(BB) \
- (VEC_index (sel_global_bb_info_def, sel_global_bb_info, (BB)->index))
+ (&VEC_index (sel_global_bb_info_def, sel_global_bb_info, (BB)->index))
/* Access macros. */
#define BB_LV_SET(BB) (SEL_GLOBAL_BB_INFO (BB)->lv_set)
@@ -927,8 +927,8 @@ DEF_VEC_ALLOC_O (sel_region_bb_info_def, heap);
extern VEC (sel_region_bb_info_def, heap) *sel_region_bb_info;
/* Get data for BB. */
-#define SEL_REGION_BB_INFO(BB) (VEC_index (sel_region_bb_info_def, \
- sel_region_bb_info, (BB)->index))
+#define SEL_REGION_BB_INFO(BB) (&VEC_index (sel_region_bb_info_def, \
+ sel_region_bb_info, (BB)->index))
/* Get BB's note_list.
A note_list is a list of various notes that was scattered across BB
@@ -1941,9 +1941,9 @@ undo_transformations (av_set_t *av_ptr, rtx insn)
{
expr_history_def *phist;
- phist = VEC_index (expr_history_def,
- EXPR_HISTORY_OF_CHANGES (expr),
- index);
+ phist = &VEC_index (expr_history_def,
+ EXPR_HISTORY_OF_CHANGES (expr),
+ index);
switch (phist->type)
{
@@ -380,7 +380,7 @@ gen_conditions_for_domain (tree arg, inp_domain domain,
{
/* Now push a separator. */
if (domain.has_lb)
- VEC_quick_push (gimple, conds, NULL);
+ VEC_quick_push (gimple, conds, (gimple)NULL);
gen_one_condition (arg, domain.ub,
(domain.is_ub_inclusive
@@ -503,7 +503,7 @@ gen_conditions_for_pow_int_base (tree base, tree expn,
type is integer. */
/* Push a separator. */
- VEC_quick_push (gimple, conds, NULL);
+ VEC_quick_push (gimple, conds, (gimple)NULL);
temp = create_tmp_var (int_type, "DCE_COND1");
cst0 = build_int_cst (int_type, 0);
@@ -1048,7 +1048,7 @@ mark_phi_for_rewrite (basic_block bb, gimple phi)
bitmap_set_bit (blocks_with_phis_to_rewrite, idx);
VEC_reserve (gimple_vec, heap, phis_to_rewrite, last_basic_block + 1);
for (i = VEC_length (gimple_vec, phis_to_rewrite); i <= idx; i++)
- VEC_quick_push (gimple_vec, phis_to_rewrite, NULL);
+ VEC_quick_push (gimple_vec, phis_to_rewrite, (gimple_vec)NULL);
phis = VEC_index (gimple_vec, phis_to_rewrite, idx);
if (!phis)
@@ -3986,7 +3986,7 @@ splice_all_param_accesses (VEC (access_p, heap) **representatives)
result = UNUSED_PARAMS;
}
else
- VEC_quick_push (access_p, *representatives, NULL);
+ VEC_quick_push (access_p, *representatives, (access_p) NULL);
}
if (result == NO_GOOD_ACCESS)
@@ -4196,7 +4196,7 @@ get_adjustment_for_base (ipa_parm_adjustment_vec adjustments, tree base)
{
struct ipa_parm_adjustment *adj;
- adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
+ adj = &VEC_index (ipa_parm_adjustment_t, adjustments, i);
if (!adj->copy_param && adj->base == base)
return adj;
}
@@ -4301,7 +4301,7 @@ sra_ipa_modify_expr (tree *expr, bool convert,
for (i = 0; i < len; i++)
{
- adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
+ adj = &VEC_index (ipa_parm_adjustment_t, adjustments, i);
if (adj->base == base &&
(adj->offset == offset || adj->remove_param))
@@ -4508,7 +4508,7 @@ sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments)
tree name, vexpr, copy = NULL_TREE;
use_operand_p use_p;
- adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
+ adj = &VEC_index (ipa_parm_adjustment_t, adjustments, i);
if (adj->copy_param || !is_gimple_reg (adj->base))
continue;
name = gimple_default_def (cfun, adj->base);
@@ -217,7 +217,8 @@ addr_for_mem_ref (struct mem_address *addr, addr_space_t as,
templ_index + 1);
/* Reuse the templates for addresses, so that we do not waste memory. */
- templ = VEC_index (mem_addr_template, mem_addr_template_list, templ_index);
+ templ = &VEC_index (mem_addr_template, mem_addr_template_list,
+ templ_index);
if (!templ->ref)
{
sym = (addr->symbol ?
@@ -1738,7 +1738,8 @@ dom_opt_enter_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
/* Push a marker on the stacks of local information so that we know how
far to unwind when we finalize this block. */
- VEC_safe_push (expr_hash_elt_t, heap, avail_exprs_stack, NULL);
+ VEC_safe_push (expr_hash_elt_t, heap, avail_exprs_stack,
+ (expr_hash_elt_t)NULL);
VEC_safe_push (tree, heap, const_and_copies_stack, NULL_TREE);
record_equivalences_from_incoming_edge (bb);
@@ -1749,7 +1750,8 @@ dom_opt_enter_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
/* Create equivalences from redundant PHIs. PHIs are only truly
redundant when they exist in the same block, so push another
marker and unwind right afterwards. */
- VEC_safe_push (expr_hash_elt_t, heap, avail_exprs_stack, NULL);
+ VEC_safe_push (expr_hash_elt_t, heap, avail_exprs_stack,
+ (expr_hash_elt_t)NULL);
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
eliminate_redundant_computations (&gsi);
remove_local_expressions_from_table ();
@@ -1804,7 +1806,8 @@ dom_opt_leave_block (struct dom_walk_data *walk_data, basic_block bb)
/* Push a marker onto the available expression stack so that we
unwind any expressions related to the TRUE arm before processing
the false arm below. */
- VEC_safe_push (expr_hash_elt_t, heap, avail_exprs_stack, NULL);
+ VEC_safe_push (expr_hash_elt_t, heap, avail_exprs_stack,
+ (expr_hash_elt_t)NULL);
VEC_safe_push (tree, heap, const_and_copies_stack, NULL_TREE);
edge_info = (struct edge_info *) true_edge->aux;
@@ -3228,7 +3228,7 @@ multiplier_allowed_in_address_p (HOST_WIDE_INT ratio, enum machine_mode mode,
TODO -- there must be some better way. This all is quite crude. */
-typedef struct
+typedef struct address_cost_data_s
{
HOST_WIDE_INT min_offset, max_offset;
unsigned costs[2][2][2][2];
@@ -1627,12 +1627,12 @@ phi_translate_1 (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
if (double_int_fits_in_shwi_p (off))
newop.off = off.low;
}
- VEC_replace (vn_reference_op_s, newoperands, j, &newop);
+ VEC_replace (vn_reference_op_s, newoperands, j, newop);
/* If it transforms from an SSA_NAME to an address, fold with
a preceding indirect reference. */
if (j > 0 && op[0] && TREE_CODE (op[0]) == ADDR_EXPR
&& VEC_index (vn_reference_op_s,
- newoperands, j - 1)->opcode == MEM_REF)
+ newoperands, j - 1).opcode == MEM_REF)
vn_reference_fold_indirect (&newoperands, &j);
}
if (i != VEC_length (vn_reference_op_s, operands))
@@ -2621,8 +2621,8 @@ create_component_ref_by_pieces_1 (basic_block block, vn_reference_t ref,
unsigned int *operand, gimple_seq *stmts,
gimple domstmt)
{
- vn_reference_op_t currop = VEC_index (vn_reference_op_s, ref->operands,
- *operand);
+ vn_reference_op_t currop = &VEC_index (vn_reference_op_s, ref->operands,
+ *operand);
tree genop;
++*operand;
switch (currop->opcode)
@@ -2699,8 +2699,8 @@ create_component_ref_by_pieces_1 (basic_block block, vn_reference_t ref,
{
pre_expr op0expr, op1expr;
tree genop0 = NULL_TREE, genop1 = NULL_TREE;
- vn_reference_op_t nextop = VEC_index (vn_reference_op_s, ref->operands,
- ++*operand);
+ vn_reference_op_t nextop = &VEC_index (vn_reference_op_s, ref->operands,
+ ++*operand);
tree baseop = create_component_ref_by_pieces_1 (block, ref, operand,
stmts, domstmt);
if (!baseop)
@@ -4822,7 +4822,7 @@ init_pre (bool do_fre)
next_expression_id = 1;
expressions = NULL;
- VEC_safe_push (pre_expr, heap, expressions, NULL);
+ VEC_safe_push (pre_expr, heap, expressions, (pre_expr)NULL);
value_expressions = VEC_alloc (bitmap_set_t, heap, get_max_value_id () + 1);
VEC_safe_grow_cleared (bitmap_set_t, heap, value_expressions,
get_max_value_id() + 1);
@@ -969,7 +969,7 @@ static VEC (oecount, heap) *cvec;
static hashval_t
oecount_hash (const void *p)
{
- const oecount *c = VEC_index (oecount, cvec, (size_t)p - 42);
+ const oecount *c = &VEC_index (oecount, cvec, (size_t)p - 42);
return htab_hash_pointer (c->op) ^ (hashval_t)c->oecode;
}
@@ -978,8 +978,8 @@ oecount_hash (const void *p)
static int
oecount_eq (const void *p1, const void *p2)
{
- const oecount *c1 = VEC_index (oecount, cvec, (size_t)p1 - 42);
- const oecount *c2 = VEC_index (oecount, cvec, (size_t)p2 - 42);
+ const oecount *c1 = &VEC_index (oecount, cvec, (size_t)p1 - 42);
+ const oecount *c2 = &VEC_index (oecount, cvec, (size_t)p2 - 42);
return (c1->oecode == c2->oecode
&& c1->op == c2->op);
}
@@ -1362,7 +1362,7 @@ undistribute_ops_list (enum tree_code opcode,
else
{
VEC_pop (oecount, cvec);
- VEC_index (oecount, cvec, (size_t)*slot - 42)->cnt++;
+ VEC_index (oecount, cvec, (size_t)*slot - 42).cnt++;
}
}
}
@@ -1389,7 +1389,7 @@ undistribute_ops_list (enum tree_code opcode,
candidates2 = sbitmap_alloc (length);
while (!VEC_empty (oecount, cvec))
{
- oecount *c = VEC_last (oecount, cvec);
+ oecount *c = &VEC_last (oecount, cvec);
if (c->cnt < 2)
break;
@@ -3223,7 +3223,7 @@ attempt_builtin_powi (gimple stmt, VEC(operand_entry_t, heap) **ops,
fputs ("Multiplying by cached product ", dump_file);
for (elt = j; elt < vec_len; elt++)
{
- rf = VEC_index (repeat_factor, repeat_factor_vec, elt);
+ rf = &VEC_index (repeat_factor, repeat_factor_vec, elt);
print_generic_expr (dump_file, rf->factor, 0);
if (elt < vec_len - 1)
fputs (" * ", dump_file);
@@ -3249,7 +3249,7 @@ attempt_builtin_powi (gimple stmt, VEC(operand_entry_t, heap) **ops,
dump_file);
for (elt = j; elt < vec_len; elt++)
{
- rf = VEC_index (repeat_factor, repeat_factor_vec, elt);
+ rf = &VEC_index (repeat_factor, repeat_factor_vec, elt);
print_generic_expr (dump_file, rf->factor, 0);
if (elt < vec_len - 1)
fputs (" * ", dump_file);
@@ -3283,7 +3283,7 @@ attempt_builtin_powi (gimple stmt, VEC(operand_entry_t, heap) **ops,
fputs ("Building __builtin_pow call for (", dump_file);
for (elt = j; elt < vec_len; elt++)
{
- rf = VEC_index (repeat_factor, repeat_factor_vec, elt);
+ rf = &VEC_index (repeat_factor, repeat_factor_vec, elt);
print_generic_expr (dump_file, rf->factor, 0);
if (elt < vec_len - 1)
fputs (" * ", dump_file);
@@ -3308,8 +3308,8 @@ attempt_builtin_powi (gimple stmt, VEC(operand_entry_t, heap) **ops,
{
tree op1, op2;
- rf1 = VEC_index (repeat_factor, repeat_factor_vec, ii);
- rf2 = VEC_index (repeat_factor, repeat_factor_vec, ii + 1);
+ rf1 = &VEC_index (repeat_factor, repeat_factor_vec, ii);
+ rf2 = &VEC_index (repeat_factor, repeat_factor_vec, ii + 1);
/* Init the last factor's representative to be itself. */
if (!rf2->repr)
@@ -3333,7 +3333,7 @@ attempt_builtin_powi (gimple stmt, VEC(operand_entry_t, heap) **ops,
/* Form a call to __builtin_powi for the maximum product
just formed, raised to the power obtained earlier. */
- rf1 = VEC_index (repeat_factor, repeat_factor_vec, j);
+ rf1 = &VEC_index (repeat_factor, repeat_factor_vec, j);
iter_result = get_reassoc_pow_ssa_name (target, type);
pow_stmt = gimple_build_call (powi_fndecl, 2, rf1->repr,
build_int_cst (integer_type_node,
@@ -3366,7 +3366,7 @@ attempt_builtin_powi (gimple stmt, VEC(operand_entry_t, heap) **ops,
unsigned k = power;
unsigned n;
- rf1 = VEC_index (repeat_factor, repeat_factor_vec, i);
+ rf1 = &VEC_index (repeat_factor, repeat_factor_vec, i);
rf1->count -= power;
FOR_EACH_VEC_ELT_REVERSE (operand_entry_t, *ops, n, oe)
@@ -777,7 +777,7 @@ ao_ref_init_from_vn_reference (ao_ref *ref,
alias_set_type base_alias_set = -1;
/* First get the final access size from just the outermost expression. */
- op = VEC_index (vn_reference_op_s, ops, 0);
+ op = &VEC_index (vn_reference_op_s, ops, 0);
if (op->opcode == COMPONENT_REF)
size_tree = DECL_SIZE (op->op0);
else if (op->opcode == BIT_FIELD_REF)
@@ -817,7 +817,7 @@ ao_ref_init_from_vn_reference (ao_ref *ref,
&& op->op0
&& DECL_P (TREE_OPERAND (op->op0, 0)))
{
- vn_reference_op_t pop = VEC_index (vn_reference_op_s, ops, i-1);
+ vn_reference_op_t pop = &VEC_index (vn_reference_op_s, ops, i-1);
base = TREE_OPERAND (op->op0, 0);
if (pop->off == -1)
{
@@ -992,8 +992,8 @@ vn_reference_fold_indirect (VEC (vn_reference_op_s, heap) **ops,
unsigned int *i_p)
{
unsigned int i = *i_p;
- vn_reference_op_t op = VEC_index (vn_reference_op_s, *ops, i);
- vn_reference_op_t mem_op = VEC_index (vn_reference_op_s, *ops, i - 1);
+ vn_reference_op_t op = &VEC_index (vn_reference_op_s, *ops, i);
+ vn_reference_op_t mem_op = &VEC_index (vn_reference_op_s, *ops, i - 1);
tree addr_base;
HOST_WIDE_INT addr_offset;
@@ -1024,8 +1024,8 @@ vn_reference_maybe_forwprop_address (VEC (vn_reference_op_s, heap) **ops,
unsigned int *i_p)
{
unsigned int i = *i_p;
- vn_reference_op_t op = VEC_index (vn_reference_op_s, *ops, i);
- vn_reference_op_t mem_op = VEC_index (vn_reference_op_s, *ops, i - 1);
+ vn_reference_op_t op = &VEC_index (vn_reference_op_s, *ops, i);
+ vn_reference_op_t mem_op = &VEC_index (vn_reference_op_s, *ops, i - 1);
gimple def_stmt;
enum tree_code code;
double_int off;
@@ -1102,7 +1102,7 @@ fully_constant_vn_reference_p (vn_reference_t ref)
/* Try to simplify the translated expression if it is
a call to a builtin function with at most two arguments. */
- op = VEC_index (vn_reference_op_s, operands, 0);
+ op = &VEC_index (vn_reference_op_s, operands, 0);
if (op->opcode == CALL_EXPR
&& TREE_CODE (op->op0) == ADDR_EXPR
&& TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
@@ -1112,9 +1112,9 @@ fully_constant_vn_reference_p (vn_reference_t ref)
{
vn_reference_op_t arg0, arg1 = NULL;
bool anyconst = false;
- arg0 = VEC_index (vn_reference_op_s, operands, 1);
+ arg0 = &VEC_index (vn_reference_op_s, operands, 1);
if (VEC_length (vn_reference_op_s, operands) > 2)
- arg1 = VEC_index (vn_reference_op_s, operands, 2);
+ arg1 = &VEC_index (vn_reference_op_s, operands, 2);
if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
|| (arg0->opcode == ADDR_EXPR
&& is_gimple_min_invariant (arg0->op0)))
@@ -1146,7 +1146,7 @@ fully_constant_vn_reference_p (vn_reference_t ref)
&& VEC_length (vn_reference_op_s, operands) == 2)
{
vn_reference_op_t arg0;
- arg0 = VEC_index (vn_reference_op_s, operands, 1);
+ arg0 = &VEC_index (vn_reference_op_s, operands, 1);
if (arg0->opcode == STRING_CST
&& (TYPE_MODE (op->type)
== TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0->op0))))
@@ -1214,12 +1214,12 @@ valueize_refs_1 (VEC (vn_reference_op_s, heap) *orig, bool *valueized_anything)
&& vro->op0
&& TREE_CODE (vro->op0) == ADDR_EXPR
&& VEC_index (vn_reference_op_s,
- orig, i - 1)->opcode == MEM_REF)
+ orig, i - 1).opcode == MEM_REF)
vn_reference_fold_indirect (&orig, &i);
else if (i > 0
&& vro->opcode == SSA_NAME
&& VEC_index (vn_reference_op_s,
- orig, i - 1)->opcode == MEM_REF)
+ orig, i - 1).opcode == MEM_REF)
vn_reference_maybe_forwprop_address (&orig, &i);
/* If it transforms a non-constant ARRAY_REF into a constant
one, adjust the constant offset. */
@@ -1612,9 +1612,9 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
i = VEC_length (vn_reference_op_s, vr->operands) - 1;
j = VEC_length (vn_reference_op_s, lhs_ops) - 1;
while (j >= 0 && i >= 0
- && vn_reference_op_eq (VEC_index (vn_reference_op_s,
- vr->operands, i),
- VEC_index (vn_reference_op_s, lhs_ops, j)))
+ && vn_reference_op_eq (&VEC_index (vn_reference_op_s,
+ vr->operands, i),
+ &VEC_index (vn_reference_op_s, lhs_ops, j)))
{
i--;
j--;
@@ -1627,10 +1627,10 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
don't care here - further lookups with the rewritten operands
will simply fail if we messed up types too badly. */
if (j == 0 && i >= 0
- && VEC_index (vn_reference_op_s, lhs_ops, 0)->opcode == MEM_REF
- && VEC_index (vn_reference_op_s, lhs_ops, 0)->off != -1
- && (VEC_index (vn_reference_op_s, lhs_ops, 0)->off
- == VEC_index (vn_reference_op_s, vr->operands, i)->off))
+ && VEC_index (vn_reference_op_s, lhs_ops, 0).opcode == MEM_REF
+ && VEC_index (vn_reference_op_s, lhs_ops, 0).off != -1
+ && (VEC_index (vn_reference_op_s, lhs_ops, 0).off
+ == VEC_index (vn_reference_op_s, vr->operands, i).off))
i--, j--;
/* i now points to the first additional op.
@@ -1657,7 +1657,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
VEC_truncate (vn_reference_op_s, vr->operands,
i + 1 + VEC_length (vn_reference_op_s, rhs));
FOR_EACH_VEC_ELT (vn_reference_op_s, rhs, j, vro)
- VEC_replace (vn_reference_op_s, vr->operands, i + 1 + j, vro);
+ VEC_replace (vn_reference_op_s, vr->operands, i + 1 + j, *vro);
VEC_free (vn_reference_op_s, heap, rhs);
vr->operands = valueize_refs (vr->operands);
vr->hashcode = vn_reference_compute_hash (vr);
@@ -1795,12 +1795,12 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
op.opcode = MEM_REF;
op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
op.off = at - lhs_offset + rhs_offset;
- VEC_replace (vn_reference_op_s, vr->operands, 0, &op);
+ VEC_replace (vn_reference_op_s, vr->operands, 0, op);
op.type = TREE_TYPE (rhs);
op.opcode = TREE_CODE (rhs);
op.op0 = rhs;
op.off = -1;
- VEC_replace (vn_reference_op_s, vr->operands, 1, &op);
+ VEC_replace (vn_reference_op_s, vr->operands, 1, op);
vr->hashcode = vn_reference_compute_hash (vr);
/* Adjust *ref from the new operands. */
@@ -3715,7 +3715,7 @@ start_over:
/* Restore the last use walker and continue walking there. */
use = name;
name = VEC_pop (tree, namevec);
- memcpy (&iter, VEC_last (ssa_op_iter, itervec),
+ memcpy (&iter, &VEC_last (ssa_op_iter, itervec),
sizeof (ssa_op_iter));
VEC_pop (ssa_op_iter, itervec);
goto continue_walking;
@@ -2918,7 +2918,7 @@ get_constraint_for_ptr_offset (tree ptr, tree offset,
for (j = 0; j < n; j++)
{
varinfo_t curr;
- c = *VEC_index (ce_s, *results, j);
+ c = VEC_index (ce_s, *results, j);
curr = get_varinfo (c.var);
if (c.type == ADDRESSOF
@@ -2980,7 +2980,7 @@ get_constraint_for_ptr_offset (tree ptr, tree offset,
else
c.offset = rhsoffset;
- VEC_replace (ce_s, *results, j, &c);
+ VEC_replace (ce_s, *results, j, c);
}
}
@@ -3049,7 +3049,7 @@ get_constraint_for_component_ref (tree t, VEC(ce_s, heap) **results,
adding the required subset of sub-fields below. */
get_constraint_for_1 (t, results, true, lhs_p);
gcc_assert (VEC_length (ce_s, *results) == 1);
- result = VEC_last (ce_s, *results);
+ result = &VEC_last (ce_s, *results);
if (result->type == SCALAR
&& get_varinfo (result->var)->is_full_var)
@@ -3275,13 +3275,13 @@ get_constraint_for_1 (tree t, VEC (ce_s, heap) **results, bool address_p,
if (address_p)
return;
- cs = *VEC_last (ce_s, *results);
+ cs = VEC_last (ce_s, *results);
if (cs.type == DEREF
&& type_can_have_subvars (TREE_TYPE (t)))
{
/* For dereferences this means we have to defer it
to solving time. */
- VEC_last (ce_s, *results)->offset = UNKNOWN_OFFSET;
+ VEC_last (ce_s, *results).offset = UNKNOWN_OFFSET;
return;
}
if (cs.type != SCALAR)
@@ -3442,8 +3442,8 @@ do_structure_copy (tree lhsop, tree rhsop)
get_constraint_for (lhsop, &lhsc);
get_constraint_for_rhs (rhsop, &rhsc);
- lhsp = VEC_index (ce_s, lhsc, 0);
- rhsp = VEC_index (ce_s, rhsc, 0);
+ lhsp = &VEC_index (ce_s, lhsc, 0);
+ rhsp = &VEC_index (ce_s, rhsc, 0);
if (lhsp->type == DEREF
|| (lhsp->type == ADDRESSOF && lhsp->var == anything_id)
|| rhsp->type == DEREF)
@@ -3472,7 +3472,7 @@ do_structure_copy (tree lhsop, tree rhsop)
for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp);)
{
varinfo_t lhsv, rhsv;
- rhsp = VEC_index (ce_s, rhsc, k);
+ rhsp = &VEC_index (ce_s, rhsc, k);
lhsv = get_varinfo (lhsp->var);
rhsv = get_varinfo (rhsp->var);
if (lhsv->may_have_pointers
@@ -4368,7 +4368,7 @@ find_func_aliases_for_call (gimple t)
lhs = get_function_part_constraint (fi, fi_parm_base + j);
while (VEC_length (ce_s, rhsc) != 0)
{
- rhsp = VEC_last (ce_s, rhsc);
+ rhsp = &VEC_last (ce_s, rhsc);
process_constraint (new_constraint (lhs, *rhsp));
VEC_pop (ce_s, rhsc);
}
@@ -4390,7 +4390,7 @@ find_func_aliases_for_call (gimple t)
VEC(ce_s, heap) *tem = NULL;
VEC_safe_push (ce_s, heap, tem, &rhs);
do_deref (&tem);
- rhs = *VEC_index (ce_s, tem, 0);
+ rhs = VEC_index (ce_s, tem, 0);
VEC_free(ce_s, heap, tem);
}
FOR_EACH_VEC_ELT (ce_s, lhsc, j, lhsp)
@@ -4462,7 +4462,7 @@ find_func_aliases (gimple origt)
struct constraint_expr *c2;
while (VEC_length (ce_s, rhsc) > 0)
{
- c2 = VEC_last (ce_s, rhsc);
+ c2 = &VEC_last (ce_s, rhsc);
process_constraint (new_constraint (*c, *c2));
VEC_pop (ce_s, rhsc);
}
@@ -5149,7 +5149,7 @@ push_fields_onto_fieldstack (tree type, VEC(fieldoff_s,heap) **fieldstack,
bool must_have_pointers_p;
if (!VEC_empty (fieldoff_s, *fieldstack))
- pair = VEC_last (fieldoff_s, *fieldstack);
+ pair = &VEC_last (fieldoff_s, *fieldstack);
/* If there isn't anything at offset zero, create sth. */
if (!pair
@@ -190,7 +190,7 @@ adjust_vec_debug_stmts (void)
while (!VEC_empty (adjust_info, adjust_vec))
{
- adjust_debug_stmts_now (VEC_last (adjust_info, adjust_vec));
+ adjust_debug_stmts_now (&VEC_last (adjust_info, adjust_vec));
VEC_pop (adjust_info, adjust_vec);
}
@@ -2566,4 +2566,3 @@ vect_loop_versioning (loop_vec_info loop_vinfo,
GSI_SAME_STMT);
}
}
-
@@ -1094,7 +1094,7 @@ vect_slp_rearrange_stmts (slp_tree node, unsigned int group_size,
tmp_stmts = VEC_alloc (gimple, heap, group_size);
for (i = 0; i < group_size; i++)
- VEC_safe_push (gimple, heap, tmp_stmts, NULL);
+ VEC_safe_push (gimple, heap, tmp_stmts, (gimple)NULL);
FOR_EACH_VEC_ELT (gimple, SLP_TREE_SCALAR_STMTS (node), i, stmt)
{
@@ -2593,7 +2593,7 @@ vect_create_mask_and_perm (gimple stmt, gimple next_scalar_stmt,
stmts later. */
for (i = VEC_length (gimple, SLP_TREE_VEC_STMTS (node));
i < (int) SLP_TREE_NUMBER_OF_VEC_STMTS (node); i++)
- VEC_quick_push (gimple, SLP_TREE_VEC_STMTS (node), NULL);
+ VEC_quick_push (gimple, SLP_TREE_VEC_STMTS (node), (gimple)NULL);
perm_dest = vect_create_destination_var (gimple_assign_lhs (stmt), vectype);
for (i = 0; i < ncopies; i++)
@@ -8069,7 +8069,7 @@ vrp_fold_stmt (gimple_stmt_iterator *si)
A NULL entry is used to mark the end of pairs which need to be
restored. */
-static VEC(tree,heap) *stack;
+static VEC(tree,heap) *equiv_stack;
/* A trivial wrapper so that we can present the generic jump threading
code with a simple API for simplifying statements. STMT is the
@@ -8137,7 +8137,7 @@ identify_jump_threads (void)
/* Allocate our unwinder stack to unwind any temporary equivalences
that might be recorded. */
- stack = VEC_alloc (tree, heap, 20);
+ equiv_stack = VEC_alloc (tree, heap, 20);
/* To avoid lots of silly node creation, we create a single
conditional and just modify it in-place when attempting to
@@ -8191,7 +8191,7 @@ identify_jump_threads (void)
if (e->flags & (EDGE_DFS_BACK | EDGE_COMPLEX))
continue;
- thread_across_edge (dummy, e, true, &stack,
+ thread_across_edge (dummy, e, true, &equiv_stack,
simplify_stmt_for_jump_threading);
}
}
@@ -8212,7 +8212,7 @@ static void
finalize_jump_threads (void)
{
thread_through_all_blocks (false);
- VEC_free (tree, heap, stack);
+ VEC_free (tree, heap, equiv_stack);
}
@@ -6718,8 +6718,8 @@ simple_cst_equal (const_tree t1, const_tree t2)
for (idx = 0; idx < VEC_length (constructor_elt, v1); ++idx)
/* ??? Should we handle also fields here? */
- if (!simple_cst_equal (VEC_index (constructor_elt, v1, idx)->value,
- VEC_index (constructor_elt, v2, idx)->value))
+ if (!simple_cst_equal (VEC_index (constructor_elt, v1, idx).value,
+ VEC_index (constructor_elt, v2, idx).value))
return false;
return true;
}
@@ -1475,7 +1475,7 @@ struct GTY(()) tree_vec {
/* In a CONSTRUCTOR node. */
#define CONSTRUCTOR_ELTS(NODE) (CONSTRUCTOR_CHECK (NODE)->constructor.elts)
#define CONSTRUCTOR_ELT(NODE,IDX) \
- (VEC_index (constructor_elt, CONSTRUCTOR_ELTS (NODE), IDX))
+ (&VEC_index (constructor_elt, CONSTRUCTOR_ELTS (NODE), IDX))
#define CONSTRUCTOR_NELTS(NODE) \
(VEC_length (constructor_elt, CONSTRUCTOR_ELTS (NODE)))
@@ -1485,7 +1485,7 @@ struct GTY(()) tree_vec {
#define FOR_EACH_CONSTRUCTOR_VALUE(V, IX, VAL) \
for (IX = 0; (IX >= VEC_length (constructor_elt, V)) \
? false \
- : ((VAL = VEC_index (constructor_elt, V, IX)->value), \
+ : ((VAL = VEC_index (constructor_elt, V, IX).value), \
true); \
(IX)++)
@@ -1495,8 +1495,8 @@ struct GTY(()) tree_vec {
#define FOR_EACH_CONSTRUCTOR_ELT(V, IX, INDEX, VAL) \
for (IX = 0; (IX >= VEC_length (constructor_elt, V)) \
? false \
- : (((void) (VAL = VEC_index (constructor_elt, V, IX)->value)), \
- (INDEX = VEC_index (constructor_elt, V, IX)->index), \
+ : (((void) (VAL = VEC_index (constructor_elt, V, IX).value)), \
+ (INDEX = VEC_index (constructor_elt, V, IX).index), \
true); \
(IX)++)
@@ -7553,7 +7553,7 @@ loc_exp_dep_clear (variable var)
{
while (!VEC_empty (loc_exp_dep, VAR_LOC_DEP_VEC (var)))
{
- loc_exp_dep *led = VEC_last (loc_exp_dep, VAR_LOC_DEP_VEC (var));
+ loc_exp_dep *led = &VEC_last (loc_exp_dep, VAR_LOC_DEP_VEC (var));
if (led->next)
led->next->pprev = led->pprev;
if (led->pprev)
@@ -7593,7 +7593,7 @@ loc_exp_insert_dep (variable var, rtx x, htab_t vars)
return;
VEC_quick_push (loc_exp_dep, VAR_LOC_DEP_VEC (var), NULL);
- led = VEC_last (loc_exp_dep, VAR_LOC_DEP_VEC (var));
+ led = &VEC_last (loc_exp_dep, VAR_LOC_DEP_VEC (var));
led->dv = var->dv;
led->value = x;
@@ -2908,8 +2908,8 @@ compare_constant (const tree t1, const tree t2)
for (idx = 0; idx < VEC_length (constructor_elt, v1); ++idx)
{
- constructor_elt *c1 = VEC_index (constructor_elt, v1, idx);
- constructor_elt *c2 = VEC_index (constructor_elt, v2, idx);
+ constructor_elt *c1 = &VEC_index (constructor_elt, v1, idx);
+ constructor_elt *c2 = &VEC_index (constructor_elt, v2, idx);
/* Check that each value is the same... */
if (!compare_constant (c1->value, c2->value))